Add WebRTC support using libdatachannel

The WebRTC is exposed via `/video.html` endpoint
and enabled by default as long as h264 stream
is available.
This commit is contained in:
Kamil Trzcinski 2022-08-31 19:04:54 +02:00
parent ff81088824
commit 5ee0bee59f
19 changed files with 644 additions and 87 deletions

3
.gitignore vendored
View File

@ -1,7 +1,8 @@
tmp/ tmp/
*.o *.o
*.d *.d
html/*.c *.html.c
*.js.c
/camera-streamer /camera-streamer
/test_* /test_*
.vscode/settings.json .vscode/settings.json

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "third_party/libdatachannel"]
path = third_party/libdatachannel
url = https://github.com/paullouisageneau/libdatachannel.git
ignore = dirty

View File

@ -4,6 +4,8 @@
"name": "Linux", "name": "Linux",
"includePath": [ "includePath": [
"${workspaceFolder}/**", "${workspaceFolder}/**",
"${workspaceFolder}/third_party/libdatachannel/include",
"${workspaceFolder}/third_party/libdatachannel/deps/json/include",
"/usr/include/libcamera", "/usr/include/libcamera",
"/usr/include/liveMedia", "/usr/include/liveMedia",
"/usr/include/groupsock", "/usr/include/groupsock",
@ -13,7 +15,8 @@
"defines": [ "defines": [
"USE_LIBCAMERA=1", "USE_LIBCAMERA=1",
"USE_FFMPEG=1", "USE_FFMPEG=1",
"USE_RTSP=1" "USE_RTSP=1",
"USE_LIBDATACHANNEL=1"
], ],
"compilerPath": "/usr/bin/gcc", "compilerPath": "/usr/bin/gcc",
"cStandard": "gnu17", "cStandard": "gnu17",

View File

@ -24,5 +24,5 @@ ADD / /src
WORKDIR /src WORKDIR /src
RUN git clean -ffdx RUN git clean -ffdx
RUN git submodule update --init --recursive --recommend-shallow RUN git submodule update --init --recursive --recommend-shallow
RUN git submodule foreach git clean -ffdx RUN git submodule foreach --recursive git clean -ffdx
RUN make -j$(nproc) RUN make -j$(nproc)

View File

@ -10,9 +10,12 @@ ifneq (x,x$(shell which ccache))
CCACHE ?= ccache CCACHE ?= ccache
endif endif
LIBDATACHANNEL_PATH ?= third_party/libdatachannel
USE_FFMPEG ?= $(shell pkg-config libavutil libavformat libavcodec && echo 1) USE_FFMPEG ?= $(shell pkg-config libavutil libavformat libavcodec && echo 1)
USE_LIBCAMERA ?= $(shell pkg-config libcamera && echo 1) USE_LIBCAMERA ?= $(shell pkg-config libcamera && echo 1)
USE_RTSP ?= $(shell pkg-config live555 && echo 1) USE_RTSP ?= $(shell pkg-config live555 && echo 1)
USE_LIBDATACHANNEL ?= $(shell [ -e $(LIBDATACHANNEL_PATH)/CMakeLists.txt ] && echo 1)
ifeq (1,$(DEBUG)) ifeq (1,$(DEBUG))
CFLAGS += -g CFLAGS += -g
@ -33,6 +36,19 @@ CFLAGS += -DUSE_RTSP $(shell pkg-config --cflags live555)
LDLIBS += $(shell pkg-config --libs live555) LDLIBS += $(shell pkg-config --libs live555)
endif endif
ifeq (1,$(USE_LIBDATACHANNEL))
CFLAGS += -DUSE_LIBDATACHANNEL
CFLAGS += -I$(LIBDATACHANNEL_PATH)/include
CFLAGS += -I$(LIBDATACHANNEL_PATH)/deps/json/include
LDLIBS += -L$(LIBDATACHANNEL_PATH)/build -ldatachannel-static
LDLIBS += -L$(LIBDATACHANNEL_PATH)/build/deps/usrsctp/usrsctplib -lusrsctp
LDLIBS += -L$(LIBDATACHANNEL_PATH)/build/deps/libsrtp -lsrtp2
LDLIBS += -L$(LIBDATACHANNEL_PATH)/build/deps/libjuice -ljuice-static
LDLIBS += -lcrypto -lssl
camera-streamer: $(LIBDATACHANNEL_PATH)/build/libdatachannel-static.a
endif
HTML_SRC = $(addsuffix .c,$(HTML)) HTML_SRC = $(addsuffix .c,$(HTML))
OBJS = $(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(SRC) $(HTML_SRC))) OBJS = $(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(SRC) $(HTML_SRC)))
@ -40,7 +56,7 @@ OBJS = $(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(SRC) $(HTML_SRC)))
all: $(TARGET) all: $(TARGET)
%: cmd/% $(OBJS) %: cmd/% $(filter-out third_party/%, $(OBJS))
$(CCACHE) $(CXX) $(CFLAGS) -o $@ $(filter-out cmd/%, $^) $(filter $</%, $^) $(LDLIBS) $(CCACHE) $(CXX) $(CFLAGS) -o $@ $(filter-out cmd/%, $^) $(filter $</%, $^) $(LDLIBS)
install: $(TARGET) install: $(TARGET)
@ -64,3 +80,7 @@ headers:
html/%.c: html/% html/%.c: html/%
xxd -i $< > $@.tmp xxd -i $< > $@.tmp
mv $@.tmp $@ mv $@.tmp $@
$(LIBDATACHANNEL_PATH)/build/libdatachannel-static.a: $(LIBDATACHANNEL_PATH)
[ -e $</build/Makefile ] || cmake -S $< -B $</build
$(MAKE) -C $</build datachannel-static

View File

@ -43,7 +43,10 @@ reboot
## Compile ## Compile
```bash ```bash
apt-get -y install libavformat-dev libavutil-dev libavcodec-dev libcamera-dev liblivemedia-dev v4l-utils pkg-config xxd build-essential git clone https://github.com/ayufan-research/camera-streamer.git --recursive
apt-get -y install libavformat-dev libavutil-dev libavcodec-dev libcamera-dev liblivemedia-dev v4l-utils pkg-config xxd build-essential cmake libssl-dev
cd camera-streamer/
make make
sudo make install sudo make install
``` ```
@ -263,6 +266,14 @@ device/buffer_lock.c: http_jpeg: Captured buffer JPEG:capture:mplane:buf1 (refs=
device/buffer_lock.c: http_jpeg: Captured buffer JPEG:capture:mplane:buf2 (refs=2), frame=159/0, processing_ms=18.5, frame_ms=8.3 device/buffer_lock.c: http_jpeg: Captured buffer JPEG:capture:mplane:buf2 (refs=2), frame=159/0, processing_ms=18.5, frame_ms=8.3
``` ```
## WebRTC support
The WebRTC is accessible via `http://<ip>:8080/video` by default and is available when there's H264 output generated.
WebRTC support is implemented using awesome [libdatachannel](https://github.com/paullouisageneau/libdatachannel/) library.
The support will be compiled by default when doing `make`.
## License ## License
GNU General Public License v3.0 GNU General Public License v3.0

View File

@ -2,6 +2,7 @@
#include "util/opts/opts.h" #include "util/opts/opts.h"
#include "util/opts/log.h" #include "util/opts/log.h"
#include "util/opts/fourcc.h" #include "util/opts/fourcc.h"
#include "output/webrtc/webrtc.h"
#include "device/camera/camera.h" #include "device/camera/camera.h"
#include "output/output.h" #include "output/output.h"
#include "output/rtsp/rtsp.h" #include "output/rtsp/rtsp.h"
@ -10,8 +11,6 @@ extern unsigned char html_index_html[];
extern unsigned int html_index_html_len; extern unsigned int html_index_html_len;
extern unsigned char html_video_html[]; extern unsigned char html_video_html[];
extern unsigned int html_video_html_len; extern unsigned int html_video_html_len;
extern unsigned char html_jmuxer_min_js[];
extern unsigned int html_jmuxer_min_js_len;
extern camera_t *camera; extern camera_t *camera;
void *camera_http_set_option(http_worker_t *worker, FILE *stream, const char *key, const char *value, void *headersp) void *camera_http_set_option(http_worker_t *worker, FILE *stream, const char *key, const char *value, void *headersp)
@ -81,8 +80,8 @@ http_method_t http_methods[] = {
{ "GET /video.h264?", http_h264_video }, { "GET /video.h264?", http_h264_video },
{ "GET /video.mkv?", http_mkv_video }, { "GET /video.mkv?", http_mkv_video },
{ "GET /video.mp4?", http_mp4_video }, { "GET /video.mp4?", http_mp4_video },
{ "POST /video?", http_webrtc_offer },
{ "GET /option?", camera_http_option }, { "GET /option?", camera_http_option },
{ "GET /jmuxer.min.js?", http_content, "text/javascript", html_jmuxer_min_js, 0, &html_jmuxer_min_js_len },
{ "GET /?", http_content, "text/html", html_index_html, 0, &html_index_html_len }, { "GET /?", http_content, "text/html", html_index_html, 0, &html_index_html_len },
{ } { }
}; };

View File

@ -1,9 +1,9 @@
#include "util/http/http.h" #include "util/http/http.h"
#include "util/opts/opts.h" #include "util/opts/opts.h"
#include "util/opts/log.h" #include "util/opts/log.h"
#include "util/opts/fourcc.h"
#include "device/camera/camera.h" #include "device/camera/camera.h"
#include "output/rtsp/rtsp.h" #include "output/rtsp/rtsp.h"
#include "output/webrtc/webrtc.h"
#include <signal.h> #include <signal.h>
#include <unistd.h> #include <unistd.h>
@ -46,6 +46,8 @@ int main(int argc, char *argv[])
goto error; goto error;
} }
webrtc_server();
while (true) { while (true) {
camera = camera_open(&camera_options); camera = camera_open(&camera_options);
if (camera) { if (camera) {

View File

@ -40,7 +40,7 @@
<br> <br>
<li> <li>
<a href="video"><b>/video</b></a><br> <a href="video"><b>/video</b></a><br>
Get a live video (H264) stream.<br> Get a live WebRTC (H264) stream.<br>
<br> <br>
<ul> <ul>
<li><a href="video.mp4"><b>/video.mp4</b></a><br> get a live video stream in MP4 format (if FFMPEG enabled).</li> <li><a href="video.mp4"><b>/video.mp4</b></a><br> get a live video stream in MP4 format (if FFMPEG enabled).</li>

1
html/jmuxer.min.js vendored

File diff suppressed because one or more lines are too long

View File

@ -1,26 +1,25 @@
<!doctype html> <!doctype html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title></title> <title></title>
<link rel="icon" href="data:;base64,iVBORw0KGgo="> <link rel="icon" href="data:;base64,iVBORw0KGgo=">
<style> <style>
body { body {
margin:0; margin:0;
padding:0; padding:0;
background-color:#303030; background-color:#303030;
} }
#streamStage { #streamStage {
position:fixed; position:fixed;
top:0; top:0;
left:0; left:0;
width:100%; width:100%;
height:100%; height:100%;
} }
#streamStage:before { #streamStage:before {
content: ''; content: '';
box-sizing: border-box; box-sizing: border-box;
position: absolute; position: absolute;
@ -30,53 +29,113 @@ body {
height: 2rem; height: 2rem;
margin-top: -1rem; margin-top: -1rem;
margin-left: -1rem; margin-left: -1rem;
} }
#stream { #stream {
max-height: 100%; max-height: 100%;
max-width: 100%; max-width: 100%;
margin: auto; margin: auto;
position: absolute; position: absolute;
top: 0; left: 0; bottom: 0; right: 0; top: 0; left: 0; bottom: 0; right: 0;
} }
</style> </style>
<script src="/jmuxer.min.js"></script>
</head> </head>
<body> <body>
<div id="streamtage"> <div id="streamtage">
<video controls autoplay muted id="stream"></video> <video controls autoplay muted id="stream"></video>
</div> </div>
<body>
<script>
function startWebRTC() {
var config = {
sdpSemantics: 'unified-plan'
};
if (!document.getElementById('use-stun') || document.getElementById('use-stun').checked) {
config.iceServers = [{urls: ['stun:stun.l.google.com:19302']}];
}
pc = new RTCPeerConnection(config);
pc.addTransceiver('video', {direction: 'recvonly'});
//pc.addTransceiver('audio', {direction: 'recvonly'});
pc.addEventListener('track', function(evt) {
console.log("track event " + evt.track.kind);
if (evt.track.kind == 'video') {
if (document.getElementById('stream'))
document.getElementById('stream').srcObject = evt.streams[0];
} else {
if (document.getElementById('audio'))
document.getElementById('audio').srcObject = evt.streams[0];
}
});
const urlSearchParams = new URLSearchParams(window.location.search);
const params = Object.fromEntries(urlSearchParams.entries());
fetch('/video', {
body: JSON.stringify({
type: 'request',
res: params.res
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
}).then(function(response) {
return response.json();
}).then(function(answer) {
pc.remote_pc_id = answer.id;
return pc.setRemoteDescription(answer);
}).then(function() {
return pc.createAnswer();
}).then(function(answer) {
return pc.setLocalDescription(answer);
}).then(function() {
// wait for ICE gathering to complete
return new Promise(function(resolve) {
if (pc.iceGatheringState === 'complete') {
resolve();
} else {
function checkState() {
if (pc.iceGatheringState === 'complete') {
pc.removeEventListener('icegatheringstatechange', checkState);
resolve();
}
}
pc.addEventListener('icegatheringstatechange', checkState);
}
});
}).then(function(answer) {
var offer = pc.localDescription;
return fetch('/video', {
body: JSON.stringify({
type: offer.type,
id: pc.remote_pc_id,
sdp: offer.sdp,
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
})
}).then(function(response) {
return response.json();
}).catch(function(e) {
alert(e);
});
}
function stopWebRTC() {
setTimeout(function() {
pc.close();
}, 500);
}
</script>
<script> <script>
window.onload = function() { window.onload = function() {
var jmuxer = new JMuxer({ startWebRTC();
node: 'stream',
mode: 'video',
flushingTime: 0,
fps: 30,
debug: false
});
fetch('/video.h264' + window.location.search).then(function(response) {
console.log(response);
const reader = response.body.getReader();
function go() {
reader.read().then(function(result) {
if (!result.done) {
if (!document.hidden){
jmuxer.feed({
video: new Uint8Array(result.value)
});
}
go ();
}
})
}
go ();
})
} }
</script> </script>
</body> </body>

View File

@ -35,11 +35,17 @@ bool h264_is_key_frame(buffer_t *buf)
{ {
unsigned char *data = buf->start; unsigned char *data = buf->start;
static const int N = 8;
char buffer [3*N+1];
buffer[sizeof(buffer)-1] = 0;
for(int j = 0; j < N; j++)
sprintf(&buffer[sizeof(buffer)/N*j], "%02X ", data[j]);
if (buf->flags.is_keyframe) { if (buf->flags.is_keyframe) {
LOG_DEBUG(buf, "Got key frame (from V4L2)!"); LOG_DEBUG(buf, "Got key frame (from V4L2)!: %s", buffer);
return true; return true;
} else if (buf->used >= 5 && (data[4] & 0x1F) == 0x07) { } else if (buf->used >= 5 && (data[4] & 0x1F) == 0x07) {
LOG_DEBUG(buf, "Got key frame (from buffer)!"); LOG_DEBUG(buf, "Got key frame (from buffer)!: %s", buffer);
return true; return true;
} }

426
output/webrtc/webrtc.cc Normal file
View File

@ -0,0 +1,426 @@
extern "C" {
#include "webrtc.h"
#include "device/buffer.h"
#include "device/buffer_list.h"
#include "device/buffer_lock.h"
#include "device/device.h"
#include "output/output.h"
};
#ifdef USE_LIBDATACHANNEL
#include <string>
#include <memory>
#include <optional>
#include <condition_variable>
#include <atomic>
#include <chrono>
#include <set>
#include <nlohmann/json.hpp>
#include <rtc/peerconnection.hpp>
#include <rtc/rtcpsrreporter.hpp>
#include <rtc/h264rtppacketizer.hpp>
#include <rtc/h264packetizationhandler.hpp>
#include <rtc/rtcpnackresponder.hpp>
using namespace std::chrono_literals;
class Client;
static std::set<std::shared_ptr<Client> > webrtc_clients;
static std::mutex webrtc_clients_lock;
static const auto webrtc_client_lock_timeout = 3 * 1000ms;
static const auto webrtc_client_max_json_body = 10 * 1024;
static const auto webrtc_client_video_payload_type = 102; // H264
static const rtc::Configuration webrtc_configuration = {
.iceServers = { rtc::IceServer("stun:stun.l.google.com:19302") },
.disableAutoNegotiation = true
};
struct ClientTrackData
{
std::shared_ptr<rtc::Track> track;
std::shared_ptr<rtc::RtcpSrReporter> sender;
void startStreaming()
{
double currentTime_s = get_monotonic_time_us(NULL, NULL)/(1000.0*1000.0);
sender->rtpConfig->setStartTime(currentTime_s, rtc::RtpPacketizationConfig::EpochStart::T1970);
sender->startRecording();
}
void sendTime()
{
double currentTime_s = get_monotonic_time_us(NULL, NULL)/(1000.0*1000.0);
auto rtpConfig = sender->rtpConfig;
uint32_t elapsedTimestamp = rtpConfig->secondsToTimestamp(currentTime_s);
sender->rtpConfig->timestamp = sender->rtpConfig->startTimestamp + elapsedTimestamp;
auto reportElapsedTimestamp = sender->rtpConfig->timestamp - sender->previousReportedTimestamp;
if (sender->rtpConfig->timestampToSeconds(reportElapsedTimestamp) > 1) {
sender->setNeedsToReport();
}
}
bool wantsFrame() const
{
if (!track)
return false;
return track->isOpen();
}
};
class Client
{
public:
Client(std::shared_ptr<rtc::PeerConnection> pc_)
: pc(pc_), use_low_res(false)
{
id.resize(20);
for (auto & c : id) {
c = 'a' + (rand() % 26);
}
id = "rtc-" + id;
name = strdup(id.c_str());
}
~Client()
{
free(name);
}
bool wantsFrame() const
{
if (!pc || !video)
return false;
if (pc->state() != rtc::PeerConnection::State::Connected)
return false;
return video->wantsFrame();
}
void pushFrame(buffer_t *buf, bool low_res)
{
auto self = this;
if (!video || !video->track) {
return;
}
if (use_low_res != low_res) {
return;
}
if (!had_key_frame) {
if (!h264_is_key_frame(buf)) {
device_video_force_key(buf->buf_list->dev);
LOG_VERBOSE(self, "Skipping as key frame was not yet sent.");
return;
}
had_key_frame = true;
}
rtc::binary data((std::byte*)buf->start, (std::byte*)buf->start + buf->used);
video->sendTime();
video->track->send(data);
}
public:
char *name;
std::string id;
std::shared_ptr<rtc::PeerConnection> pc;
std::shared_ptr<ClientTrackData> video;
std::mutex lock;
std::condition_variable wait_for_complete;
bool had_key_frame;
bool use_low_res;
};
std::shared_ptr<Client> findClient(std::string id)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client && client->id == id) {
return client;
}
}
return std::shared_ptr<Client>();
}
void removeClient(const std::shared_ptr<Client> &client, const char *reason)
{
std::unique_lock lk(webrtc_clients_lock);
webrtc_clients.erase(client);
LOG_INFO(client.get(), "Client removed: %s.", reason);
}
std::shared_ptr<ClientTrackData> addVideo(const std::shared_ptr<rtc::PeerConnection> pc, const uint8_t payloadType, const uint32_t ssrc, const std::string cname, const std::string msid)
{
auto video = rtc::Description::Video(cname, rtc::Description::Direction::SendOnly);
video.addH264Codec(payloadType);
video.setBitrate(1000);
video.addSSRC(ssrc, cname, msid, cname);
auto track = pc->addTrack(video);
auto rtpConfig = std::make_shared<rtc::RtpPacketizationConfig>(ssrc, cname, payloadType, rtc::H264RtpPacketizer::defaultClockRate);
auto packetizer = std::make_shared<rtc::H264RtpPacketizer>(rtc::H264RtpPacketizer::Separator::LongStartSequence, rtpConfig);
auto h264Handler = std::make_shared<rtc::H264PacketizationHandler>(packetizer);
auto srReporter = std::make_shared<rtc::RtcpSrReporter>(rtpConfig);
h264Handler->addToChain(srReporter);
auto nackResponder = std::make_shared<rtc::RtcpNackResponder>();
h264Handler->addToChain(nackResponder);
track->setMediaHandler(h264Handler);
return std::shared_ptr<ClientTrackData>(new ClientTrackData{track, srReporter});
}
std::shared_ptr<Client> createPeerConnection(const rtc::Configuration &config)
{
auto pc = std::make_shared<rtc::PeerConnection>(config);
auto client = std::make_shared<Client>(pc);
auto wclient = std::weak_ptr(client);
pc->onTrack([wclient](std::shared_ptr<rtc::Track> track) {
if(auto client = wclient.lock()) {
LOG_DEBUG(client.get(), "onTrack: %s", track->mid().c_str());
}
});
pc->onLocalDescription([wclient](rtc::Description description) {
if(auto client = wclient.lock()) {
LOG_DEBUG(client.get(), "onLocalDescription: %s", description.typeString().c_str());
}
});
pc->onSignalingStateChange([wclient](rtc::PeerConnection::SignalingState state) {
if(auto client = wclient.lock()) {
LOG_DEBUG(client.get(), "onSignalingStateChange: %d", (int)state);
}
});
pc->onStateChange([wclient](rtc::PeerConnection::State state) {
if(auto client = wclient.lock()) {
LOG_DEBUG(client.get(), "onStateChange: %d", (int)state);
if (state == rtc::PeerConnection::State::Disconnected ||
state == rtc::PeerConnection::State::Failed ||
state == rtc::PeerConnection::State::Closed)
{
removeClient(client, "stream closed");
}
}
});
pc->onGatheringStateChange([wclient](rtc::PeerConnection::GatheringState state) {
if(auto client = wclient.lock()) {
LOG_DEBUG(client.get(), "onGatheringStateChange: %d", (int)state);
if (state == rtc::PeerConnection::GatheringState::Complete) {
client->wait_for_complete.notify_all();
}
}
});
std::unique_lock lk(webrtc_clients_lock);
webrtc_clients.insert(client);
return client;
}
static bool webrtc_h264_needs_buffer(buffer_lock_t *buf_lock)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame())
return true;
}
return false;
}
static void webrtc_h264_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame()) {
client->pushFrame(buf, false);
if (!http_h264_lowres.buf_list) {
client->pushFrame(buf, true);
}
}
}
}
static void webrtc_h264_low_res_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame()) {
client->pushFrame(buf, true);
}
}
}
static void http_webrtc_request(http_worker_t *worker, FILE *stream, const nlohmann::json &message)
{
auto client = createPeerConnection(webrtc_configuration);
LOG_INFO(client.get(), "Stream requested.");
client->video = addVideo(client->pc, webrtc_client_video_payload_type, rand(), "video", "");
if (message.contains("res")) {
client->use_low_res = (message["res"] == "low");
}
try {
{
std::unique_lock lock(client->lock);
client->pc->setLocalDescription();
client->wait_for_complete.wait_for(lock, webrtc_client_lock_timeout);
}
if (client->pc->gatheringState() == rtc::PeerConnection::GatheringState::Complete) {
auto description = client->pc->localDescription();
nlohmann::json message;
message["id"] = client->id;
message["type"] = description->typeString();
message["sdp"] = std::string(description.value());
http_write_response(stream, "200 OK", "application/json", message.dump().c_str(), 0);
LOG_VERBOSE(client.get(), "Local SDP Offer: %s", std::string(message["sdp"]).c_str());
} else {
http_500(stream, "Not complete");
}
} catch(const std::exception &e) {
http_500(stream, e.what());
removeClient(client, e.what());
}
}
static void http_webrtc_answer(http_worker_t *worker, FILE *stream, const nlohmann::json &message)
{
if (!message.contains("id") || !message.contains("sdp")) {
http_400(stream, "no sdp or id");
return;
}
if (auto client = findClient(message["id"])) {
LOG_INFO(client.get(), "Answer received.");
LOG_VERBOSE(client.get(), "Remote SDP Answer: %s", std::string(message["sdp"]).c_str());
try {
auto answer = rtc::Description(std::string(message["sdp"]), std::string(message["type"]));
client->pc->setRemoteDescription(answer);
client->video->startStreaming();
http_write_response(stream, "200 OK", "application/json", "{}", 0);
} catch(const std::exception &e) {
http_500(stream, e.what());
removeClient(client, e.what());
}
} else {
http_404(stream, "No client found");
}
}
static void http_webrtc_offer(http_worker_t *worker, FILE *stream, const nlohmann::json &message)
{
if (!message.contains("sdp")) {
http_400(stream, "no sdp");
return;
}
auto offer = rtc::Description(std::string(message["sdp"]), std::string(message["type"]));
auto client = createPeerConnection(webrtc_configuration);
LOG_INFO(client.get(), "Offer received.");
LOG_VERBOSE(client.get(), "Remote SDP Offer: %s", std::string(message["sdp"]).c_str());
try {
client->video = addVideo(client->pc, webrtc_client_video_payload_type, rand(), "video", "");
client->video->startStreaming();
{
std::unique_lock lock(client->lock);
client->pc->setRemoteDescription(offer);
client->pc->setLocalDescription();
client->wait_for_complete.wait_for(lock, webrtc_client_lock_timeout);
}
if (client->pc->gatheringState() == rtc::PeerConnection::GatheringState::Complete) {
auto description = client->pc->localDescription();
nlohmann::json message;
message["type"] = description->typeString();
message["sdp"] = std::string(description.value());
http_write_response(stream, "200 OK", "application/json", message.dump().c_str(), 0);
LOG_VERBOSE(client.get(), "Local SDP Answer: %s", std::string(message["sdp"]).c_str());
} else {
http_500(stream, "Not complete");
}
} catch(const std::exception &e) {
http_500(stream, e.what());
removeClient(client, e.what());
}
}
nlohmann::json http_parse_json_body(http_worker_t *worker, FILE *stream)
{
std::string text;
size_t i = 0;
size_t n = (size_t)worker->content_length;
if (n < 0 || n > webrtc_client_max_json_body)
n = webrtc_client_max_json_body;
text.resize(n);
while (i < n && !feof(stream)) {
i += fread(&text[i], 1, n-i, stream);
}
text.resize(i);
return nlohmann::json::parse(text);
}
extern "C" void http_webrtc_offer(http_worker_t *worker, FILE *stream)
{
auto message = http_parse_json_body(worker, stream);
if (!message.contains("type")) {
http_400(stream, "missing 'type'");
return;
}
std::string type = message["type"];
LOG_DEBUG(worker, "Recevied: '%s'", type.c_str());
if (type == "request") {
http_webrtc_request(worker, stream, message);
} else if (type == "answer") {
http_webrtc_answer(worker, stream, message);
} else if (type == "offer") {
http_webrtc_offer(worker, stream, message);
} else {
http_400(stream, (std::string("Not expected: " + type)).c_str());
}
}
extern "C" void webrtc_server()
{
buffer_lock_register_check_streaming(&http_h264, webrtc_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264, webrtc_h264_capture);
buffer_lock_register_check_streaming(&http_h264_lowres, webrtc_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264_lowres, webrtc_h264_low_res_capture);
}
#else // USE_LIBDATACHANNEL
extern "C" void http_webrtc_offer(http_worker_t *worker, FILE *stream)
{
http_404(stream, NULL);
}
extern "C" void webrtc_server()
{
}
#endif // USE_LIBDATACHANNEL

11
output/webrtc/webrtc.h Normal file
View File

@ -0,0 +1,11 @@
#pragma once
#include "util/http/http.h"
#include "util/opts/log.h"
#include "util/opts/fourcc.h"
#include "util/opts/control.h"
#include "device/buffer.h"
// WebRTC
void http_webrtc_offer(http_worker_t *worker, FILE *stream);
void webrtc_server();

1
third_party/libdatachannel vendored Submodule

@ -0,0 +1 @@
Subproject commit 04cf4738961f55ba3f0aa39b4a61342f66bb3781

View File

@ -14,6 +14,9 @@
#include "http.h" #include "http.h"
#include "util/opts/log.h" #include "util/opts/log.h"
#define HEADER_RANGE "Range:"
#define HEADER_CONTENT_LENGTH "Content-Length:"
static int http_listen(int port, int maxcons) static int http_listen(int port, int maxcons)
{ {
struct sockaddr_in server = {0}; struct sockaddr_in server = {0};
@ -92,6 +95,7 @@ static void http_process(http_worker_t *worker, FILE *stream)
} }
worker->range_header[0] = 0; worker->range_header[0] = 0;
worker->content_length = -1;
// Consume headers // Consume headers
for(int i = 0; i < 50; i++) { for(int i = 0; i < 50; i++) {
@ -101,9 +105,12 @@ static void http_process(http_worker_t *worker, FILE *stream)
if (line[0] == '\r' && line[1] == '\n') if (line[0] == '\r' && line[1] == '\n')
break; break;
if (strcasestr(line, "Range:") == line) { if (strcasestr(line, HEADER_RANGE) == line) {
strcpy(worker->range_header, line); strcpy(worker->range_header, line);
} }
if (strcasestr(line, HEADER_CONTENT_LENGTH) == line) {
worker->content_length = atoi(line + strlen(HEADER_CONTENT_LENGTH));
}
} }
worker->current_method = NULL; worker->current_method = NULL;

View File

@ -31,6 +31,7 @@ typedef struct http_worker_s {
pthread_t thread; pthread_t thread;
int client_fd; int client_fd;
int content_length;
struct sockaddr_in client_addr; struct sockaddr_in client_addr;
char *client_host; char *client_host;
char client_method[BUFSIZE]; char client_method[BUFSIZE];
@ -46,7 +47,9 @@ typedef struct http_server_options_s {
int http_server(http_server_options_t *options, http_method_t *methods); int http_server(http_server_options_t *options, http_method_t *methods);
void http_content(http_worker_t *worker, FILE *stream); void http_content(http_worker_t *worker, FILE *stream);
void http_write_response(FILE *stream, const char *status, const char *content_type, const char *body, unsigned content_length);
void http_200(FILE *stream, const char *data); void http_200(FILE *stream, const char *data);
void http_400(FILE *stream, const char *data);
void http_404(FILE *stream, const char *data); void http_404(FILE *stream, const char *data);
void http_500(FILE *stream, const char *data); void http_500(FILE *stream, const char *data);
void *http_enum_params(http_worker_t *worker, FILE *stream, http_param_fn fn, void *opaque); void *http_enum_params(http_worker_t *worker, FILE *stream, http_param_fn fn, void *opaque);

View File

@ -3,7 +3,7 @@
#include "http.h" #include "http.h"
static void http_write_response( void http_write_response(
FILE *stream, FILE *stream,
const char *status, const char *status,
const char *content_type, const char *content_type,
@ -46,6 +46,11 @@ void http_200(FILE *stream, const char *data)
http_write_response(stream, "200 OK", NULL, data ? data : "Nothing here.\n", 0); http_write_response(stream, "200 OK", NULL, data ? data : "Nothing here.\n", 0);
} }
void http_400(FILE *stream, const char *data)
{
http_write_response(stream, "400 Bad Request", NULL, data ? data : "Nothing here.\n", 0);
}
void http_404(FILE *stream, const char *data) void http_404(FILE *stream, const char *data)
{ {
http_write_response(stream, "404 Not Found", NULL, data ? data : "Nothing here.\n", 0); http_write_response(stream, "404 Not Found", NULL, data ? data : "Nothing here.\n", 0);