Add -snapshot.height, -stream.height and -video.height

This commit is contained in:
Kamil Trzciński 2022-10-29 14:30:43 +02:00 committed by Kamil Trzcinski
parent c18561c9f8
commit ca08adb62f
29 changed files with 553 additions and 514 deletions

View File

@ -40,6 +40,22 @@ apt-get dist-upgrade
reboot reboot
``` ```
Ensure that your `/boot/config.txt` has enough of GPU memory (required for JPEG re-encoding):
```
# Example for IMX519
dtoverlay=vc4-kms-v3d,cma-128
gpu_mem=128 # preferred 160 or 256MB
dtoverlay=imx519
# Example for Arducam 64MP
gpu_mem=128
dtoverlay=arducam_64mp,media-controller=1
# Example for USB cam
gpu_mem=128
```
## Compile ## Compile
```bash ```bash
@ -137,8 +153,13 @@ All streams are exposed over very simple HTTP server, providing different stream
Camera capture and resolution exposed is controlled by threee parameters: Camera capture and resolution exposed is controlled by threee parameters:
- `-camera-width` and `-camera-height` define capture resolution - `-camera-width` and `-camera-height` define capture resolution
- (ISP mode only) `-camera-high_res_factor` a default resolution exposed via HTTP (`exposed_width = camera_width / factor, exposed_height = camera_height / factor`) - `-camera-video.height` - define height for an aspect ratio scaled resolution for `/video` and `/webrtc` (H264) output - this might require rescaller and might not always work
- (ISP mode only) `-camera-low_res_factor` a low-resolution exposed via HTTP when `?res=low` is added (ex. `http://<ip>:8080/snapshot`) - `-camera-stream.height` - define height for an aspect ratio scaled resolution for `/stream` (MJPEG) output - this might require rescaller and might not always work
- `-camera-snapshot.height` - define height for an aspect ratio scaled resolution for `/snapshot` (JPEG) output - this might require rescaller and might not always work
Any `video`, `stream` and `snapshot` might not work as this requires usually decoding, scaling, and encoding to achieve the desired resolution.
This works ONLY BEST when using `libcamera`, the support for `USB` will varry and might require configuring `/boot/config.txt` to set enough of GPU memory to be able to re-encode JPEG.
## RTSP server ## RTSP server
@ -205,6 +226,7 @@ and enabled in `imx519`. Focus can be manually controlled via `i2c-tools`:
```shell ```shell
# /boot/config.txt # /boot/config.txt
dtoverlay=imx519,media-controller=0 dtoverlay=imx519,media-controller=0
gpu_mem=160 # at least 128
# /etc/modules-load.d/modules.conf # /etc/modules-load.d/modules.conf
i2c-dev i2c-dev

View File

@ -16,6 +16,36 @@ extern rtsp_options_t rtsp_options;
camera_t *camera; camera_t *camera;
void deprecations()
{
if (camera_options.high_res_factor > 0) {
printf("Using deprecated `-camera-high_res_factor`. Use `-camera-snapshot.height` instead.");
if (!camera_options.snapshot.height)
camera_options.snapshot.height = camera_options.height / camera_options.high_res_factor;
}
if (camera_options.low_res_factor > 0) {
printf("Using deprecated `-camera-low_res_factor`. Use `-camera-stream.height` or `-camera-video.height` instead.");
if (!camera_options.stream.height)
camera_options.stream.height = camera_options.height / camera_options.low_res_factor;
if (!camera_options.video.height)
camera_options.video.height = camera_options.height / camera_options.low_res_factor;
}
}
void inherit()
{
if (!camera_options.snapshot.height || camera_options.snapshot.height > camera_options.height)
camera_options.snapshot.height = camera_options.height;
if (!camera_options.video.height || camera_options.video.height > camera_options.snapshot.height)
camera_options.video.height = camera_options.snapshot.height;
if (!camera_options.stream.height || camera_options.stream.height > camera_options.video.height)
camera_options.stream.height = camera_options.video.height;
}
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
int http_fd = -1; int http_fd = -1;
@ -25,6 +55,9 @@ int main(int argc, char *argv[])
return -1; return -1;
} }
deprecations();
inherit();
if (camera_options.list_options) { if (camera_options.list_options) {
camera = camera_open(&camera_options); camera = camera_open(&camera_options);
if (camera) { if (camera) {

View File

@ -4,6 +4,7 @@
#include "util/opts/fourcc.h" #include "util/opts/fourcc.h"
#include "device/camera/camera.h" #include "device/camera/camera.h"
#include "output/rtsp/rtsp.h" #include "output/rtsp/rtsp.h"
#include "output/output.h"
camera_options_t camera_options = { camera_options_t camera_options = {
.path = "", .path = "",
@ -13,13 +14,19 @@ camera_options_t camera_options = {
.nbufs = 3, .nbufs = 3,
.fps = 30, .fps = 30,
.allow_dma = true, .allow_dma = true,
.high_res_factor = 1.0, .high_res_factor = 0.0,
.low_res_factor = 0.0, .low_res_factor = 0.0,
.auto_reconnect = 0, .auto_reconnect = 0,
.auto_focus = true, .auto_focus = true,
.options = "", .options = "",
.list_options = false, .list_options = false,
.h264 = { .snapshot = {
.options = "compression_quality=80"
},
.stream = {
.options = "compression_quality=80"
},
.video = {
.options = .options =
"video_bitrate_mode=0" OPTION_VALUE_LIST_SEP "video_bitrate_mode=0" OPTION_VALUE_LIST_SEP
"video_bitrate=2000000" OPTION_VALUE_LIST_SEP "video_bitrate=2000000" OPTION_VALUE_LIST_SEP
@ -29,10 +36,7 @@ camera_options_t camera_options = {
"h264_profile=4" OPTION_VALUE_LIST_SEP "h264_profile=4" OPTION_VALUE_LIST_SEP
"h264_minimum_qp_value=16" OPTION_VALUE_LIST_SEP "h264_minimum_qp_value=16" OPTION_VALUE_LIST_SEP
"h264_maximum_qp_value=32" "h264_maximum_qp_value=32"
}, }
.jpeg = {
.options = "compression_quality=80"
},
}; };
http_server_options_t http_options = { http_server_options_t http_options = {
@ -93,8 +97,18 @@ option_t all_options[] = {
DEFINE_OPTION_DEFAULT(camera, hflip, bool, "1", "Do horizontal image flip (does not work with all camera)."), DEFINE_OPTION_DEFAULT(camera, hflip, bool, "1", "Do horizontal image flip (does not work with all camera)."),
DEFINE_OPTION_PTR(camera, isp.options, list, "Set the ISP processing options. List all available options with `-camera-list_options`."), DEFINE_OPTION_PTR(camera, isp.options, list, "Set the ISP processing options. List all available options with `-camera-list_options`."),
DEFINE_OPTION_PTR(camera, jpeg.options, list, "Set the JPEG compression options. List all available options with `-camera-list_options`."),
DEFINE_OPTION_PTR(camera, h264.options, list, "Set the H264 encoding options. List all available options with `-camera-list_options`."), DEFINE_OPTION_PTR(camera, snapshot.options, list, "Set the JPEG compression options. List all available options with `-camera-list_options`."),
DEFINE_OPTION(camera, snapshot.height, uint, "Override the snapshot height and maintain aspect ratio."),
DEFINE_OPTION_DEFAULT(camera, stream.disabled, bool, "1", "Disable stream."),
DEFINE_OPTION_PTR(camera, stream.options, list, "Set the JPEG compression options. List all available options with `-camera-list_options`."),
DEFINE_OPTION(camera, stream.height, uint, "Override the stream height and maintain aspect ratio."),
DEFINE_OPTION_DEFAULT(camera, video.disabled, bool, "1", "Disable video."),
DEFINE_OPTION_PTR(camera, video.options, list, "Set the H264 encoding options. List all available options with `-camera-list_options`."),
DEFINE_OPTION(camera, video.height, uint, "Override the video height and maintain aspect ratio."),
DEFINE_OPTION_DEFAULT(camera, list_options, bool, "1", "List all available options and exit."), DEFINE_OPTION_DEFAULT(camera, list_options, bool, "1", "List all available options and exit."),
DEFINE_OPTION(http, port, uint, "Set the HTTP web-server port."), DEFINE_OPTION(http, port, uint, "Set the HTTP web-server port."),

View File

@ -43,13 +43,15 @@ void camera_close(camera_t **camerap)
for (int i = MAX_DEVICES; i-- > 0; ) { for (int i = MAX_DEVICES; i-- > 0; ) {
link_t *link = &camera->links[i]; link_t *link = &camera->links[i];
if (link->callbacks.on_buffer) { for (int j = 0; j < link->n_callbacks; j++) {
link->callbacks.on_buffer(NULL); if (link->callbacks[j].on_buffer) {
link->callbacks.on_buffer = NULL; link->callbacks[j].on_buffer(NULL);
} link->callbacks[j].on_buffer = NULL;
if (link->callbacks.buf_lock) { }
buffer_lock_capture(link->callbacks.buf_lock, NULL); if (link->callbacks[j].buf_lock) {
link->callbacks.buf_lock = NULL; buffer_lock_capture(link->callbacks[j].buf_lock, NULL);
link->callbacks[j].buf_lock = NULL;
}
} }
} }
@ -86,10 +88,10 @@ void camera_capture_add_output(camera_t *camera, buffer_list_t *capture, buffer_
link->sinks[nsinks] = output; link->sinks[nsinks] = output;
} }
void camera_capture_set_callbacks(camera_t *camera, buffer_list_t *capture, link_callbacks_t callbacks) void camera_capture_add_callbacks(camera_t *camera, buffer_list_t *capture, link_callbacks_t callbacks)
{ {
link_t *link = camera_ensure_capture(camera, capture); link_t *link = camera_ensure_capture(camera, capture);
link->callbacks = callbacks; link->callbacks[link->n_callbacks++] = callbacks;
if (callbacks.buf_lock) { if (callbacks.buf_lock) {
callbacks.buf_lock->buf_list = capture; callbacks.buf_lock->buf_list = capture;
@ -107,12 +109,10 @@ int camera_set_params(camera_t *camera)
} }
// Set some defaults // Set some defaults
for (int i = 0; i < 2; i++) { device_set_option_list(camera->codec_snapshot, camera->options.snapshot.options);
device_set_option_list(camera->legacy_isp[i], camera->options.isp.options); device_set_option_list(camera->codec_stream, camera->options.stream.options);
device_set_option_list(camera->codec_jpeg[i], camera->options.jpeg.options); device_set_option_string(camera->codec_video, "repeat_sequence_header", "1"); // required for force key support
device_set_option_string(camera->codec_h264[i], "repeat_sequence_header", "1"); // required for force key support device_set_option_list(camera->codec_video, camera->options.video.options);
device_set_option_list(camera->codec_h264[i], camera->options.h264.options);
}
return 0; return 0;
} }

View File

@ -4,6 +4,7 @@
#include "device/device.h" #include "device/device.h"
#define MAX_DEVICES 20 #define MAX_DEVICES 20
#define MAX_RESCALLERS 4
#define MAX_HTTP_METHODS 20 #define MAX_HTTP_METHODS 20
#define CAMERA_DEVICE_CAMERA 0 #define CAMERA_DEVICE_CAMERA 0
@ -14,6 +15,12 @@ typedef enum {
CAMERA_LIBCAMERA CAMERA_LIBCAMERA
} camera_type_t; } camera_type_t;
typedef struct camera_output_options_s {
bool disabled;
unsigned height;
char options[CAMERA_OPTIONS_LENGTH];
} camera_output_options_t;
typedef struct camera_options_s { typedef struct camera_options_s {
char path[256]; char path[256];
unsigned width, height, format; unsigned width, height, format;
@ -40,13 +47,9 @@ typedef struct camera_options_s {
char options[CAMERA_OPTIONS_LENGTH]; char options[CAMERA_OPTIONS_LENGTH];
} isp; } isp;
struct { camera_output_options_t snapshot;
char options[CAMERA_OPTIONS_LENGTH]; camera_output_options_t stream;
} jpeg; camera_output_options_t video;
struct {
char options[CAMERA_OPTIONS_LENGTH];
} h264;
} camera_options_t; } camera_options_t;
typedef struct camera_s { typedef struct camera_s {
@ -60,9 +63,10 @@ typedef struct camera_s {
device_t *camera; device_t *camera;
device_t *decoder; // decode JPEG/H264 into YUVU device_t *decoder; // decode JPEG/H264 into YUVU
device_t *isp; device_t *isp;
device_t *legacy_isp[2]; device_t *rescallers[3];
device_t *codec_jpeg[2]; // encode YUVU into JPEG device_t *codec_snapshot;
device_t *codec_h264[2]; // encode YUVU into H264 device_t *codec_stream;
device_t *codec_video;
}; };
}; };
@ -81,12 +85,13 @@ int camera_run(camera_t *camera);
link_t *camera_ensure_capture(camera_t *camera, buffer_list_t *capture); link_t *camera_ensure_capture(camera_t *camera, buffer_list_t *capture);
void camera_capture_add_output(camera_t *camera, buffer_list_t *capture, buffer_list_t *output); void camera_capture_add_output(camera_t *camera, buffer_list_t *capture, buffer_list_t *output);
void camera_capture_set_callbacks(camera_t *camera, buffer_list_t *capture, link_callbacks_t callbacks); void camera_capture_add_callbacks(camera_t *camera, buffer_list_t *capture, link_callbacks_t callbacks);
int camera_configure_input(camera_t *camera); int camera_configure_input(camera_t *camera);
int camera_configure_decoder(camera_t *camera, buffer_list_t *src_capture); int camera_configure_pipeline(camera_t *camera, buffer_list_t *camera_capture);
int camera_configure_output_rescaler(camera_t *camera, buffer_list_t *src_capture, float high_div, float low_div);
int camera_configure_output(camera_t *camera, buffer_list_t *src_capture, int res);
int camera_configure_isp(camera_t *camera, buffer_list_t *src, float high_div, float low_div); buffer_list_t *camera_configure_isp(camera_t *camera, buffer_list_t *src_capture);
int camera_configure_legacy_isp(camera_t *camera, buffer_list_t *src, float div, int res); buffer_list_t *camera_configure_decoder(camera_t *camera, buffer_list_t *src_capture);
unsigned camera_rescaller_align_size(unsigned target_height);
buffer_list_t *camera_configure_rescaller(camera_t *camera, buffer_list_t *src_capture, const char *name, unsigned target_height, unsigned formats[]);
int camera_configure_output(camera_t *camera, const char *name, unsigned target_height, unsigned formats[], link_callbacks_t callbacks, device_t **device);

View File

@ -0,0 +1,81 @@
#include "camera.h"
#include "device/buffer.h"
#include "device/buffer_list.h"
#include "device/device.h"
#include "device/device_list.h"
#include "device/links.h"
#include "util/opts/log.h"
#include "util/opts/fourcc.h"
#include "device/buffer_list.h"
#include "util/http/http.h"
#include "output/rtsp/rtsp.h"
#include "output/output.h"
static unsigned decoder_formats[] =
{
// best quality
V4L2_PIX_FMT_YUYV,
// medium quality
V4L2_PIX_FMT_YUV420,
V4L2_PIX_FMT_NV12,
// low quality
V4L2_PIX_FMT_YVU420,
V4L2_PIX_FMT_NV21,
0
};
static void decoder_debug_on_buffer(buffer_t *buf)
{
if (!buf) {
return;
}
static int index = 0;
char path[256];
sprintf(path, "/tmp/decoder_capture.%d.%s", index++ % 10, fourcc_to_string(buf->buf_list->fmt.format).buf);
FILE *fp = fopen(path, "wb");
if (!fp) {
return;
}
fwrite(buf->start, 1, buf->used, fp);
fclose(fp);
}
static link_callbacks_t decoder_debug_callbacks = {
.name = "DECODER-DEBUG-CAPTURE",
.on_buffer = decoder_debug_on_buffer
};
buffer_list_t *camera_configure_decoder(camera_t *camera, buffer_list_t *src_capture)
{
unsigned chosen_format = 0;
device_info_t *device = device_list_find_m2m_formats(camera->device_list, src_capture->fmt.format, decoder_formats, &chosen_format);
if (!device) {
LOG_INFO(camera, "Cannot find '%s' decoder", fourcc_to_string(src_capture->fmt.format).buf);
return NULL;
}
device_video_force_key(camera->camera);
camera->decoder = device_v4l2_open("DECODER", device->path);
buffer_list_t *decoder_output = device_open_buffer_list_output(
camera->decoder, src_capture);
buffer_list_t *decoder_capture = device_open_buffer_list_capture(
camera->decoder, NULL, decoder_output, 0, 0, chosen_format, true);
if (getenv("CAMERA_DECODER_DEBUG")) {
camera_capture_add_callbacks(camera, decoder_capture, decoder_debug_callbacks);
}
camera_capture_add_output(camera, src_capture, decoder_output);
return decoder_capture;
}

View File

@ -32,47 +32,13 @@ static int camera_configure_input_v4l2(camera_t *camera)
camera->camera->opts.allow_dma = false; camera->camera->opts.allow_dma = false;
} }
buffer_list_t *camera_capture = device_open_buffer_list(camera->camera, true, camera->options.width, camera->options.height, camera->options.format, 0, camera->options.nbufs, true); buffer_list_t *camera_capture = device_open_buffer_list(camera->camera, true,
camera->options.width, camera->options.height, camera->options.format, 0, camera->options.nbufs, true);
if (!camera_capture) { if (!camera_capture) {
return -1; return -1;
} }
camera_capture->do_timestamps = true;
if (camera->options.fps > 0) { return camera_configure_pipeline(camera, camera_capture);
camera_capture->fmt.interval_us = 1000 * 1000 / camera->options.fps;
}
switch (camera_capture->fmt.format) {
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_VYUY:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB24:
if (camera->options.high_res_factor > 1) {
// Use ISP, as there are two resolutions
return camera_configure_isp(camera, camera_capture,
camera->options.high_res_factor, camera->options.low_res_factor);
} else {
// Use direct approach, as there's likely low frequently used low resolution
return camera_configure_output_rescaler(camera, camera_capture,
camera->options.high_res_factor, camera->options.low_res_factor);
}
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_H264:
return camera_configure_decoder(camera, camera_capture);
case V4L2_PIX_FMT_SRGGB10P:
return camera_configure_isp(camera, camera_capture,
camera->options.high_res_factor, camera->options.low_res_factor);
default:
LOG_INFO(camera, "Unsupported camera format=%s",
fourcc_to_string(camera_capture->fmt.format).buf);
return -1;
}
} }
static int camera_configure_input_libcamera(camera_t *camera) static int camera_configure_input_libcamera(camera_t *camera)
@ -89,8 +55,8 @@ static int camera_configure_input_libcamera(camera_t *camera)
buffer_list_t *camera_capture = device_open_buffer_list( buffer_list_t *camera_capture = device_open_buffer_list(
camera->camera, camera->camera,
true, true,
camera->options.width / camera->options.high_res_factor, camera->options.width,
camera->options.height / camera->options.high_res_factor, camera->options.height,
camera->options.format, camera->options.format,
0, 0,
camera->options.nbufs, camera->options.nbufs,
@ -99,14 +65,8 @@ static int camera_configure_input_libcamera(camera_t *camera)
if (!camera_capture) { if (!camera_capture) {
return -1; return -1;
} }
camera_capture->do_timestamps = true;
if (camera->options.fps > 0) { return camera_configure_pipeline(camera, camera_capture);
camera_capture->fmt.interval_us = 1000 * 1000 / camera->options.fps;
}
return camera_configure_output_rescaler(camera, camera_capture,
1.0, camera->options.low_res_factor / camera->options.high_res_factor);
} }
int camera_configure_input(camera_t *camera) int camera_configure_input(camera_t *camera)

View File

@ -10,67 +10,16 @@
#include "device/buffer_list.h" #include "device/buffer_list.h"
#include "util/http/http.h" #include "util/http/http.h"
int camera_configure_output_rescaler2(camera_t *camera, buffer_list_t *src_capture, float div, int res); buffer_list_t *camera_configure_isp(camera_t *camera, buffer_list_t *src_capture)
int camera_configure_isp(camera_t *camera, buffer_list_t *src_capture, float high_div, float low_div)
{ {
camera->isp = device_v4l2_open("ISP", "/dev/video13"); camera->isp = device_v4l2_open("ISP", "/dev/video13");
buffer_list_t *isp_output = device_open_buffer_list_output( buffer_list_t *isp_output = device_open_buffer_list_output(
camera->isp, src_capture); camera->isp, src_capture);
buffer_list_t *isp_capture = device_open_buffer_list_capture2(
camera->isp, "/dev/video14", isp_output, high_div, V4L2_PIX_FMT_YUYV, true);
camera_capture_add_output(camera, src_capture, isp_output);
if (camera_configure_output(camera, isp_capture, 0) < 0) {
return -1;
}
#if 1
return camera_configure_output_rescaler2(camera, isp_capture, low_div, 1);
#else
if (low_div > 1) {
// TODO: Currently we cannot pull the data at the same time from /dev/video14 and /dev/video15
// if only one path (consuming /dev/video15) is activated
buffer_list_t *isp_lowres_capture = device_open_buffer_list_capture2(
camera->isp, "/dev/video15", isp_output, low_div, V4L2_PIX_FMT_YUYV, true);
if (camera_configure_output(camera, isp_lowres_capture, 1) < 0) {
return -1;
}
}
return 0;
#endif
}
static const char *isp_names[2] = {
"ISP",
"ISP-LOW"
};
int camera_configure_legacy_isp(camera_t *camera, buffer_list_t *src_capture, float div, int res)
{
device_info_t *device = device_list_find_m2m_format(camera->device_list, src_capture->fmt.format, V4L2_PIX_FMT_YUYV);
if (!device) {
LOG_INFO(camera, "Cannot find ISP to scale from '%s' to 'YUYV'", fourcc_to_string(src_capture->fmt.format).buf);
return -1;
}
camera->legacy_isp[res] = device_v4l2_open(isp_names[res], device->path);
buffer_list_t *isp_output = device_open_buffer_list_output(
camera->legacy_isp[res], src_capture);
buffer_list_t *isp_capture = device_open_buffer_list_capture( buffer_list_t *isp_capture = device_open_buffer_list_capture(
camera->legacy_isp[res], isp_output, div, V4L2_PIX_FMT_YUYV, true); camera->isp, "/dev/video14", isp_output, 0, 0, V4L2_PIX_FMT_YUYV, true);
camera_capture_add_output(camera, src_capture, isp_output); camera_capture_add_output(camera, src_capture, isp_output);
if (camera_configure_output(camera, isp_capture, res) < 0) { return isp_capture;
return -1;
}
return 0;
} }

View File

@ -12,196 +12,94 @@
#include "output/rtsp/rtsp.h" #include "output/rtsp/rtsp.h"
#include "output/output.h" #include "output/output.h"
static const char *jpeg_names[2] = { static bool camera_output_matches_capture(buffer_list_t *capture, unsigned target_height, unsigned formats[])
"JPEG",
"JPEG-LOW"
};
static link_callbacks_t jpeg_callbacks[2] = {
{ .name = "JPEG-CAPTURE", .buf_lock = &http_jpeg },
{ .name = "JPEG-LOW-CAPTURE", .buf_lock = &http_jpeg_lowres }
};
static const char *h264_names[2] = {
"H264",
"H264-LOW"
};
static link_callbacks_t h264_callbacks[2] = {
{ .name = "H264-CAPTURE", .buf_lock = &http_h264 },
{ .name = "H264-LOW-CAPTURE", .buf_lock = &http_h264_lowres }
};
static int camera_configure_h264_output(camera_t *camera, buffer_list_t *src_capture, int res)
{ {
if (src_capture->fmt.format == V4L2_PIX_FMT_H264) { if (target_height && capture->fmt.height != target_height && capture->fmt.height != camera_rescaller_align_size(target_height)) {
camera_capture_set_callbacks(camera, src_capture, h264_callbacks[res]); return false;
return 0;
} }
device_info_t *device = device_list_find_m2m_format(camera->device_list, src_capture->fmt.format, V4L2_PIX_FMT_H264); for (int i = 0; formats[i]; i++) {
if (formats[i] == capture->fmt.format)
if (!device) { return true;
LOG_INFO(camera, "Cannot find H264 encoder to convert from '%s'", fourcc_to_string(src_capture->fmt.format).buf);
return -1;
} }
camera->codec_h264[res] = device_v4l2_open(h264_names[res], device->path); return false;
buffer_list_t *output = device_open_buffer_list_output(camera->codec_h264[res], src_capture);
buffer_list_t *capture = device_open_buffer_list_capture(camera->codec_h264[res], output, 1.0, V4L2_PIX_FMT_H264, true);
if (!capture) {
return -1;
}
camera_capture_add_output(camera, src_capture, output);
camera_capture_set_callbacks(camera, capture, h264_callbacks[res]);
return 0;
} }
static int camera_configure_jpeg_output(camera_t *camera, buffer_list_t *src_capture, int res) static buffer_list_t *camera_find_capture(camera_t *camera, unsigned target_height, unsigned formats[])
{ {
if (src_capture->fmt.format == V4L2_PIX_FMT_MJPEG || src_capture->fmt.format == V4L2_PIX_FMT_JPEG) { for (int i = 0; i < MAX_DEVICES; i++) {
camera_capture_set_callbacks(camera, src_capture, jpeg_callbacks[res]); if (!camera->devices[i])
return 0; continue;
}
device_info_t *device = device_list_find_m2m_format(camera->device_list, src_capture->fmt.format, V4L2_PIX_FMT_JPEG); device_t *device = camera->devices[i];
for (int j = 0; j < device->n_capture_list; j++) {
buffer_list_t *capture_list = device->capture_lists[j];
if (!device) { if (camera_output_matches_capture(capture_list, target_height, formats)) {
device = device_list_find_m2m_format(camera->device_list, src_capture->fmt.format, V4L2_PIX_FMT_MJPEG); return capture_list;
} }
if (!device) {
LOG_INFO(camera, "Cannot find JPEG encoder to convert from '%s'", fourcc_to_string(src_capture->fmt.format).buf);
return -1;
}
camera->codec_jpeg[res] = device_v4l2_open(jpeg_names[res], device->path);
buffer_list_t *output = device_open_buffer_list_output(camera->codec_jpeg[res], src_capture);
buffer_list_t *capture = device_open_buffer_list_capture(camera->codec_jpeg[res], output, 1.0, V4L2_PIX_FMT_JPEG, true);
if (!capture) {
return -1;
}
camera_capture_add_output(camera, src_capture, output);
camera_capture_set_callbacks(camera, capture, jpeg_callbacks[res]);
return 0;
}
int camera_configure_output(camera_t *camera, buffer_list_t *src_capture, int res)
{
if (camera_configure_h264_output(camera, src_capture, res) < 0 ||
camera_configure_jpeg_output(camera, src_capture, res) < 0) {
return -1;
} }
}
return 0; return NULL;
} }
int camera_configure_output_rescaler2(camera_t *camera, buffer_list_t *src_capture, float div, int res) static unsigned rescalled_formats[] =
{ {
if (div > 1) { // best quality
return camera_configure_legacy_isp(camera, src_capture, div, res); V4L2_PIX_FMT_YUYV,
} else if (div > 0) {
return camera_configure_output(camera, src_capture, 0);
} else {
return 0;
}
}
int camera_configure_output_rescaler(camera_t *camera, buffer_list_t *src_capture, float high_div, float low_div) // medium quality
{ V4L2_PIX_FMT_YUV420,
if (camera_configure_output_rescaler2(camera, src_capture, high_div, 0) < 0 || V4L2_PIX_FMT_NV12,
camera_configure_output_rescaler2(camera, src_capture, low_div, 1) < 0) {
return -1;
}
return 0; // low quality
} V4L2_PIX_FMT_NV21,
V4L2_PIX_FMT_YVU420,
static void decoder_debug_on_buffer(buffer_t *buf) { 0
if (!buf) {
return;
}
static int index = 0;
char path[256];
sprintf(path, "/tmp/decoder_capture.%d.%s", index++ % 10, fourcc_to_string(buf->buf_list->fmt.format).buf);
FILE *fp = fopen(path, "wb");
if (!fp) {
return;
}
fwrite(buf->start, 1, buf->used, fp);
fclose(fp);
}
static link_callbacks_t decoder_debug_callbacks = {
.name = "DECODER-DEBUG-CAPTURE",
.on_buffer = decoder_debug_on_buffer
}; };
int camera_configure_decoder(camera_t *camera, buffer_list_t *src_capture) int camera_configure_output(camera_t *camera, const char *name, unsigned target_height, unsigned formats[], link_callbacks_t callbacks, device_t **device)
{ {
unsigned decode_formats[] = { buffer_list_t *src_capture = camera_find_capture(camera, target_height, formats);
// best quality if (src_capture) {
V4L2_PIX_FMT_YUYV, camera_capture_add_callbacks(camera, src_capture, callbacks);
return 0;
}
// medium quality src_capture = camera_find_capture(camera, target_height, rescalled_formats);
V4L2_PIX_FMT_YUV420, if (!src_capture) {
V4L2_PIX_FMT_NV12, // Try to find re-scallabe output
src_capture = camera_find_capture(camera, 0, rescalled_formats);
if (src_capture) {
src_capture = camera_configure_rescaller(camera, src_capture, name, target_height, rescalled_formats);
}
}
if (!src_capture) {
return -1;
}
// low quality
V4L2_PIX_FMT_NV21,
V4L2_PIX_FMT_YVU420,
0
};
unsigned chosen_format = 0; unsigned chosen_format = 0;
device_info_t *device = device_list_find_m2m_formats(camera->device_list, src_capture->fmt.format, decode_formats, &chosen_format); device_info_t *device_info = device_list_find_m2m_formats(camera->device_list, src_capture->fmt.format, formats, &chosen_format);
if (!device) { if (!device_info) {
LOG_INFO(camera, "Cannot find '%s' decoder", fourcc_to_string(src_capture->fmt.format).buf); LOG_INFO(camera, "Cannot find encoder to convert from '%s'", fourcc_to_string(src_capture->fmt.format).buf);
return -1; return -1;
} }
device_video_force_key(camera->camera); *device = device_v4l2_open(name, device_info->path);
camera->decoder = device_v4l2_open("DECODER", device->path); buffer_list_t *output = device_open_buffer_list_output(*device, src_capture);
buffer_list_t *capture = device_open_buffer_list_capture(*device, NULL, output, 0, 0, chosen_format, true);
buffer_list_t *decoder_output = device_open_buffer_list_output( if (!capture) {
camera->decoder, src_capture);
buffer_list_t *decoder_capture = device_open_buffer_list_capture(
camera->decoder, decoder_output, 1.0, chosen_format, true);
if (getenv("CAMERA_DECODER_DEBUG")) {
camera_capture_set_callbacks(camera, decoder_capture, decoder_debug_callbacks);
}
camera_capture_add_output(camera, src_capture, decoder_output);
if (camera->options.high_res_factor <= 1 && (src_capture->fmt.format == V4L2_PIX_FMT_JPEG || src_capture->fmt.format == V4L2_PIX_FMT_MJPEG)) {
camera_capture_set_callbacks(camera, src_capture, jpeg_callbacks[0]);
if (camera_configure_h264_output(camera, decoder_capture, 0) < 0)
return -1;
} else if (camera->options.high_res_factor <= 1 && src_capture->fmt.format == V4L2_PIX_FMT_H264) {
camera_capture_set_callbacks(camera, src_capture, h264_callbacks[0]);
if (camera_configure_jpeg_output(camera, decoder_capture, 0) < 0)
return -1;
} else if (camera_configure_output_rescaler2(camera, decoder_capture, camera->options.high_res_factor, 0) < 0) {
return -1;
}
if (camera->options.low_res_factor > 1 && camera_configure_output_rescaler2(camera, decoder_capture, camera->options.low_res_factor, 1) < 0) {
return -1; return -1;
} }
camera_capture_add_output(camera, src_capture, output);
camera_capture_add_callbacks(camera, capture, callbacks);
return 0; return 0;
} }

View File

@ -0,0 +1,85 @@
#include "camera.h"
#include "device/buffer.h"
#include "device/buffer_list.h"
#include "device/device.h"
#include "device/device_list.h"
#include "device/links.h"
#include "util/opts/log.h"
#include "util/opts/fourcc.h"
#include "device/buffer_list.h"
#include "util/http/http.h"
#include "output/output.h"
static unsigned snapshot_formats[] =
{
V4L2_PIX_FMT_MJPEG,
V4L2_PIX_FMT_JPEG,
0
};
static link_callbacks_t snapshot_callbacks =
{
.name = "SNAPSHOT-CAPTURE",
.buf_lock = &snapshot_lock
};
static link_callbacks_t stream_callbacks =
{
.name = "STREAM-CAPTURE",
.buf_lock = &stream_lock
};
static unsigned video_formats[] =
{
V4L2_PIX_FMT_H264,
0
};
static link_callbacks_t video_callbacks =
{
.name = "VIDEO-CAPTURE",
.buf_lock = &video_lock
};
int camera_configure_pipeline(camera_t *camera, buffer_list_t *capture)
{
if (capture) {
capture->do_timestamps = true;
if (camera->options.fps > 0) {
capture->fmt.interval_us = 1000 * 1000 / camera->options.fps;
}
switch (capture->fmt.format) {
case V4L2_PIX_FMT_SRGGB10P:
case V4L2_PIX_FMT_SGRBG10P:
case V4L2_PIX_FMT_SRGGB10:
case V4L2_PIX_FMT_SGRBG10:
camera_configure_isp(camera, capture);
break;
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_H264:
camera_configure_decoder(camera, capture);
break;
}
}
if (!camera->options.snapshot.disabled && camera_configure_output(
camera, "SNAPSHOT", camera->options.snapshot.height, snapshot_formats, snapshot_callbacks, &camera->codec_snapshot) < 0) {
return -1;
}
if (!camera->options.stream.disabled && camera_configure_output(
camera, "STREAM", camera->options.stream.height, snapshot_formats, stream_callbacks, &camera->codec_stream) < 0) {
return -1;
}
if (!camera->options.video.disabled && camera_configure_output(
camera, "VIDEO", camera->options.video.height, video_formats, video_callbacks, &camera->codec_video) < 0) {
return -1;
}
return 0;
}

View File

@ -0,0 +1,77 @@
#include "camera.h"
#include "device/buffer.h"
#include "device/buffer_list.h"
#include "device/device.h"
#include "device/device_list.h"
#include "device/links.h"
#include "util/opts/log.h"
#include "util/opts/fourcc.h"
#include "device/buffer_list.h"
#include "util/http/http.h"
unsigned camera_rescaller_align_size(unsigned size)
{
return (size + 31) / 32 * 32;
}
buffer_list_t *camera_try_rescaller(camera_t *camera, buffer_list_t *src_capture, const char *name, unsigned target_height, unsigned target_format)
{
device_info_t *device_info = device_list_find_m2m_format(camera->device_list, src_capture->fmt.format, target_format);
if (!device_info) {
return NULL;
}
if (target_height > src_capture->fmt.height) {
LOG_INFO(src_capture, "Upscaling from %dp to %dp does not make sense.",
src_capture->fmt.height, target_height);
return NULL;
}
target_height = camera_rescaller_align_size(target_height);
unsigned target_width = target_height * src_capture->fmt.width / src_capture->fmt.height;
target_width = camera_rescaller_align_size(target_width);
char name2[256];
sprintf(name2, "RESCALLER:%s", name);
device_t *device = device_v4l2_open(name2, device_info->path);
buffer_list_t *rescaller_output = device_open_buffer_list_output(
device, src_capture);
buffer_list_t *rescaller_capture = device_open_buffer_list_capture(
device, NULL, rescaller_output,
target_width, target_height, target_format, true);
if (!rescaller_capture) {
device_close(device);
return NULL;
}
camera_capture_add_output(camera, src_capture, rescaller_output);
return rescaller_capture;
}
buffer_list_t *camera_configure_rescaller(camera_t *camera, buffer_list_t *src_capture, const char *name, unsigned target_height, unsigned formats[])
{
int rescallers = 0;
for ( ; rescallers < MAX_RESCALLERS && camera->rescallers[rescallers]; rescallers++);
if (rescallers == MAX_RESCALLERS) {
return NULL;
}
buffer_list_t *rescaller_capture = NULL; // camera_try_rescaller(camera, src_capture, name, target_height, src_capture->fmt.format);
for (int i = 0; !rescaller_capture && formats[i]; i++) {
rescaller_capture = camera_try_rescaller(camera, src_capture, name, target_height, formats[i]);
}
if (!rescaller_capture) {
LOG_INFO(src_capture, "Cannot find rescaller to scale from '%s' to 'YUYV'", fourcc_to_string(src_capture->fmt.format).buf);
return NULL;
}
camera->rescallers[rescallers] = rescaller_capture->dev;
return rescaller_capture;
}

View File

@ -121,19 +121,14 @@ buffer_list_t *device_open_buffer_list_output(device_t *dev, buffer_list_t *capt
capture_list->dev->opts.allow_dma ? !capture_list->do_mmap : true); capture_list->dev->opts.allow_dma ? !capture_list->do_mmap : true);
} }
buffer_list_t *device_open_buffer_list_capture(device_t *dev, buffer_list_t *output_list, float div, unsigned format, bool do_mmap) buffer_list_t *device_open_buffer_list_capture(device_t *dev, const char *path, buffer_list_t *output_list, unsigned width, unsigned height, unsigned format, bool do_mmap)
{
return device_open_buffer_list_capture2(dev, NULL, output_list, div, format, do_mmap);
}
buffer_list_t *device_open_buffer_list_capture2(device_t *dev, const char *path, buffer_list_t *output_list, float div, unsigned format, bool do_mmap)
{ {
if (!dev || !output_list) { if (!dev || !output_list) {
return NULL; return NULL;
} }
return device_open_buffer_list2(dev, path, true, return device_open_buffer_list2(dev, path, true,
output_list->fmt.width / div, output_list->fmt.height / div, width ? width : output_list->fmt.width, height ? height : output_list->fmt.height,
format, 0, output_list->nbufs, do_mmap); format, 0, output_list->nbufs, do_mmap);
} }

View File

@ -60,8 +60,7 @@ void device_close(device_t *dev);
buffer_list_t *device_open_buffer_list(device_t *dev, bool do_capture, unsigned width, unsigned height, unsigned format, unsigned bytesperline, int nbufs, bool do_mmap); buffer_list_t *device_open_buffer_list(device_t *dev, bool do_capture, unsigned width, unsigned height, unsigned format, unsigned bytesperline, int nbufs, bool do_mmap);
buffer_list_t *device_open_buffer_list2(device_t *dev, const char *path, bool do_capture, unsigned width, unsigned height, unsigned format, unsigned bytesperline, int nbufs, bool do_mmap); buffer_list_t *device_open_buffer_list2(device_t *dev, const char *path, bool do_capture, unsigned width, unsigned height, unsigned format, unsigned bytesperline, int nbufs, bool do_mmap);
buffer_list_t *device_open_buffer_list_output(device_t *dev, buffer_list_t *capture_list); buffer_list_t *device_open_buffer_list_output(device_t *dev, buffer_list_t *capture_list);
buffer_list_t *device_open_buffer_list_capture(device_t *dev, buffer_list_t *output_list, float div, unsigned format, bool do_mmap); buffer_list_t *device_open_buffer_list_capture(device_t *dev, const char *path, buffer_list_t *output_list, unsigned width, unsigned height, unsigned format, bool do_mmap);
buffer_list_t *device_open_buffer_list_capture2(device_t *dev, const char *path, buffer_list_t *output_list, float div, unsigned format, bool do_mmap);
int device_consume_event(device_t *dev); int device_consume_event(device_t *dev);
int device_set_stream(device_t *dev, bool do_on); int device_set_stream(device_t *dev, bool do_on);

View File

@ -34,12 +34,14 @@ int _build_fds(link_t *all_links, struct pollfd *fds, link_t **links, buffer_lis
bool paused = true; bool paused = true;
if (link->callbacks.check_streaming && link->callbacks.check_streaming()) { for (int j = 0; j < link->n_callbacks; j++) {
paused = false; if (link->callbacks[j].check_streaming && link->callbacks[j].check_streaming()) {
} paused = false;
}
if (link->callbacks.buf_lock && buffer_lock_needs_buffer(link->callbacks.buf_lock)) { if (link->callbacks[j].buf_lock && buffer_lock_needs_buffer(link->callbacks[j].buf_lock)) {
paused = false; paused = false;
}
} }
for (int j = 0; link->sinks[j]; j++) { for (int j = 0; link->sinks[j]; j++) {
@ -127,9 +129,11 @@ int links_enqueue_from_source(buffer_list_t *buf_list, link_t *link)
LOG_ERROR(buf_list, "No buffer dequeued from source?"); LOG_ERROR(buf_list, "No buffer dequeued from source?");
} }
if (link->callbacks.validate_buffer && !link->callbacks.validate_buffer(link, buf)) { for (int j = 0; j < link->n_callbacks; j++) {
LOG_DEBUG(buf_list, "Buffer rejected by validation"); if (link->callbacks[j].validate_buffer && !link->callbacks[j].validate_buffer(link, buf)) {
return 0; LOG_DEBUG(buf_list, "Buffer rejected by validation");
return 0;
}
} }
for (int j = 0; link->sinks[j]; j++) { for (int j = 0; link->sinks[j]; j++) {
@ -142,12 +146,14 @@ int links_enqueue_from_source(buffer_list_t *buf_list, link_t *link)
buffer_list_enqueue(link->sinks[j], buf); buffer_list_enqueue(link->sinks[j], buf);
} }
if (link->callbacks.on_buffer) { for (int j = 0; j < link->n_callbacks; j++) {
link->callbacks.on_buffer(buf); if (link->callbacks[j].on_buffer) {
} link->callbacks[j].on_buffer(buf);
}
if (link->callbacks.buf_lock) { if (link->callbacks[j].buf_lock) {
buffer_lock_capture(link->callbacks.buf_lock, buf); buffer_lock_capture(link->callbacks[j].buf_lock, buf);
}
} }
return 0; return 0;
@ -346,10 +352,10 @@ void links_dump(link_t *all_links)
links_dump_buf_list(line, link->sinks[j]); links_dump_buf_list(line, link->sinks[j]);
} }
if (link->callbacks.name) { for (int j = 0; j < link->n_callbacks; j++) {
if (link->sinks[0]) if (link->sinks[0] || j > 0)
strcat(line, ", "); strcat(line, ", ");
strcat(line, link->callbacks.name); strcat(line, link->callbacks[j].name);
} }
strcat(line, "]"); strcat(line, "]");

View File

@ -25,7 +25,8 @@ typedef struct link_callbacks_s {
typedef struct link_s { typedef struct link_s {
buffer_list_t *source; // capture_list buffer_list_t *source; // capture_list
buffer_list_t *sinks[10]; buffer_list_t *sinks[10];
link_callbacks_t callbacks; link_callbacks_t callbacks[10];
int n_callbacks;
} link_t; } link_t;
int links_init(link_t *all_links); int links_init(link_t *all_links);

View File

@ -59,26 +59,24 @@ int v4l2_buffer_list_open(buffer_list_t *buf_list)
v4l2_fmt.type = buf_list->v4l2->type; v4l2_fmt.type = buf_list->v4l2->type;
buffer_format_t fmt = buf_list->fmt; buffer_format_t fmt = buf_list->fmt;
unsigned block_size = 1; unsigned block_width = 1, block_height = 1;
// JPEG is in 16x16 blocks (shrink image to fit) (but adapt to 32x32) if (buf_list->do_capture && strstr(buf_list->name, "RESCALLER")) {
// And ISP output block_width = 32;
if (strstr(buf_list->name, "JPEG")) { block_height = 32;
block_size = 32;
} else if (buf_list->do_capture && strstr(buf_list->name, "ISP")) {
block_size = 32;
} else if (strstr(buf_list->name, "H264")) {
// TODO: even though H264 encoder on RPI requires 32x32
// it appears that it breaks encoding creating a bar at top
// block_size = 32;
} }
if (block_size > 1) { LOG_DEBUG(buf_list, "Get current format ...");
ERR_IOCTL(buf_list, buf_list->v4l2->dev_fd, VIDIOC_G_FMT, &v4l2_fmt, "Can't get format");
retry_resolution_set:
if (block_width > 1 || block_height > 1) {
buffer_format_t org_fmt = buf_list->fmt; buffer_format_t org_fmt = buf_list->fmt;
fmt.width = shrink_to_block(fmt.width, block_size); fmt.width = shrink_to_block(fmt.width, block_width);
fmt.height = shrink_to_block(fmt.height, block_size); fmt.height = shrink_to_block(fmt.height, block_height);
LOG_VERBOSE(buf_list, "Adapting size to %dx%d block: %dx%d shrunk to %dx%d", LOG_VERBOSE(buf_list, "Adapting size to %dx%d block: %dx%d shrunk to %dx%d",
block_size, block_size, block_width, block_height,
org_fmt.width, org_fmt.height, fmt.width, fmt.height); org_fmt.width, org_fmt.height, fmt.width, fmt.height);
} }
@ -86,9 +84,6 @@ int v4l2_buffer_list_open(buffer_list_t *buf_list)
fmt.bytesperline = 0; fmt.bytesperline = 0;
} }
LOG_DEBUG(buf_list, "Get current format ...");
ERR_IOCTL(buf_list, buf_list->v4l2->dev_fd, VIDIOC_G_FMT, &v4l2_fmt, "Can't set format");
if (buf_list->v4l2->do_mplanes) { if (buf_list->v4l2->do_mplanes) {
v4l2_fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG; v4l2_fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG;
if (fmt.width) if (fmt.width)
@ -131,9 +126,17 @@ int v4l2_buffer_list_open(buffer_list_t *buf_list)
if (buf_list->fmt.width != fmt.width || buf_list->fmt.height != fmt.height) { if (buf_list->fmt.width != fmt.width || buf_list->fmt.height != fmt.height) {
if (fmt.bytesperline) { if (fmt.bytesperline) {
LOG_ERROR(buf_list, "Requested resolution=%ux%u is unavailable. Got %ux%u. " LOG_INFO(buf_list, "Requested resolution=%ux%u is unavailable. Got %ux%u.",
"Consider using the `-camera-high_res_factor=2` or `-camera-low_res_factor=3`",
fmt.width, fmt.height, buf_list->fmt.width, buf_list->fmt.height); fmt.width, fmt.height, buf_list->fmt.width, buf_list->fmt.height);
if (block_height > 1) {
goto error;
}
// Try to shrink resolution
block_width = 32;
block_height = 32;
goto retry_resolution_set;
} else { } else {
LOG_INFO(buf_list, "Requested resolution=%ux%u is unavailable. Got %ux%u. Accepted", LOG_INFO(buf_list, "Requested resolution=%ux%u is unavailable. Got %ux%u. Accepted",
fmt.width, fmt.height, buf_list->fmt.width, buf_list->fmt.height); fmt.width, fmt.height, buf_list->fmt.width, buf_list->fmt.height);

View File

@ -11,44 +11,56 @@
<ul> <ul>
<br> <br>
<li> <li>
<a href="snapshot"><b>/snapshot</b></a><br> <a href="snapshot"><b>/snapshot</b></a> (JPEG image)<br>
Get a current actual image from the server.<br>
<br> <br>
<ul> <ul>
<li><a href="snapshot?res=low">/snapshot?res=low</a> get a low resolution stream (if -camera-low_res_factor=X is configured).</li> <li>Get a high-resolution snapshot image from the server.</li>
<li>Uses resolution specified by <i>-camera-snapshot.height=</i>.</li>
</ul> </ul>
</li> </li>
<br> <br>
<li> <li>
<a href="stream"><b>/stream</b></a> (MJPEG stream)</b><br> <a href="stream"><b>/stream</b></a> (MJPEG stream)</b><br>
Get a live stream. Works everywhere, but consumes a ton of bandwidth.<br>
<br> <br>
<ul> <ul>
<li><a href="stream?res=low">/stream?res=low</a> get a low resolution stream (if -camera-low_res_factor=X is configured).</li> <li>Get a live stream. Works everywhere, but consumes a ton of bandwidth.</li>
<li>Uses resolution specified by <i>-camera-stream.height=</i>.</li>
</ul> </ul>
</li> </li>
<br> <br>
<li> <li>
<a href="webrtc"><b>/webrtc</b></a> (HTTP page / iframe)<br> <a href="webrtc"><b>/webrtc</b></a> (HTTP page / iframe)<br>
Get a live video using WebRTC (low-latency streaming with latency of around 100ms).<br>
<br> <br>
<ul> <ul>
<li><a href="webrtc?res=low">/webrtc?res=low</a> get a low resolution WebRTC stream (if -camera-low_res_factor=X is configured).</li> <li>Get a live video using WebRTC (low-latency streaming with latency of around 100ms).</li>
<li>Uses resolution specified by <i>-camera-video.height=</i>.</li>
</ul> </ul>
</li> </li>
<br> <br>
<li> <li>
<a href="video"><b>/video</b></a> (IP Camera)<br> <a href="video"><b>/video</b></a> (IP Camera)<br>
Get a live (H264) video stream best suited to current browser in a maximum compatibility mode choosing automatically between one of the below formats.<br>
<br> <br>
<ul> <ul>
<li>Get a live (H264) video stream best suited to current browser in a maximum compatibility mode choosing automatically between one of the below formats.</li>
<li>Uses resolution specified by <i>-camera-video.height=</i>.</li>
<br>
<li><a href="video.mp4"><b>/video.mp4</b></a><br> get a live video stream in MP4 format (Firefox, with latency of around 1s if FFMPEG enabled).</li> <li><a href="video.mp4"><b>/video.mp4</b></a><br> get a live video stream in MP4 format (Firefox, with latency of around 1s if FFMPEG enabled).</li>
<br> <br>
<li><a href="video.mkv"><b>/video.mkv</b></a><br> get a live video stream in MKV format (Chrome, with latency of around 2s if FFMPEG enabled).</li> <li><a href="video.mkv"><b>/video.mkv</b></a><br> get a live video stream in MKV format (Chrome, with latency of around 2s if FFMPEG enabled).</li>
<br> <br>
<li><a href="video.m3u8"><b>/video.m3u8</b></a><br> get a live video stream in HLS format (Safari, with latency of around 1s).</li> <li><a href="video.m3u8"><b>/video.m3u8</b></a><br> get a live video stream in HLS format (Safari, with latency of around 1s).</li>
</ul>
</li>
<br>
<li>
<a href="option"><b>/option</b></a><br>
<br>
<ul>
<li>See all configurable options cameras.</li>
<br> <br>
<li><a href="video?res=low">/video?res=low</a> get a low resolution stream (if -camera-low_res_factor=X is configured).</li> <li><a href="option?key=value">/option?key=value</a> set <i>key</i> to <i>value</i>.</li>
<br>
<li><a href="option?AfTrigger=1">/option?AfTrigger=1</a> trigger auto focus for ArduCams.</li>
</ul> </ul>
</li> </li>
<br> <br>
@ -57,19 +69,9 @@
<br> <br>
<ul> <ul>
<li><a href="?action=snapshot">/?action=snapshot</a> as alias to the <a href="snapshot">/snapshot</a>.</li> <li><a href="?action=snapshot">/?action=snapshot</a> as alias to the <a href="snapshot">/snapshot</a>.</li>
<br>
<li><a href="?action=stream">/?action=stream</a> as alias to the <a href="stream">/stream</a>.</li> <li><a href="?action=stream">/?action=stream</a> as alias to the <a href="stream">/stream</a>.</li>
</ul> </ul>
</li> </li>
<br>
<li>
The commands available on some cameras:<br>
<br>
<ul>
<li><a href="option">/option</a> see all options.</li>
<li><a href="option?AfTrigger=1">/option?AfTrigger=1</a> trigger auto focus.</li>
</ul>
</li>
</ul> </ul>
<br> <br>
<hr> <hr>

View File

@ -10,8 +10,6 @@
#include "device/device.h" #include "device/device.h"
#include "util/ffmpeg/remuxer.h" #include "util/ffmpeg/remuxer.h"
buffer_lock_t *http_h264_buffer_for_res(http_worker_t *worker);
static const char *const VIDEO_HEADER = static const char *const VIDEO_HEADER =
"HTTP/1.0 200 OK\r\n" "HTTP/1.0 200 OK\r\n"
"Access-Control-Allow-Origin: *\r\n" "Access-Control-Allow-Origin: *\r\n"
@ -135,7 +133,7 @@ static void http_ffmpeg_video(http_worker_t *worker, FILE *stream, const char *c
#endif #endif
int n = buffer_lock_write_loop( int n = buffer_lock_write_loop(
http_h264_buffer_for_res(worker), &video_lock,
0, 0,
0, 0,
(buffer_write_fn)http_ffmpeg_video_buf_part, (buffer_write_fn)http_ffmpeg_video_buf_part,

View File

@ -16,14 +16,6 @@ static const char *const VIDEO_HEADER =
"Content-Type: application/octet-stream\r\n" "Content-Type: application/octet-stream\r\n"
"\r\n"; "\r\n";
buffer_lock_t *http_h264_buffer_for_res(http_worker_t *worker)
{
if (strstr(worker->request_params, HTTP_LOW_RES_PARAM) && http_h264_lowres.buf_list)
return &http_h264_lowres;
else
return &http_h264;
}
typedef struct { typedef struct {
FILE *stream; FILE *stream;
bool wrote_header; bool wrote_header;
@ -60,7 +52,7 @@ void http_h264_video(http_worker_t *worker, FILE *stream)
{ {
http_video_status_t status = { stream }; http_video_status_t status = { stream };
int n = buffer_lock_write_loop(http_h264_buffer_for_res(worker), 0, 0, (buffer_write_fn)http_video_buf_part, &status); int n = buffer_lock_write_loop(&video_lock, 0, 0, (buffer_write_fn)http_video_buf_part, &status);
if (status.wrote_header) { if (status.wrote_header) {
return; return;

View File

@ -20,14 +20,6 @@ static const char *const STREAM_PART = "Content-Type: " CONTENT_TYPE "\r\n" CONT
static const char *const STREAM_BOUNDARY = "\r\n" static const char *const STREAM_BOUNDARY = "\r\n"
"--" PART_BOUNDARY "\r\n"; "--" PART_BOUNDARY "\r\n";
buffer_lock_t *http_jpeg_buffer_for_res(http_worker_t *worker)
{
if (strstr(worker->request_params, HTTP_LOW_RES_PARAM) && http_jpeg_lowres.buf_list)
return &http_jpeg_lowres;
else
return &http_jpeg;
}
int http_snapshot_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FILE *stream) int http_snapshot_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FILE *stream)
{ {
fprintf(stream, "HTTP/1.1 200 OK\r\n"); fprintf(stream, "HTTP/1.1 200 OK\r\n");
@ -40,7 +32,7 @@ int http_snapshot_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FI
void http_snapshot(http_worker_t *worker, FILE *stream) void http_snapshot(http_worker_t *worker, FILE *stream)
{ {
int n = buffer_lock_write_loop(http_jpeg_buffer_for_res(worker), 1, 0, (buffer_write_fn)http_snapshot_buf_part, stream); int n = buffer_lock_write_loop(&snapshot_lock, 1, 0, (buffer_write_fn)http_snapshot_buf_part, stream);
if (n <= 0) { if (n <= 0) {
http_500(stream, NULL); http_500(stream, NULL);
@ -68,7 +60,7 @@ int http_stream_buf_part(buffer_lock_t *buf_lock, buffer_t *buf, int frame, FILE
void http_stream(http_worker_t *worker, FILE *stream) void http_stream(http_worker_t *worker, FILE *stream)
{ {
int n = buffer_lock_write_loop(http_jpeg_buffer_for_res(worker), 0, 0, (buffer_write_fn)http_stream_buf_part, stream); int n = buffer_lock_write_loop(&stream_lock, 0, 0, (buffer_write_fn)http_stream_buf_part, stream);
if (n == 0) { if (n == 0) {
http_500(stream, NULL); http_500(stream, NULL);

View File

@ -1,7 +1,5 @@
#include "device/buffer_lock.h" #include "device/buffer_lock.h"
DEFINE_BUFFER_LOCK(http_h264, 0); DEFINE_BUFFER_LOCK(snapshot_lock, 1000);
DEFINE_BUFFER_LOCK(http_h264_lowres, 0); DEFINE_BUFFER_LOCK(stream_lock, 0);
DEFINE_BUFFER_LOCK(video_lock, 0);
DEFINE_BUFFER_LOCK(http_jpeg, 1000);
DEFINE_BUFFER_LOCK(http_jpeg_lowres, 1000);

View File

@ -5,11 +5,9 @@
struct http_worker_s; struct http_worker_s;
struct buffer_s; struct buffer_s;
extern struct buffer_lock_s http_h264; extern struct buffer_lock_s snapshot_lock;
extern struct buffer_lock_s http_h264_lowres; extern struct buffer_lock_s stream_lock;
extern struct buffer_lock_s video_lock;
extern struct buffer_lock_s http_jpeg;
extern struct buffer_lock_s http_jpeg_lowres;
// M-JPEG // M-JPEG
void http_snapshot(struct http_worker_s *worker, FILE *stream); void http_snapshot(struct http_worker_s *worker, FILE *stream);

View File

@ -26,13 +26,12 @@ static pthread_mutex_t rtsp_lock = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
static class DynamicH264Stream *rtsp_streams; static class DynamicH264Stream *rtsp_streams;
static const char *stream_name = "stream.h264"; static const char *stream_name = "stream.h264";
static const char *stream_low_res_name = "stream_low_res.h264";
class DynamicH264Stream : public FramedSource class DynamicH264Stream : public FramedSource
{ {
public: public:
DynamicH264Stream(UsageEnvironment& env, Boolean lowResMode) DynamicH264Stream(UsageEnvironment& env)
: FramedSource(env), fHaveStartedReading(False), fLowResMode(lowResMode) : FramedSource(env), fHaveStartedReading(False)
{ {
} }
@ -65,16 +64,12 @@ public:
pthread_mutex_unlock(&rtsp_lock); pthread_mutex_unlock(&rtsp_lock);
} }
void receiveData(buffer_t *buf, bool lowResMode) void receiveData(buffer_t *buf)
{ {
if (!isCurrentlyAwaitingData()) { if (!isCurrentlyAwaitingData()) {
return; // we're not ready for the data yet return; // we're not ready for the data yet
} }
if (fLowResMode != lowResMode) {
return;
}
if (buf->flags.is_keyframe) { if (buf->flags.is_keyframe) {
fHadKeyFrame = true; fHadKeyFrame = true;
} }
@ -109,7 +104,6 @@ public:
Boolean fHaveStartedReading; Boolean fHaveStartedReading;
Boolean fHadKeyFrame; Boolean fHadKeyFrame;
Boolean fRequestedKeyFrame; Boolean fRequestedKeyFrame;
Boolean fLowResMode;
DynamicH264Stream *pNextStream; DynamicH264Stream *pNextStream;
}; };
@ -117,23 +111,21 @@ public:
class DynamicH264VideoFileServerMediaSubsession : public OnDemandServerMediaSubsession class DynamicH264VideoFileServerMediaSubsession : public OnDemandServerMediaSubsession
{ {
public: public:
DynamicH264VideoFileServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource, Boolean lowResMode) DynamicH264VideoFileServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource)
: OnDemandServerMediaSubsession(env, reuseFirstSource), fLowResMode(lowResMode) : OnDemandServerMediaSubsession(env, reuseFirstSource)
{ {
} }
virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{ {
estBitrate = 500; // kbps, estimate estBitrate = 500; // kbps, estimate
return H264VideoStreamFramer::createNew(envir(), new DynamicH264Stream(envir(), fLowResMode)); return H264VideoStreamFramer::createNew(envir(), new DynamicH264Stream(envir()));
} }
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/)
{ {
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
} }
Boolean fLowResMode;
}; };
class DynamicRTSPServer: public RTSPServerSupportingHTTPStreaming class DynamicRTSPServer: public RTSPServerSupportingHTTPStreaming
@ -164,13 +156,8 @@ protected:
protected: // redefined virtual functions protected: // redefined virtual functions
virtual ServerMediaSession* lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession) virtual ServerMediaSession* lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession)
{ {
bool lowResMode = false;
if (strcmp(streamName, stream_name) == 0) { if (strcmp(streamName, stream_name) == 0) {
LOG_INFO(NULL, "Requesting %s stream...", streamName); LOG_INFO(NULL, "Requesting %s stream...", streamName);
} else if (strcmp(streamName, stream_low_res_name) == 0) {
LOG_INFO(NULL, "Requesting %s stream (low resolution mode)...", streamName);
lowResMode = true;
} else { } else {
LOG_INFO(NULL, "No stream available: '%s'", streamName); LOG_INFO(NULL, "No stream available: '%s'", streamName);
return NULL; return NULL;
@ -188,7 +175,7 @@ protected: // redefined virtual functions
sms = ServerMediaSession::createNew(envir(), streamName, streamName, "streamed by the LIVE555 Media Server");; sms = ServerMediaSession::createNew(envir(), streamName, streamName, "streamed by the LIVE555 Media Server");;
OutPacketBuffer::maxSize = 2000000; // allow for some possibly large H.264 frames OutPacketBuffer::maxSize = 2000000; // allow for some possibly large H.264 frames
auto subsession = new DynamicH264VideoFileServerMediaSubsession(envir(), false, lowResMode); auto subsession = new DynamicH264VideoFileServerMediaSubsession(envir(), false);
sms->addSubsession(subsession); sms->addSubsession(subsession);
addServerMediaSession(sms); addServerMediaSession(sms);
return sms; return sms;
@ -208,8 +195,7 @@ static bool rtsp_h264_needs_buffer(buffer_lock_t *buf_lock)
pthread_mutex_lock(&rtsp_lock); pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) { for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
if (!stream->fLowResMode) needsBuffer = true;
needsBuffer = true;
} }
pthread_mutex_unlock(&rtsp_lock); pthread_mutex_unlock(&rtsp_lock);
return needsBuffer; return needsBuffer;
@ -219,33 +205,7 @@ static void rtsp_h264_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{ {
pthread_mutex_lock(&rtsp_lock); pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) { for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
stream->receiveData(buf, false); stream->receiveData(buf);
if (!http_h264_lowres.buf_list) {
stream->receiveData(buf, true);
}
}
pthread_mutex_unlock(&rtsp_lock);
}
static bool rtsp_h264_low_res_needs_buffer(buffer_lock_t *buf_lock)
{
bool needsBuffer = false;
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
if (stream->fLowResMode)
needsBuffer = true;
}
pthread_mutex_unlock(&rtsp_lock);
return needsBuffer;
}
static void rtsp_h264_low_res_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
pthread_mutex_lock(&rtsp_lock);
for (DynamicH264Stream *stream = rtsp_streams; stream; stream = stream->pNextStream) {
stream->receiveData(buf, true);
} }
pthread_mutex_unlock(&rtsp_lock); pthread_mutex_unlock(&rtsp_lock);
} }
@ -280,10 +240,8 @@ extern "C" int rtsp_server(rtsp_options_t *options)
// LOG_INFO(NULL, "The RTSP-over-HTTP is not available."); // LOG_INFO(NULL, "The RTSP-over-HTTP is not available.");
// } // }
buffer_lock_register_check_streaming(&http_h264, rtsp_h264_needs_buffer); buffer_lock_register_check_streaming(&video_lock, rtsp_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264, rtsp_h264_capture); buffer_lock_register_notify_buffer(&video_lock, rtsp_h264_capture);
buffer_lock_register_check_streaming(&http_h264_lowres, rtsp_h264_low_res_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264_lowres, rtsp_h264_low_res_capture);
pthread_create(&rtsp_thread, NULL, rtsp_server_thread, env); pthread_create(&rtsp_thread, NULL, rtsp_server_thread, env);
return 0; return 0;
@ -299,4 +257,4 @@ extern "C" int rtsp_server(rtsp_options_t *options)
return 0; return 0;
} }
#endif // USE_RTSP #endif // USE_RTSP

View File

@ -76,7 +76,7 @@ class Client
{ {
public: public:
Client(std::shared_ptr<rtc::PeerConnection> pc_) Client(std::shared_ptr<rtc::PeerConnection> pc_)
: pc(pc_), use_low_res(false) : pc(pc_)
{ {
id.resize(20); id.resize(20);
for (auto & c : id) { for (auto & c : id) {
@ -91,18 +91,16 @@ public:
free(name); free(name);
} }
bool wantsFrame(bool low_res) const bool wantsFrame() const
{ {
if (!pc || !video) if (!pc || !video)
return false; return false;
if (pc->state() != rtc::PeerConnection::State::Connected) if (pc->state() != rtc::PeerConnection::State::Connected)
return false; return false;
if (use_low_res != low_res)
return false;
return video->wantsFrame(); return video->wantsFrame();
} }
void pushFrame(buffer_t *buf, bool low_res) void pushFrame(buffer_t *buf)
{ {
auto self = this; auto self = this;
@ -110,10 +108,6 @@ public:
return; return;
} }
if (use_low_res != low_res) {
return;
}
if (!had_key_frame) { if (!had_key_frame) {
if (!buf->flags.is_keyframe) { if (!buf->flags.is_keyframe) {
device_video_force_key(buf->buf_list->dev); device_video_force_key(buf->buf_list->dev);
@ -136,7 +130,6 @@ public:
std::mutex lock; std::mutex lock;
std::condition_variable wait_for_complete; std::condition_variable wait_for_complete;
bool had_key_frame; bool had_key_frame;
bool use_low_res;
}; };
std::shared_ptr<Client> findClient(std::string id) std::shared_ptr<Client> findClient(std::string id)
@ -232,9 +225,7 @@ static bool webrtc_h264_needs_buffer(buffer_lock_t *buf_lock)
{ {
std::unique_lock lk(webrtc_clients_lock); std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) { for (auto client : webrtc_clients) {
if (client->wantsFrame(false)) if (client->wantsFrame())
return true;
if (!http_h264_lowres.buf_list && client->wantsFrame(true))
return true; return true;
} }
@ -245,31 +236,8 @@ static void webrtc_h264_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{ {
std::unique_lock lk(webrtc_clients_lock); std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) { for (auto client : webrtc_clients) {
if (client->wantsFrame(false)) if (client->wantsFrame())
client->pushFrame(buf, false); client->pushFrame(buf);
if (!http_h264_lowres.buf_list && client->wantsFrame(true))
client->pushFrame(buf, true);
}
}
static bool webrtc_h264_low_res_needs_buffer(buffer_lock_t *buf_lock)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(true))
return true;
}
return false;
}
static void webrtc_h264_low_res_capture(buffer_lock_t *buf_lock, buffer_t *buf)
{
std::unique_lock lk(webrtc_clients_lock);
for (auto client : webrtc_clients) {
if (client->wantsFrame(true)) {
client->pushFrame(buf, true);
}
} }
} }
@ -279,9 +247,6 @@ static void http_webrtc_request(http_worker_t *worker, FILE *stream, const nlohm
LOG_INFO(client.get(), "Stream requested."); LOG_INFO(client.get(), "Stream requested.");
client->video = addVideo(client->pc, webrtc_client_video_payload_type, rand(), "video", ""); client->video = addVideo(client->pc, webrtc_client_video_payload_type, rand(), "video", "");
if (message.contains("res")) {
client->use_low_res = (message["res"] == "low");
}
try { try {
{ {
@ -418,10 +383,8 @@ extern "C" void http_webrtc_offer(http_worker_t *worker, FILE *stream)
extern "C" void webrtc_server() extern "C" void webrtc_server()
{ {
buffer_lock_register_check_streaming(&http_h264, webrtc_h264_needs_buffer); buffer_lock_register_check_streaming(&video_lock, webrtc_h264_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264, webrtc_h264_capture); buffer_lock_register_notify_buffer(&video_lock, webrtc_h264_capture);
buffer_lock_register_check_streaming(&http_h264_lowres, webrtc_h264_low_res_needs_buffer);
buffer_lock_register_notify_buffer(&http_h264_lowres, webrtc_h264_low_res_capture);
} }
#else // USE_LIBDATACHANNEL #else // USE_LIBDATACHANNEL

View File

@ -13,10 +13,12 @@ ExecStart=/usr/local/bin/camera-streamer \
-camera-fps=15 \ -camera-fps=15 \
; use two memory buffers to optimise usage ; use two memory buffers to optimise usage
-camera-nbufs=2 \ -camera-nbufs=2 \
; the high-res is 1552x1165 ; the snapshot is 1438x1080
-camera-high_res_factor=1.5 \ -camera-snapshot.height=1080 \
; the low-res is 776x582 ; the video/webrtc is 958x720
-camera-low_res_factor=3.0 \ -camera-video.height=720 \
; the stream is 639x480
-camera-stream.height=480 \
; bump brightness slightly ; bump brightness slightly
-camera-options=brightness=0.1 \ -camera-options=brightness=0.1 \
; disable auto-focus ; disable auto-focus

View File

@ -15,10 +15,12 @@ ExecStart=/usr/local/bin/camera-streamer \
-camera-fps=30 \ -camera-fps=30 \
; use two memory buffers to optimise usage ; use two memory buffers to optimise usage
-camera-nbufs=2 \ -camera-nbufs=2 \
; the high-res is 1552x1165 ; the snapshot is 1438x1080
-camera-high_res_factor=1.5 \ -camera-snapshot.height=1080 \
; the low-res is 776x582 ; the video/webrtc is 958x720
-camera-low_res_factor=3.0 \ -camera-video.height=720 \
; the stream is 639x480
-camera-stream.height=480 \
; bump brightness slightly ; bump brightness slightly
-camera-options=brightness=0.1 \ -camera-options=brightness=0.1 \
; disable auto-focus ; disable auto-focus

View File

@ -16,10 +16,12 @@ ExecStart=/usr/local/bin/camera-streamer \
-camera-fps=30 \ -camera-fps=30 \
; use two memory buffers to optimise usage ; use two memory buffers to optimise usage
-camera-nbufs=2 \ -camera-nbufs=2 \
; the high-res is 1640x1232 ; the snapshot is 1438x1080
-camera-high_res_factor=2 \ -camera-snapshot.height=1080 \
; the low-res is 820x616 ; the video/webrtc is 958x720
-camera-low_res_factor=4 \ -camera-video.height=720 \
; the stream is 639x480
-camera-stream.height=480 \
; bump brightness slightly ; bump brightness slightly
-camera-options=brightness=0.1 \ -camera-options=brightness=0.1 \
-rtsp-port -rtsp-port

View File

@ -16,10 +16,12 @@ ExecStart=/usr/local/bin/camera-streamer \
-camera-fps=30 \ -camera-fps=30 \
; use two memory buffers to optimise usage ; use two memory buffers to optimise usage
-camera-nbufs=2 \ -camera-nbufs=2 \
; the high-res is 1920x1080 ; the snapshot is 1920x1080
-camera-high_res_factor=1.2 \ -camera-snapshot.height=1080 \
; the low-res is 1280x720 ; the video/webrtc is 1280x720
-camera-low_res_factor=1.8 \ -camera-video.height=720 \
; the stream is 853x480
-camera-stream.height=480 \
-rtsp-port -rtsp-port
DynamicUser=yes DynamicUser=yes

View File

@ -12,9 +12,11 @@ ExecStart=/usr/local/bin/camera-streamer \
; use two memory buffers to optimise usage ; use two memory buffers to optimise usage
-camera-nbufs=2 \ -camera-nbufs=2 \
; the high-res is 1920x1080 ; the high-res is 1920x1080
-camera-high_res_factor=1.0 \ -camera-snapshot.height=1080 \
; the low-res is 960x540 ; the video/webrtc is 1280x720
-camera-low_res_factor=2.0 \ -camera-video.height=720 \
; the stream is 853x480
-camera-stream.height=480 \
-rtsp-port -rtsp-port
DynamicUser=yes DynamicUser=yes