이벤트 녹화¶
녹화 중에 이벤트가 발생되면 녹화 객체 생성시 전달한 패러미터(event-pre-recording-seconds와 event-post-recording-seconds) 값으로 지정된 전후 시간만큼 이벤트 녹화를 시작합니다.
이벤트 녹화는 아래 API를 사용합니다.
| 구분 | 녹화 시작 | 녹화 종료 |
|---|---|---|
| 통합 녹화 | startEventRecording |
stopEventRecording |
| 개별 녹화 | startMultiChannelEventRecording |
stopMultiChannelEventRecording |
이벤트 녹화 종료는 stopEventRecording 나 stopMultiChannelEventRecording 를 호출하여 강제로 종료할 수 있습니다. 보통의 경우, 이벤트 녹화가 마치면 자동으로 종료됩니다.
이벤트 녹화 진행 중 발생하는 이벤트 콜백 함수는 아래와 같습니다.
| 구분 | 인터페이스 | 녹화 시작 | 녹화 종료 |
|---|---|---|---|
| 통합 녹화 | MediaObserver |
onEventRecordingStarted |
onEventRecordingCompleted |
| 개별 녹화 | MediaObserver2 |
onEventRecordingStarted |
onEventRecordingCompleted |
통합 녹화¶
통합 녹화의 경우, 아래의 startEventRecording API를 사용합니다.
- int32_t startEventRecording(RecorderRef recorder, const char * event_recording_file_path);
event_recording_file_path에 응용 프로그램은 이벤트 녹화가 저장될 파일의 절대 경로를 지정합니다. 이벤트 녹화 파일이 위치할 폴더는 녹화 객체 생성시 전달한 패러미터(event-folder-path) 값으로 지정합니다. 다른 폴더 경로를 사용해도 무방합니다.
- int32_t startEventRecording(RecorderRef recorder, const char * event_recording_file_path, void * stream_data_header, size_t stream_data_header_size);
- int32_t startEventRecording(RecorderRef recorder, const char * event_recording_file_path, void * stream_data_header, size_t stream_data_header_size, bool do_single_recording);
do_single_recording이true이면 현재 녹화를 중지하고 이벤트 녹화를 시작합니다. 이벤트 녹화가 끝나면 다시 현재 녹화를 시작합니다.
개별 녹화¶
개별 녹화의 경우, 아래의 startMultiChannelEventRecording API를 사용합니다.
- int32_t startMultiChannelEventRecording(MultiChannelRecorderRef multi_channel_recorder, const std::list<MultiChannelRecorderFilePath> & file_paths, void * stream_data_header, size_t stream_data_header_size, bool do_single_recording);
file_paths에 각 채널별 녹화파일 경로를 추가하여 채널 개수 크기의 목록을 만들어 전달합니다.
개별 녹화의 경우는 채널을 지정하여 이벤트 녹화를 시작할 수 있습니다.
- int32_t startMultiChannelEventRecording(MultiChannelRecorderRef multi_channel_recorder, int32_t channel_id, const char * event_recording_file_path, void * stream_data_header, size_t stream_data_header_size, bool is_type_of_motion);
is_type_of_motion이true이면 모션 녹화를 진행합니다. 이벤트 녹화와 차이점은 녹화 생성시 지정된 전후 시간을motion-pre-recording-seconds와motion-post-recording-seconds값을 따릅니다.
- int32_t startMultiChannelEventRecording(MultiChannelRecorderRef multi_channel_recorder, int32_t channel_id, const char * event_recording_file_path, void * stream_data_header, size_t stream_data_header_size, bool is_type_of_motion, bool do_single_recording);
- int32_t startMultiChannelEventRecording(MultiChannelRecorderRef multi_channel_recorder, int32_t channel_id, const char * event_recording_file_path, void * stream_data_header, size_t stream_data_header_size, bool do_single_recording);
예제¶
통합 이벤트 녹화¶
아래는 2개의 영상 채널에 대한 통합 녹화 예입니다. 녹화 도중 사용자가 엔터키를 입력하면 이벤트 녹화를 진행합니다.
printf("Ctrl+C to exit...\n");
do {
c = getch("Press Enter key to start event recording...");
if(c < 0 || continue_recording == false) {
break;
}
if(c != '\n') {
fprintf(stdout, "%c", c);
}
fprintf(stdout, "\n");
if(c == 10) {
//start event recording
if (isRecorderRecording(recorder) && isEventRunning(recorder) == false) {
time_t t;
struct tm tm_t;
char path[PATH_MAX];
time(&t);
localtime_r(&t, &tm_t);
#if USE_OFFS
sprintf(path, "/mnt/sd/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
#else
sprintf(path, "/tmp/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
#endif
startEventRecording(recorder, path);
} else {
fprintf(stdout, "Recorder not started or busy in recording an event.\n");
}
}
} while (continue_recording == true);
로그 출력을 위해 MediaObserver 에서 유도된 사용자 정의 인터페이스에 아래와 같이 콜백 함수를 정의합니다.
virtual void onEventRecordingStarted(void* user_data, media_report_reason_t reason, const char* file_path) {
DLOG0(DLOG_THIS, "Event Recording started @ \"%s\" (reason: %d %s).\r\n", file_path, reason, mediaReportReasonString(reason));
}
virtual void onEventRecordingCompleted(void* user_data, media_report_reason_t reason, const char* file_path) {
DLOG0(DLOG_THIS, "Event Recording completed @ \"%s\" (reason: %d %s).\r\n", file_path, reason, mediaReportReasonString(reason));
}
출력된 로그 내용입니다.
[2000/06/27 04:02:35.233130] Event Recording started @ "/tmp/EVENT/EVT_2000-06-27_04-02-35.mp4" (reason: 0 NoError).
[2000/06/27 04:02:42.338952] Event Recording completed @ "/tmp/EVENT/EVT_2000-06-27_04-02-35.mp4" (reason: 0 NoError).
전체 코드입니다.
#include "OasisAPI.h"
#include "OasisLog.h"
#include "OasisMedia.h"
#include "OasisFS.h"
#include "OasisUtil.h"
#include <thread>
#include <mutex>
#include <memory>
#include <condition_variable>
#include <signal.h>
#define DLOG_THIS 0x00010000
#define DLOG_RECORD 0x00020000
#define DLOG_FRAME 0x00040000
#define DLOG_TRACE 0x00080000
#undef DLOG_FLAGS
#define DLOG_FLAGS (DLOG_FLAGS_DEFAULT|DLOG_RECORD|DLOG_TRACE|DLOG_THIS/*|DLOG_FRAME*/)
#undef DLOG_TAG
#define DLOG_TAG "RECORDER"
#define OFFS_QUEUE_SIZE_KBYTES (20*1024)
#define OFFS_CACHE_SIZE_KBYTES (1024)
#define OFFS_WRITE_ALIGNMENT_BYTES 8192
#define MEDIA_CACHE_SIZE_KBYTES (40*1024)
#define USE_OFFS 0
extern "C" int getch(const char *prompt);
using namespace oasis;
static bool timer_running = true;
static bool continue_recording = true;
static void bytesToString(size_t size, char *size_string)
{
if(size >= 1024*1024) {
sprintf(size_string, "%.2fMB", (double)size/1048576);
} else if(size >= 1024) {
sprintf(size_string, "%.2fKB", (double)size/1024);
} else {
sprintf(size_string, "%dB", size);
}
}
static uint8_t strd_data_header_[24] = {0, };
class MyRecordingObserver : public MediaObserver {
public:
virtual void onStarted(void* user_data, const char* file_path) {
DLOG(DLOG_RECORD|DLOG_INFO, "Recording started, file path \"%s\"\r\n", file_path);
}
virtual void onStopped(void* user_data, media_report_reason_t reason, void* details) {
DLOG(DLOG_RECORD|DLOG_INFO, "Recording stopped, reason: %d(%s)\r\n", reason, mediaReportReasonString(reason));
if(reason != kMediaReportNoError) {
continue_recording = false;
}
}
virtual void onFileChanged(void* user_data, const char* new_file_path) {
DLOG(DLOG_RECORD|DLOG_INFO, "New file used: \"%s\"\r\n", new_file_path);
}
virtual void onError(void* user_data, media_report_reason_t reason, void* details) {
if(reason == kRecordingErrorMediaDataTimeout) {
int32_t camera_id = (intptr_t)details;
DLOG(DLOG_RECORD|DLOG_ERROR, "Recording error, reason: %d(%s), camera<%d>\r\n", reason, mediaReportReasonString(reason), camera_id);
} else if(reason == kRecordingErrorMediaCacheReadErrorBegin) {
DLOG(DLOG_RECORD|DLOG_ERROR, "Recording error, reason: %d(%s) ===> stopping\r\n", reason, mediaReportReasonString(reason));
} else if(reason == kRecordingErrorMediaCacheReadErrorEnd) {
DLOG(DLOG_RECORD|DLOG_ERROR, "Recording error, reason: %d(%s)\r\n", reason, mediaReportReasonString(reason));
}
continue_recording = false;
}
virtual void onInfo(void* user_data, MediaInfo* info) {
DLOG(DLOG_RECORD, "Recording duration %lld.%06lld sec\r\n", info->durationUs / 1000000ll, info->durationUs % 1000000ll);
}
virtual void onInfoEx(void* user_data, MediaInfoEx* info) {
int h, m, s, u;
char normal_qsize[128], event_qsize[128], in_size[128], out_size[128], cache_in_size[128];
char video1_size[128], video2_size[128], meta_size[128], audio_size[128], file_size[128];
parseUsec(info->durationUs, h, m, s, u);
DLOG0(DLOG_RECORD, "Recording duration \033[33m%02d:%02d:%02d.%06d\033[0m, wq#%d(%d), eq#%d(%d), meq#%d(%d), nohits#%u, mb.avail#%d/%d (free mem %.3fMB, cpu %.2f%%)\r\n", h, m, s, u, info->writerStat.curSamples, info->writerStat.maxSamples, info->timeshiftStat.curSamples, info->timeshiftStat.maxSamples, info->writerStat.motionStat.curSamples, info->writerStat.motionStat.maxSamples, info->mediaCacheStat.nohits_, info->mediaCacheStat.free_buffer_count_, info->mediaCacheStat.total_buffer_count_, (double)oasis::getFreeMemorySize()/1024.0/1024.0, getCPUUsage());
//offs state
bytesToString(info->offsStat.qNormalSize, normal_qsize);
bytesToString(info->offsStat.qEventSize, event_qsize);
bytesToString(info->offsStat.inSize, in_size);
bytesToString(info->offsStat.outSize, out_size);
bytesToString(info->mediaCacheStat.in_size_, cache_in_size);
TRACE0(" cache: during %d msec, in %s\r\n", info->mediaCacheStat.check_duration_, cache_in_size);
TRACE0(" offs: n#%d, e#%d, nz#%s, ez#%s, during %d msec: in %s out %s elapsed %d msec \r\n", info->offsStat.qNormalCount, info->offsStat.qEventCount, normal_qsize, event_qsize, info->offsStat.checkDuration, in_size, out_size, info->offsStat.elapsedSum);
//TRACE0(" cache: hits#%u, nohits#%u, free#%u, gets#%u, puts#%u\r\n", info->mediaCacheStat.hits_, info->mediaCacheStat.nohits_, info->mediaCacheStat.free_buffer_count_, info->mediaCacheStat.get_buffer_count_, info->mediaCacheStat.put_buffer_count_);
//print recording stat in details
bytesToString(info->writerStat.video1Length, video1_size);
bytesToString(info->writerStat.video2Length, video2_size);
bytesToString(info->writerStat.metaLength, meta_size);
bytesToString(info->writerStat.audioLength, audio_size);
bytesToString(info->writerStat.fileLength, file_size);
TRACE0(" %s: video1 %s, video2 %s, meta %s, audio %s, file %s\n", info->sniffing?"sniffing":"recording", video1_size, video2_size, meta_size, audio_size, file_size);
if(info->writerStat.motionStat.recording) {
bytesToString(info->writerStat.motionStat.video1Length, video1_size);
bytesToString(info->writerStat.motionStat.video2Length, video2_size);
bytesToString(info->writerStat.motionStat.metaLength, meta_size);
bytesToString(info->writerStat.motionStat.audioLength, audio_size);
bytesToString(info->writerStat.motionStat.fileLength, file_size);
TRACE0(" motion: video1 %s, video2 %s, meta %s, audio %s, file %s\n", video1_size, video2_size, meta_size, audio_size, file_size);
}
if(info->timeshiftStat.recording) {
bytesToString(info->timeshiftStat.video1Length, video1_size);
bytesToString(info->timeshiftStat.video2Length, video2_size);
bytesToString(info->timeshiftStat.metaLength, meta_size);
bytesToString(info->timeshiftStat.audioLength, audio_size);
bytesToString(info->timeshiftStat.fileLength, file_size);
TRACE0(" event: video1 %s, video2 %s, meta %s, audio %s, file %s\n", video1_size, video2_size, meta_size, audio_size, file_size);
}
}
//player paused and resumed
virtual void onPaused(void *user_data, MediaInfo *info) {}
virtual void onResumed(void *user_data, MediaInfo *info) {}
virtual void onEventRecordingStarted(void* user_data, media_report_reason_t reason, const char* file_path) {
DLOG0(DLOG_THIS, "Event Recording started @ \"%s\" (reason: %d %s).\r\n", file_path, reason, mediaReportReasonString(reason));
}
virtual void onEventRecordingCompleted(void* user_data, media_report_reason_t reason, const char* file_path) {
DLOG0(DLOG_THIS, "Event Recording completed @ \"%s\" (reason: %d %s).\r\n", file_path, reason, mediaReportReasonString(reason));
}
virtual void onMotionRecordingStarted(void *user_data, media_report_reason_t reason, const char *file_path) {
}
virtual void onMotionRecordingCompleted(void *user_data, media_report_reason_t reason, const char *file_path) {
}
virtual void queryNewFilePath(void* user_data, oasis::recording_mode_t file_type, bool rear_camera_on, bool sound_on, std::string& file_path) {
// use as it is
}
virtual void onSnapshotCompleted(void *user_data, uint32_t snapshot_id, int32_t camera_id, int error, const std::vector<char> &image_data, const struct timeval ×tamp) {}
};
void print_usage(const char *pname)
{
DLOG0(DLOG_INFO, "USAGE: %s\n", pname);
}
void cancel_handler(int signum)
{
timer_running = false;
continue_recording = false;
}
#if 0
#define SND_PATH "default"
#else
#define SND_PATH "hw:0,0"
#endif
int main(int argc, char* argv[])
{
int32_t c, err;
std::thread t2;
oasis::key_value_map_t parameters;
srand(time(NULL));
signal(SIGINT, cancel_handler);
////////////////////////////////////////////////////////////////////////////////////////////
// init
bool offs_disabled = false;
if(!offs_disabled) {
parameters["offs-qsize-max"] = std::to_string(OFFS_QUEUE_SIZE_KBYTES);
parameters["offs-overwrite-if-exist"] = "1";
parameters["offs-cache-size"] = std::to_string(OFFS_CACHE_SIZE_KBYTES);
} else {
parameters["offs-disable"] = "1";
}
parameters["media-cache-size"] = std::to_string(MEDIA_CACHE_SIZE_KBYTES);
//parameters["oasis-log-flags"] = std::to_string(OASIS_LOG_DEBUG/*|OASIS_LOG_ENCODE_BITRATE*/);
enableLogLocalTime(true);
if(oasis::initialize(parameters) < 0) {
DLOG(DLOG_ERROR, "Oasis init failed\n");
return -1;
}
//non-offs, use system fs
#if !USE_OFFS
fs::offsConfigLocalFormatInfo("/tmp/DRIVING", 20*1024*1024);
fs::offsConfigLocalFormatInfo("/tmp/EVENT", 20*1024*1024);
#endif
////////////////////////////////////////////////////////////////////////////////////////////
// audio
parameters.clear();
parameters["types"]="source,sink";
parameters["path"]=SND_PATH;
parameters["always-on"]="0";
parameters["channels"]="2";
parameters["aec-disabled"]="1";
parameters["snd-input-channels"]="2";
parameters["snd-input-sample-size"]="16";
parameters["snd-input-sampling-duration-msec"]="40";
parameters["snd-input-sampling-rate"]="48000";
//parameters["snd-input-sampling-rate"]="22050";
createAudioDevice(parameters);
////////////////////////////////////////////////////////////////////////////////////////////
// sources
parameters.clear();
parameters["source-count"] = "2";
parameters["source1-camera-id"] = "0";
parameters["source1-isp-id"] = "0";
parameters["source1-isp-wdr-mode"] = "0";
parameters["source1-capture-format"] = "YUV420";
parameters["source1-capture-buffers"] = "5";
parameters["source1-fps"] = "30";
parameters["source1-subchannel-rotation"] = "0";
parameters["source1-loc"] = "front";
parameters["source1-capture-resolution"] = "2160p";
parameters["source1-sensor-config"] = "./Resource_678/VIC/2/imx678_3840x2160_ch2.cfg";
parameters["source1-autoscene-config"] = "./Resource_678/AutoScene/autoscene_conf.cfg";
parameters["source1-resource-dir"] = "./Resource_678/";
parameters["source2-camera-id"] = "1";
parameters["source2-isp-id"] = "0";
parameters["source2-isp-wdr-mode"] = "0";
parameters["source2-capture-format"] = "YUV420";
parameters["source2-capture-buffers"] = "5";
parameters["source2-fps"] = "30";
parameters["source2-subchannel-rotation"] = "0";
parameters["source2-loc"] = "right";
parameters["source2-capture-resolution"] = "1080p";
parameters["source2-sensor-config"] = "./Resource_tp2863/VIC/0/tp2863_1920x1080_ch0.cfg";
parameters["source2-autoscene-config"] = "./Resource_tp2863/AutoScene/autoscene_conf.cfg";
parameters["source2-resource-dir"] = "./Resource_tp2863/";
configCameras(parameters);
////////////////////////////////////////////////////////////////////////////////////////////
//recorer settings
parameters.clear();
parameters["file-prefix"] = "oasis-";
parameters["file-extension"] = "mp4";
parameters["file-duration-secs"] = "60";
#if USE_OFFS
parameters["normal-folder-path"] = "/mnt/sd/DRIVING";
parameters["event-folder-path"] = "/mnt/sd/EVENT";
parameters["motion-folder-path"] = "/mnt/sd/EVENT";
#else
parameters["normal-folder-path"] = "/tmp/DRIVING";
parameters["event-folder-path"] = "/tmp/EVENT";
parameters["motion-folder-path"] = "/tmp/EVENT";
#endif
parameters["event-pre-recording-seconds"] = "10";
parameters["event-post-recording-seconds"] = "10";
parameters["motion-pre-recording-seconds"] = "10";
parameters["motion-post-recording-seconds"] = "30";
parameters["disable-event-recording"] = "0";
parameters["disable-offs-recording"] = "0";
#if USE_OFFS
parameters["max-files"] = "0";
#else
parameters["max-files"] = "5";
#endif
parameters["delete-oldest-file-on-max-files"] = "1";
parameters["recording-size-limit-threshold-seconds"] = "1";
parameters["report-media-info-ex"] = "1";
parameters["avi-strd-size-max"] = "65536";
parameters["mp4-udta-size-max"] = "65536";
parameters["enable-persistent-cache"] = "0";
parameters["recording-file-header-write-interval-secs"] = "1";
parameters["snd-path"] = "hw:0,0";
parameters["snd-input-channels"] = "2";
parameters["snd-input-sample-size"] = "16";
parameters["snd-input-sampling-duration-msec"] = "120";
parameters["snd-input-sampling-rate"] = "44100";
//parameters["snd-input-sampling-rate"] = "48000";
parameters["aencoder-type"] = "aac"; //aac, raw, mp3
parameters["aencoder-bitrate"] = "128000";
parameters["osd-font-size"] = "0"; // disable (default)
parameters["osd-font-face"] = "Consolas";
parameters["osd-text-color"] = "255,255,255";
parameters["osd-use-text-color"] = "1";
parameters["osd-use-bg-color"] = "0";
parameters["osd-bg-color"] = "0,0,0";
parameters["osd-use-outline-color"] = "1";
parameters["osd-outline-color"] = "255,255,255";
parameters["osd-horz-align"] = "left";
parameters["osd-vert-align"] = "bottom";
parameters["osd-font-path"] = "/mnt/flash/leipzig/consola.ttf";
parameters["osd-use-fixed-size"] = "0";
parameters["channel-count"] = "2";
parameters["channel1-camera-id"] = "0";
parameters["channel1-ise-id"] = "-1";
parameters["channel1-resolution"] = "2160p";
parameters["channel1-bitrate"] = "8000000";
parameters["channel1-fps"] = "30";
parameters["channel1-file-framerate"] = "30";
parameters["channel1-vencoder-type"] = "h264";
parameters["channel1-venc-framerate"] = "30";
parameters["channel1-venc-keyframe-interval"] = "30";
parameters["channel1-h264-profile"] = "high";
parameters["channel1-h264-level"] = "level51";
parameters["channel1-h264-enable-cabac"] = "1";
parameters["channel1-h264-min-qp"] = "10";
parameters["channel1-h264-max-qp"] = "31";
parameters["channel1-h264-enable-fixqp"] = "0";
parameters["channel1-h264-fix-iqp"] = "10";
parameters["channel1-h264-fix-pqp"] = "20";
parameters["channel1-media-wait-timeout-secs"] = "3";
parameters["channel1-media-wait-timeout-notify-oneshot"] = "1";
parameters["channel1-osd-font-size"] = "12";
parameters["channel2-camera-id"] = "1";
parameters["channel2-ise-id"] = "-1";
parameters["channel2-resolution"] = "1080p";
parameters["channel2-bitrate"] = "8000000";
parameters["channel2-fps"] = "30";
parameters["channel2-file-framerate"] = "30";
parameters["channel2-vencoder-type"] = "h264";
parameters["channel2-venc-framerate"] = "30";
parameters["channel2-venc-keyframe-interval"] = "30";
parameters["channel2-h264-profile"] = "high";
parameters["channel2-h264-level"] = "level51";
parameters["channel2-h264-enable-cabac"] = "1";
parameters["channel2-h264-min-qp"] = "10";
parameters["channel2-h264-max-qp"] = "31";
parameters["channel2-h264-enable-fixqp"] = "0";
parameters["channel2-h264-fix-iqp"] = "10";
parameters["channel2-h264-fix-pqp"] = "20";
parameters["channel2-media-wait-timeout-secs"] = "3";
parameters["channel2-media-wait-timeout-notify-oneshot"] = "1";
parameters["channel2-osd-font-size"] = "12";
std::shared_ptr<MyRecordingObserver> recording_observer = std::make_shared<MyRecordingObserver>();
//dumpParameters("recorder", recorder_parameters);
RecorderRef recorder = createRecorder(parameters);
if(recorder == nullptr) {
DLOG(DLOG_ERROR, "recorder creation failed!\r\n");
goto done;
}
memset(&strd_data_header_, 0, sizeof(strd_data_header_));
////////////////////////////////////////////////////////////////////////////////////////////
// timer thread
t2 = std::thread([&]() {
time_t t;
struct tm tm_t;
uint64_t usec, preview_usec;
std::string title;
uint32_t count = 0;
char stream_data[512] = { 0, };
preview_usec = systemTime();
do {
usec = systemTime();
time(&t);
localtime_r(&t, &tm_t);
if (isRecorderRecording(recorder)) {
sprintf(stream_data, "Main %4d/%02d/%02d %02d:%02d:%02d", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
addRecordingVideoStreamData(recorder, (void*)stream_data, strlen(stream_data) + 1);
}
if(/*count == 0 &&*/ isRecorderRecording(recorder)) {
title = oasis::format("OASIS %4d/%02d/%02d %02d:%02d:%02d\n", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
setRecordingText(recorder, kTextTrackOsd, title, usec);
count++;
}
//30 msec
usleep(40000);
} while (timer_running);
});
err = startRecording(recorder, recording_observer, nullptr, false, &strd_data_header_, sizeof(strd_data_header_));
if(err < 0) {
DLOG(DLOG_ERROR, "start recording failed\r\n");
goto done;
}
printf("Ctrl+C to exit...\n");
do {
c = getch("Press Enter key to start event recording...");
if(c < 0 || continue_recording == false) {
break;
}
if(c != '\n') {
fprintf(stdout, "%c", c);
}
fprintf(stdout, "\n");
if(c == 10) {
//start event recording
if (isRecorderRecording(recorder) && isEventRunning(recorder) == false) {
time_t t;
struct tm tm_t;
char path[PATH_MAX];
time(&t);
localtime_r(&t, &tm_t);
#if USE_OFFS
sprintf(path, "/mnt/sd/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
#else
sprintf(path, "/tmp/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
#endif
startEventRecording(recorder, path);
} else {
fprintf(stdout, "Recorder not started or busy in recording an event.\n");
}
}
} while (continue_recording == true);
done:
timer_running = false;
if (t2.joinable()) {
t2.join();
}
if(recorder) {
destroyRecorder(recorder);
}
oasis::finalize();
printf("goodbye.\n");
return 0;
}
개별 이벤트 녹화¶
아래는 2개의 영상 채널에 대한 개별 녹화 예입니다. 녹화 도중 사용자가 엔터키를 입력하면 이벤트 녹화를 진행합니다.
printf("Ctrl+C to exit...\n");
do {
c = getch("Press Enter key to start event recording...");
if(c < 0 || continue_recording == false) {
break;
}
if(c != '\n') {
fprintf(stdout, "%c", c);
}
fprintf(stdout, "\n");
if(c == 10) {
//start event recording
if (isMultiChannelRecorderRecording(recorder) && isMultiChannelEventRunning(recorder) == false) {
std::list<MultiChannelRecorderFilePath> file_paths;
time_t t;
struct tm tm_t;
char path[PATH_MAX];
time(&t);
localtime_r(&t, &tm_t);
for(int32_t ch=1; ch<=2; ch++) {
#if USE_OFFS
sprintf(path, "/mnt/sd/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d-CH%d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec, ch);
#else
sprintf(path, "/tmp/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d-CH%d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec, ch);
#endif
file_paths.emplace_back(ch, path);
}
startMultiChannelEventRecording(recorder, file_paths, nullptr, 0, false);
} else {
fprintf(stdout, "Recorder not started or busy in recording an event.\n");
}
}
} while (continue_recording == true);
로그 출력을 위해 MediaObserver2 에서 유도된 사용자 정의 인터페이스에 아래와 같이 콜백 함수를 정의합니다.
virtual void onEventRecordingStarted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
DLOG0(DLOG_THIS, "Event<%d> Recording started @ \"%s\" (reason: %d).\r\n", channel_id, file_path, reason);
}
virtual void onEventRecordingCompleted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
DLOG0(DLOG_THIS, "Event<%d> Recording completed @ \"%s\" (reason: %d).\r\n", channel_id, file_path, reason);
}
출력된 로그 내용입니다.
[264.401025] [2000/06/27 04:06:13.770276] Event<1> Recording started @ "/tmp/EVENT/EVT_2000-06-27_04-06-13-CH1.mp4" (reason: 0).
[264.401372] [2000/06/27 04:06:13.770622] Event<2> Recording started @ "/tmp/EVENT/EVT_2000-06-27_04-06-13-CH2.mp4" (reason: 0).
[279.613067] [2000/06/27 04:06:28.982317] Event<2> Recording completed @ "/tmp/EVENT/EVT_2000-06-27_04-06-13-CH2.mp4" (reason: 0).
[279.616044] [2000/06/27 04:06:28.985295] Event<1> Recording completed @ "/tmp/EVENT/EVT_2000-06-27_04-06-13-CH1.mp4" (reason: 0).
전체 코드입니다.
#include "OasisAPI.h"
#include "OasisLog.h"
#include "OasisMedia.h"
#include "OasisFS.h"
#include "OasisUtil.h"
#include <thread>
#include <mutex>
#include <memory>
#include <condition_variable>
#include <signal.h>
#define DLOG_THIS 0x00010000
#define DLOG_RECORD 0x00020000
#define DLOG_FRAME 0x00040000
#define DLOG_TRACE 0x00080000
#undef DLOG_FLAGS
#define DLOG_FLAGS (DLOG_FLAGS_DEFAULT|DLOG_RECORD|DLOG_TRACE|DLOG_THIS/*|DLOG_FRAME*/)
#undef DLOG_TAG
#define DLOG_TAG "RECORDER"
#define OFFS_QUEUE_SIZE_KBYTES (20*1024)
#define OFFS_CACHE_SIZE_KBYTES (1024)
#define OFFS_WRITE_ALIGNMENT_BYTES 8192
#define MEDIA_CACHE_SIZE_KBYTES (40*1024)
#define USE_OFFS 0
extern "C" int getch(const char *prompt);
using namespace oasis;
static bool timer_running = true;
static bool continue_recording = true;
static void bytesToString(size_t size, char *size_string)
{
if(size >= 1024*1024) {
sprintf(size_string, "%.2fMB", (double)size/1048576);
} else if(size >= 1024) {
sprintf(size_string, "%.2fKB", (double)size/1024);
} else {
sprintf(size_string, "%dB", size);
}
}
static uint8_t strd_data_header_[24] = {0, };
class MyRecordingObserver : public oasis::MediaObserver2
{
public:
MyRecordingObserver() {}
virtual ~MyRecordingObserver() {}
//recording started and stopped
virtual void onStarted(int32_t channel_id, void *user_data, const char *file_path) {
DLOG(DLOG_THIS, "Recording<%d> started, file path \"%s\"\r\n", channel_id, file_path);
}
virtual void onStopped(int32_t channel_id, void *user_data, media_report_reason_t reason, void *details) {
DLOG(DLOG_THIS, "Recording%d> stopped, reason: %d(%s)\r\n", channel_id, reason, mediaReportReasonString(reason));
if(reason != kMediaReportNoError) {
continue_recording = false;
}
}
//recording a new file
virtual void onFileChanged(int32_t channel_id, void *user_data, const char *new_file_path) {
DLOG(DLOG_THIS, "Recording<%d> new file used: \"%s\"\r\n", channel_id, new_file_path);
}
virtual void onError(int32_t channel_id, void *user_data, media_report_reason_t reason, void *details) {
if(reason == kRecordingErrorMediaDataTimeout) {
int32_t camera_id = (intptr_t)details;
DLOG(DLOG_ERROR, "Recording<%d> error, reason: %d(%s), camera<%d>\r\n", channel_id, reason, mediaReportReasonString(reason), camera_id);
} else if(reason == kRecordingErrorMediaCacheReadErrorBegin) {
DLOG(DLOG_ERROR, "Recording<%d> error, reason: %d(%s) ===> stopping!!!\r\n", channel_id, reason, mediaReportReasonString(reason));
} else if(reason == kRecordingErrorMediaCacheReadErrorEnd) {
DLOG(DLOG_ERROR, "Recording<%d> error, reason: %d(%s)\r\n", channel_id, reason, mediaReportReasonString(reason));
}
continue_recording = false;
}
//periodic report every 1 second.
virtual void onInfo(int32_t channel_id, void *user_data, MediaInfo *info) {
DLOG(DLOG_THIS, "Recording<%d> duration %lld.%06lld sec\r\n", channel_id, info->durationUs / 1000000ll, info->durationUs % 1000000ll);
}
virtual void onInfoEx(int32_t channel_id, void *user_data, MediaInfoEx *info) {
#if 0
int h, m, s, u;
char normal_qsize[128], event_qsize[128], in_size[128], out_size[128], cache_in_size[128];
char video1_size[128], video2_size[128], meta_size[128], audio_size[128], file_size[128];
parseUsec(info->durationUs, h, m, s, u);
TRACE0("Recording<%d> duration \033[33m%02d:%02d:%02d.%06d\033[0m, wq#%d(%d), eq#%d(%d), meq#%d(%d), nohits#%u, mb.avail#%d/%d (free mem %.3fMB, cpu %.2f%%)\r\n", channel_id, h, m, s, u, info->writerStat.curSamples, info->writerStat.maxSamples, info->timeshiftStat.curSamples, info->timeshiftStat.maxSamples, info->writerStat.motionStat.curSamples, info->writerStat.motionStat.maxSamples, info->mediaCacheStat.nohits_, info->mediaCacheStat.free_buffer_count_, info->mediaCacheStat.total_buffer_count_, (double)oasis::getFreeMemorySize()/1024.0/1024.0, getCPUUsage());
//offs state
bytesToString(info->offsStat.qNormalSize, normal_qsize);
bytesToString(info->offsStat.qEventSize, event_qsize);
bytesToString(info->offsStat.inSize, in_size);
bytesToString(info->offsStat.outSize, out_size);
bytesToString(info->mediaCacheStat.in_size_, cache_in_size);
TRACE0(" cache: during %d msec, in %s\r\n", info->mediaCacheStat.check_duration_, cache_in_size);
TRACE0(" offs: n#%d, e#%d, nz#%s, ez#%s, during %d msec: in %s out %s elapsed %d msec \r\n", info->offsStat.qNormalCount, info->offsStat.qEventCount, normal_qsize, event_qsize, info->offsStat.checkDuration, in_size, out_size, info->offsStat.elapsedSum);
//TRACE0(" cache: hits#%u, nohits#%u, free#%u, gets#%u, puts#%u\r\n", info->mediaCacheStat.hits_, info->mediaCacheStat.nohits_, info->mediaCacheStat.free_buffer_count_, info->mediaCacheStat.get_buffer_count_, info->mediaCacheStat.put_buffer_count_);
//print recording stat in details
bytesToString(info->writerStat.video1Length, video1_size);
bytesToString(info->writerStat.video2Length, video2_size);
bytesToString(info->writerStat.metaLength, meta_size);
bytesToString(info->writerStat.audioLength, audio_size);
bytesToString(info->writerStat.fileLength, file_size);
TRACE0(" %s: video1 %s, video2 %s, meta %s, audio %s, file %s\n", info->sniffing?"sniffing":"recording", video1_size, video2_size, meta_size, audio_size, file_size);
if(info->writerStat.motionStat.recording) {
bytesToString(info->writerStat.motionStat.video1Length, video1_size);
bytesToString(info->writerStat.motionStat.video2Length, video2_size);
bytesToString(info->writerStat.motionStat.metaLength, meta_size);
bytesToString(info->writerStat.motionStat.audioLength, audio_size);
bytesToString(info->writerStat.motionStat.fileLength, file_size);
TRACE0(" motion: video1 %s, video2 %s, meta %s, audio %s, file %s\n", video1_size, video2_size, meta_size, audio_size, file_size);
}
if(info->timeshiftStat.recording) {
bytesToString(info->timeshiftStat.video1Length, video1_size);
bytesToString(info->timeshiftStat.video2Length, video2_size);
bytesToString(info->timeshiftStat.metaLength, meta_size);
bytesToString(info->timeshiftStat.audioLength, audio_size);
bytesToString(info->timeshiftStat.fileLength, file_size);
TRACE0(" event: video1 %s, video2 %s, meta %s, audio %s, file %s\n", video1_size, video2_size, meta_size, audio_size, file_size);
}
#endif
}
//event recording started and completed(aborted)
virtual void onEventRecordingStarted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
DLOG0(DLOG_THIS, "Event<%d> Recording started @ \"%s\" (reason: %d).\r\n", channel_id, file_path, reason);
}
virtual void onEventRecordingCompleted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
DLOG0(DLOG_THIS, "Event<%d> Recording completed @ \"%s\" (reason: %d).\r\n", channel_id, file_path, reason);
}
//motion recording started and completed(aborted)
virtual void onMotionRecordingStarted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
}
virtual void onMotionRecordingCompleted(int32_t channel_id, void *user_data, media_report_reason_t reason, const char *file_path) {
}
virtual void queryNewFilePaths(void *user_data, recording_mode_t file_type, bool sound_on, std::list<MultiChannelRecorderFilePath> &file_paths) {
DLOG(DLOG_THIS, "queryNewFilePath: mode: %d, sound on %d:\n", file_type, sound_on);
for(auto it = file_paths.begin(); it != file_paths.end(); it++) {
DLOG(DLOG_THIS, " Recording<%d> path %s\n", (*it).channel_id_, (*it).file_path_.c_str());
}
}
virtual void onSnapshotCompleted(int32_t channel_id, void *user_data, uint32_t snapshot_id, int error, const std::vector<char> &jpeg_image_data, const struct timeval ×tamp) {
DLOG(DLOG_THIS, "Snapshot<%d> completed: id %d, error %d, size %zd bytes @ %d sec\n", channel_id, snapshot_id, error, jpeg_image_data.size(), timestamp.tv_sec);
}
};
void print_usage(const char *pname)
{
DLOG0(DLOG_INFO, "USAGE: %s\n", pname);
}
void cancel_handler(int signum)
{
timer_running = false;
continue_recording = false;
}
#if 0
#define SND_PATH "default"
#else
#define SND_PATH "hw:0,0"
#endif
int main(int argc, char* argv[])
{
int32_t c, err;
std::thread t2;
oasis::key_value_map_t parameters;
srand(time(NULL));
signal(SIGINT, cancel_handler);
////////////////////////////////////////////////////////////////////////////////////////////
// init
bool offs_disabled = false;
if(!offs_disabled) {
parameters["offs-qsize-max"] = std::to_string(OFFS_QUEUE_SIZE_KBYTES);
parameters["offs-overwrite-if-exist"] = "1";
parameters["offs-cache-size"] = std::to_string(OFFS_CACHE_SIZE_KBYTES);
} else {
parameters["offs-disable"] = "1";
}
parameters["media-cache-size"] = std::to_string(MEDIA_CACHE_SIZE_KBYTES);
//parameters["oasis-log-flags"] = std::to_string(OASIS_LOG_DEBUG/*|OASIS_LOG_ENCODE_BITRATE*/);
enableLogLocalTime(true);
if(oasis::initialize(parameters) < 0) {
DLOG(DLOG_ERROR, "Oasis init failed\n");
return -1;
}
//non-offs, use system fs
#if !USE_OFFS
fs::offsConfigLocalFormatInfo("/tmp/DRIVING", 20*1024*1024);
fs::offsConfigLocalFormatInfo("/tmp/EVENT", 20*1024*1024);
#endif
////////////////////////////////////////////////////////////////////////////////////////////
// audio
parameters.clear();
parameters["types"]="source,sink";
parameters["path"]=SND_PATH;
parameters["always-on"]="0";
parameters["channels"]="2";
parameters["aec-disabled"]="1";
parameters["snd-input-channels"]="2";
parameters["snd-input-sample-size"]="16";
parameters["snd-input-sampling-duration-msec"]="40";
parameters["snd-input-sampling-rate"]="48000";
//parameters["snd-input-sampling-rate"]="22050";
createAudioDevice(parameters);
////////////////////////////////////////////////////////////////////////////////////////////
// sources
parameters.clear();
parameters["source-count"] = "2";
parameters["source1-camera-id"] = "0";
parameters["source1-isp-id"] = "0";
parameters["source1-isp-wdr-mode"] = "0";
parameters["source1-capture-format"] = "YUV420";
parameters["source1-capture-buffers"] = "5";
parameters["source1-fps"] = "30";
parameters["source1-subchannel-rotation"] = "0";
parameters["source1-loc"] = "front";
parameters["source1-capture-resolution"] = "2160p";
parameters["source1-sensor-config"] = "./Resource_678/VIC/2/imx678_3840x2160_ch2.cfg";
parameters["source1-autoscene-config"] = "./Resource_678/AutoScene/autoscene_conf.cfg";
parameters["source1-resource-dir"] = "./Resource_678/";
parameters["source2-camera-id"] = "1";
parameters["source2-isp-id"] = "0";
parameters["source2-isp-wdr-mode"] = "0";
parameters["source2-capture-format"] = "YUV420";
parameters["source2-capture-buffers"] = "5";
parameters["source2-fps"] = "30";
parameters["source2-subchannel-rotation"] = "0";
parameters["source2-loc"] = "right";
parameters["source2-capture-resolution"] = "1080p";
parameters["source2-sensor-config"] = "./Resource_tp2863/VIC/0/tp2863_1920x1080_ch0.cfg";
parameters["source2-autoscene-config"] = "./Resource_tp2863/AutoScene/autoscene_conf.cfg";
parameters["source2-resource-dir"] = "./Resource_tp2863/";
configCameras(parameters);
////////////////////////////////////////////////////////////////////////////////////////////
//recorer settings
parameters.clear();
parameters["file-prefix"] = "oasis-";
parameters["file-extension"] = "mp4";
parameters["file-duration-secs"] = "60";
#if USE_OFFS
parameters["normal-folder-path"] = "/mnt/sd/DRIVING";
parameters["event-folder-path"] = "/mnt/sd/EVENT";
parameters["motion-folder-path"] = "/mnt/sd/EVENT";
#else
parameters["normal-folder-path"] = "/tmp/DRIVING";
parameters["event-folder-path"] = "/tmp/EVENT";
parameters["motion-folder-path"] = "/tmp/EVENT";
#endif
parameters["event-pre-recording-seconds"] = "10";
parameters["event-post-recording-seconds"] = "10";
parameters["motion-pre-recording-seconds"] = "10";
parameters["motion-post-recording-seconds"] = "30";
parameters["disable-event-recording"] = "0";
parameters["disable-offs-recording"] = "0";
#if USE_OFFS
parameters["max-files"] = "0";
#else
parameters["max-files"] = "15";
#endif
parameters["delete-oldest-file-on-max-files"] = "1";
parameters["recording-size-limit-threshold-seconds"] = "1";
parameters["report-media-info-ex"] = "1";
parameters["avi-strd-size-max"] = "65536";
parameters["mp4-udta-size-max"] = "65536";
parameters["enable-persistent-cache"] = "0";
parameters["recording-file-header-write-interval-secs"] = "1";
parameters["snd-path"] = "hw:0,0";
parameters["snd-input-channels"] = "2";
parameters["snd-input-sample-size"] = "16";
parameters["snd-input-sampling-duration-msec"] = "120";
parameters["snd-input-sampling-rate"] = "44100";
//parameters["snd-input-sampling-rate"] = "48000";
parameters["aencoder-type"] = "aac"; //aac, raw, mp3
parameters["aencoder-bitrate"] = "128000";
parameters["osd-font-size"] = "0"; // disable (default)
parameters["osd-font-face"] = "Consolas";
parameters["osd-text-color"] = "255,255,255";
parameters["osd-use-text-color"] = "1";
parameters["osd-use-bg-color"] = "0";
parameters["osd-bg-color"] = "0,0,0";
parameters["osd-use-outline-color"] = "1";
parameters["osd-outline-color"] = "255,255,255";
parameters["osd-horz-align"] = "left";
parameters["osd-vert-align"] = "bottom";
parameters["osd-font-path"] = "/mnt/flash/leipzig/consola.ttf";
parameters["osd-use-fixed-size"] = "0";
parameters["channel-count"] = "2";
parameters["channel1-camera-id"] = "0";
parameters["channel1-ise-id"] = "-1";
parameters["channel1-resolution"] = "2160p";
parameters["channel1-bitrate"] = "8000000";
parameters["channel1-fps"] = "30";
parameters["channel1-file-framerate"] = "30";
parameters["channel1-vencoder-type"] = "h264";
parameters["channel1-venc-framerate"] = "30";
parameters["channel1-venc-keyframe-interval"] = "30";
parameters["channel1-h264-profile"] = "high";
parameters["channel1-h264-level"] = "level51";
parameters["channel1-h264-enable-cabac"] = "1";
parameters["channel1-h264-min-qp"] = "10";
parameters["channel1-h264-max-qp"] = "31";
parameters["channel1-h264-enable-fixqp"] = "0";
parameters["channel1-h264-fix-iqp"] = "10";
parameters["channel1-h264-fix-pqp"] = "20";
parameters["channel1-media-wait-timeout-secs"] = "3";
parameters["channel1-media-wait-timeout-notify-oneshot"] = "1";
parameters["channel1-osd-font-size"] = "12";
parameters["channel2-camera-id"] = "1";
parameters["channel2-ise-id"] = "-1";
parameters["channel2-resolution"] = "1080p";
parameters["channel2-bitrate"] = "8000000";
parameters["channel2-fps"] = "30";
parameters["channel2-file-framerate"] = "30";
parameters["channel2-vencoder-type"] = "h264";
parameters["channel2-venc-framerate"] = "30";
parameters["channel2-venc-keyframe-interval"] = "30";
parameters["channel2-h264-profile"] = "high";
parameters["channel2-h264-level"] = "level51";
parameters["channel2-h264-enable-cabac"] = "1";
parameters["channel2-h264-min-qp"] = "10";
parameters["channel2-h264-max-qp"] = "31";
parameters["channel2-h264-enable-fixqp"] = "0";
parameters["channel2-h264-fix-iqp"] = "10";
parameters["channel2-h264-fix-pqp"] = "20";
parameters["channel2-media-wait-timeout-secs"] = "3";
parameters["channel2-media-wait-timeout-notify-oneshot"] = "1";
parameters["channel2-osd-font-size"] = "12";
std::shared_ptr<MyRecordingObserver> recording_observer = std::make_shared<MyRecordingObserver>();
//dumpParameters("recorder", recorder_parameters);
RecorderRef recorder = createMultiChannelRecorder(parameters);
if(recorder == nullptr) {
DLOG(DLOG_ERROR, "recorder creation failed!\r\n");
goto done;
}
memset(&strd_data_header_, 0, sizeof(strd_data_header_));
////////////////////////////////////////////////////////////////////////////////////////////
// timer thread
t2 = std::thread([&]() {
time_t t;
struct tm tm_t;
uint64_t usec, preview_usec;
std::string title;
uint32_t count = 0;
char stream_data[512] = { 0, };
preview_usec = systemTime();
do {
usec = systemTime();
time(&t);
localtime_r(&t, &tm_t);
if (isMultiChannelRecorderRecording(recorder)) {
sprintf(stream_data, "Main %4d/%02d/%02d %02d:%02d:%02d", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
addMultiChannelRecordingVideoStreamData(recorder, (void*)stream_data, strlen(stream_data) + 1);
}
if(/*count == 0 &&*/ isMultiChannelRecorderRecording(recorder)) {
title = oasis::format("OASIS %4d/%02d/%02d %02d:%02d:%02d\n", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec);
setMultiChannelRecordingText(recorder, kTextTrackOsd, title, usec);
count++;
}
//30 msec
usleep(40000);
} while (timer_running);
});
err = startMultiChannelRecording(recorder, recording_observer, nullptr, false, &strd_data_header_, sizeof(strd_data_header_));
if(err < 0) {
DLOG(DLOG_ERROR, "start recording failed\r\n");
goto done;
}
printf("Ctrl+C to exit...\n");
do {
c = getch("Press Enter key to start event recording...");
if(c < 0 || continue_recording == false) {
break;
}
if(c != '\n') {
fprintf(stdout, "%c", c);
}
fprintf(stdout, "\n");
if(c == 10) {
//start event recording
if (isMultiChannelRecorderRecording(recorder) && isMultiChannelEventRunning(recorder) == false) {
std::list<MultiChannelRecorderFilePath> file_paths;
time_t t;
struct tm tm_t;
char path[PATH_MAX];
time(&t);
localtime_r(&t, &tm_t);
for(int32_t ch=1; ch<=2; ch++) {
#if USE_OFFS
sprintf(path, "/mnt/sd/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d-CH%d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec, ch);
#else
sprintf(path, "/tmp/EVENT/EVT_%04d-%02d-%02d_%02d-%02d-%02d-CH%d.mp4", tm_t.tm_year + 1900, tm_t.tm_mon + 1, tm_t.tm_mday, tm_t.tm_hour, tm_t.tm_min, tm_t.tm_sec, ch);
#endif
file_paths.emplace_back(ch, path);
}
startMultiChannelEventRecording(recorder, file_paths, nullptr, 0, false);
} else {
fprintf(stdout, "Recorder not started or busy in recording an event.\n");
}
}
} while (continue_recording == true);
done:
timer_running = false;
if (t2.joinable()) {
t2.join();
}
if(recorder) {
destroyMultiChannelRecorder(recorder);
}
oasis::finalize();
printf("goodbye.\n");
return 0;
}