diff --git a/lib/libesp32/rtsp/CRtspSession.cpp b/lib/libesp32/rtsp/CRtspSession.cpp index 9d5718091..a14610038 100755 --- a/lib/libesp32/rtsp/CRtspSession.cpp +++ b/lib/libesp32/rtsp/CRtspSession.cpp @@ -34,8 +34,8 @@ void CRtspSession::Init() bool CRtspSession::ParseRtspRequest(char const * aRequest, unsigned aRequestSize) { - char CmdName[RTSP_PARAM_STRING_MAX]; - static char CurRequest[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack + // char CmdName[RTSP_PARAM_STRING_MAX]; + //char CurRequest[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack unsigned CurRequestSize; Init(); @@ -45,7 +45,7 @@ bool CRtspSession::ParseRtspRequest(char const * aRequest, unsigned aRequestSize // check whether the request contains information about the RTP/RTCP UDP client ports (SETUP command) char * ClientPortPtr; char * TmpPtr; - static char CP[1024]; + char CP[128]; //static char CP[1024]; char * pCP; ClientPortPtr = strstr(CurRequest,"client_port"); @@ -230,7 +230,7 @@ RTSP_CMD_TYPES CRtspSession::Handle_RtspRequest(char const * aRequest, unsigned void CRtspSession::Handle_RtspOPTION() { - static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack + //static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack snprintf(Response,sizeof(Response), "RTSP/1.0 200 OK\r\nCSeq: %s\r\n" @@ -241,9 +241,9 @@ void CRtspSession::Handle_RtspOPTION() void CRtspSession::Handle_RtspDESCRIBE() { - static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack - static char SDPBuf[1024]; - static char URLBuf[1024]; + //static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack + char SDPBuf[128]; //static char SDPBuf[1024]; + char URLBuf[128]; //static char URLBuf[1024]; // check whether we know a stream with the URL which is requested m_StreamID = -1; // invalid URL @@ -261,7 +261,7 @@ void CRtspSession::Handle_RtspDESCRIBE() }; // simulate DESCRIBE server response - static char OBuf[256]; + // static char OBuf[256]; char * ColonPtr; strcpy(OBuf,m_URLHostPort); ColonPtr = strstr(OBuf,":"); @@ -305,8 +305,8 @@ void CRtspSession::Handle_RtspDESCRIBE() void CRtspSession::Handle_RtspSETUP() { - static char Response[1024]; - static char Transport[255]; + //static char Response[1024]; + //static char Transport[255]; // init RTP streamer transport type (UDP or TCP) and ports for UDP transport m_Streamer->InitTransport(m_ClientRTPPort,m_ClientRTCPPort,m_TcpTransport); @@ -336,7 +336,7 @@ void CRtspSession::Handle_RtspSETUP() void CRtspSession::Handle_RtspPLAY() { - static char Response[1024]; + //static char Response[1024]; // simulate SETUP server response snprintf(Response,sizeof(Response), @@ -354,10 +354,10 @@ void CRtspSession::Handle_RtspPLAY() char const * CRtspSession::DateHeader() { - static char buf[200]; + //static char buf[200]; time_t tt = time(NULL); - strftime(buf, sizeof buf, "Date: %a, %b %d %Y %H:%M:%S GMT", gmtime(&tt)); - return buf; + strftime(session_buf, sizeof(session_buf), "Date: %a, %b %d %Y %H:%M:%S GMT", gmtime(&tt)); + return session_buf; } int CRtspSession::GetStreamID() @@ -375,7 +375,7 @@ bool CRtspSession::handleRequests(uint32_t readTimeoutMs) if(m_stopped) return false; // Already closed down - static char RecvBuf[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack + //char RecvBuf[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack memset(RecvBuf,0x00,sizeof(RecvBuf)); int res = socketread(m_RtspClient,RecvBuf,sizeof(RecvBuf), readTimeoutMs); diff --git a/lib/libesp32/rtsp/CRtspSession.h b/lib/libesp32/rtsp/CRtspSession.h index 298bd6a15..79700bd60 100755 --- a/lib/libesp32/rtsp/CRtspSession.h +++ b/lib/libesp32/rtsp/CRtspSession.h @@ -70,4 +70,11 @@ private: char m_CSeq[RTSP_PARAM_STRING_MAX]; // RTSP command sequence number char m_URLHostPort[MAX_HOSTNAME_LEN]; // host:port part of the URL unsigned m_ContentLength; // SDP string size + char CurRequest[RTSP_BUFFER_SIZE]; + char RecvBuf[RTSP_BUFFER_SIZE]; + char session_buf[128]; + char CmdName[RTSP_PARAM_STRING_MAX]; + char Transport[255]; + char Response[1024]; + char OBuf[256]; }; diff --git a/lib/libesp32/rtsp/CStreamer.cpp b/lib/libesp32/rtsp/CStreamer.cpp index 26322c0fa..af4519ec7 100755 --- a/lib/libesp32/rtsp/CStreamer.cpp +++ b/lib/libesp32/rtsp/CStreamer.cpp @@ -48,7 +48,7 @@ int CStreamer::SendRtpPacket(unsigned const char * jpeg, int jpegLen, int fragme bool includeQuantTbl = quant0tbl && quant1tbl && fragmentOffset == 0; uint8_t q = includeQuantTbl ? 128 : 0x5e; - static char RtpBuf[2048]; // Note: we assume single threaded, this large buf we keep off of the tiny stack + //static char RtpBuf[2048]; // Note: we assume single threaded, this large buf we keep off of the tiny stack int RtpPacketSize = fragmentLen + KRtpHeaderSize + KJpegHeaderSize + (includeQuantTbl ? (4 + 64 * 2) : 0); memset(RtpBuf,0x00,sizeof(RtpBuf)); diff --git a/lib/libesp32/rtsp/CStreamer.h b/lib/libesp32/rtsp/CStreamer.h index 78d112b5b..58a9983e2 100755 --- a/lib/libesp32/rtsp/CStreamer.h +++ b/lib/libesp32/rtsp/CStreamer.h @@ -39,6 +39,7 @@ private: u_short m_width; // image data info u_short m_height; + char RtpBuf[2048]; }; diff --git a/lib/libesp32/rtsp/LICENSE b/lib/libesp32/rtsp/LICENSE new file mode 100755 index 000000000..136bbd239 --- /dev/null +++ b/lib/libesp32/rtsp/LICENSE @@ -0,0 +1,7 @@ +Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/lib/libesp32/rtsp/README.md b/lib/libesp32/rtsp/README.md new file mode 100755 index 000000000..37d6aec65 --- /dev/null +++ b/lib/libesp32/rtsp/README.md @@ -0,0 +1,93 @@ +# Micro-RTSP + +This is a small library which can be used to serve up RTSP streams from +resource constrained MCUs. It lets you trivially make a $10 open source +RTSP video stream camera. + +# Usage + +This library works for ESP32/arduino targets but also for most any posixish platform. + +## Example arduino/ESP32 usage + +This library will work standalone, but it is _super_ easy to use if your app is platform.io based. +Just "pio lib install Micro-RTSP" to pull the latest version from their library server. If you want to use the OV2640 +camera support you'll need to be targeting the espressif32 platform in your project. + +See the [example platform.io app](/examples). It should build and run on virtually any of the $10 +ESP32-CAM boards (such as M5CAM). The relevant bit of the code is included below. In short: +1. Listen for a TCP connection on the RTSP port with accept() +2. When a connection comes in, create a CRtspSession and OV2640Streamer camera streamer objects. +3. While the connection remains, call session->handleRequests(0) to handle any incoming client requests. +4. Every 100ms or so call session->broadcastCurrentFrame() to send new frames to any clients. + +``` +void loop() +{ + uint32_t msecPerFrame = 100; + static uint32_t lastimage = millis(); + + // If we have an active client connection, just service that until gone + // (FIXME - support multiple simultaneous clients) + if(session) { + session->handleRequests(0); // we don't use a timeout here, + // instead we send only if we have new enough frames + + uint32_t now = millis(); + if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover + session->broadcastCurrentFrame(now); + lastimage = now; + + // check if we are overrunning our max frame rate + now = millis(); + if(now > lastimage + msecPerFrame) + printf("warning exceeding max frame rate of %d ms\n", now - lastimage); + } + + if(session->m_stopped) { + delete session; + delete streamer; + session = NULL; + streamer = NULL; + } + } + else { + client = rtspServer.accept(); + + if(client) { + //streamer = new SimStreamer(&client, true); // our streamer for UDP/TCP based RTP transport + streamer = new OV2640Streamer(&client, cam); // our streamer for UDP/TCP based RTP transport + + session = new CRtspSession(&client, streamer); // our threads RTSP session and state + } + } +} +``` +## Example posix/linux usage + +There is a small standalone example [here](/test/RTSPTestServer.cpp). You can build it by following [these](/test/README.md) directions. The usage of the two key classes (CRtspSession and SimStreamer) are very similar to to the ESP32 usage. + +## Supporting new camera devices + +Supporting new camera devices is quite simple. See OV2640Streamer for an example and implement streamImage() +by reading a frame from your camera. + +# Structure and design notes + +# Issues and sending pull requests + +Please report issues and send pull requests. I'll happily reply. ;-) + +# Credits + +The server code was initially based on a great 2013 [tutorial](https://www.medialan.de/usecase0001.html) by Medialan. + +# License + +Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/lib/libesp32/rtsp/library.properties b/lib/libesp32/rtsp/library.properties new file mode 100755 index 000000000..03b5dc224 --- /dev/null +++ b/lib/libesp32/rtsp/library.properties @@ -0,0 +1,9 @@ +name=Micro-RTSP +version=0.1.6 +author=Kevin Hester +maintainer=Kevin Hester +sentence=Mikro RTSP server for mikros +paragraph=A small/efficient RTSP server for ESP32 and other micros +category=Data Storage +url=https://github.com/geeksville/Micro-RTSP.git +architectures=* diff --git a/tasmota/xdrv_81_webcam.ino b/tasmota/xdrv_81_webcam.ino index 179655dd4..f3d44710e 100755 --- a/tasmota/xdrv_81_webcam.ino +++ b/tasmota/xdrv_81_webcam.ino @@ -44,6 +44,8 @@ * WcSaturation = Set picture Saturation -2 ... +2 * WcBrightness = Set picture Brightness -2 ... +2 * WcContrast = Set picture Contrast -2 ... +2 + * WcInit = Init Camera Interface + * WcRtsp = Control RTSP Server, 0=disable, 1=enable (forces restart) (if defined ENABLE_RTSPSERVER) * * Only boards with PSRAM should be used. To enable PSRAM board should be se set to esp32cam in common32 of platform_override.ini * board = esp32cam @@ -54,6 +56,7 @@ * not tolerate any capictive load * flash led = gpio 4 * red led = gpio 33 + * optional rtsp url: rtsp://xxx.xxx.xxx.xxx:8554/mjpeg/1 */ /*********************************************************************************************/ @@ -69,11 +72,8 @@ bool HttpCheckPriviledgedAccess(bool); extern ESP8266WebServer *Webserver; -ESP8266WebServer *CamServer; #define BOUNDARY "e8b8c539-047d-4777-a985-fbba6edff11e" -WiFiClient client; - // CAMERA_MODEL_AI_THINKER default template pins #define PWDN_GPIO_NUM 32 @@ -94,29 +94,50 @@ WiFiClient client; #define HREF_GPIO_NUM 23 #define PCLK_GPIO_NUM 22 -struct { - uint8_t up; - uint16_t width; - uint16_t height; - uint8_t stream_active; -#ifdef USE_FACE_DETECT - uint8_t faces; - uint16_t face_detect_time; +#ifndef MAX_PICSTORE +#define MAX_PICSTORE 4 #endif -} Wc; +struct PICSTORE { + uint8_t *buff; + uint32_t len; +}; #ifdef ENABLE_RTSPSERVER #include #include #include #include -WiFiServer rtspServer(8554); -CStreamer *rtsp_streamer; -CRtspSession *rtsp_session; -WiFiClient rtsp_client; -uint8_t rtsp_start; -OV2640 cam; -#endif +#ifndef RTSP_FRAME_TIME +#define RTSP_FRAME_TIME 100 +#endif // RTSP_FRAME_TIME +#endif // ENABLE_RTSPSERVER + +struct { + uint8_t up; + uint16_t width; + uint16_t height; + uint8_t stream_active; + WiFiClient client; + ESP8266WebServer *CamServer; + struct PICSTORE picstore[MAX_PICSTORE]; +#ifdef USE_FACE_DETECT + uint8_t faces; + uint16_t face_detect_time; + uint32_t face_ltime; + mtmn_config_t mtmn_config = {0}; +#endif // USE_FACE_DETECT +#ifdef ENABLE_RTSPSERVER + WiFiServer *rtspp; + CStreamer *rtsp_streamer; + CRtspSession *rtsp_session; + WiFiClient rtsp_client; + uint8_t rtsp_start; + OV2640 cam; + uint32_t rtsp_lastframe_time; +#endif // ENABLE_RTSPSERVER +} Wc; + + /*********************************************************************************************/ @@ -336,18 +357,21 @@ uint32_t WcGetHeight(void) { /*********************************************************************************************/ +struct WC_Motion { uint16_t motion_detect; uint32_t motion_ltime; uint32_t motion_trigger; uint32_t motion_brightness; uint8_t *last_motion_buffer; +} wc_motion; + uint32_t WcSetMotionDetect(int32_t value) { - if (value >= 0) { motion_detect = value; } + if (value >= 0) { wc_motion.motion_detect = value; } if (-1 == value) { - return motion_trigger; + return wc_motion.motion_trigger; } else { - return motion_brightness; + return wc_motion.motion_brightness; } } @@ -356,22 +380,22 @@ void WcDetectMotion(void) { camera_fb_t *wc_fb; uint8_t *out_buf = 0; - if ((millis()-motion_ltime) > motion_detect) { - motion_ltime = millis(); + if ((millis()-wc_motion.motion_ltime) > wc_motion.motion_detect) { + wc_motion.motion_ltime = millis(); wc_fb = esp_camera_fb_get(); if (!wc_fb) { return; } - if (!last_motion_buffer) { - last_motion_buffer=(uint8_t *)heap_caps_malloc((wc_fb->width*wc_fb->height)+4, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT); + if (!wc_motion.last_motion_buffer) { + wc_motion.last_motion_buffer = (uint8_t *)heap_caps_malloc((wc_fb->width*wc_fb->height) + 4, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT); } - if (last_motion_buffer) { + if (wc_motion.last_motion_buffer) { if (PIXFORMAT_JPEG == wc_fb->format) { out_buf = (uint8_t *)heap_caps_malloc((wc_fb->width*wc_fb->height*3)+4, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT); if (out_buf) { fmt2rgb888(wc_fb->buf, wc_fb->len, wc_fb->format, out_buf); uint32_t x, y; uint8_t *pxi = out_buf; - uint8_t *pxr = last_motion_buffer; + uint8_t *pxr = wc_motion.last_motion_buffer; // convert to bw uint64_t accu = 0; uint64_t bright = 0; @@ -386,8 +410,8 @@ void WcDetectMotion(void) { bright += gray; } } - motion_trigger = accu / ((wc_fb->height * wc_fb->width) / 100); - motion_brightness = bright / ((wc_fb->height * wc_fb->width) / 100); + wc_motion.motion_trigger = accu / ((wc_fb->height * wc_fb->width) / 100); + wc_motion.motion_brightness = bright / ((wc_fb->height * wc_fb->width) / 100); free(out_buf); } } @@ -400,22 +424,20 @@ void WcDetectMotion(void) { #ifdef USE_FACE_DETECT -static mtmn_config_t mtmn_config = {0}; - void fd_init(void) { - mtmn_config.type = FAST; - mtmn_config.min_face = 80; - mtmn_config.pyramid = 0.707; - mtmn_config.pyramid_times = 4; - mtmn_config.p_threshold.score = 0.6; - mtmn_config.p_threshold.nms = 0.7; - mtmn_config.p_threshold.candidate_number = 20; - mtmn_config.r_threshold.score = 0.7; - mtmn_config.r_threshold.nms = 0.7; - mtmn_config.r_threshold.candidate_number = 10; - mtmn_config.o_threshold.score = 0.7; - mtmn_config.o_threshold.nms = 0.7; - mtmn_config.o_threshold.candidate_number = 1; + Wc.mtmn_config.type = FAST; + Wc.mtmn_config.min_face = 80; + Wc.mtmn_config.pyramid = 0.707; + Wc.mtmn_config.pyramid_times = 4; + Wc.mtmn_config.p_threshold.score = 0.6; + Wc.mtmn_config.p_threshold.nms = 0.7; + Wc.mtmn_config.p_threshold.candidate_number = 20; + Wc.mtmn_config.r_threshold.score = 0.7; + Wc.mtmn_config.r_threshold.nms = 0.7; + Wc.mtmn_config.r_threshold.candidate_number = 10; + Wc.mtmn_config.o_threshold.score = 0.7; + Wc.mtmn_config.o_threshold.nms = 0.7; + Wc.mtmn_config.o_threshold.candidate_number = 1; } #define FACE_COLOR_WHITE 0x00FFFFFF @@ -473,8 +495,6 @@ uint32_t WcSetFaceDetect(int32_t value) { return Wc.faces; } -uint32_t face_ltime; - uint32_t WcDetectFace(void); uint32_t WcDetectFace(void) { @@ -486,8 +506,8 @@ uint32_t WcDetectFace(void) { int face_id = 0; camera_fb_t *fb; - if ((millis() - face_ltime) > Wc.face_detect_time) { - face_ltime = millis(); + if ((millis() - Wc.face_ltime) > Wc.face_detect_time) { + Wc.face_ltime = millis(); fb = esp_camera_fb_get(); if (!fb) { return ESP_FAIL; } @@ -511,7 +531,7 @@ uint32_t WcDetectFace(void) { return ESP_FAIL; } - box_array_t *net_boxes = face_detect(image_matrix, &mtmn_config); + box_array_t *net_boxes = face_detect(image_matrix, &Wc.mtmn_config); if (net_boxes){ detected = true; Wc.faces = net_boxes->len; @@ -536,15 +556,6 @@ uint32_t WcDetectFace(void) { /*********************************************************************************************/ -#ifndef MAX_PICSTORE -#define MAX_PICSTORE 4 -#endif -struct PICSTORE { - uint8_t *buff; - uint32_t len; -}; - -struct PICSTORE picstore[MAX_PICSTORE]; #ifdef COPYFRAME struct PICSTORE tmp_picstore; @@ -552,8 +563,8 @@ struct PICSTORE tmp_picstore; uint32_t WcGetPicstore(int32_t num, uint8_t **buff) { if (num<0) { return MAX_PICSTORE; } - *buff = picstore[num].buff; - return picstore[num].len; + *buff = Wc.picstore[num].buff; + return Wc.picstore[num].len; } uint32_t WcGetFrame(int32_t bnum) { @@ -566,8 +577,8 @@ uint32_t WcGetFrame(int32_t bnum) { if (bnum < -MAX_PICSTORE) { bnum=-1; } bnum = -bnum; bnum--; - if (picstore[bnum].buff) { free(picstore[bnum].buff); } - picstore[bnum].len = 0; + if (Wc.picstore[bnum].buff) { free(Wc.picstore[bnum].buff); } + Wc.picstore[bnum].len = 0; return 0; } @@ -608,18 +619,18 @@ uint32_t WcGetFrame(int32_t bnum) { pcopy: if ((bnum < 1) || (bnum > MAX_PICSTORE)) { bnum = 1; } bnum--; - if (picstore[bnum].buff) { free(picstore[bnum].buff); } - picstore[bnum].buff = (uint8_t *)heap_caps_malloc(_jpg_buf_len+4, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT); - if (picstore[bnum].buff) { - memcpy(picstore[bnum].buff, _jpg_buf, _jpg_buf_len); - picstore[bnum].len = _jpg_buf_len; + if (Wc.picstore[bnum].buff) { free(Wc.picstore[bnum].buff); } + Wc.picstore[bnum].buff = (uint8_t *)heap_caps_malloc(_jpg_buf_len+4, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT); + if (Wc.picstore[bnum].buff) { + memcpy(Wc.picstore[bnum].buff, _jpg_buf, _jpg_buf_len); + Wc.picstore[bnum].len = _jpg_buf_len; } else { AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Can't allocate picstore")); - picstore[bnum].len = 0; + Wc.picstore[bnum].len = 0; } if (wc_fb) { esp_camera_fb_return(wc_fb); } if (jpeg_converted) { free(_jpg_buf); } - if (!picstore[bnum].buff) { return 0; } + if (!Wc.picstore[bnum].buff) { return 0; } return _jpg_buf_len; } @@ -657,11 +668,11 @@ void HandleImage(void) { if (wc_fb) { esp_camera_fb_return(wc_fb); } } else { bnum--; - if (!picstore[bnum].len) { + if (!Wc.picstore[bnum].len) { AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: No image #: %d"), bnum); return; } - client.write((char *)picstore[bnum].buff, picstore[bnum].len); + client.write((char *)Wc.picstore[bnum].buff, Wc.picstore[bnum].len); } client.stop(); @@ -674,7 +685,7 @@ void HandleImageBasic(void) { AddLog_P(LOG_LEVEL_DEBUG_MORE, PSTR(D_LOG_HTTP "Capture image")); if (Settings.webcam_config.stream) { - if (!CamServer) { + if (!Wc.CamServer) { WcStreamControl(); } } @@ -717,7 +728,7 @@ void HandleWebcamMjpeg(void) { // if (!Wc.stream_active) { // always restart stream Wc.stream_active = 1; - client = CamServer->client(); + Wc.client = Wc.CamServer->client(); AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Create client")); // } } @@ -731,15 +742,15 @@ void HandleWebcamMjpegTask(void) { uint32_t tlen; bool jpeg_converted = false; - if (!client.connected()) { + if (!Wc.client.connected()) { AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Client fail")); Wc.stream_active = 0; } if (1 == Wc.stream_active) { - client.flush(); - client.setTimeout(3); + Wc.client.flush(); + Wc.client.setTimeout(3); AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Start stream")); - client.print("HTTP/1.1 200 OK\r\n" + Wc.client.print("HTTP/1.1 200 OK\r\n" "Content-Type: multipart/x-mixed-replace;boundary=" BOUNDARY "\r\n" "\r\n"); Wc.stream_active = 2; @@ -764,17 +775,17 @@ void HandleWebcamMjpegTask(void) { _jpg_buf = wc_fb->buf; } - client.printf("Content-Type: image/jpeg\r\n" + Wc.client.printf("Content-Type: image/jpeg\r\n" "Content-Length: %d\r\n" "\r\n", static_cast(_jpg_buf_len)); - tlen = client.write(_jpg_buf, _jpg_buf_len); + tlen = Wc.client.write(_jpg_buf, _jpg_buf_len); /* if (tlen!=_jpg_buf_len) { esp_camera_fb_return(wc_fb); Wc.stream_active=0; AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Send fail")); }*/ - client.print("\r\n--" BOUNDARY "\r\n"); + Wc.client.print("\r\n--" BOUNDARY "\r\n"); #ifdef COPYFRAME if (tmp_picstore.buff) { free(tmp_picstore.buff); } @@ -793,15 +804,15 @@ void HandleWebcamMjpegTask(void) { } if (0 == Wc.stream_active) { AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Stream exit")); - client.flush(); - client.stop(); + Wc.client.flush(); + Wc.client.stop(); } } void HandleWebcamRoot(void) { //CamServer->redirect("http://" + String(ip) + ":81/cam.mjpeg"); - CamServer->sendHeader("Location", WiFi.localIP().toString() + ":81/cam.mjpeg"); - CamServer->send(302, "", ""); + Wc.CamServer->sendHeader("Location", WiFi.localIP().toString() + ":81/cam.mjpeg"); + Wc.CamServer->send(302, "", ""); AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Root called")); } @@ -813,20 +824,20 @@ uint32_t WcSetStreamserver(uint32_t flag) { Wc.stream_active = 0; if (flag) { - if (!CamServer) { - CamServer = new ESP8266WebServer(81); - CamServer->on("/", HandleWebcamRoot); - CamServer->on("/cam.mjpeg", HandleWebcamMjpeg); - CamServer->on("/cam.jpg", HandleWebcamMjpeg); - CamServer->on("/stream", HandleWebcamMjpeg); + if (!Wc.CamServer) { + Wc.CamServer = new ESP8266WebServer(81); + Wc.CamServer->on("/", HandleWebcamRoot); + Wc.CamServer->on("/cam.mjpeg", HandleWebcamMjpeg); + Wc.CamServer->on("/cam.jpg", HandleWebcamMjpeg); + Wc.CamServer->on("/stream", HandleWebcamMjpeg); AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Stream init")); - CamServer->begin(); + Wc.CamServer->begin(); } } else { - if (CamServer) { - CamServer->stop(); - delete CamServer; - CamServer = NULL; + if (Wc.CamServer) { + Wc.CamServer->stop(); + delete Wc.CamServer; + Wc.CamServer = NULL; AddLog_P(LOG_LEVEL_DEBUG, PSTR("CAM: Stream exit")); } } @@ -839,61 +850,58 @@ void WcStreamControl() { } /*********************************************************************************************/ -#ifdef ENABLE_RTSPSERVER -static uint32_t rtsp_lastframe_time; -#ifndef RTSP_FRAME_TIME -#define RTSP_FRAME_TIME 100 -#endif -#endif + void WcLoop(void) { - if (CamServer) { - CamServer->handleClient(); + if (Wc.CamServer) { + Wc.CamServer->handleClient(); if (Wc.stream_active) { HandleWebcamMjpegTask(); } } - if (motion_detect) { WcDetectMotion(); } + if (wc_motion.motion_detect) { WcDetectMotion(); } #ifdef USE_FACE_DETECT if (Wc.face_detect_time) { WcDetectFace(); } #endif #ifdef ENABLE_RTSPSERVER + if (Settings.webcam_config.rtsp && !TasmotaGlobal.global_state.wifi_down && Wc.up) { + if (!Wc.rtsp_start) { + Wc.rtspp = new WiFiServer(8554); + Wc.rtspp->begin(); + Wc.rtsp_start = 1; + AddLog_P(LOG_LEVEL_INFO, PSTR("CAM: RTSP init")); + Wc.rtsp_lastframe_time = millis(); + } - if (!rtsp_start && !TasmotaGlobal.global_state.wifi_down && Wc.up) { - rtspServer.begin(); - rtsp_start = 1; - AddLog_P(LOG_LEVEL_INFO, PSTR("CAM: RTSP init")); - rtsp_lastframe_time = millis(); - } - - // If we have an active client connection, just service that until gone - if (rtsp_session) { - rtsp_session->handleRequests(0); // we don't use a timeout here, + // If we have an active client connection, just service that until gone + if (Wc.rtsp_session) { + Wc.rtsp_session->handleRequests(0); // we don't use a timeout here, // instead we send only if we have new enough frames uint32_t now = millis(); - if ((now-rtsp_lastframe_time) > RTSP_FRAME_TIME) { - rtsp_session->broadcastCurrentFrame(now); - rtsp_lastframe_time = now; + if ((now-Wc.rtsp_lastframe_time) > RTSP_FRAME_TIME) { + Wc.rtsp_session->broadcastCurrentFrame(now); + Wc.rtsp_lastframe_time = now; // AddLog_P(LOG_LEVEL_INFO, PSTR("CAM: RTSP session frame")); } - if (rtsp_session->m_stopped) { - delete rtsp_session; - delete rtsp_streamer; - rtsp_session = NULL; - rtsp_streamer = NULL; + if (Wc.rtsp_session->m_stopped) { + delete Wc.rtsp_session; + delete Wc.rtsp_streamer; + Wc.rtsp_session = NULL; + Wc.rtsp_streamer = NULL; AddLog_P(LOG_LEVEL_INFO, PSTR("CAM: RTSP stopped")); } - } - else { - rtsp_client = rtspServer.accept(); - if (rtsp_client) { - rtsp_streamer = new OV2640Streamer(&rtsp_client, cam); // our streamer for UDP/TCP based RTP transport - rtsp_session = new CRtspSession(&rtsp_client, rtsp_streamer); // our threads RTSP session and state + } + else { + Wc.rtsp_client = Wc.rtspp->accept(); + if (Wc.rtsp_client) { + Wc.rtsp_streamer = new OV2640Streamer(&Wc.rtsp_client, Wc.cam); // our streamer for UDP/TCP based RTP transport + Wc.rtsp_session = new CRtspSession(&Wc.rtsp_client, Wc.rtsp_streamer); // our threads RTSP session and state AddLog_P(LOG_LEVEL_INFO, PSTR("CAM: RTSP stream created")); } + } } -#endif +#endif // ENABLE_RTSPSERVER } void WcPicSetup(void) { @@ -904,12 +912,12 @@ void WcPicSetup(void) { void WcShowStream(void) { if (Settings.webcam_config.stream) { -// if (!CamServer || !Wc.up) { - if (!CamServer) { +// if (!Wc.CamServer || !Wc.up) { + if (!Wc.CamServer) { WcStreamControl(); delay(50); // Give the webcam webserver some time to prepare the stream } - if (CamServer && Wc.up) { + if (Wc.CamServer && Wc.up) { WSContentSend_P(PSTR("

Webcam stream

"), WiFi.localIP().toString().c_str()); } @@ -941,24 +949,39 @@ void WcInit(void) { #define D_CMND_WC_BRIGHTNESS "Brightness" #define D_CMND_WC_CONTRAST "Contrast" #define D_CMND_WC_INIT "Init" +#define D_CMND_RTSP "Rtsp" const char kWCCommands[] PROGMEM = D_PRFX_WEBCAM "|" // Prefix "|" D_CMND_WC_STREAM "|" D_CMND_WC_RESOLUTION "|" D_CMND_WC_MIRROR "|" D_CMND_WC_FLIP "|" D_CMND_WC_SATURATION "|" D_CMND_WC_BRIGHTNESS "|" D_CMND_WC_CONTRAST "|" D_CMND_WC_INIT +#ifdef ENABLE_RTSPSERVER + "|" D_CMND_RTSP +#endif // ENABLE_RTSPSERVER ; void (* const WCCommand[])(void) PROGMEM = { &CmndWebcam, &CmndWebcamStream, &CmndWebcamResolution, &CmndWebcamMirror, &CmndWebcamFlip, &CmndWebcamSaturation, &CmndWebcamBrightness, &CmndWebcamContrast, &CmndWebcamInit +#ifdef ENABLE_RTSPSERVER + , &CmndWebRtsp +#endif // ENABLE_RTSPSERVER }; void CmndWebcam(void) { Response_P(PSTR("{\"" D_PRFX_WEBCAM "\":{\"" D_CMND_WC_STREAM "\":%d,\"" D_CMND_WC_RESOLUTION "\":%d,\"" D_CMND_WC_MIRROR "\":%d,\"" D_CMND_WC_FLIP "\":%d,\"" - D_CMND_WC_SATURATION "\":%d,\"" D_CMND_WC_BRIGHTNESS "\":%d,\"" D_CMND_WC_CONTRAST "\":%d}}"), + D_CMND_WC_SATURATION "\":%d,\"" D_CMND_WC_BRIGHTNESS "\":%d,\"" D_CMND_WC_CONTRAST "\":%d" +#ifdef ENABLE_RTSPSERVER + ",\"" D_CMND_RTSP "\":%d" +#endif // ENABLE_RTSPSERVER + "}}"), Settings.webcam_config.stream, Settings.webcam_config.resolution, Settings.webcam_config.mirror, Settings.webcam_config.flip, - Settings.webcam_config.saturation -2, Settings.webcam_config.brightness -2, Settings.webcam_config.contrast -2); + Settings.webcam_config.saturation -2, Settings.webcam_config.brightness -2, Settings.webcam_config.contrast -2 +#ifdef ENABLE_RTSPSERVER + , Settings.webcam_config.rtsp +#endif // ENABLE_RTSPSERVER + ); } void CmndWebcamStream(void) { @@ -1022,6 +1045,17 @@ void CmndWebcamInit(void) { ResponseCmndDone(); } +#ifdef ENABLE_RTSPSERVER +void CmndWebRtsp(void) { + if ((XdrvMailbox.payload >= 0) && (XdrvMailbox.payload <= 1)) { + Settings.webcam_config.rtsp = XdrvMailbox.payload; + TasmotaGlobal.restart_flag = 2; + } + ResponseCmndStateText(Settings.webcam_config.rtsp); +} +#endif // ENABLE_RTSPSERVER + + /*********************************************************************************************\ * Interface \*********************************************************************************************/