add AAC support
This commit is contained in:
parent
2561a07d86
commit
afc7c86eac
|
@ -8,66 +8,78 @@
|
||||||
|
|
||||||
#include "zm_rtsp_server_server_media_subsession.h"
|
#include "zm_rtsp_server_server_media_subsession.h"
|
||||||
#include "zm_rtsp_server_device_source.h"
|
#include "zm_rtsp_server_device_source.h"
|
||||||
|
#include "zm_rtsp_server_adts_source.h"
|
||||||
|
|
||||||
|
|
||||||
// ---------------------------------
|
// ---------------------------------
|
||||||
// BaseServerMediaSubsession
|
// BaseServerMediaSubsession
|
||||||
// ---------------------------------
|
// ---------------------------------
|
||||||
FramedSource* BaseServerMediaSubsession::createSource(
|
FramedSource* BaseServerMediaSubsession::createSource(
|
||||||
UsageEnvironment& env, FramedSource* videoES, const std::string& format)
|
UsageEnvironment& env, FramedSource* inputSource, const std::string& format)
|
||||||
{
|
{
|
||||||
FramedSource* source = nullptr;
|
FramedSource* source = nullptr;
|
||||||
if ( format == "video/MP2T" ) {
|
if ( format == "video/MP2T" ) {
|
||||||
source = MPEG2TransportStreamFramer::createNew(env, videoES);
|
source = MPEG2TransportStreamFramer::createNew(env, inputSource);
|
||||||
} else if ( format == "video/H264" ) {
|
} else if ( format == "video/H264" ) {
|
||||||
source = H264VideoStreamDiscreteFramer::createNew(env, videoES);
|
source = H264VideoStreamDiscreteFramer::createNew(env, inputSource);
|
||||||
}
|
}
|
||||||
#if LIVEMEDIA_LIBRARY_VERSION_INT > 1414454400
|
#if LIVEMEDIA_LIBRARY_VERSION_INT > 1414454400
|
||||||
else if ( format == "video/H265" ) {
|
else if ( format == "video/H265" ) {
|
||||||
source = H265VideoStreamDiscreteFramer::createNew(env, videoES);
|
source = H265VideoStreamDiscreteFramer::createNew(env, inputSource);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#if 0
|
#if 0
|
||||||
else if (format == "video/JPEG") {
|
else if (format == "video/JPEG") {
|
||||||
source = MJPEGVideoSource::createNew(env, videoES);
|
source = MJPEGVideoSource::createNew(env, inputSource);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
else {
|
else {
|
||||||
source = videoES;
|
source = inputSource;
|
||||||
}
|
}
|
||||||
Error("Source %p %s", source, format.c_str());
|
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* source is generally a replica */
|
||||||
RTPSink* BaseServerMediaSubsession::createSink(
|
RTPSink* BaseServerMediaSubsession::createSink(
|
||||||
UsageEnvironment& env,
|
UsageEnvironment& env,
|
||||||
Groupsock* rtpGroupsock,
|
Groupsock* rtpGroupsock,
|
||||||
unsigned char rtpPayloadTypeIfDynamic,
|
unsigned char rtpPayloadTypeIfDynamic,
|
||||||
const std::string& format
|
const std::string& format,
|
||||||
|
FramedSource *source
|
||||||
) {
|
) {
|
||||||
RTPSink* videoSink = nullptr;
|
|
||||||
|
RTPSink* sink = nullptr;
|
||||||
if ( format == "video/MP2T" ) {
|
if ( format == "video/MP2T" ) {
|
||||||
videoSink = SimpleRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic, 90000, "video", "MP2T", 1, True, False);
|
sink = SimpleRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic, 90000, "video", "MP2T", 1, true, false);
|
||||||
} else if ( format == "video/H264" ) {
|
} else if ( format == "video/H264" ) {
|
||||||
videoSink = H264VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
sink = H264VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||||
} else if ( format == "video/VP8" ) {
|
} else if ( format == "video/VP8" ) {
|
||||||
videoSink = VP8VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
sink = VP8VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||||
}
|
}
|
||||||
#if LIVEMEDIA_LIBRARY_VERSION_INT > 1414454400
|
#if LIVEMEDIA_LIBRARY_VERSION_INT > 1414454400
|
||||||
else if ( format == "video/VP9" ) {
|
else if ( format == "video/VP9" ) {
|
||||||
videoSink = VP9VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic);
|
sink = VP9VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||||
} else if ( format == "video/H265" ) {
|
} else if ( format == "video/H265" ) {
|
||||||
videoSink = H265VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic);
|
sink = H265VideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||||
#endif
|
#endif
|
||||||
|
} else if ( format == "audio/AAC" ) {
|
||||||
|
ADTS_ZoneMinderDeviceSource *adts_source = (ADTS_ZoneMinderDeviceSource *)(m_replicator->inputSource());
|
||||||
|
sink = MPEG4GenericRTPSink::createNew(env, rtpGroupsock,
|
||||||
|
rtpPayloadTypeIfDynamic,
|
||||||
|
adts_source->samplingFrequency(),
|
||||||
|
"audio", "AAC-hbr",
|
||||||
|
adts_source->configStr(),
|
||||||
|
adts_source->numChannels()
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
std::cerr << "unknown format\n";
|
Error("unknown format");
|
||||||
}
|
}
|
||||||
#if 0
|
#if 0
|
||||||
else if (format == "video/JPEG") {
|
else if (format == "video/JPEG") {
|
||||||
videoSink = JPEGVideoRTPSink::createNew (env, rtpGroupsock);
|
sink = JPEGVideoRTPSink::createNew (env, rtpGroupsock);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
Error("Sink %p %s", videoSink, format.c_str());
|
return sink;
|
||||||
return videoSink;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
char const* BaseServerMediaSubsession::getAuxLine(
|
char const* BaseServerMediaSubsession::getAuxLine(
|
||||||
|
@ -86,7 +98,7 @@ char const* BaseServerMediaSubsession::getAuxLine(
|
||||||
os << "a=x-dimensions:" << width << "," << height << "\r\n";
|
os << "a=x-dimensions:" << width << "," << height << "\r\n";
|
||||||
}
|
}
|
||||||
auxLine = strdup(os.str().c_str());
|
auxLine = strdup(os.str().c_str());
|
||||||
Error("auxLine: %s", auxLine);
|
Debug(1, "auxLine: %s", auxLine);
|
||||||
} else {
|
} else {
|
||||||
Error("No source auxLine: ");
|
Error("No source auxLine: ");
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,23 +16,30 @@
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
|
|
||||||
// live555
|
|
||||||
#include <liveMedia.hh>
|
#include <liveMedia.hh>
|
||||||
|
|
||||||
// forward declaration
|
|
||||||
class ZoneMinderDeviceSource;
|
class ZoneMinderDeviceSource;
|
||||||
|
|
||||||
// ---------------------------------
|
|
||||||
// BaseServerMediaSubsession
|
|
||||||
// ---------------------------------
|
|
||||||
class BaseServerMediaSubsession {
|
class BaseServerMediaSubsession {
|
||||||
public:
|
public:
|
||||||
BaseServerMediaSubsession(StreamReplicator* replicator): m_replicator(replicator) {};
|
BaseServerMediaSubsession(StreamReplicator* replicator):
|
||||||
|
m_replicator(replicator) {};
|
||||||
|
|
||||||
public:
|
FramedSource* createSource(
|
||||||
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, const std::string& format);
|
UsageEnvironment& env,
|
||||||
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format);
|
FramedSource * videoES,
|
||||||
char const* getAuxLine(ZoneMinderDeviceSource* source, unsigned char rtpPayloadType);
|
const std::string& format);
|
||||||
|
|
||||||
|
RTPSink * createSink(
|
||||||
|
UsageEnvironment& env,
|
||||||
|
Groupsock * rtpGroupsock,
|
||||||
|
unsigned char rtpPayloadTypeIfDynamic,
|
||||||
|
const std::string& format,
|
||||||
|
FramedSource *source);
|
||||||
|
|
||||||
|
char const* getAuxLine(
|
||||||
|
ZoneMinderDeviceSource* source,
|
||||||
|
unsigned char rtpPayloadType);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
StreamReplicator* m_replicator;
|
StreamReplicator* m_replicator;
|
||||||
|
|
Loading…
Reference in New Issue