1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
|
//================================================================
// testH264VideoStreamer.cpp: program that reads a H.264 Trasport Stream live video source
// and streams it using RTP.
//=================================================================
#include "stdafx.h"
#include <WinSock2.h>
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include "grabber.h"
#include "lm555interface.h"
#include <MPEG4VideoStreamDiscreteFramer.hh>
#include <DeviceSourceFICard.hh>
#include <DeviceSource.hh>
/*#pragma comment(lib, "libliveMedia.lib" )
#pragma comment(lib, "libgroupsock.lib" )
#pragma comment(lib, "libBasicUsageEnvironment.lib")
#pragma comment(lib, "libUsageEnvironment.lib")*/
#define TRANSPORT_PACKET_SIZE 188
#define TRANSPORT_PACKETS_PER_NETWORK_PACKET 7
#define IMPLEMENT_RTSP_SERVER 1
// To stream using "source-specific multicast" (SSM), uncomment the following:
//#define USE_SSM 1
#ifdef USE_SSM
Boolean const isSSM = True;
#else
Boolean const isSSM = False;
#endif
// Global Variables:
UsageEnvironment* env;
char const* inputFileName = "test.ts";
char const* OutFileName = "testOut.ts";
FramedSource* videoSource;
RTPSink* videoSink;
DeviceSourceFICard* fileSource;
FICardDeviceParameters fi_params;
void initPlay();
void play(); // forward
HANDLE g_hRtpComThread;
DWORD g_dwRtpComThreadID;
//================================================================
// initLm555Settings(): Initialize live media 555 related settings.
// Create video device input source and RTP sink. Initialize RTP
// server and start playing the input source.
//=================================================================
int initLm555Settings(void) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
// Create 'groupsocks' for RTP and RTCP:
char const* destinationAddressStr = "192.168.15.196";
struct in_addr destinationAddress;
destinationAddress.s_addr = our_inet_addr(destinationAddressStr);
//destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address.
const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 7; //255;
const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);
Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source
// Create an appropriate 'RTP sink' from the RTP 'groupsock':
videoSink =
SimpleRTPSink::createNew(*env, &rtpGroupsock, 33, 90000, "video", "MP2T",
1, True, False /*no 'M' bit*/);
// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
#ifdef IMPLEMENT_RTSP_SERVER
RTCPInstance* rtcp =
#endif
RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL /* we're a server */, isSSM);
// Note: This starts RTCP running automatically
#ifdef IMPLEMENT_RTSP_SERVER
RTSPServer* rtspServer = RTSPServer::createNew(*env);
// Note that this (attempts to) start a server on the default RTSP server
// port: 554. To use a different port number, add it as an extra
// (optional) parameter to the "RTSPServer::createNew()" call above.
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, "testStream", inputFileName,
"Session streamed by \"testMPEG2TransportStreamer\"",
isSSM);
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
//delete[] url;
// Start the streaming:
*env << "Beginning streaming...\n";
#endif
// initPlay();
play();
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
//================================================================
// afterPlaying(): Called when end of the video is detected.
//=================================================================
void afterPlaying(void* /*clientData*/) {
*env << "...done reading from file\n";
videoSink->stopPlaying();
// Note that this also closes the input file that this source read from.
Medium::close(videoSource);
// Start playing once again:
play();
}
//================================================================
// play(): Play the input source.
//=================================================================
void play() {
// Open the input file as a 'byte-stream file source':
fi_params.nFICardFrameSize = TRANSPORT_PACKETS_PER_NETWORK_PACKET * TRANSPORT_PACKET_SIZE;//Here i get the data
fi_params.p_lm_lock_fn = lm_lock_fn;
fi_params.p_lm_unlock_fn = lm_unlock_fn;
DeviceParameters temp;
fileSource = DeviceSourceFICard::createNew(*env, fi_params, temp);
if (fileSource == NULL) {
*env << "Unable to open file \"" << inputFileName
<< "\" as a byte-stream file source\n";
exit(1);
}
FramedSource* videoES = fileSource;
// Create a framer for the Video Elementary Stream:
videoSource = MPEG1or2VideoStreamDiscreteFramer::createNew(*env, videoES);
// Finally, start playing:
*env << "Beginning to read from file...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
//================================================================
// initPlay(): Initialize device source parameter before playing
//the input source.
//=================================================================
void initPlay(){
// Open the input source
FICardDeviceParameters fi_params;
fi_params.nFICardFrameSize = TRANSPORT_PACKETS_PER_NETWORK_PACKET * TRANSPORT_PACKET_SIZE;
fi_params.p_lm_lock_fn = lm_lock_fn;
fi_params.p_lm_unlock_fn = lm_unlock_fn;
}
//================================================================
// StartRTPProcess(): Invoke the thread which manages the livemedia
// communication..
//=================================================================
void StartRTPProcess(void)
{
g_hRtpComThread = CreateThread((LPSECURITY_ATTRIBUTES) NULL, 0,
(LPTHREAD_START_ROUTINE)initLm555Settings, 0, 0, &g_dwRtpComThreadID);
if(g_hRtpComThread) SetThreadPriority(g_hRtpComThread, THREAD_PRIORITY_LOWEST/*THREAD_PRIORITY_NORMAL*/);
}
|