1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
|
/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
**********/
// "liveMedia"
// Copyright (c) 1996-2004 Live Networks, Inc. All rights reserved.
// A simplified version of "MPEG1or2VideoStreamFramer" that takes only
// complete, discrete frames (rather than an arbitrary byte stream) as input.
// This avoids the parsing and data copying overhead of the full
// "MPEG1or2VideoStreamFramer".
// Implementation
#include "MPEG1or2VideoStreamDiscreteFramer.hh"
MPEG1or2VideoStreamDiscreteFramer*
MPEG1or2VideoStreamDiscreteFramer::createNew(UsageEnvironment& env,
FramedSource* inputSource,
Boolean iFramesOnly,
double vshPeriod) {
// Need to add source type checking here??? #####
return new MPEG1or2VideoStreamDiscreteFramer(env, inputSource,
iFramesOnly, vshPeriod);
}
MPEG1or2VideoStreamDiscreteFramer
::MPEG1or2VideoStreamDiscreteFramer(UsageEnvironment& env,
FramedSource* inputSource,
Boolean iFramesOnly, double vshPeriod)
: MPEG1or2VideoStreamFramer(env, inputSource, iFramesOnly, vshPeriod,
False/*don't create a parser*/),
fLastNonBFrameTemporal_reference(0) {
fLastNonBFramePresentationTime.tv_sec = 0;
fLastNonBFramePresentationTime.tv_usec = 0;
}
MPEG1or2VideoStreamDiscreteFramer::~MPEG1or2VideoStreamDiscreteFramer() {
}
void MPEG1or2VideoStreamDiscreteFramer::doGetNextFrame() {
// Arrange to read data (which should be a complete MPEG-1 or 2 video frame)
// from our data source, directly into the client's input buffer.
// After reading this, we'll do some parsing on the frame.
fInputSource->getNextFrame(fTo, fMaxSize,
afterGettingFrame, this,
FramedSource::handleClosure, this);
}
void MPEG1or2VideoStreamDiscreteFramer
::afterGettingFrame(void* clientData, unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds) {
MPEG1or2VideoStreamDiscreteFramer* source
= (MPEG1or2VideoStreamDiscreteFramer*)clientData;
source->afterGettingFrame1(frameSize, numTruncatedBytes,
presentationTime, durationInMicroseconds);
}
static double const frameRateFromCode[] = {
0.0, // forbidden
24000/1001.0, // approx 23.976
24.0,
25.0,
30000/1001.0, // approx 29.97
30.0,
50.0,
60000/1001.0, // approx 59.94
60.0,
0.0, // reserved
0.0, // reserved
0.0, // reserved
0.0, // reserved
0.0, // reserved
0.0, // reserved
0.0 // reserved
};
void MPEG1or2VideoStreamDiscreteFramer
::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds) {
// Check that the first 4 bytes are a system code:
if (frameSize >= 4 && fTo[0] == 0 && fTo[1] == 0 && fTo[2] == 1) {
fPictureEndMarker = True; // Assume that we have a complete 'picture' here
u_int8_t nextCode = fTo[3];
if (nextCode == 0xB3) { // VIDEO_SEQUENCE_HEADER_START_CODE
// Note the following 'frame rate' code:
if (frameSize >= 8) {
u_int8_t frame_rate_code = fTo[7]&0x0F;
fFrameRate = frameRateFromCode[frame_rate_code];
}
}
unsigned i = 3;
if (nextCode == 0xB3 /*VIDEO_SEQUENCE_HEADER_START_CODE*/ ||
nextCode == 0xB8 /*GROUP_START_CODE*/) {
// Skip to the following PICTURE_START_CODE (if any):
for (i += 4; i < frameSize; ++i) {
if (fTo[i] == 0x00 /*PICTURE_START_CODE*/
&& fTo[i-1] == 1 && fTo[i-2] == 0 && fTo[i-3] == 0) {
nextCode = fTo[i];
break;
}
}
}
if (nextCode == 0x00 /*PICTURE_START_CODE*/ && i+2 < frameSize) {
// Get the 'temporal_reference' and 'picture_coding_type' from the
// following 2 bytes:
++i;
unsigned short temporal_reference = (fTo[i]<<2)|(fTo[i+1]>>6);
unsigned char picture_coding_type = (fTo[i+1]&0x38)>>3;
// If this is a "B" frame, then we have to tweak "presentationTime":
if (picture_coding_type == 3/*B*/
&& (fLastNonBFramePresentationTime.tv_usec > 0 ||
fLastNonBFramePresentationTime.tv_sec > 0)) {
int trIncrement
= fLastNonBFrameTemporal_reference - temporal_reference;
if (trIncrement < 0) trIncrement += 1024; // field is 10 bits in size
unsigned const MILLION = 1000000;
unsigned usIncrement = fFrameRate == 0.0 ? 0
: (unsigned)((trIncrement*MILLION)/fFrameRate);
unsigned secondsToSubtract = usIncrement/MILLION;
unsigned uSecondsToSubtract = usIncrement%MILLION;
presentationTime = fLastNonBFramePresentationTime;
if ((unsigned)presentationTime.tv_usec < uSecondsToSubtract) {
presentationTime.tv_usec += MILLION;
if (presentationTime.tv_sec > 0) --presentationTime.tv_sec;
}
presentationTime.tv_usec -= uSecondsToSubtract;
if ((unsigned)presentationTime.tv_sec > secondsToSubtract) {
presentationTime.tv_sec -= secondsToSubtract;
} else {
presentationTime.tv_sec = presentationTime.tv_usec = 0;
}
} else {
fLastNonBFramePresentationTime = presentationTime;
fLastNonBFrameTemporal_reference = temporal_reference;
}
}
}
// ##### Later:
// - do "iFramesOnly" if requested
// - handle "vshPeriod"
// Complete delivery to the client:
fFrameSize = frameSize;
fNumTruncatedBytes = numTruncatedBytes;
fPresentationTime = presentationTime;
fDurationInMicroseconds = durationInMicroseconds;
afterGetting(this);
}
|