1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SampleIterator.h"
#include <algorithm>
#include <limits>
#include "BufferReader.h"
#include "MP4Interval.h"
#include "MP4Metadata.h"
#include "MediaDataDemuxer.h"
#include "SinfParser.h"
#include "mozilla/RefPtr.h"
using namespace mozilla::media;
namespace mozilla {
class MOZ_STACK_CLASS RangeFinder {
public:
// Given that we're processing this in order we don't use a binary search
// to find the apropriate time range. Instead we search linearly from the
// last used point.
explicit RangeFinder(const MediaByteRangeSet& ranges)
: mRanges(ranges), mIndex(0) {
// Ranges must be normalised for this to work
}
bool Contains(const MediaByteRange& aByteRange);
private:
const MediaByteRangeSet& mRanges;
size_t mIndex;
};
bool RangeFinder::Contains(const MediaByteRange& aByteRange) {
if (mRanges.IsEmpty()) {
return false;
}
if (mRanges[mIndex].ContainsStrict(aByteRange)) {
return true;
}
if (aByteRange.mStart < mRanges[mIndex].mStart) {
// Search backwards
do {
if (!mIndex) {
return false;
}
--mIndex;
if (mRanges[mIndex].ContainsStrict(aByteRange)) {
return true;
}
} while (aByteRange.mStart < mRanges[mIndex].mStart);
return false;
}
while (aByteRange.mEnd > mRanges[mIndex].mEnd) {
if (mIndex == mRanges.Length() - 1) {
return false;
}
++mIndex;
if (mRanges[mIndex].ContainsStrict(aByteRange)) {
return true;
}
}
return false;
}
SampleIterator::SampleIterator(MP4SampleIndex* aIndex)
: mIndex(aIndex), mCurrentMoof(0), mCurrentSample(0) {
mIndex->RegisterIterator(this);
}
SampleIterator::~SampleIterator() { mIndex->UnregisterIterator(this); }
bool SampleIterator::HasNext() { return Get().isOk(); }
already_AddRefed<MediaRawData> SampleIterator::GetNextHeader() {
auto current = Get();
if (current.isErr()) {
return nullptr;
}
Sample* s = current.unwrap();
int64_t length = std::numeric_limits<int64_t>::max();
mIndex->mSource->Length(&length);
if (s->mByteRange.mEnd > length) {
// We don't have this complete sample.
return nullptr;
}
RefPtr<MediaRawData> sample = new MediaRawData();
sample->mTimecode = s->mDecodeTime;
sample->mTime = s->mCompositionRange.start;
sample->mDuration = s->mCompositionRange.Length();
sample->mOffset = s->mByteRange.mStart;
sample->mKeyframe = s->mSync;
Next();
return sample.forget();
}
Result<already_AddRefed<MediaRawData>, MediaResult> SampleIterator::GetNext() {
auto current = Get();
if (current.isErr()) {
return current.propagateErr();
}
Sample* s = current.unwrap();
int64_t length = std::numeric_limits<int64_t>::max();
mIndex->mSource->Length(&length);
if (s->mByteRange.mEnd > length) {
return Err(MediaResult::Logged(
NS_ERROR_DOM_MEDIA_RANGE_ERR,
RESULT_DETAIL("Sample data byte range beyond end of resource"),
gMediaDemuxerLog));
}
RefPtr<MediaRawData> sample = new MediaRawData();
sample->mTimecode = s->mDecodeTime;
sample->mTime = s->mCompositionRange.start;
sample->mDuration = s->mCompositionRange.Length();
sample->mOffset = s->mByteRange.mStart;
sample->mKeyframe = s->mSync;
UniquePtr<MediaRawDataWriter> writer(sample->CreateWriter());
// Do the blocking read
if (!writer->SetSize(s->mByteRange.Length())) {
return Err(MediaResult::Logged(NS_ERROR_OUT_OF_MEMORY, __func__,
gMediaDemuxerLog));
}
size_t bytesRead;
nsresult rv = mIndex->mSource->ReadAt(sample->mOffset, writer->Data(),
sample->Size(), &bytesRead);
if (NS_FAILED(rv) || bytesRead != sample->Size()) {
return Err(MediaResult::Logged(
// Fewer bytes read means end of stream, or the bytes are not
// available because a network error has occurred.
// A sample range extending past the end of stream is a bad range.
NS_FAILED(rv) ? rv : NS_ERROR_DOM_MEDIA_RANGE_ERR,
RESULT_DETAIL("Sample data read failed"), gMediaDemuxerLog));
}
MoofParser* moofParser = mIndex->mMoofParser.get();
if (!moofParser) {
// File is not fragmented, we can't have crypto, just early return.
Next();
return sample.forget();
}
const nsTArray<Moof>& moofs = moofParser->Moofs();
const Moof* currentMoof = &moofs[mCurrentMoof];
// We need to check if this moof has init data the CDM expects us to surface.
// This should happen when handling the first sample, even if that sample
// isn't encrypted (samples later in the moof may be).
if (mCurrentSample == 0) {
if (!currentMoof->mPsshes.IsEmpty()) {
// This Moof contained crypto init data. Report that. We only report
// the init data on the Moof's first sample, to avoid reporting it more
// than once per Moof.
writer->mCrypto.mInitDatas.AppendElements(currentMoof->mPsshes);
writer->mCrypto.mInitDataType = u"cenc"_ns;
}
}
auto cryptoSchemeResult = GetEncryptionScheme();
if (cryptoSchemeResult.isErr()) {
return Err(MediaResult::Logged(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
cryptoSchemeResult.unwrapErr(),
gMediaDemuxerLog));
}
CryptoScheme cryptoScheme = cryptoSchemeResult.unwrap();
if (cryptoScheme == CryptoScheme::None) {
// No crypto to handle, early return.
Next();
return sample.forget();
}
writer->mCrypto.mCryptoScheme = cryptoScheme;
MOZ_ASSERT(writer->mCrypto.mCryptoScheme != CryptoScheme::None,
"Should have early returned if we don't have a crypto scheme!");
MOZ_ASSERT(writer->mCrypto.mKeyId.IsEmpty(),
"Sample should not already have a key ID");
MOZ_ASSERT(writer->mCrypto.mConstantIV.IsEmpty(),
"Sample should not already have a constant IV");
const CencSampleEncryptionInfoEntry* sampleInfo = GetSampleEncryptionEntry();
if (sampleInfo) {
// Use sample group information if present, this supersedes track level
// information.
writer->mCrypto.mKeyId.AppendElements(sampleInfo->mKeyId);
writer->mCrypto.mIVSize = sampleInfo->mIVSize;
writer->mCrypto.mCryptByteBlock = sampleInfo->mCryptByteBlock;
writer->mCrypto.mSkipByteBlock = sampleInfo->mSkipByteBlock;
writer->mCrypto.mConstantIV.AppendElements(sampleInfo->mConsantIV);
} else {
// Use the crypto info from track metadata
writer->mCrypto.mKeyId.AppendElements(moofParser->mSinf.mDefaultKeyID, 16);
writer->mCrypto.mIVSize = moofParser->mSinf.mDefaultIVSize;
writer->mCrypto.mCryptByteBlock = moofParser->mSinf.mDefaultCryptByteBlock;
writer->mCrypto.mSkipByteBlock = moofParser->mSinf.mDefaultSkipByteBlock;
writer->mCrypto.mConstantIV.AppendElements(
moofParser->mSinf.mDefaultConstantIV);
}
if ((writer->mCrypto.mIVSize == 0 && writer->mCrypto.mConstantIV.IsEmpty()) ||
(writer->mCrypto.mIVSize != 0 &&
(s->mCencRange.IsEmpty() && !currentMoof->SencIsValid()))) {
// If mIVSize == 0, this indicates that a constant IV is in use, thus we
// should have a non empty constant IV. Alternatively if IV size is non
// zero, we should have an IV for this sample, which we need to look up
// in mCencRange (which must then be non empty). If neither of these are
// true we have bad crypto data, so bail.
return Err(MediaResult::Logged(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
RESULT_DETAIL("Crypto IV size inconsistent"),
gMediaDemuxerLog));
}
// Retrieve encryption information
// This information might come from two places: the senc box, or the
// auxiliary data (indicated by saio and saiz boxes)
// Try to use senc information first, and fallback to auxiliary data if not
// present
if (currentMoof->SencIsValid()) {
if (writer->mCrypto.mIVSize != s->mIV.Length()) {
return Err(MediaResult::Logged(
NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
RESULT_DETAIL("Inconsistent crypto IV size"), gMediaDemuxerLog));
}
writer->mCrypto.mIV = s->mIV;
writer->mCrypto.mPlainSizes = s->mPlainSizes;
writer->mCrypto.mEncryptedSizes = s->mEncryptedSizes;
} else if (!s->mCencRange.IsEmpty()) {
// The size comes from an 8 bit field
AutoTArray<uint8_t, 256> cencAuxInfo;
cencAuxInfo.SetLength(s->mCencRange.Length());
// Sample Auxiliary Information may be stored anywhere in the file, but
// encryption is supported in only fragmented mp4, so the offsets are
// assumed in the traf or a subsequent box.
rv = mIndex->mSource->ReadAt(s->mCencRange.mStart, cencAuxInfo.Elements(),
cencAuxInfo.Length(), &bytesRead);
if (NS_FAILED(rv) || bytesRead != cencAuxInfo.Length()) {
return Err(MediaResult::Logged(
// Unless pref "eme.mse-only" is set to false, encryption is supported
// only in MSE, where fewer bytes means end of stream. mCencRange
// extending past the end of stream means an error with the range.
NS_FAILED(rv) ? rv : NS_ERROR_DOM_MEDIA_RANGE_ERR,
RESULT_DETAIL("cenc Sample Auxiliary Information read failed"),
gMediaDemuxerLog));
}
BufferReader reader(cencAuxInfo);
if (!reader.ReadArray(writer->mCrypto.mIV, writer->mCrypto.mIVSize)) {
return Err(MediaResult::Logged(
NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
RESULT_DETAIL("sample InitializationVector error"),
gMediaDemuxerLog));
}
// Parse the auxiliary information for subsample information
auto res = reader.ReadU16();
if (res.isOk() && res.unwrap() > 0) {
uint16_t count = res.unwrap();
for (size_t i = 0; i < count; i++) {
auto res_16 = reader.ReadU16();
auto res_32 = reader.ReadU32();
if (res_16.isErr() || res_32.isErr()) {
return Err(MediaResult::Logged(
NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
RESULT_DETAIL("cenc subsample_count too large for"
"CencSampleAuxiliaryDataFormat"),
gMediaDemuxerLog));
}
writer->mCrypto.mPlainSizes.AppendElement(res_16.unwrap());
writer->mCrypto.mEncryptedSizes.AppendElement(res_32.unwrap());
}
} else {
// No subsample information means the entire sample is encrypted.
writer->mCrypto.mPlainSizes.AppendElement(0);
writer->mCrypto.mEncryptedSizes.AppendElement(sample->Size());
}
}
Next();
return sample.forget();
}
SampleDescriptionEntry* SampleIterator::GetSampleDescriptionEntry() {
nsTArray<Moof>& moofs = mIndex->mMoofParser->Moofs();
Moof& currentMoof = moofs[mCurrentMoof];
uint32_t sampleDescriptionIndex =
currentMoof.mTfhd.mDefaultSampleDescriptionIndex;
// Mp4 indices start at 1, shift down 1 so we index our array correctly.
sampleDescriptionIndex--;
FallibleTArray<SampleDescriptionEntry>& sampleDescriptions =
mIndex->mMoofParser->mSampleDescriptions;
if (sampleDescriptionIndex >= sampleDescriptions.Length()) {
// The sample description index is invalid, the mp4 is malformed. Bail out.
return nullptr;
}
return &sampleDescriptions[sampleDescriptionIndex];
}
const CencSampleEncryptionInfoEntry* SampleIterator::GetSampleEncryptionEntry()
const {
return mIndex->mMoofParser->GetSampleEncryptionEntry(mCurrentMoof,
mCurrentSample);
}
Result<CryptoScheme, nsCString> SampleIterator::GetEncryptionScheme() {
// See ISO/IEC 23001-7 for information on the metadata being checked.
MoofParser* moofParser = mIndex->mMoofParser.get();
if (!moofParser) {
// This mp4 isn't fragmented so it can't be encrypted.
return CryptoScheme::None;
}
SampleDescriptionEntry* sampleDescriptionEntry = GetSampleDescriptionEntry();
if (!sampleDescriptionEntry) {
// For the file to be valid the tfhd must reference a sample description
// entry.
// If we encounter this error often, we may consider using the first
// sample description entry if the index is out of bounds.
return mozilla::Err(RESULT_DETAIL(
"Could not determine encryption scheme due to bad index for sample "
"description entry."));
}
if (!sampleDescriptionEntry->mIsEncryptedEntry) {
return CryptoScheme::None;
}
if (!moofParser->mSinf.IsValid()) {
// The sample description entry says this sample is encrypted, but we
// don't have a valid sinf box. This shouldn't happen as the sinf box is
// part of the sample description entry. Suggests a malformed file, bail.
return mozilla::Err(RESULT_DETAIL(
"Could not determine encryption scheme. Sample description entry "
"indicates encryption, but could not find associated sinf box."));
}
const CencSampleEncryptionInfoEntry* sampleInfo = GetSampleEncryptionEntry();
if (sampleInfo && !sampleInfo->mIsEncrypted) {
// May not have sample encryption info, but if we do, it should match other
// metadata.
return mozilla::Err(RESULT_DETAIL(
"Could not determine encryption scheme. Sample description entry "
"indicates encryption, but sample encryption entry indicates sample is "
"not encrypted. These should be consistent."));
}
if (moofParser->mSinf.mDefaultEncryptionType == AtomType("cenc")) {
return CryptoScheme::Cenc;
} else if (moofParser->mSinf.mDefaultEncryptionType == AtomType("cbcs")) {
return CryptoScheme::Cbcs;
}
return mozilla::Err(RESULT_DETAIL(
"Could not determine encryption scheme. Sample description entry "
"reports sample is encrypted, but no scheme, or an unsupported scheme "
"is in use."));
}
Result<Sample*, nsresult> SampleIterator::Get() {
if (!mIndex->mMoofParser) {
MOZ_ASSERT(!mCurrentMoof);
if (mCurrentSample >= mIndex->mIndex.Length()) {
return Err(NS_ERROR_DOM_MEDIA_END_OF_STREAM);
}
return &mIndex->mIndex[mCurrentSample];
}
nsTArray<Moof>& moofs = mIndex->mMoofParser->Moofs();
while (true) {
if (mCurrentMoof == moofs.Length()) {
nsresult rv = mIndex->mMoofParser->BlockingReadNextMoof();
if (NS_FAILED(rv)) {
return Err(rv);
}
MOZ_ASSERT(mCurrentMoof < moofs.Length());
}
if (mCurrentSample < moofs[mCurrentMoof].mIndex.Length()) {
break;
}
mCurrentSample = 0;
++mCurrentMoof;
}
return &moofs[mCurrentMoof].mIndex[mCurrentSample];
}
void SampleIterator::Next() { ++mCurrentSample; }
void SampleIterator::Seek(const TimeUnit& aTime, SyncSampleMode aMode) {
size_t syncMoof = 0;
size_t syncSample = 0;
mCurrentMoof = 0;
mCurrentSample = 0;
while (Sample* sample = Get().unwrapOr(nullptr)) {
if (sample->mCompositionRange.start > aTime) {
break;
}
if (sample->mSync) {
syncMoof = mCurrentMoof;
syncSample = mCurrentSample;
if (aMode == SyncSampleMode::First) {
break;
}
}
if (sample->mCompositionRange.start == aTime) {
break;
}
Next();
}
mCurrentMoof = syncMoof;
mCurrentSample = syncSample;
}
TimeUnit SampleIterator::GetNextKeyframeTime() {
SampleIterator itr(*this);
while (Sample* sample = itr.Get().unwrapOr(nullptr)) {
if (sample->mSync) {
return sample->mCompositionRange.start;
}
itr.Next();
}
return TimeUnit::Invalid();
}
MP4SampleIndex::MP4SampleIndex(const IndiceWrapper& aIndices,
ByteStream* aSource, uint32_t aTrackId,
bool aIsAudio, uint32_t aTimeScale)
: mSource(aSource), mIsAudio(aIsAudio) {
if (!aIndices.Length()) {
mMoofParser =
MakeUnique<MoofParser>(aSource, AsVariant(aTrackId), aIsAudio);
} else {
if (!mIndex.SetCapacity(aIndices.Length(), fallible)) {
// OOM.
return;
}
media::IntervalSet<TimeUnit> intervalTime;
MediaByteRange intervalRange;
bool haveSync = false;
bool progressive = true;
int64_t lastOffset = 0;
for (size_t i = 0; i < aIndices.Length(); i++) {
Indice indice{};
int64_t timescale =
mMoofParser ? AssertedCast<int64_t>(mMoofParser->mMvhd.mTimescale)
: aTimeScale;
if (!aIndices.GetIndice(i, indice)) {
// Out of index?
return;
}
if (indice.sync || mIsAudio) {
haveSync = true;
}
if (!haveSync) {
continue;
}
Sample sample;
sample.mByteRange =
MediaByteRange(indice.start_offset, indice.end_offset);
sample.mCompositionRange = MP4Interval<media::TimeUnit>(
TimeUnit(indice.start_composition, timescale),
TimeUnit(indice.end_composition, timescale));
sample.mDecodeTime = TimeUnit(indice.start_decode, timescale);
sample.mSync = indice.sync || mIsAudio;
// FIXME: Make this infallible after bug 968520 is done.
MOZ_ALWAYS_TRUE(mIndex.AppendElement(sample, fallible));
if (indice.start_offset < lastOffset) {
NS_WARNING("Chunks in MP4 out of order, expect slow down");
progressive = false;
}
lastOffset = indice.end_offset;
// Pack audio samples in group of 128.
if (sample.mSync && progressive && (!mIsAudio || !(i % 128))) {
if (mDataOffset.Length()) {
auto& last = mDataOffset.LastElement();
last.mEndOffset = intervalRange.mEnd;
NS_ASSERTION(intervalTime.Length() == 1,
"Discontinuous samples between keyframes");
last.mTime.start = intervalTime.GetStart();
last.mTime.end = intervalTime.GetEnd();
}
if (!mDataOffset.AppendElement(
MP4DataOffset(mIndex.Length() - 1, indice.start_offset),
fallible)) {
// OOM.
return;
}
intervalTime = media::IntervalSet<TimeUnit>();
intervalRange = MediaByteRange();
}
intervalTime += media::Interval<TimeUnit>(sample.mCompositionRange.start,
sample.mCompositionRange.end);
intervalRange = intervalRange.Span(sample.mByteRange);
}
if (mDataOffset.Length() && progressive) {
Indice indice;
if (!aIndices.GetIndice(aIndices.Length() - 1, indice)) {
return;
}
auto& last = mDataOffset.LastElement();
last.mEndOffset = indice.end_offset;
last.mTime =
MP4Interval<TimeUnit>(intervalTime.GetStart(), intervalTime.GetEnd());
} else {
mDataOffset.Clear();
}
}
}
MP4SampleIndex::~MP4SampleIndex() = default;
void MP4SampleIndex::UpdateMoofIndex(const MediaByteRangeSet& aByteRanges) {
UpdateMoofIndex(aByteRanges, false);
}
void MP4SampleIndex::UpdateMoofIndex(const MediaByteRangeSet& aByteRanges,
bool aCanEvict) {
if (!mMoofParser) {
return;
}
size_t moofs = mMoofParser->Moofs().Length();
bool canEvict = aCanEvict && moofs > 1;
if (canEvict) {
// Check that we can trim the mMoofParser. We can only do so if all
// iterators have demuxed all possible samples.
for (const SampleIterator* iterator : mIterators) {
if ((iterator->mCurrentSample == 0 && iterator->mCurrentMoof == moofs) ||
iterator->mCurrentMoof == moofs - 1) {
continue;
}
canEvict = false;
break;
}
}
mMoofParser->RebuildFragmentedIndex(aByteRanges, &canEvict);
if (canEvict) {
// The moofparser got trimmed. Adjust all registered iterators.
for (SampleIterator* iterator : mIterators) {
iterator->mCurrentMoof -= moofs - 1;
}
}
}
TimeIntervals MP4SampleIndex::ConvertByteRangesToTimeRanges(
const MediaByteRangeSet& aByteRanges) {
if (aByteRanges == mLastCachedRanges) {
return mLastBufferedRanges;
}
mLastCachedRanges = aByteRanges;
if (mDataOffset.Length()) {
TimeIntervals timeRanges;
for (const auto& range : aByteRanges) {
uint32_t start = mDataOffset.IndexOfFirstElementGt(range.mStart - 1);
if (!mIsAudio && start == mDataOffset.Length()) {
continue;
}
uint32_t end = mDataOffset.IndexOfFirstElementGt(
range.mEnd, MP4DataOffset::EndOffsetComparator());
if (!mIsAudio && end < start) {
continue;
}
if (mIsAudio && start &&
range.Intersects(MediaByteRange(mDataOffset[start - 1].mStartOffset,
mDataOffset[start - 1].mEndOffset))) {
// Check if previous audio data block contains some available samples.
for (size_t i = mDataOffset[start - 1].mIndex; i < mIndex.Length();
i++) {
if (range.ContainsStrict(mIndex[i].mByteRange)) {
timeRanges += TimeInterval(mIndex[i].mCompositionRange.start,
mIndex[i].mCompositionRange.end);
}
}
}
if (end > start) {
for (uint32_t i = start; i < end; i++) {
timeRanges += TimeInterval(mDataOffset[i].mTime.start,
mDataOffset[i].mTime.end);
}
}
if (end < mDataOffset.Length()) {
// Find samples in partial block contained in the byte range.
for (size_t i = mDataOffset[end].mIndex;
i < mIndex.Length() && range.ContainsStrict(mIndex[i].mByteRange);
i++) {
timeRanges += TimeInterval(mIndex[i].mCompositionRange.start,
mIndex[i].mCompositionRange.end);
}
}
}
mLastBufferedRanges = timeRanges;
return timeRanges;
}
RangeFinder rangeFinder(aByteRanges);
nsTArray<MP4Interval<media::TimeUnit>> timeRanges;
nsTArray<FallibleTArray<Sample>*> indexes;
if (mMoofParser) {
// We take the index out of the moof parser and move it into a local
// variable so we don't get concurrency issues. It gets freed when we
// exit this function.
for (int i = 0; i < mMoofParser->Moofs().Length(); i++) {
Moof& moof = mMoofParser->Moofs()[i];
// We need the entire moof in order to play anything
if (rangeFinder.Contains(moof.mRange)) {
if (rangeFinder.Contains(moof.mMdatRange)) {
MP4Interval<media::TimeUnit>::SemiNormalAppend(timeRanges,
moof.mTimeRange);
} else {
indexes.AppendElement(&moof.mIndex);
}
}
}
} else {
indexes.AppendElement(&mIndex);
}
bool hasSync = false;
for (size_t i = 0; i < indexes.Length(); i++) {
FallibleTArray<Sample>* index = indexes[i];
for (size_t j = 0; j < index->Length(); j++) {
const Sample& sample = (*index)[j];
if (!rangeFinder.Contains(sample.mByteRange)) {
// We process the index in decode order so we clear hasSync when we hit
// a range that isn't buffered.
hasSync = false;
continue;
}
hasSync |= sample.mSync;
if (!hasSync) {
continue;
}
MP4Interval<media::TimeUnit>::SemiNormalAppend(timeRanges,
sample.mCompositionRange);
}
}
// This fixes up when the compositon order differs from the byte range order
nsTArray<MP4Interval<TimeUnit>> timeRangesNormalized;
MP4Interval<media::TimeUnit>::Normalize(timeRanges, &timeRangesNormalized);
// convert timeRanges.
media::TimeIntervals ranges;
for (size_t i = 0; i < timeRangesNormalized.Length(); i++) {
ranges += media::TimeInterval(timeRangesNormalized[i].start,
timeRangesNormalized[i].end);
}
mLastBufferedRanges = ranges;
return ranges;
}
uint64_t MP4SampleIndex::GetEvictionOffset(const TimeUnit& aTime) {
uint64_t offset = std::numeric_limits<uint64_t>::max();
if (mMoofParser) {
// We need to keep the whole moof if we're keeping any of it because the
// parser doesn't keep parsed moofs.
for (int i = 0; i < mMoofParser->Moofs().Length(); i++) {
Moof& moof = mMoofParser->Moofs()[i];
if (!moof.mTimeRange.Length().IsZero() && moof.mTimeRange.end > aTime) {
offset = std::min(offset, uint64_t(std::min(moof.mRange.mStart,
moof.mMdatRange.mStart)));
}
}
} else {
// We've already parsed and stored the moov so we don't need to keep it.
// All we need to keep is the sample data itself.
for (size_t i = 0; i < mIndex.Length(); i++) {
const Sample& sample = mIndex[i];
if (aTime >= sample.mCompositionRange.end) {
offset = std::min(offset, uint64_t(sample.mByteRange.mEnd));
}
}
}
return offset;
}
void MP4SampleIndex::RegisterIterator(SampleIterator* aIterator) {
mIterators.AppendElement(aIterator);
}
void MP4SampleIndex::UnregisterIterator(SampleIterator* aIterator) {
mIterators.RemoveElement(aIterator);
}
} // namespace mozilla
|