1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123
|
/***
Olive - Non-Linear Video Editor
Copyright (C) 2019 Olive Team
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
***/
#include "clip.h"
OLIVE_NAMESPACE_ENTER
ClipBlock::ClipBlock()
{
texture_input_ = new NodeInput("buffer_in", NodeInput::kBuffer);
texture_input_->set_is_keyframable(false);
AddInput(texture_input_);
}
Node *ClipBlock::copy() const
{
return new ClipBlock();
}
Block::Type ClipBlock::type() const
{
return kClip;
}
QString ClipBlock::Name() const
{
return tr("Clip");
}
QString ClipBlock::id() const
{
return QStringLiteral("org.olivevideoeditor.Olive.clip");
}
QString ClipBlock::Description() const
{
return tr("A time-based node that represents a media source.");
}
NodeInput *ClipBlock::texture_input() const
{
return texture_input_;
}
void ClipBlock::InvalidateCache(const TimeRange &range, NodeInput *from, NodeInput *source)
{
// If signal is from texture input, transform all times from media time to sequence time
if (from == texture_input_) {
// Adjust range from media time to sequence time
rational start = MediaToSequenceTime(range.in());
rational end = MediaToSequenceTime(range.out());
Block::InvalidateCache(TimeRange(start, end), from, source);
} else {
// Otherwise, pass signal along normally
Block::InvalidateCache(range, from, source);
}
}
TimeRange ClipBlock::InputTimeAdjustment(NodeInput *input, const TimeRange &input_time) const
{
if (input == texture_input_) {
return TimeRange(SequenceToMediaTime(input_time.in()), SequenceToMediaTime(input_time.out()));
}
return Block::InputTimeAdjustment(input, input_time);
}
TimeRange ClipBlock::OutputTimeAdjustment(NodeInput *input, const TimeRange &input_time) const
{
if (input == texture_input_) {
return TimeRange(MediaToSequenceTime(input_time.in()), MediaToSequenceTime(input_time.out()));
}
return Block::InputTimeAdjustment(input, input_time);
}
NodeValueTable ClipBlock::Value(NodeValueDatabase &value) const
{
// We discard most values here except for the buffer we received
NodeValue data = value[texture_input()].GetWithMeta(NodeParam::kBuffer);
NodeValueTable table;
if (data.type() != NodeParam::kNone) {
table.Push(data);
}
return table;
}
void ClipBlock::Retranslate()
{
Block::Retranslate();
texture_input_->set_name(tr("Buffer"));
}
void ClipBlock::Hash(QCryptographicHash &hash, const rational &time) const
{
if (texture_input_->is_connected()) {
rational t = InputTimeAdjustment(texture_input_, TimeRange(time, time)).in();
texture_input_->get_connected_node()->Hash(hash, t);
}
}
OLIVE_NAMESPACE_EXIT
|