tracktion-engine 3.0-10-g034fdde4aa5
Tracktion Engine — High level data model for audio applications

« « « Anklang Documentation
Loading...
Searching...
No Matches
tracktion_CombiningNode.cpp
Go to the documentation of this file.
1 /*
2 ,--. ,--. ,--. ,--.
3 ,-' '-.,--.--.,--,--.,---.| |,-.,-' '-.`--' ,---. ,--,--, Copyright 2024
4 '-. .-'| .--' ,-. | .--'| /'-. .-',--.| .-. || \ Tracktion Software
5 | | | | \ '-' \ `--.| \ \ | | | |' '-' '| || | Corporation
6 `---' `--' `--`--'`---'`--'`--' `---' `--' `---' `--''--' www.tracktion.com
7
8 Tracktion Engine uses a GPL/commercial licence - see LICENCE.md for details.
9*/
10
12
13#define USE_PARTITION_INSERTION 1
14
15namespace tracktion { inline namespace engine
16{
17
18namespace combining_node_utils
19{
20 // how much extra time to give a track before it gets cut off - to allow for plugins
21 // that ring on.
22 static constexpr BeatDuration decayTimeAllowance { 8_bd };
23 static constexpr int secondsPerGroup = 8;
24
25 static inline constexpr int timeToGroupIndex (TimePosition t) noexcept
26 {
27 return static_cast<int> (t.inSeconds()) / secondsPerGroup;
28 }
29}
30
31//==============================================================================
33{
34 TimedNode (std::unique_ptr<Node> sourceNode, BeatRange t)
35 : time (t), node (std::move (sourceNode))
36 {
37 for (auto n = node.get();;)
38 {
39 nodesToProcess.insert (nodesToProcess.begin(), n);
40 auto inputNodes = n->getDirectInputNodes();
41
42 if (inputNodes.empty())
43 break;
44
45 // This doesn't work with parallel input Nodes
46 assert (inputNodes.size() == 1);
47 n = inputNodes.front();
48 }
49 }
50
51 std::vector<Node*> getNodes() const
52 {
53 return nodesToProcess;
54 }
55
56 void prepareToPlay (const tracktion::graph::PlaybackInitialisationInfo& info,
57 choc::buffer::ChannelArrayView<float> view)
58 {
59 auto info2 = info;
60 info2.allocateAudioBuffer = [view] (choc::buffer::Size size) -> tracktion::graph::NodeBuffer
61 {
62 jassert (size.numFrames == view.getNumFrames());
63 jassert (size.numChannels <= view.getNumChannels());
64
65 return { view.getFirstChannels (size.numChannels), {} };
66 };
67 info2.deallocateAudioBuffer = nullptr;
68
69 for (auto n : nodesToProcess)
70 n->initialise (info2);
71 }
72
73 bool isReadyToProcess() const
74 {
75 return nodesToProcess.front()->isReadyToProcess();
76 }
77
78 void prefetchBlock (juce::Range<int64_t> referenceSampleRange)
79 {
80 for (auto n : nodesToProcess)
81 n->prepareForNextBlock (referenceSampleRange);
82
83 #if JUCE_DEBUG
84 hasPrefetched = true;
85 #endif
86 }
87
88 void process (ProcessContext& pc)
89 {
90 jassert (hasPrefetched);
91
92 // Process all the Nodes
93 for (auto n : nodesToProcess)
94 n->process (pc.numSamples, pc.referenceSampleRange);
95
96 // Then get the output from the source Node
97 auto nodeOutput = node->getProcessedOutput();
98 const auto numDestChannels = pc.buffers.audio.getNumChannels();
99 const auto numChannelsToAdd = std::min (nodeOutput.audio.getNumChannels(), numDestChannels);
100
101 if (numChannelsToAdd > 0)
102 add (pc.buffers.audio.getFirstChannels (numChannelsToAdd),
103 nodeOutput.audio.getFirstChannels (numChannelsToAdd));
104
105 pc.buffers.midi.mergeFrom (nodeOutput.midi);
106
107 #if JUCE_DEBUG
108 hasPrefetched = false;
109 #endif
110 }
111
112 size_t getAllocatedBytes() const
113 {
114 size_t size = 0;
115
116 for (auto n : nodesToProcess)
117 size += n->getAllocatedBytes();
118
119 return size;
120 }
121
122 const BeatRange time;
123
124private:
125 const std::unique_ptr<Node> node;
126 std::vector<Node*> nodesToProcess;
127 #if JUCE_DEBUG
128 bool hasPrefetched = false;
129 #endif
130
132};
133
134//==============================================================================
135CombiningNode::CombiningNode (EditItemID id, ProcessState& ps)
136 : TracktionEngineNode (ps),
137 itemID (id)
138{
139 jassert (getProcessState().getTempoSequence());
140 hash_combine (nodeProperties.nodeID, itemID);
141}
142
143CombiningNode::~CombiningNode() {}
144
146{
147 jassert (time.getEnd() <= Edit::getMaximumEditEnd());
148 addInput (std::move (input), toBeats (*getProcessState().getTempoSequence(), time));
149}
150
151void CombiningNode::addInput (std::unique_ptr<Node> input, BeatRange beatRange)
152{
153 assert (input != nullptr);
154
155 if (beatRange.isEmpty())
156 return;
157
158 auto props = input->getNodeProperties();
159
160 nodeProperties.hasAudio |= props.hasAudio;
161 nodeProperties.hasMidi |= props.hasMidi;
162 nodeProperties.numberOfChannels = std::max (nodeProperties.numberOfChannels, props.numberOfChannels);
163 nodeProperties.latencyNumSamples = std::max (nodeProperties.latencyNumSamples, props.latencyNumSamples);
164 hash_combine (nodeProperties.nodeID, props.nodeID);
165 hash_combine (nodeProperties.nodeID, beatRange);
166
167 #if USE_PARTITION_INSERTION
168 const auto lower = std::partition_point (inputs.begin(), inputs.end(),
169 [&] (const auto& i)
170 {
171 return i->time.getStart() < beatRange.getStart();
172 });
173 int i = static_cast<int> (std::distance (inputs.begin(), lower));
174 #else
175 int i;
176 for (i = 0; i < inputs.size(); ++i)
177 if (inputs.getUnchecked (i)->time.getStart() >= beatRange.getStart())
178 break;
179 #endif
180
181 beatRange = BeatRange (beatRange.getStart(), beatRange.getLength() + combining_node_utils::decayTimeAllowance);
182 auto tan = inputs.insert (i, new TimedNode (std::move (input), beatRange));
183
184 // add the node to any groups it's near to.
185 const auto& ts = *getProcessState().getTempoSequence();
186 const auto overlapTime = TimeDuration::fromSeconds (combining_node_utils::secondsPerGroup / 2 + 2);
187 const auto timeRange = toTime (ts, beatRange).expanded (overlapTime);
188 const auto start = std::max (0, combining_node_utils::timeToGroupIndex (timeRange.getStart()));
189 const auto end = std::max (0, combining_node_utils::timeToGroupIndex (timeRange.getEnd()));
190
191 while (groups.size() <= end)
192 groups.add (new juce::Array<TimedNode*>());
193
194 for (i = start; i <= end; ++i)
195 {
196 auto g = groups.getUnchecked (i);
197
198 #if USE_PARTITION_INSERTION
199 const auto lowerGroup = std::partition_point (g->begin(), g->end(),
200 [&] (auto in)
201 {
202 return in->time.getStart() < beatRange.getStart();
203 });
204 const int j = static_cast<int> (std::distance (g->begin(), lowerGroup));
205 #else
206 int j;
207 for (j = 0; j < g->size(); ++j)
208 if (g->getUnchecked (j)->time.getStart() >= beatRange.getStart())
209 break;
210 #endif
211
212 jassert (tan != nullptr);
213 g->insert (j, tan);
214 }
215}
216
218{
219 return inputs.size();
220}
221
223{
224 std::vector<Node*> leafNodes;
225
226 for (auto i : inputs)
227 for (auto n : i->getNodes())
228 leafNodes.push_back (n);
229
230 return leafNodes;
231}
232
237
242
244{
245 isReadyToProcessBlock.store (true, std::memory_order_release);
246 tempAudioBuffer.resize (choc::buffer::Size::create ((choc::buffer::ChannelCount) nodeProperties.numberOfChannels,
247 (choc::buffer::FrameCount) info.blockSize));
248
249 for (auto& i : inputs)
250 {
251 i->prepareToPlay (info, tempAudioBuffer.getView());
252
253 if (! i->isReadyToProcess())
254 isReadyToProcessBlock.store (false, std::memory_order_release);
255 }
256
257 // Inspect the old graph to find clips that need to be killed
258 if (info.nodeGraphToReplace != nullptr)
259 {
260 if (auto oldNode = findNode<CombiningNode> (*info.nodeGraphToReplace,
261 [itemID = itemID] (auto& cn) { return cn.itemID == itemID; }))
262 {
263 queueNoteOffsForClipsNoLongerPresent (*oldNode);
264 }
265 }
266}
267
269{
270 return isReadyToProcessBlock.load (std::memory_order_acquire);
271}
272
274{
275 SCOPED_REALTIME_CHECK
276
277 const auto editTime = getEditTimeRange();
278 prefetchGroup (referenceSampleRange, editTime, getEditBeatRange());
279
280 // Update ready to process state based on nodes intersecting this time
281 isReadyToProcessBlock.store (true, std::memory_order_release);
282
283 if (auto g = groups[combining_node_utils::timeToGroupIndex (editTime.getStart())])
284 {
285 for (auto tan : *g)
286 {
287 if (! tan->isReadyToProcess())
288 {
289 isReadyToProcessBlock.store (false, std::memory_order_release);
290 break;
291 }
292 }
293 }
294}
295
297{
298 const auto editBeats = getEditBeatRange();
299
300 SCOPED_REALTIME_CHECK
301 const auto initialEvents = pc.buffers.midi.size();
302
303 // Merge any note-offs from clips that have been deleted
304 pc.buffers.midi.mergeFromAndClear (noteOffEventsToSend);
305
306 // Then process the list
307 if (auto g = groups[combining_node_utils::timeToGroupIndex (getEditTimeRange().getStart())])
308 {
309 for (auto tan : *g)
310 {
311 if (tan->time.getEnd() > editBeats.getStart())
312 {
313 if (tan->time.getStart() >= editBeats.getEnd())
314 break;
315
316 // Clear the allocated storage
317 tempAudioBuffer.clear();
318
319 // Then process the buffer.
320 // This will use the local buffer for the Nodes in the TimedNode and put the result in pc.buffers
321 tan->process (pc);
322 }
323 }
324 }
325
326 if (pc.buffers.midi.size() > initialEvents)
327 pc.buffers.midi.sortByTimestamp();
328}
329
330size_t CombiningNode::getAllocatedBytes() const
331{
332 size_t size = tempAudioBuffer.getView().data.getBytesNeeded (tempAudioBuffer.getSize());
333
334 for (const auto& i : inputs)
335 size += i->getAllocatedBytes();
336
337 return size;
338}
339
340void CombiningNode::prefetchGroup (juce::Range<int64_t> referenceSampleRange, TimeRange editTime, BeatRange editBeats)
341{
342 if (auto g = groups[combining_node_utils::timeToGroupIndex (editTime.getStart())])
343 {
344 for (auto tan : *g)
345 {
346 if (tan->time.getEnd() > editBeats.getStart())
347 {
348 if (tan->time.getStart() >= editBeats.getEnd())
349 break;
350
351 tan->prefetchBlock (referenceSampleRange);
352 }
353 }
354 }
355}
356
357void CombiningNode::queueNoteOffsForClipsNoLongerPresent (const CombiningNode& oldCombiningNode)
358{
359 // Find any LoopingMidiNodes that are no longer present
360 // Add note-offs for any note-ons they have
361 std::vector<EditItemID> currentNodeIDs;
362
363 for (auto timedNode : inputs)
364 for (auto node : timedNode->getNodes())
365 if (auto loopingMidiNode = dynamic_cast<LoopingMidiNode*> (node))
366 currentNodeIDs.push_back (loopingMidiNode->getItemID());
367
368 for (auto oldTimedNode : oldCombiningNode.inputs)
369 {
370 for (auto oldNode : oldTimedNode->getNodes())
371 {
372 if (auto oldLoopingMidiNode = dynamic_cast<LoopingMidiNode*> (oldNode))
373 {
374 if (std::find (currentNodeIDs.begin(), currentNodeIDs.end(), oldLoopingMidiNode->getItemID())
375 == currentNodeIDs.end())
376 {
377 oldLoopingMidiNode->getActiveNoteList()->iterate ([this, mpeSourceID = oldLoopingMidiNode->getMPESourceID()]
378 (int chan, int note)
379 {
380 noteOffEventsToSend.addMidiMessage (juce::MidiMessage::noteOff (chan, note), mpeSourceID);
381 });
382 }
383 }
384 }
385 }
386}
387
388}} // namespace tracktion { inline namespace engine
assert
T begin(T... args)
void process(ProcessContext &) override
Called when the node is to be processed.
void addInput(std::unique_ptr< Node >, TimeRange)
Adds an input node to be played at a given time range.
int getNumInputs() const
Returns the number of inputs added.
void prefetchBlock(juce::Range< int64_t >) override
Called before once on all Nodes before they are processed.
tracktion::graph::NodeProperties getNodeProperties() override
Should return the properties of the node.
std::vector< Node * > getDirectInputNodes() override
Should return all the inputs directly feeding in to this node.
void prepareToPlay(const tracktion::graph::PlaybackInitialisationInfo &) override
Called once before playback begins for each node.
bool isReadyToProcess() override
Should return true when this node is ready to be processed.
std::vector< Node * > getInternalNodes() override
Returns the inputs that have been added.
Base class for Nodes that provides information about the current process call.
TimeRange getEditTimeRange() const
Returns the edit time range of the current process block.
ProcessState & getProcessState()
Returns the ProcessState in use.
BeatRange getEditBeatRange() const
Returns the edit beat range of the current process block.
Struct to describe a single iteration of a process call.
T distance(T... args)
T end(T... args)
T find(T... args)
T is_pointer_v
#define jassert(expression)
#define JUCE_DECLARE_NON_COPYABLE(className)
T load(T... args)
T max(T... args)
T min(T... args)
BeatPosition toBeats(TimePosition tp, const TempoSequence &ts)
Converts a TimePosition to a BeatPosition given a TempoSequence.
TimePosition toTime(BeatPosition bp, const TempoSequence &ts)
Converts a BeatPosition to a TimePosition given a TempoSequence.
std::vector< Node * > getNodes(Node &node, VertexOrdering vertexOrdering)
Returns all the nodes in a Node graph in the order given by vertexOrdering.
T partition_point(T... args)
T push_back(T... args)
T store(T... args)
ID for objects of type EditElement - e.g.
Holds the state of a process call.
const tempo::Sequence * getTempoSequence() const
Returns the tempo::Sequence this state has been initialised with one.
Holds a view over some data and optionally some storage for that data.
Holds some really basic properties of a node.
Passed into Nodes when they are being initialised, to give them useful contextual information that th...
tan
time