tracktion-engine 3.0-10-g034fdde4aa5
Tracktion Engine — High level data model for audio applications

« « « Anklang Documentation
Loading...
Searching...
No Matches
tracktion_EditPlaybackContext.cpp
Go to the documentation of this file.
1 /*
2 ,--. ,--. ,--. ,--.
3 ,-' '-.,--.--.,--,--.,---.| |,-.,-' '-.`--' ,---. ,--,--, Copyright 2024
4 '-. .-'| .--' ,-. | .--'| /'-. .-',--.| .-. || \ Tracktion Software
5 | | | | \ '-' \ `--.| \ \ | | | |' '-' '| || | Corporation
6 `---' `--' `--`--'`---'`--'`--' `---' `--' `---' `--''--' www.tracktion.com
7
8 Tracktion Engine uses a GPL/commercial licence - see LICENCE.md for details.
9*/
10
11namespace tracktion { inline namespace engine
12{
13
14//==============================================================================
15//==============================================================================
16namespace EditPlaybackContextInternal
17{
18 inline int& getThreadPoolStrategyType()
19 {
20 static int type = static_cast<int> (tracktion::graph::ThreadPoolStrategy::lightweightSemHybrid);
21 return type;
22 }
23
24 inline bool& getPooledMemoryFlag()
25 {
26 static bool usePool = false;
27 return usePool;
28 }
29
30 inline bool& getNodeMemorySharingFlag()
31 {
32 static bool useSharing = false;
33 return useSharing;
34 }
35
36 inline bool& getAudioWorkgroupFlag()
37 {
38 static bool useAudioWorkgroup = false;
39 return useAudioWorkgroup;
40 }
41
42 inline juce::AudioWorkgroup getAudioWorkgroupIfEnabled (Engine& e)
43 {
44 if (! getAudioWorkgroupFlag())
45 return {};
46
47 return e.getDeviceManager().deviceManager.getDeviceAudioWorkgroup();
48 }
49
50 inline size_t getMaxNumThreadsToUse (Edit& edit)
51 {
52 if (edit.getIsPreviewEdit())
53 return 0;
54
55 auto wg = getAudioWorkgroupIfEnabled (edit.engine);
56 return wg ? wg.getMaxParallelThreadCount() - 1
57 : static_cast<size_t> (juce::SystemStats::getNumCpus() - 1);
58 }
59}
60
61
62//==============================================================================
63//==============================================================================
65{
66 //==============================================================================
67 ContextSyncroniser() = default;
68
69 //==============================================================================
70 enum class SyncAction
71 {
72 none,
75 };
76
78 {
79 SyncAction action;
80 TimePosition position;
81 };
82
83 //==============================================================================
84 SyncAndPosition getSyncAction (tracktion::graph::PlayHead& sourcePlayHead, tracktion::graph::PlayHead& destPlayHead,
85 double sampleRate)
86 {
87 if (! isValid)
88 return { SyncAction::none, {} };
89
90 const auto sourceTimelineTime = TimePosition::fromSamples (sourcePlayHead.getPosition(), sampleRate);
91 const auto millisecondsSinceEpoch = std::chrono::duration_cast<std::chrono::milliseconds> (sourcePlayHead.getLastUserInteractionTime().time_since_epoch()).count();
92 const juce::Time sourceLastInteractionTime (static_cast<int64_t> (millisecondsSinceEpoch));
93
94 const auto destTimelineTime = TimePosition::fromSamples (destPlayHead.getPosition(), sampleRate);
95 const auto destLoopDuration = TimeDuration::fromSamples (destPlayHead.getLoopRange().getLength(), sampleRate);
96
97 return getSyncAction (sourceTimelineTime, sourcePlayHead.isPlaying(), sourceLastInteractionTime,
98 destTimelineTime, destPlayHead.isLooping(), destLoopDuration);
99 }
100
101 void reset (TimePosition previousBarTime_, TimeDuration syncInterval_)
102 {
103 hasSynced = false;
104 previousBarTime = previousBarTime_;
105 syncInterval = syncInterval_;
106 isValid = true;
107 }
108
109private:
110 //==============================================================================
111 TimePosition previousBarTime;
112 TimeDuration syncInterval;
113 TimePosition lastSourceTimelineTime;
114 bool hasSynced = false, isValid = false;
115
116 SyncAndPosition getSyncAction (const TimePosition sourceTimelineTime, const bool sourceIsPlaying, const juce::Time sourceLastInteractionTime,
117 const TimePosition destTimelineTime, const bool destIsLooping, const TimeDuration destLoopDuration)
118 {
119 const juce::ScopedValueSetter<TimePosition> lastSourceTimelineTimeUpdater (lastSourceTimelineTime, lastSourceTimelineTime,
120 sourceTimelineTime);
121
122 if (! hasSynced)
123 {
124 const auto sourceDurationSinceLastBarStart = TimeDuration::fromSeconds (std::fmod ((sourceTimelineTime - previousBarTime).inSeconds(), syncInterval.inSeconds()));
125 jassert (sourceTimelineTime - previousBarTime >= TimeDuration());
126 jassert (sourceDurationSinceLastBarStart > TimeDuration());
127
128 auto newTimelineTime = sourceDurationSinceLastBarStart - syncInterval;
129
130 // If the next bar is too far away, start playing now
131 if (destIsLooping && newTimelineTime < -syncInterval)
132 newTimelineTime = sourceDurationSinceLastBarStart;
133
134 newTimelineTime = TimeDuration::fromSeconds (std::fmod (newTimelineTime.inSeconds(), destLoopDuration.inSeconds()));
135 hasSynced = true;
136
137 return { SyncAction::rollInToLoop, toPosition (newTimelineTime) };
138 }
139
140 if (! sourceIsPlaying || std::abs (lastSourceTimelineTime.inSeconds() - sourceTimelineTime.inSeconds()) > 0.2)
141 {
142 auto rt = juce::Time::getCurrentTime() - sourceLastInteractionTime;
143
144 if (! sourceIsPlaying || rt.inSeconds() < 0.2)
145 {
146 // user has moved or stopped the playhead -- break the sync
147 isValid = false;
148
149 return { SyncAction::breakSync, {} };
150 }
151 else
152 {
153 // This +0.1 is here to prevent rounding errors causing the playhead to jump back by a sync interval each loop iteration
154 const auto sourceDurationSinceLastBarStart = TimeDuration::fromSeconds (std::fmod ((sourceTimelineTime - previousBarTime).inSeconds(), syncInterval.inSeconds()));
155 auto newTimelineTime = syncInterval * std::floor ((destTimelineTime.inSeconds() + 0.1) / syncInterval.inSeconds()) + sourceDurationSinceLastBarStart;
156 newTimelineTime = TimeDuration::fromSeconds (std::fmod (newTimelineTime.inSeconds(), destLoopDuration.inSeconds()));
157
158 return { SyncAction::rollInToLoop, toPosition (newTimelineTime) };
159 }
160 }
161
162 return { SyncAction::none, {} };
163 }
164};
165
166
167//==============================================================================
168//==============================================================================
170 {
171 NodePlaybackContext (EditPlaybackContext& epc, size_t numThreads, size_t maxNumThreadsToUse)
172 : editPlaybackContext (epc),
173 player (processState,
175 EditPlaybackContextInternal::getAudioWorkgroupIfEnabled (tempoSequence.edit.engine)),
176 maxNumThreads (maxNumThreadsToUse)
177 {
178 processState.onContinuityUpdated = [this]
179 {
180 const auto syncRange = processState.getSyncRange();
181 const auto editTime = syncRange.start.time;
182 editPlaybackContext.edit.updateModifierTimers (editTime, static_cast<int> (getNumSamples (syncRange)));
183 editPlaybackContext.midiDispatcher.masterTimeUpdate (editTime);
184
185 #if TRACKTION_ENABLE_ABLETON_LINK
186 editPlaybackContext.edit.getAbletonLink().syncronise (editTime);
187 #endif
188 };
189
190 setNumThreads (numThreads);
191 player.enablePooledMemoryAllocations (EditPlaybackContextInternal::getPooledMemoryFlag());
192 player.enableNodeMemorySharing (EditPlaybackContextInternal::getNodeMemorySharingFlag());
193 }
194
195 void setNumThreads (size_t numThreads)
196 {
198 player.setNumThreads (std::min (numThreads, maxNumThreads));
199 }
200
201 void setNode (std::unique_ptr<Node> node, double sampleRate, int blockSize)
202 {
203 jassert (sampleRate > 0.0);
204 jassert (blockSize > 0);
205 blockSize = juce::roundToInt (blockSize * (1.0 + (10.0 * 0.01))); // max speed comp
206 player.setNode (std::move (node), sampleRate, blockSize);
207
208 if (auto currentNode = player.getNode())
209 latencySamples = currentNode->getNodeProperties().latencyNumSamples;
210 }
211
212 void clearNode()
213 {
214 player.clearNode();
215 }
216
217 int getLatencySamples() const
218 {
219 return latencySamples;
220 }
221
222 void postPosition (TimePosition positionToJumpTo, std::optional<TimePosition> whenToJump)
223 {
224 pendingPosition.store (positionToJumpTo.inSeconds(), std::memory_order_release);
225
226 if (whenToJump)
227 {
228 pendingPositionJumpTime.store (whenToJump->inSeconds(), std::memory_order_release);
229 pendingPositionJumpTimeValid.store (true, std::memory_order_release);
230 }
231 else
232 {
233 pendingPositionJumpTimeValid.store (false, std::memory_order_release);
234 }
235
236 pendingRollInToLoop.store (false, std::memory_order_release);
237 positionUpdatePending = true;
238 }
239
240 std::optional<TimePosition> getPendingPositionChange() const
241 {
242 if (! positionUpdatePending.load (std::memory_order_relaxed))
243 return {};
244
245 return TimePosition::fromSeconds (pendingPosition.load (std::memory_order_relaxed));
246 }
247
248 void postRollInToLoop (double newPosition)
249 {
250 pendingPosition.store (newPosition, std::memory_order_release);
251 pendingRollInToLoop.store (true, std::memory_order_release);
252 positionUpdatePending = true;
253 }
254
255 void setSpeedCompensation (double plusOrMinus)
256 {
257 speedCompensation = juce::jlimit (-10.0, 10.0, plusOrMinus);
258 }
259
260 void setTempoAdjustment (double plusOrMinusProportion)
261 {
262 blockLengthScaleFactor = 1.0 + std::clamp (plusOrMinusProportion, -0.5, 0.5);
263 }
264
265 void checkForTempoSequenceChanges()
266 {
267 const auto& internalSequence = tempoSequence.getInternalSequence();
268
269 if (internalSequence.hash() == tempoState.hash)
270 return;
271
272 const auto lastPositionRemapped = internalSequence.toTime (tempoState.lastBeatPosition);
273 const auto lastSampleRemapped = toSamples (lastPositionRemapped, getSampleRate());
274 playHead.overridePosition (lastSampleRemapped);
275 }
276
277 void updateReferenceSampleRange (int numSamples)
278 {
279 if (speedCompensation != 0.0)
280 numSamples = juce::roundToInt (numSamples * (1.0 + (speedCompensation * 0.01)));
281
282 double sampleDuration = static_cast<double> (numSamples);
283
284 if (blockLengthScaleFactor != 1.0)
285 sampleDuration *= blockLengthScaleFactor;
286
287 referenceStreamRange = juce::Range<double>::withStartAndLength (referenceStreamRange.getEnd(), sampleDuration);
288 playHead.setReferenceSampleRange (getReferenceSampleRange());
289 numSamplesToProcess = static_cast<choc::buffer::FrameCount> (numSamples);
290 processState.setPlaybackSpeedRatio (blockLengthScaleFactor);
291
292 // This needs to be called after the playhead reference range has been set above
293 checkForTempoSequenceChanges();
294 }
295
296 void resyncToReferenceSampleRange (juce::Range<int64_t> newReferenceSampleRange)
297 {
298 const double sampleRate = getSampleRate();
299 const auto currentPos = tracktion::graph::sampleToTime (playHead.getPosition(), sampleRate);
300 referenceStreamRange = juce::Range<double> (static_cast<double> (newReferenceSampleRange.getStart()),
301 static_cast<double> (newReferenceSampleRange.getEnd()));
302 playHead.setReferenceSampleRange (getReferenceSampleRange());
303 playHead.setPosition (tracktion::graph::timeToSample (currentPos, sampleRate));
304 }
305
306 void process (float* const* allChannels, int numChannels, int destNumSamples)
307 {
308 const auto referenceSampleRange = getReferenceSampleRange();
309 const double sampleRate = getSampleRate();
310
311 if (positionUpdatePending.load (std::memory_order_acquire))
312 {
313 bool shouldPerformPositionChange = true;
314
315 if (pendingPositionJumpTimeValid)
316 {
317 const auto currentTimeSeconds = sampleToTime (juce::Range<int64_t>::withStartAndLength (playHead.getPosition(), referenceSampleRange.getLength()), sampleRate);
318 const auto jumpTimeSeconds = pendingPositionJumpTime.load (std::memory_order_acquire);
319
320 // Check if loop end time is in this block and if it is, cancel the jump
321 const bool loopEndIsInThisBlock = playHead.isLooping()
322 && currentTimeSeconds.contains (sampleToTime (playHead.getLoopRange().getEnd(), sampleRate));
323
324 if (loopEndIsInThisBlock)
325 {
326 pendingPositionJumpTimeValid = false;
327 pendingPositionJumpTime = 0.0;
328
329 pendingPosition = 0.0;
330 positionUpdatePending = false;
331
332 shouldPerformPositionChange = false;
333 }
334
335 if (currentTimeSeconds.contains (jumpTimeSeconds))
336 {
337 pendingPositionJumpTimeValid = false;
338 pendingPositionJumpTime = 0.0;
339 }
340 else
341 {
342 shouldPerformPositionChange = false;
343 }
344 }
345
346 if (shouldPerformPositionChange)
347 {
348 if (positionUpdatePending.exchange (false))
349 {
350 const auto samplePos = timeToSample (pendingPosition.load (std::memory_order_acquire), sampleRate);
351
352 if (pendingRollInToLoop.load (std::memory_order_acquire))
353 playHead.setRollInToLoop (samplePos);
354 else
355 playHead.setPosition (samplePos);
356 }
357 }
358 }
359
360 scratchMidiBuffer.clear();
361
362 if (isUsingInterpolator || destNumSamples != (int) numSamplesToProcess)
363 {
364 // Initialise interpolators
365 isUsingInterpolator = true;
366 ensureNumInterpolators (numChannels);
367
368 // Process required num samples
369 scratchAudioBuffer.setSize (numChannels, (int) numSamplesToProcess, false, false, true);
370 scratchAudioBuffer.clear();
371
372 tracktion::graph::Node::ProcessContext pc { numSamplesToProcess, referenceSampleRange, { tracktion::graph::toBufferView (scratchAudioBuffer), scratchMidiBuffer } };
373 player.process (pc);
374
375 // Then resample them to the dest num samples
376 const double ratio = numSamplesToProcess / (double) destNumSamples;
377
378 for (int channel = 0; channel < numChannels; ++channel)
379 {
380 const auto src = scratchAudioBuffer.getReadPointer (channel);
381 const auto dest = allChannels[channel];
382
383 interpolators[(size_t) channel]->processAdding (ratio, src, dest, destNumSamples, 1.0f);
384 }
385 }
386 else
387 {
388 auto audioView = choc::buffer::createChannelArrayView (allChannels,
389 (choc::buffer::ChannelCount) numChannels,
390 numSamplesToProcess);
391 tracktion::graph::Node::ProcessContext pc { numSamplesToProcess, referenceSampleRange, { audioView, scratchMidiBuffer } };
392 player.process (pc);
393 }
394
395 tempoState = { tempoSequence.getInternalSequence().hash(),
396 processState.editBeatRange.getEnd() };
397 }
398
399 double getSampleRate() const
400 {
401 return player.getSampleRate();
402 }
403
404 SyncPoint getSyncPoint() const
405 {
406 return processState.getSyncPoint();
407 }
408
409 EditPlaybackContext& editPlaybackContext;
410 const TempoSequence& tempoSequence { editPlaybackContext.edit.tempoSequence };
412 tracktion::graph::PlayHeadState playHeadState { playHead };
413 ProcessState processState { playHeadState, tempoSequence };
414
415 private:
416 juce::AudioBuffer<float> scratchAudioBuffer;
417 MidiMessageArray scratchMidiBuffer;
418 TracktionNodePlayer player;
419 const size_t maxNumThreads;
420
421 int latencySamples = 0;
422 choc::buffer::FrameCount numSamplesToProcess = 0;
423 juce::Range<double> referenceStreamRange;
424 std::atomic<double> pendingPosition { 0.0 }, pendingPositionJumpTime { 0.0 };
425 std::atomic<bool> positionUpdatePending { false }, pendingRollInToLoop { false }, pendingPositionJumpTimeValid { false };
426 double speedCompensation = 0.0, blockLengthScaleFactor = 1.0;
428 bool isUsingInterpolator = false;
429
430 struct TempoState
431 {
432 size_t hash = 0;
433 BeatPosition lastBeatPosition;
434 };
435
436 TempoState tempoState;
437
438 juce::Range<int64_t> getReferenceSampleRange() const
439 {
440 return { static_cast<int64_t> (std::llround (referenceStreamRange.getStart())),
441 static_cast<int64_t> (std::llround (referenceStreamRange.getEnd())) };
442 }
443
444 void ensureNumInterpolators (int numRequired)
445 {
446 for (size_t i = interpolators.size(); i < (size_t) numRequired; ++i)
448 }
449};
450
451//==============================================================================
452EditPlaybackContext::ScopedDeviceListReleaser::ScopedDeviceListReleaser (EditPlaybackContext& e, bool reallocate)
453 : owner (e), shouldReallocate (reallocate)
454{
455 // Do this here to avoid creating the audio nodes upon reallocation causing a deadlock upon plugin initialisation
456 if (reallocate)
457 owner.isAllocated = false;
458
459 owner.releaseDeviceList();
460}
461
462EditPlaybackContext::ScopedDeviceListReleaser::~ScopedDeviceListReleaser()
463{
464 if (shouldReallocate)
465 owner.rebuildDeviceList();
466}
467
468//==============================================================================
469EditPlaybackContext::EditPlaybackContext (TransportControl& tc)
470 : edit (tc.edit), transport (tc)
471{
472 if (edit.isRendering())
473 {
475 TRACKTION_LOG_ERROR("EditPlaybackContext created whilst rendering");
476 }
477
478 if (edit.shouldPlay())
479 {
480 nodePlaybackContext = std::make_unique<NodePlaybackContext> (*this,
481 edit.engine.getEngineBehaviour().getNumberOfCPUsToUseForAudio(),
482 EditPlaybackContextInternal::getMaxNumThreadsToUse (edit));
483 contextSyncroniser = std::make_unique<ContextSyncroniser>();
484
485 // This ensures the referenceSampleRange of the new context has been synced
486 edit.engine.getDeviceManager().addContext (this);
487
488 // Set the playhead position as early as possible so it doesn't revert to 0 in the TransportControl
489 nodePlaybackContext->playHead.setPosition (toSamples (transport.getPosition(),
490 edit.engine.getDeviceManager().getSampleRate()));
491 }
492
493 rebuildDeviceList();
494}
495
496EditPlaybackContext::~EditPlaybackContext()
497{
498 TRACKTION_ASSERT_MESSAGE_THREAD
499 releaseDeviceList();
500 edit.engine.getDeviceManager().removeContext (this);
501}
502
503void EditPlaybackContext::releaseDeviceList()
504{
505 TRACKTION_ASSERT_MESSAGE_THREAD
507
508 const juce::ScopedValueSetter<bool> alocateSetter (isAllocated, isAllocated);
509 clearNodes();
510 midiDispatcher.setMidiDeviceList (juce::OwnedArray<MidiOutputDeviceInstance>());
511
512 // Clear the outputs before the inputs as the midi inputs will be referenced by the MidiDeviceInstanceBase::Consumer
513 waveOutputs.clear();
514 midiOutputs.clear();
515 waveInputs.clear();
516 midiInputs.clear();
517}
518
519void EditPlaybackContext::rebuildDeviceList()
520{
521 TRACKTION_ASSERT_MESSAGE_THREAD
523
524 jassert (waveInputs.isEmpty() && midiInputs.isEmpty()
525 && midiOutputs.isEmpty() && waveOutputs.isEmpty());
526
527 auto& dm = edit.engine.getDeviceManager();
528
529 for (auto mo : dm.midiOutputs)
530 if (mo->isEnabled())
531 midiOutputs.add (mo->createInstance (*this));
532
533 for (auto mi : dm.midiInputs)
534 if (mi->isEnabled() && mi->isAvailableToEdit())
535 midiInputs.add (mi->createInstance (*this));
536
537 for (auto wo : dm.waveOutputs)
538 if (wo->isEnabled())
539 waveOutputs.add (wo->createInstance (*this));
540
541 for (auto wi : dm.waveInputs)
542 if (wi->isEnabled() && wi->isAvailableToEdit())
543 waveInputs.add (wi->createInstance (*this));
544
545 for (auto* at : getAudioTracks (edit))
546 {
547 auto& twid = at->getWaveInputDevice();
548 auto& tmid = at->getMidiInputDevice();
549
550 if (twid.isEnabled()) waveInputs.add (twid.createInstance (*this));
551 if (tmid.isEnabled()) midiInputs.add (tmid.createInstance (*this));
552 }
553
554 midiDispatcher.setMidiDeviceList (midiOutputs);
555
556 if (isAllocated)
557 reallocate();
558}
559
560void EditPlaybackContext::removeInstanceForDevice (InputDevice& device)
561{
562 // Keep the instances alive until after reallocating because the nodes will refer to them
564
565 for (int i = midiInputs.size(); --i >= 0;)
566 if (&midiInputs.getUnchecked (i)->owner == &device)
567 removedInstances.add (midiInputs.removeAndReturn (i));
568
569 for (int i = waveInputs.size(); --i >= 0;)
570 if (&waveInputs.getUnchecked (i)->owner == &device)
571 removedInstances.add (waveInputs.removeAndReturn (i));
572
573 if (! removedInstances.isEmpty() && isAllocated)
574 reallocate();
575}
576
578{
579 jassert (getInputFor (&device) == nullptr);
580
581 if (waveInputs.add (device.createInstance (*this)) != nullptr && isAllocated)
582 reallocate();
583}
584
585void EditPlaybackContext::addMidiInputDeviceInstance (InputDevice& device)
586{
587 jassert (getInputFor (&device) == nullptr);
588
589 if (midiInputs.add (device.createInstance (*this)) != nullptr && isAllocated)
590 reallocate();
591}
592
593void EditPlaybackContext::clearNodes()
594{
596
597 for (auto mo : midiOutputs)
598 mo->stop();
599
600 {
601 InputDeviceInstance::StopRecordingParameters params;
602 params.discardRecordings = true;
603
604 for (auto mi : midiInputs)
605 mi->stopRecording (params);
606
607 for (auto wi : waveInputs)
608 wi->stopRecording (params);
609 }
610
611 priorityBooster = nullptr;
612 isAllocated = false;
613
614 // Because the nodePlaybackContext is lock-free, it doesn't immediately delete its current node
615 // so we need to explicity call clearNode
616 if (nodePlaybackContext)
617 {
618 nodePlaybackContext->playHead.stop();
619 nodePlaybackContext->clearNode();
620 nodePlaybackContext->setNumThreads (0);
621 }
622}
623
624void EditPlaybackContext::createNode()
625{
627 // Reset this until it's updated by the play graph
628 audiblePlaybackTime = transport.getPosition().inSeconds();
629
630 isAllocated = true;
631
632 auto& dm = edit.engine.getDeviceManager();
633 CreateNodeParams cnp { nodePlaybackContext->processState };
634 cnp.sampleRate = dm.getSampleRate();
635 cnp.blockSize = dm.getBlockSize();
636
637 if (cnp.sampleRate <= 0.0 || cnp.blockSize <= 0)
638 {
639 clearNodes();
640 return;
641 }
642
643 auto& engineBehaviour = edit.engine.getEngineBehaviour();
644 cnp.includeBypassedPlugins = ! engineBehaviour.shouldBypassedPluginsBeRemovedFromPlaybackGraph();
645 cnp.allowClipSlots = engineBehaviour.areClipSlotsEnabled();
646 cnp.readAheadTimeStretchNodes = engineBehaviour.enableReadAheadForTimeStretchNodes();
647 auto editNode = createNodeForEdit (*this, audiblePlaybackTime, cnp);
648
649 nodePlaybackContext->setNode (std::move (editNode), cnp.sampleRate, cnp.blockSize);
650 updateNumCPUs();
651}
652
653void EditPlaybackContext::createPlayAudioNodes (TimePosition startTime)
654{
655 createNode();
656 startPlaying (startTime);
657}
658
659void EditPlaybackContext::createPlayAudioNodesIfNeeded (TimePosition startTime)
660{
661 if (! isAllocated)
662 createPlayAudioNodes (startTime);
663}
664
665void EditPlaybackContext::reallocate()
666{
667 createPlayAudioNodes (getPosition());
668}
669
670void EditPlaybackContext::startPlaying (TimePosition start)
671{
672 prepareOutputDevices (start);
673
674 if (priorityBooster == nullptr)
675 priorityBooster = std::make_unique<ProcessPriorityBooster> (edit.engine);
676
677 for (auto mo : midiOutputs)
678 mo->start();
679}
680
681juce::Result EditPlaybackContext::startRecording (TimePosition start, TimePosition punchIn)
682{
683 struct InputAndContext
684 {
685 InputDeviceInstance* input = nullptr;
686 InputDeviceInstance::PreparedContext preparedContext;
687 };
688
689 std::vector<InputAndContext> inputAndContexts;
690
691 auto anyContextHasErrors = [&inputAndContexts]
692 {
693 for (auto& inputAndContext : inputAndContexts)
694 if (hasErrors (inputAndContext.preparedContext))
695 return true;
696
697 return false;
698 };
699
700 for (int i = waveInputs.size(); --i >= 0 && ! anyContextHasErrors();)
701 if (auto wi = waveInputs.getUnchecked (i))
702 if (wi->isRecordingActive())
703 inputAndContexts.push_back ({ wi, wi->prepareToRecord (getDefaultRecordingParameters (*this, start, punchIn)) });
704
705 for (int i = midiInputs.size(); --i >= 0 && ! anyContextHasErrors();)
706 if (auto mi = midiInputs.getUnchecked (i))
707 if (mi->isRecordingActive())
708 inputAndContexts.push_back ({ mi, mi->prepareToRecord (getDefaultRecordingParameters (*this, start, punchIn)) });
709
710 // Check if any devices started
711 {
712 bool anyContexts = false;
713
714 for (auto& inputAndContext : inputAndContexts)
715 if (! inputAndContext.preparedContext.empty())
716 anyContexts = true;
717
718 if (! anyContexts)
719 return juce::Result::fail (TRANS("Failed to start recording: No input devices"));
720 }
721
722 // Check if any had errors
723 for (auto& inputAndContext : inputAndContexts)
724 for (auto& res : inputAndContext.preparedContext)
725 if (! res)
726 return juce::Result::fail (res.error());
727
728 // Now start the recordings
729 startPlaying (start);
730
731 for (auto& inputAndContext : inputAndContexts)
732 {
733 if (! inputAndContext.input->isRecordingActive())
734 continue;
735
736 [[ maybe_unused ]] auto [preparedContext, error] = extract (std::move (inputAndContext.preparedContext));
737 [[ maybe_unused ]] auto contextsLeft = inputAndContext.input->startRecording (std::move (preparedContext));
738 jassert (contextsLeft.empty());
739 }
740
741 return juce::Result::ok();
742}
743
744void EditPlaybackContext::prepareOutputDevices (TimePosition start)
745{
747 auto& dm = edit.engine.getDeviceManager();
748 double sampleRate = dm.getSampleRate();
749 int blockSize = dm.getBlockSize();
750
751 // TODO: This feels wrong now as the global stream time should be a TimePosition...
752 // It looks like start already is an EditTime
753 // It also looks like MidiOutputDeviceInstance::prepareToPlay doesn't actually use the
754 // time so is likely to be a wrong but unused step
755 start = globalStreamTimeToEditTime (start.inSeconds());
756
757 for (auto wo : waveOutputs)
758 wo->prepareToPlay (sampleRate, blockSize);
759
760 for (auto mo : midiOutputs)
761 mo->prepareToPlay (start, true);
762
763 midiDispatcher.prepareToPlay (start);
764}
765
766void EditPlaybackContext::prepareForPlaying (TimePosition startTime)
767{
768 createPlayAudioNodesIfNeeded (startTime);
769}
770
771void EditPlaybackContext::prepareForRecording (TimePosition startTime, TimePosition punchIn)
772{
773 createPlayAudioNodesIfNeeded (startTime);
774 startRecording (startTime, punchIn);
775}
776
777static SelectionManager* findAppropriateSelectionManager (Edit& ed)
778{
779 SelectionManager* found = nullptr;
780
781 for (SelectionManager::Iterator iter; iter.next();)
782 if (auto sm = iter.get())
783 if (sm->getEdit() == &ed)
784 if (found == nullptr || found->editViewID == -1)
785 found = sm;
786
787 return found;
788}
789
790tl::expected<Clip::Array, juce::String> EditPlaybackContext::stopRecording (InputDeviceInstance& in, bool discardRecordings)
791{
792 TRACKTION_ASSERT_MESSAGE_THREAD
794
795 InputDeviceInstance::StopRecordingParameters params;
796 params.unloopedTimeToEndRecording = getUnloopedPosition();
797 params.isLooping = transport.looping;
798 params.markedRange = transport.getLoopRange();
799 params.discardRecordings = discardRecordings;
800 return in.stopRecording (params);
801}
802
803tl::expected<Clip::Array, juce::String> EditPlaybackContext::stopRecording (TimePosition unloopedEnd, bool discardRecordings)
804{
805 TRACKTION_ASSERT_MESSAGE_THREAD
807 Clip::Array clips;
808 juce::String error;
809
810 InputDeviceInstance::StopRecordingParameters params;
811 params.unloopedTimeToEndRecording = unloopedEnd;
812 params.isLooping = transport.looping;
813 params.markedRange = transport.getLoopRange();
814 params.discardRecordings = discardRecordings;
815
816 for (auto in : getAllInputs())
817 in->stopRecording (params)
818 .map ([&] (auto c) { clips.addArray (std::move (c)); })
819 .map_error ([&] (auto err) { error = err; });
820
821 if (! error.isEmpty())
822 return tl::unexpected (error);
823
824 return clips;
825}
826
827juce::Result EditPlaybackContext::applyRetrospectiveRecord (juce::Array<Clip*>* clips, bool armedOnly)
828{
829 TRACKTION_ASSERT_MESSAGE_THREAD
831
832 bool inputAssigned = false;
833
834 for (auto in : getAllInputs())
835 {
836 if (isAttached (*in) && (! armedOnly || in->isRecordingActive()))
837 {
838 inputAssigned = true;
839 break;
840 }
841 }
842
843 if (! inputAssigned)
844 return juce::Result::fail (TRANS("Unable to perform retrospective record, no inputs are assigned to a track"));
845
846 InputDevice::setRetrospectiveLock (edit.engine, getAllInputs(), true);
847
848 bool clipCreated = false;
849
850 for (auto in : getAllInputs())
851 {
852 for (auto clip : in->applyRetrospectiveRecord (armedOnly))
853 {
854 if (clips != nullptr)
855 clips->add (clip);
856
857 clipCreated = true;
858 }
859 }
860
861 InputDevice::setRetrospectiveLock (edit.engine, getAllInputs(), false);
862
863 if (! clipCreated)
864 return juce::Result::fail (TRANS("Unable to perform retrospective record, all input buffers are empty"));
865
866 if (clips != nullptr)
867 {
868 if (auto sm = findAppropriateSelectionManager (edit))
869 {
870 sm->select (*clips);
871 sm->keepSelectedObjectsOnScreen();
872 }
873 }
874
875 return juce::Result::ok();
876}
877
878juce::Array<InputDeviceInstance*> EditPlaybackContext::getAllInputs()
879{
881 allInputs.addArray (waveInputs);
882 allInputs.addArray (midiInputs);
883
884 return allInputs;
885}
886
887//==============================================================================
888void EditPlaybackContext::fillNextNodeBlock (float* const* allChannels, int numChannels, int numSamples)
889{
891
892 if (edit.isRendering())
893 return;
894
895 SCOPED_REALTIME_CHECK
896 if (! nodePlaybackContext)
897 return;
898
899 nodePlaybackContext->updateReferenceSampleRange (numSamples);
900
901 // Sync this playback context with a master context
902 if (nodeContextToSyncTo && nodePlaybackContext->playHead.isPlaying() && nodeContextToSyncTo->getNodePlayHead() != nullptr)
903 {
904 jassert (contextSyncroniser);
905 jassert (nodeContextToSyncTo->getNodePlayHead() != nullptr);
906 const auto[action, newTimelineTime] = contextSyncroniser->getSyncAction (*nodeContextToSyncTo->getNodePlayHead(),
907 nodePlaybackContext->playHead,
908 nodeContextToSyncTo->getSampleRate());
909
910 switch (action)
911 {
913 {
914 break;
915 }
917 {
918 nodePlaybackContext->postRollInToLoop (newTimelineTime.inSeconds());
919 break;
920 }
922 {
923 nodeContextToSyncTo = nullptr;
924 break;
925 }
926 }
927 }
928
929 nodePlaybackContext->process (allChannels, numChannels, numSamples);
930
931 // Dispatch any MIDI messages that have been injected in to the MidiOutputDeviceInstances by the Node
932 auto editTime = nodePlaybackContext->processState.getSyncRange().start.time;
933 midiDispatcher.dispatchPendingMessagesForDevices (editTime);
934}
935
936InputDeviceInstance* EditPlaybackContext::getInputFor (InputDevice* d) const
937{
938 TRACKTION_ASSERT_MESSAGE_THREAD
939
940 for (auto i : waveInputs)
941 if (&i->owner == d)
942 return i;
943
944 for (auto i : midiInputs)
945 if (&i->owner == d)
946 return i;
947
948 return {};
949}
950
951OutputDeviceInstance* EditPlaybackContext::getOutputFor (OutputDevice* d) const
952{
953 TRACKTION_ASSERT_MESSAGE_THREAD
954
955 for (auto i : waveOutputs)
956 if (&i->owner == d)
957 return i;
958
959 for (auto i : midiOutputs)
960 if (&i->owner == d)
961 return i;
962
963 return {};
964}
965
966void EditPlaybackContext::syncToContext (EditPlaybackContext* newContextToSyncTo,
967 TimePosition newPreviousBarTime, TimeDuration newSyncInterval)
968{
969 contextToSyncTo = newContextToSyncTo;
970 previousBarTime = newPreviousBarTime;
971 syncInterval = newSyncInterval;
972 hasSynced = false;
973
974 if (newContextToSyncTo != nullptr && newContextToSyncTo->getNodePlayHead() != nullptr)
975 {
976 nodeContextToSyncTo = newContextToSyncTo;
977 contextSyncroniser->reset (previousBarTime, syncInterval);
978 }
979}
980
981static bool hasCheckedDenormNoise = false;
982
983bool EditPlaybackContext::shouldAddAntiDenormalisationNoise (Engine& e)
984{
985 static bool shouldAdd;
986
987 if (! hasCheckedDenormNoise)
988 {
989 shouldAdd = e.getPropertyStorage().getProperty (SettingID::addAntiDenormalNoise, false);
990 hasCheckedDenormNoise = true;
991 }
992
993 return shouldAdd;
994}
995
996void EditPlaybackContext::setAddAntiDenormalisationNoise (Engine& e, bool b)
997{
998 e.getPropertyStorage().setProperty (SettingID::addAntiDenormalNoise, b);
999 hasCheckedDenormNoise = false;
1000}
1001
1002//==============================================================================
1003tracktion::graph::PlayHead* EditPlaybackContext::getNodePlayHead() const
1004{
1005 //TODO can this be removed?
1006 return nodePlaybackContext ? &nodePlaybackContext->playHead
1007 : nullptr;
1008}
1009
1010void EditPlaybackContext::blockUntilSyncPointChange()
1011{
1012 if (const auto startSyncPoint = getSyncPoint())
1013 {
1014 for (const auto startTime = std::chrono::steady_clock::now();;)
1015 {
1016 const auto syncPointNow = getSyncPoint();
1017
1018 if (! syncPointNow)
1019 break;
1020
1021 if (syncPointNow->referenceSamplePosition != startSyncPoint->referenceSamplePosition)
1022 break;
1023
1024 // This probably means something has gone wrong with the audio device and it's not playing
1025 // back anymore but appears valid so we don't want to block indefinitely
1026 if ((std::chrono::steady_clock::now() - startTime) > 100ms)
1027 break;
1028
1030 }
1031 }
1032}
1033
1034bool EditPlaybackContext::isPlaying() const
1035{
1036 return nodePlaybackContext ? nodePlaybackContext->playHead.isPlaying()
1037 : false;
1038}
1039
1040bool EditPlaybackContext::isLooping() const
1041{
1042 return nodePlaybackContext->playHead.isLooping();
1043}
1044
1045bool EditPlaybackContext::isDragging() const
1046{
1047 return nodePlaybackContext->playHead.isUserDragging();
1048}
1049
1050TimePosition EditPlaybackContext::getPosition() const
1051{
1052 return TimePosition::fromSamples (nodePlaybackContext->playHead.getPosition(),
1053 nodePlaybackContext->getSampleRate());
1054}
1055
1056TimePosition EditPlaybackContext::getUnloopedPosition() const
1057{
1058 return TimePosition::fromSamples (nodePlaybackContext->playHead.getUnloopedPosition(),
1059 nodePlaybackContext->getSampleRate());
1060}
1061
1062TimeRange EditPlaybackContext::getLoopTimes() const
1063{
1064 return tracktion::timeRangeFromSamples (nodePlaybackContext->playHead.getLoopRange(),
1065 nodePlaybackContext->getSampleRate());
1066}
1067
1068int EditPlaybackContext::getLatencySamples() const
1069{
1070 return nodePlaybackContext ? nodePlaybackContext->getLatencySamples()
1071 : 0;
1072}
1073
1074TimePosition EditPlaybackContext::getAudibleTimelineTime()
1075{
1076 return nodePlaybackContext ? TimePosition::fromSeconds (audiblePlaybackTime.load())
1077 : transport.getPosition();
1078}
1079
1080double EditPlaybackContext::getSampleRate() const
1081{
1082 return nodePlaybackContext ? nodePlaybackContext->getSampleRate()
1083 : 44100.0;
1084}
1085
1086void EditPlaybackContext::updateNumCPUs()
1087{
1088 if (nodePlaybackContext)
1089 nodePlaybackContext->setNumThreads ((size_t) edit.engine.getEngineBehaviour().getNumberOfCPUsToUseForAudio() - 1);
1090}
1091
1092void EditPlaybackContext::setSpeedCompensation (double plusOrMinus)
1093{
1094 if (nodePlaybackContext)
1095 nodePlaybackContext->setSpeedCompensation (plusOrMinus);
1096}
1097
1098void EditPlaybackContext::setTempoAdjustment (double plusOrMinusProportion)
1099{
1100 if (nodePlaybackContext)
1101 nodePlaybackContext->setTempoAdjustment (plusOrMinusProportion);
1102}
1103
1104void EditPlaybackContext::postPosition (TimePosition positionToJumpTo, std::optional<TimePosition> whenToJump)
1105{
1106 if (nodePlaybackContext)
1107 {
1108 if (whenToJump && *whenToJump == positionToJumpTo)
1109 nodePlaybackContext->postPosition (positionToJumpTo, {});
1110 else
1111 nodePlaybackContext->postPosition (positionToJumpTo, whenToJump);
1112 }
1113}
1114
1115std::optional<TimePosition> EditPlaybackContext::getPendingPositionChange() const
1116{
1117 if (nodePlaybackContext)
1118 return nodePlaybackContext->getPendingPositionChange();
1119
1120 return {};
1121}
1122
1123void EditPlaybackContext::play()
1124{
1125 if (nodePlaybackContext)
1126 nodePlaybackContext->playHead.play();
1127}
1128
1129void EditPlaybackContext::stop()
1130{
1131 if (nodePlaybackContext)
1132 nodePlaybackContext->playHead.stop();
1133}
1134
1135std::optional<SyncPoint> EditPlaybackContext::getSyncPoint() const
1136{
1137 if (nodePlaybackContext)
1138 return nodePlaybackContext->getSyncPoint();
1139
1140 return {};
1141}
1142
1143TimePosition EditPlaybackContext::globalStreamTimeToEditTime (double globalStreamTime) const
1144{
1145 if (! nodePlaybackContext)
1146 return TimePosition();
1147
1148 const auto sampleRate = getSampleRate();
1149 const auto globalSamplePos = tracktion::graph::timeToSample (globalStreamTime, sampleRate);
1150 const auto timelinePosition = nodePlaybackContext->playHead.referenceSamplePositionToTimelinePosition (globalSamplePos);
1151
1152 return TimePosition::fromSamples (timelinePosition, sampleRate);
1153}
1154
1155TimePosition EditPlaybackContext::globalStreamTimeToEditTimeUnlooped (double globalStreamTime) const
1156{
1157 if (! nodePlaybackContext)
1158 return TimePosition();
1159
1160 const auto sampleRate = getSampleRate();
1161 const auto globalSamplePos = tracktion::graph::timeToSample (globalStreamTime, sampleRate);
1162 const auto timelinePosition = nodePlaybackContext->playHead.referenceSamplePositionToTimelinePositionUnlooped (globalSamplePos);
1163
1164 return TimePosition::fromSamples (timelinePosition, sampleRate);
1165}
1166
1167void EditPlaybackContext::resyncToGlobalStreamTime (juce::Range<double> globalStreamTime, double sampleRate)
1168{
1169 if (! nodePlaybackContext)
1170 return;
1171
1172 const auto globalSampleRange = tracktion::graph::timeToSample (globalStreamTime, sampleRate);
1173 nodePlaybackContext->resyncToReferenceSampleRange (globalSampleRange);
1174}
1175
1176void EditPlaybackContext::setThreadPoolStrategy (int type)
1177{
1178 type = juce::jlimit (static_cast<int> (tracktion::graph::ThreadPoolStrategy::conditionVariable),
1179 static_cast<int> (tracktion::graph::ThreadPoolStrategy::lightweightSemHybrid),
1180 type);
1181
1182 EditPlaybackContextInternal::getThreadPoolStrategyType() = type;
1183}
1184
1185int EditPlaybackContext::getThreadPoolStrategy()
1186{
1187 const int type = juce::jlimit (static_cast<int> (tracktion::graph::ThreadPoolStrategy::conditionVariable),
1188 static_cast<int> (tracktion::graph::ThreadPoolStrategy::lightweightSemHybrid),
1189 EditPlaybackContextInternal::getThreadPoolStrategyType());
1190
1191 return type;
1192}
1193
1194void EditPlaybackContext::enablePooledMemory (bool enable)
1195{
1196 EditPlaybackContextInternal::getPooledMemoryFlag() = enable;
1197}
1198
1199void EditPlaybackContext::enableNodeMemorySharing (bool enable)
1200{
1201 EditPlaybackContextInternal::getNodeMemorySharingFlag() = enable;
1202}
1203
1204void EditPlaybackContext::enableAudioWorkgroup (bool enable)
1205{
1206 EditPlaybackContextInternal::getAudioWorkgroupFlag() = enable;
1207}
1208
1209int EditPlaybackContext::getNumActivelyRecordingDevices() const
1210{
1211 return activelyRecordingInputDevices.load (std::memory_order_acquire);
1212}
1213
1214void EditPlaybackContext::incrementNumActivelyRecordingDevices()
1215{
1216 activelyRecordingInputDevices.fetch_add (1, std::memory_order_acq_rel);
1217}
1218
1219void EditPlaybackContext::decrementNumActivelyRecordingDevices()
1220{
1221 activelyRecordingInputDevices.fetch_sub (1, std::memory_order_acq_rel);
1222}
1223
1224//==============================================================================
1225static int numHighPriorityPlayers = 0, numRealtimeDefeaters = 0;
1226
1227inline void updateProcessPriority (Engine& engine)
1228{
1229 int level = 0;
1230
1231 if (numHighPriorityPlayers > 0)
1232 level = numRealtimeDefeaters == 0 && engine.getPropertyStorage().getProperty (SettingID::useRealtime, false) ? 2 : 1;
1233
1234 engine.getEngineBehaviour().setProcessPriority (level);
1235}
1236
1237EditPlaybackContext::ProcessPriorityBooster::ProcessPriorityBooster (Engine& e) : engine (e) { ++numHighPriorityPlayers; updateProcessPriority (engine); }
1238EditPlaybackContext::ProcessPriorityBooster::~ProcessPriorityBooster() { --numHighPriorityPlayers; updateProcessPriority (engine); }
1239EditPlaybackContext::RealtimePriorityDisabler::RealtimePriorityDisabler (Engine& e) : engine (e) { ++numRealtimeDefeaters; updateProcessPriority (engine); }
1240EditPlaybackContext::RealtimePriorityDisabler::~RealtimePriorityDisabler() { --numRealtimeDefeaters; updateProcessPriority (engine); }
1241
1242}} // namespace tracktion { inline namespace engine
T clamp(T... args)
void addArray(const Type *elementsToAdd, int numElementsToAdd)
void add(const ElementType &newElement)
void setSize(int newNumChannels, int newNumSamples, bool keepExistingContent=false, bool clearExtraSpace=false, bool avoidReallocating=false)
void clear() noexcept
const Type * getReadPointer(int channelNumber) const noexcept
bool isEmpty() const noexcept
ObjectClass * add(ObjectClass *newObject)
static Range withStartAndLength(const ValueType startValue, const ValueType length) noexcept
constexpr ValueType getStart() const noexcept
constexpr ValueType getEnd() const noexcept
constexpr ValueType getLength() const noexcept
static Result fail(const String &errorMessage) noexcept
static Result ok() noexcept
bool isEmpty() const noexcept
static int getNumCpus() noexcept
static Time JUCE_CALLTYPE getCurrentTime() noexcept
void addWaveInputDeviceInstance(InputDevice &)
Note this doesn't check for device enablement.
std::optional< SyncPoint > getSyncPoint() const
Returns the last reference sample position and the edit time and beat that it corresponded to.
AbletonLink & getAbletonLink() const noexcept
Returns the AbletonLink object.
TempoSequence tempoSequence
The global TempoSequence of this Edit.
void updateModifierTimers(TimePosition editTime, int numSamples) const
Updates all the ModifierTimers with a given edit time and number of samples.
Represents an input device.
Holds a list of TempoSetting objects, to form a sequence of tempo changes.
const tempo::Sequence & getInternalSequence() const
N.B.
Plays back a Node with PlayHeadState and ProcessState.
void setNumThreads(size_t numThreads)
Sets the number of threads to use for rendering.
void clearNode()
Clears the Node currently playing.
double getSampleRate() const
Returns the current sample rate.
int process(const tracktion::graph::Node::ProcessContext &pc)
Processes a block of audio and MIDI data.
Controls the transport of an Edit's playback.
TimePosition getPosition() const
Returns the current transport position.
TimeRange getLoopRange() const noexcept
Returns the loop range.
Struct to describe a single iteration of a process call.
Determines how this block releates to other previous render blocks and if the play head has jumped in...
Converts a monotonically increasing reference range in to a timeline range.
std::chrono::system_clock::time_point getLastUserInteractionTime() const
Returns the time of the last user interaction, either a setPosition or setUserIsDragging call.
int64_t getPosition() const
Returns the current timeline position.
void overridePosition(int64_t newPosition)
Adjust position without triggering a 'user interaction' change.
void setPosition(int64_t newPosition)
Sets the timeline position of the play head and if it is different logs a user interaction.
juce::Range< int64_t > getLoopRange() const noexcept
Returns the looped playback range.
void setReferenceSampleRange(juce::Range< int64_t > sampleRange)
Sets the reference sample count, adjusting the timeline if the play head is playing.
void setRollInToLoop(int64_t playbackPosition)
Puts the play head in to roll in to loop mode.
bool isPlaying() const noexcept
Returns true is the play head is currently playing.
bool isLooping() const noexcept
Returns true is the play head is in loop mode.
T empty(T... args)
T exchange(T... args)
T fetch_add(T... args)
T fetch_sub(T... args)
T floor(T... args)
T fmod(T... args)
T is_pointer_v
#define TRANS(stringLiteral)
#define jassert(expression)
#define jassertfalse
T load(T... args)
typedef double
T min(T... args)
Type jlimit(Type lowerLimit, Type upperLimit, Type valueToConstrain) noexcept
int roundToInt(const FloatType value) noexcept
bool isAttached(InputDeviceInstance &instance)
Returns true if this input is assigned to a target.
juce::Array< AudioTrack * > getAudioTracks(const Edit &edit)
Returns all the AudioTracks in an Edit.
std::unique_ptr< tracktion::graph::Node > createNodeForEdit(EditPlaybackContext &epc, std::atomic< double > &audibleTimeToUpdate, const CreateNodeParams &params)
Creates a Node to play back an Edit with live inputs and outputs.
bool hasErrors(const InputDeviceInstance::PreparedContext &pc)
Returns true if all the targets were fully prepared.
std::pair< std::vector< std::unique_ptr< InputDeviceInstance::RecordingContext > >, juce::StringArray > extract(InputDeviceInstance::PreparedContext &&pc)
Splits the PreparedContext in to valid RecordingContexts and an array of error messages.
TimePosition time
The Edit timeline time.
InputDeviceInstance::RecordingParameters getDefaultRecordingParameters(const EditPlaybackContext &context, TimePosition playStart, TimePosition punchIn)
Returns the default set of recording parameters.
Holds a reference sample position and the Edit time and beat that it corresponds to.
LockFreeMultiThreadedNodePlayer::ThreadPoolCreator getPoolCreatorFunction(ThreadPoolStrategy poolType)
Returns a function to create a ThreadPool for the given stategy.
TimeRange timeRangeFromSamples(juce::Range< int64_t > sampleRange, double sampleRate)
Creates a TimeRange from a range of samples.
ThreadPoolStrategy
Available strategies for thread pools.
constexpr TimePosition toPosition(TimeDuration)
Converts a TimeDuration to a TimePosition.
RangeType< TimePosition > TimeRange
A RangeType based on real time (i.e.
constexpr double sampleToTime(IntType samplePosition, double sampleRate)
Converts an integer sample number to a time in seconds.
T push_back(T... args)
T llround(T... args)
T size(T... args)
T sleep_for(T... args)
typedef int64_t
T store(T... args)
Represents a position in beats.
Represents a duration in real-life time.
Represents a position in real-life time.
constexpr double inSeconds() const
Returns the TimePosition as a number of seconds.
@ rollInToLoop
Set the dest playhead to roll in to the loop.
Holds the state of a process call.
std::function< void()> onContinuityUpdated
Callback which can be set to be called when the continuity changes.
SyncPoint getSyncPoint() const
Returns the end of the SyncRange.
SyncRange getSyncRange() const
Returns the SyncRange for the current audio block.
void setPlaybackSpeedRatio(double newRatio)
Sets a playback speed ratio.
typedef size_t
#define CRASH_TRACER
This macro adds the current location to a stack which gets logged if a crash happens.