15#define USE_DYNAMIC_OFFSET_CONTAINER_CLIP 1
18namespace tracktion {
inline namespace engine
31 template<
typename PluginType>
39 for (
auto p : edit.getPluginCache().getPlugins())
40 if (auto pt = dynamic_cast<PluginType*> (p))
41 if (pt->state.getParent().isValid() && pt->state.getRoot() == edit.state)
47 using namespace tracktion::graph;
49 int getSidechainBusID (
EditItemID sidechainSourceID)
51 constexpr size_t sidechainMagicNum = 0xb2275e7216a2;
52 return static_cast<int> (
hash (sidechainMagicNum, sidechainSourceID.getRawID()));
57 constexpr size_t rackInputMagicNum = 0x7261636b496e;
58 return static_cast<int> (
hash (rackInputMagicNum, rackID.getRawID()));
63 constexpr size_t rackOutputMagicNum = 0x7261636b4f7574;
64 return static_cast<int> (
hash (rackOutputMagicNum, rackID.getRawID()));
67 int getWaveInputDeviceBusID (
EditItemID trackItemID)
69 constexpr size_t waveMagicNum = 0xc1abde;
70 return static_cast<int> (
hash (waveMagicNum, trackItemID.getRawID()));
73 int getMidiInputDeviceBusID (
EditItemID trackItemID)
75 constexpr size_t midiMagicNum = 0x9a2762;
76 return static_cast<int> (
hash (midiMagicNum, trackItemID.getRawID()));
79 bool isSidechainSource (
Track& t)
81 const auto itemID = t.itemID;
83 for (
auto p : t.edit.getPluginCache().getPlugins())
84 if (p->getSidechainSourceID() == itemID)
90 constexpr int getTrackNumChannels()
97 for (
auto mapping : channelMap)
98 if (mapping.first != mapping.second)
107 if (&t->getWaveInputDevice() == &device)
116 if (&t->getMidiInputDevice() == &device)
125 return waveDevice->getChannelSet().size();
134 for (
auto ri : getAllPluginsOfType<
RackInstance> (type.edit))
135 if (ri->type.get() == &type)
143 auto instances = getInstancesForRack (type);
144 instances.
removeIf ([] (
auto instance) {
return ! instance->isEnabled(); });
163 for (
auto allowedTrack : *params.allowedTracks)
164 if (t.isAChildOf (*allowedTrack))
175 auto tracks = originalTracks;
181 for (
auto track : originalTracks)
184 st !=
nullptr && st->isSubmixFolder())
186 bool shouldSkip =
false;
189 for (
auto potentialParent : originalTracks)
191 if (track->isAChildOf (*potentialParent))
199 for (
auto potentialChild : originalTracks)
201 if (potentialChild->isAChildOf (*track))
212 for (
auto childTrack : st->getAllSubTracks (true))
213 tracks.addIfNotAlreadyThere (childTrack);
226 const auto clipPos = clip.getPosition();
230 desc.inTimeRange =
TimeRange (clipPos.getStart(), clip.getFadeIn());
231 desc.fadeInType = clip.getFadeInType();
240 desc.outTimeRange =
TimeRange (clipPos.getEnd() - clip.getFadeOut(), clip.getFadeOut());
241 desc.fadeOutType = clip.getFadeOutType();
256 if (! clip.getWarpTime())
261 for (
auto m : clip.getWarpTimeManager().getMarkers())
262 map.push_back ({ m->sourceTime, m->warpTime });
272 if (! clip.getAutoPitch())
278 if (
auto pg = clip.getPatternGenerator())
281 const auto clipRootKey = clip.getLoopInfo().getRootNote() % 12;
282 const auto clipTransposeSemitones = clip.getTransposeSemiTones (
false);
283 const auto scale =
static_cast<int> (pg->scaleType.get());
287 pg->getFlattenedChordProgression (progression,
true);
291 auto editTempoSequencePosition =
createPosition (clip.edit.tempoSequence);
294 for (
auto p : progression)
297 editTempoSequencePosition.set (beatPos);
298 const auto editKey = editTempoSequencePosition.getKey();
300 const int scaleNote = editKey.pitch % 12;
301 int chordTrackPitchDelta = 0;
304 if (p->chordName.get().isNotEmpty())
306 const int chordNote = p->getRootNote (scaleNote,
Scale (
static_cast<Scale::ScaleType
> (editKey.scale)));
307 chordTrackPitchDelta = chordNote - scaleNote;
311 int transposeBase = scaleNote - clipRootKey;
313 while (transposeBase > 6) transposeBase -= 12;
314 while (transposeBase < -6) transposeBase += 12;
317 transposeBase += p->octave * 12;
320 const int finalPitch = transposeBase + chordTrackPitchDelta + clipTransposeSemitones;
321 keyChanges.
push_back ({ beatPos, { finalPitch, scale } });
323 beatPos = beatPos + p->lengthInBeats;
331 for (
auto ts : clip.edit.tempoSequence.getTempos())
332 tempoChanges.push_back ({ ts->startBeatNumber.get(), ts->bpm.get(), ts->curve.get() });
334 for (
auto ts : clip.edit.tempoSequence.getTimeSigs())
335 timeSigChanges.push_back ({ ts->startBeatNumber.get(), ts->numerator.get(), ts->denominator.get(), ts->triplets.get() });
338 const bool useDenominator = clip.edit.engine.getEngineBehaviour().lengthOfOneBeatDependsOnTimeSignature();
339 tempo::Sequence seq (std::move (tempoChanges), std::move (timeSigChanges), std::move (keyChanges),
340 useDenominator ? tempo::LengthOfOneBeat::dependsOnTimeSignature
351 switch (instance.
owner.getMonitorMode())
382 auto fIn = clip.getFadeIn();
383 auto fOut = clip.getFadeOut();
385 if (fIn > 0_td || fOut > 0_td)
390 auto pos =
toTime (clipTimeRangeToUse, clip.edit.tempoSequence);
391 node = makeNode<FadeInOutNode> (std::move (node), params.
processState,
392 speedIn ?
TimeRange (pos.getStart(), pos.getStart() +
juce::jmin (TimeDuration::fromSeconds (0.003), fIn))
393 :
TimeRange (pos.getStart(), pos.getStart() + fIn),
395 :
TimeRange (pos.getEnd() - fOut, pos.getEnd()),
396 clip.getFadeInType(), clip.getFadeOutType(),
407 auto& playHeadState = params.
processState.playHeadState;
408 const AudioFile playFile (clip.getPlaybackFile());
410 if (playFile.isNull())
414 if (clip.setupARA (
false))
416 jassert (clip.melodyneProxy !=
nullptr);
419 return makeNode<MelodyneNode> (clip, playHeadState.playHead, params.
forRendering);
424 clip.melodyneProxy =
nullptr;
427 auto original = clip.getAudioFile();
430 clip.beginRenderingNewProxyIfNeeded();
434 if (clip.canUseProxy())
436 assert (role != ClipRole::launcher);
442 if (! clip.usesTimeStretchedProxy())
444 nodeOffset = clip.getPosition().getOffset();
445 loopRange = clip.getLoopRange();
446 speed = clip.getSpeedRatio();
453 const auto clipPos = clip.getPosition();
457 desc.inTimeRange =
TimeRange (clipPos.getStart(), clip.getFadeIn());
458 desc.fadeInType = clip.getFadeInType();
467 desc.outTimeRange =
TimeRange (clipPos.getEnd() - clip.getFadeOut(), clip.getFadeOut());
468 desc.fadeOutType = clip.getFadeOutType();
475 node = tracktion::graph::makeNode<SpeedRampWaveNode> (playFile,
476 toTime (clipTimeRangeToUse, clip.edit.tempoSequence),
479 clip.getLiveClipLevel(),
481 clip.getActiveChannels(),
490 node = tracktion::graph::makeNode<WaveNode> (playFile,
491 toTime (clipTimeRangeToUse, clip.edit.tempoSequence),
494 clip.getLiveClipLevel(),
496 clip.getActiveChannels(),
505 const auto timeStretcherMode = clip.getActualTimeStretchMode();
506 const auto timeStretcherOpts = clip.elastiqueProOptions.get();
508 : WaveNodeRealTime::ReadAhead::no;
510 const auto speedFadeDesc = getSpeedFadeDescription (clip);
511 auto warpMap = getWarpMap (clip);
514 if (clip.getAutoTempo() || clip.getAutoPitch() || role == ClipRole::launcher)
520 auto syncTempo = WaveNodeRealTime::SyncTempo::no;
521 auto syncPitch = WaveNodeRealTime::SyncPitch::no;
523 auto wi = clip.getWaveInfo();
524 auto& li = clip.getLoopInfo();
526 if (clip.getAutoTempo() && li.getNumBeats() > 0 && wi.hashCode != 0)
528 tempos.
push_back ({ 0_bp, li.getBpm (wi), 1.0 });
529 timeSigs.
push_back ({ 0_bp, li.getNumerator(), li.getDenominator(),
false });
530 syncTempo = WaveNodeRealTime::SyncTempo::yes;
535 timeSigs.
push_back ({ 0_bp, 4, 4,
false });
538 if (clip.getAutoPitch() && li.getRootNote() != -1)
540 keyChanges.
push_back ({ 0_bp, { li.getRootNote(), 0 } });
541 syncPitch = WaveNodeRealTime::SyncPitch::yes;
544 tempo::Sequence seq (std::move (tempos),
545 std::move (timeSigs),
546 std::move (keyChanges),
547 clip.edit.engine.getEngineBehaviour().lengthOfOneBeatDependsOnTimeSignature() ? tempo::LengthOfOneBeat::dependsOnTimeSignature
550 if (role == ClipRole::launcher)
552 node = makeNode<WaveNodeRealTime> (playFile,
553 timeStretcherMode, timeStretcherOpts,
555 clip.getOffsetInBeats(),
556 clip.getLoopRangeBeats(),
557 clip.getLiveClipLevel(),
558 clip.getActiveChannels(),
563 clip.getResamplingQuality(),
564 speedFadeDesc, std::move (editTempoPosition),
566 seq, syncTempo, syncPitch,
567 getChordTrackSequenceIfRequired (clip),
568 clip.getPitchChange(),
573 node = makeNode<WaveNodeRealTime> (playFile,
574 timeStretcherMode, timeStretcherOpts,
575 toBeats (clipTimeRangeToUse, clip.edit.tempoSequence),
576 clip.getOffsetInBeats(),
577 BeatRange (clip.getLoopStartBeats(), clip.getLoopLengthBeats()),
578 clip.getLiveClipLevel(),
579 clip.getActiveChannels(),
584 clip.getResamplingQuality(),
585 speedFadeDesc, std::move (editTempoPosition),
587 seq, syncTempo, syncPitch,
588 getChordTrackSequenceIfRequired (clip),
589 clip.getPitchChange(),
595 assert (role != ClipRole::launcher);
597 node = makeNode<WaveNodeRealTime> (playFile,
598 toTime (clipTimeRangeToUse, clip.edit.tempoSequence),
599 clip.getPosition().getOffset(),
601 clip.getLiveClipLevel(),
602 clip.getSpeedRatio(),
603 clip.getActiveChannels(),
608 clip.getResamplingQuality(),
609 speedFadeDesc, std::move (editTempoPosition),
610 timeStretcherMode, timeStretcherOpts,
611 clip.getPitchChange(),
619 if (
auto pluginList = clip.getPluginList())
621 for (
auto p : *pluginList)
622 p->initialiseFully();
624 node = createPluginNodeForList (*pluginList,
nullptr, std::move (node), playHeadState, params);
629 if (role != ClipRole::launcher)
630 node = createFadeNodeForClip (clip, clipTimeRangeToUse, std::move (node), params);
638 if (clip.canUseProxy())
640 assert (role == ClipRole::arranger);
641 return createNodeForAudioClip (clip, clip.itemID, clip.getEditTimeRange(), includeMelodyne, params, role);
644 if (clip.getAutoTempo() || clip.getAutoPitch() || role == ClipRole::launcher)
645 return createNodeForAudioClip (clip, clip.itemID, clip.getEditBeatRange(), includeMelodyne, params, role);
647 assert (role == ClipRole::arranger);
648 return createNodeForAudioClip (clip, clip.itemID, clip.getEditTimeRange(), includeMelodyne, params, role);
655 const bool generateMPE = clip.getMPEMode();
656 const auto timeBase = clip.canUseProxy() ? MidiList::TimeBase::seconds
660 :
juce::Range<
int>::withStartAndLength (clip.getMidiChannel().getChannelNumber(), 1);
665 sequences.
emplace_back (clip.getSequence().exportToPlaybackMidiSequence (clip, timeBase, generateMPE));
667 :
BeatRange (clip.getStartBeat(), clip.getEndBeat());
669 return graph::makeNode<LoopingMidiNode> (std::move (sequences),
673 clip.getLoopRangeBeats(),
674 clip.getOffsetInBeats(),
675 clip.getLiveClipLevel(),
678 clip.getQuantisation(),
679 clip.edit.engine.getGrooveTemplateManager().getTemplateByName (clip.getGrooveTemplate()),
680 clip.getGrooveStrength(),
683 if (! trackMuteState.shouldTrackBeAudible())
684 return ! trackMuteState.shouldTrackMidiBeProcessed();
691 assert (role != ClipRole::launcher);
692 const auto clipTimeRange = clip.getEditTimeRange();
696 sequences.
emplace_back (clip.getSequenceLooped().exportToPlaybackMidiSequence (clip, timeBase, generateMPE));
698 return graph::makeNode<MidiNode> (std::move (sequences),
703 clip.getLiveClipLevel(),
708 if (! trackMuteState.shouldTrackBeAudible())
709 return ! trackMuteState.shouldTrackMidiBeProcessed();
722 if (role == ClipRole::launcher)
726 for (
int i = clip.usesProbability() ? 64 : 1; --i >= 0;)
730 node = graph::makeNode<LoopingMidiNode> (std::move (sequences),
734 clip.getLoopRangeBeats(),
735 clip.getOffsetInBeats(),
736 clip.getLiveClipLevel(),
754 for (
int i = clip.usesProbability() ? 64 : 1; --i >= 0;)
757 clip.generateMidiSequence (sequence);
761 const auto clipRange = clip.getEditTimeRange ();
762 const juce::Range<double> editTimeRange (clipRange.getStart ().inSeconds (), clipRange.getEnd ().inSeconds ());
763 node = graph::makeNode<MidiNode> (std::move (sequences),
764 MidiList::TimeBase::seconds,
768 clip.getLiveClipLevel(),
773 if (!trackMuteState.shouldTrackBeAudible ())
774 return !trackMuteState.shouldTrackMidiBeProcessed ();
780 if (node && ! clip.getListeners().isEmpty())
781 node = makeNode<LiveMidiOutputNode> (clip, std::move (node));
790 const auto& clips = clip.getClips();
795 #if USE_DYNAMIC_OFFSET_CONTAINER_CLIP
805 assert (! acb->canUseProxy());
806 assert (acb->getAutoTempo());
808 if (
auto clipNode = createNodeForAudioClip (*acb,
false, params, ClipRole::arranger))
813 assert (
false &&
"Only WaveAudioClips supported at the moment");
820 : clip.getEditBeatRange(),
821 clip.getOffsetInBeats(),
822 clip.getLoopRangeBeats(),
824 node = std::move (offsetNode);
830 auto node = makeNode<ContainerClipNode> (params.
processState,
832 BeatRange (clip.getStartBeat(), clip.getEndBeat()),
833 clip.getOffsetInBeats(),
834 clip.getLoopRangeBeats(),
835 createNodeForClips (clip.itemID, clips, trackMuteState, params));
841 if (
auto pluginList = clip.getPluginList())
843 for (
auto p : *pluginList)
844 p->initialiseFully();
846 node = createPluginNodeForList (*pluginList,
nullptr, std::move (node), params.
processState.playHeadState, params);
851 if (role != ClipRole::launcher)
852 return createFadeNodeForClip (clip, clip.getEditTimeRange(), std::move (node), params);
864 if (
auto containerClip =
dynamic_cast<ContainerClip*
> (&clip))
865 return createNodeForContainerClip (*containerClip, trackMuteState, params, role);
868 return createNodeForAudioClip (*audioClip,
false, params, role);
870 if (
auto midiClip =
dynamic_cast<MidiClip*
> (&clip))
871 return createNodeForMidiClip (*midiClip, trackMuteState, params, role);
873 if (
auto stepClip =
dynamic_cast<StepClip*
> (&clip))
874 return createNodeForStepClip (*stepClip, trackMuteState, params, role);
884 if (clips.
size() == 0)
887 const bool clipsHaveLatency = [&]
890 for (
auto clip : clips)
891 if (params.allowedClips == nullptr || params.allowedClips->contains (clip))
892 if (auto pluginList = clip->getPluginList())
893 for (auto p : *pluginList)
894 if (p->getLatencySeconds() > 0.0)
903 if (clipsHaveLatency)
907 for (
auto clip : clips)
908 if (params.allowedClips == nullptr || params.allowedClips->contains (clip))
909 if (auto clipNode = createNodeForClip (*clip, trackMuteState, params, ClipRole::arranger))
910 combiner->addInput (
std::
move (clipNode));
915 if (clips.
size() == 1)
923 if (
auto clipNode = createNodeForClip (*clip, trackMuteState, params, ClipRole::arranger))
924 combiner->addInput (std::move (clipNode), clip->getPosition().time);
933 for (
auto clip : clips)
934 if (params.allowedClips == nullptr || params.allowedClips->contains (clip))
935 if (auto clipNode = createNodeForClip (*clip, trackMuteState, params, ClipRole::arranger))
936 combiner->addInput (
std::
move (clipNode), clip->getPosition().
time);
946 for (
auto slot : slotList.getClipSlots())
948 auto clip = slot->getClip();
955 if (
auto clipNode = createNodeForClip (*clip, trackMuteState, params, ClipRole::launcher))
960 launchHandle = acb->getLaunchHandle();
961 else if (
auto mc =
dynamic_cast<MidiClip*
> (clip))
962 launchHandle = mc->getLaunchHandle();
963 else if (
auto sc =
dynamic_cast<StepClip*
> (clip))
964 launchHandle = sc->getLaunchHandle();
969 : clip->getLengthInBeats();
971 switch (clip->followActionDurationType.get())
974 if (
auto afterBeats = clip->followActionBeats.get(); afterBeats > 0_bd)
975 clipDuration = afterBeats;
979 if (clip->isLooping())
981 if (
auto afterLoops = clip->followActionNumLoops.get(); afterLoops > 0.0)
982 clipDuration = (clip->getLoopLengthBeats() * afterLoops) - clip->getOffsetInBeats();
986 clipDuration = clip->getLengthInBeats() - clip->getOffsetInBeats();
992 std::move (launchHandle),
994 createFollowAction (*clip),
996 std::move (clipNode));
998 nodes.
push_back (std::move (controlNode));
1012 const bool processMidiWhenMuted = track.state.getProperty (IDs::processMidiWhenMuted,
false);
1014 auto node = tracktion::graph::makeNode<WaveNode> (
AudioFile (track.edit.engine, TemporaryFileManager::getFreezeFileForTrack (track)),
1024 node = createPluginNodeForTrack (track, *trackMuteState, std::move (node), playHeadState, params);
1026 if (isSidechainSource (track))
1027 node = makeNode<SendNode> (std::move (node), getSidechainBusID (track.itemID));
1029 node = makeNode<TrackMutingNode> (std::move (trackMuteState), std::move (node),
false);
1038 for (
auto clip : clips)
1039 if (params.allowedClips == nullptr || params.allowedClips->contains (clip))
1041 if (acb->isUsingMelodyne() && acb->melodyneProxy != nullptr)
1044 if (araClips.
size() == 0)
1049 for (
auto araClip : araClips)
1050 if (auto araNode = createNodeForAudioClip (*araClip, true, params, ClipRole::arranger))
1051 nodes.push_back (createFadeNodeForClip (*araClip, araClip->getEditTimeRange(),
std::
move (araNode), params));
1053 if (nodes.
size() == 1)
1054 return std::move (nodes.
front());
1063 const auto trackID = at.
itemID;
1066 if (
auto clipsNode = createNodeForClips (trackID, clips, trackMuteState, params))
1067 arrangerNodes.
push_back (std::move (clipsNode));
1069 if (
auto araNode = createARAClipsNode (clips, trackMuteState, params))
1070 arrangerNodes.
push_back (std::move (araNode));
1074 if (arrangerNodes.
empty())
1077 if (arrangerNodes.
size() == 1)
1078 return std::move (arrangerNodes.
front());
1083 auto launcherNodes = createNodeForLauncherClips (at.
getClipSlotList(), trackMuteState, params);
1085 if (arrangerNodes.
empty() && launcherNodes.empty())
1090 if (arrangerNodes.
size() == 1)
1091 arrangerNode = std::move (arrangerNodes.
front());
1092 else if (arrangerNodes.
size() > 1)
1095 return makeNode<ArrangerLauncherSwitchingNode> (params.
processState, at, std::move (arrangerNode), std::move (launcherNodes));
1103 if (midiDevice->isTrackDevice())
1104 if (
auto sourceTrack = getTrackContainingTrackDevice (inputDeviceInstance.
edit, *midiDevice))
1105 return makeNode<TrackMidiInputDeviceNode> (*midiDevice, makeNode<ReturnNode> (getMidiInputDeviceBusID (sourceTrack->itemID)), params.
processState,
1106 shouldMonitorTrackDevice (inputDeviceInstance));
1109 return makeNode<HostedMidiInputDeviceNode> (inputDeviceInstance, *midiDevice, midiDevice->getMPESourceID(), playHeadState, params.
processState);
1111 return makeNode<MidiInputDeviceNode> (inputDeviceInstance, *midiDevice, midiDevice->getMPESourceID(), playHeadState, trackID);
1115 if (waveDevice->isTrackDevice())
1116 if (
auto sourceTrack = getTrackContainingTrackDevice (inputDeviceInstance.
edit, *waveDevice))
1117 return makeNode<TrackWaveInputDeviceNode> (params.
processState,
1119 makeNode<ReturnNode> (getWaveInputDeviceBusID (sourceTrack->itemID)),
1120 shouldMonitorTrackDevice (inputDeviceInstance));
1123 return makeNode<WaveInputDeviceNode> (inputDeviceInstance, *waveDevice,
1135 if (
auto context = track.edit.getCurrentPlaybackContext())
1136 for (
auto in : context->getAllInputs())
1137 if ((in->isLivePlayEnabled (track) || in->getInputDevice().isTrackDevice()) && in->getTargets().contains (track.itemID))
1138 if (auto node = createLiveInputNodeForDevice (*in, playHeadState, params, track.itemID))
1139 nodes.push_back (
std::
move (node));
1144 if (nodes.
size() == 1)
1145 return std::move (nodes.
front());
1152 const auto sidechainSourceID = plugin.getSidechainSourceID();
1153 const bool usesSidechain = ! plugin.
isMissing() && sidechainSourceID.
isValid();
1155 if (! usesSidechain)
1163 for (
int i = 0; i < plugin.getNumWires(); ++i)
1165 if (
auto w = plugin.getWire (i))
1167 const int sourceIndex = w->sourceChannelIndex;
1168 const int destIndex = w->destChannelIndex;
1170 if (sourceIndex < getTrackNumChannels())
1171 directChannelMap.emplace_back (sourceIndex, destIndex);
1173 sidechainChannelMap.emplace_back (sourceIndex - getTrackNumChannels(), destIndex);
1177 if (directChannelMap.empty() && sidechainChannelMap.empty())
1180 auto directInput = std::move (node);
1182 if (! isUnityChannelMap (directChannelMap))
1183 directInput = makeNode<ChannelRemappingNode> (std::move (directInput), directChannelMap,
true);
1185 auto sidechainInput = makeNode<ReturnNode> (getSidechainBusID (sidechainSourceID));
1186 sidechainInput = makeNode<ChannelRemappingNode> (std::move (sidechainInput), std::move (sidechainChannelMap),
false);
1188 if (directChannelMap.empty())
1189 return sidechainInput;
1191 auto sumNode = makeSummingNode ({ directInput.release(), sidechainInput.release() });
1207 int maxNumChannels = -1;
1211 if (! plugin.getSidechainSourceID().isValid())
1214 node = createSidechainInputNodeForPlugin (plugin, std::move (node));
1215 node = tracktion::graph::makeNode<PluginNode> (std::move (node),
1229 if (! rackInstance.isEnabled())
1232 const auto rackInputID = getRackInputBusID (rackInstance.rackTypeID);
1233 const auto rackOutputID = getRackOutputBusID (rackInstance.rackTypeID);
1236 auto* inputNode = node.
get();
1240 RackInstanceNode::ChannelMap sendChannelMap;
1241 sendChannelMap[0] = { 0, rackInstance.leftInputGoesTo - 1, rackInstance.leftInDb };
1242 sendChannelMap[1] = { 1, rackInstance.rightInputGoesTo - 1, rackInstance.rightInDb };
1243 node = makeNode<RackInstanceNode> (std::move (node), std::move (sendChannelMap));
1244 node = makeNode<SendNode> (std::move (node), rackInputID);
1245 node = makeNode<ReturnNode> (makeNode<SinkNode> (std::move (node)), rackOutputID);
1248 RackInstanceNode::ChannelMap returnChannelMap;
1249 returnChannelMap[0] = { rackInstance.leftOutputComesFrom - 1, 0, rackInstance.leftOutDb };
1250 returnChannelMap[1] = { rackInstance.rightOutputComesFrom - 1, 1, rackInstance.rightOutDb };
1251 node = makeNode<RackInstanceNode> (std::move (node), std::move (returnChannelMap));
1253 return makeNode<RackReturnNode> (std::move (node),
1254 [wetGain = rackInstance.wetGain] { return wetGain->getCurrentValue(); },
1256 [dryGain = rackInstance.dryGain] { return dryGain->getCurrentValue(); });
1269 node = makeNode<LevelMeasurerProcessingNode> (std::move (node), *meterPlugin);
1271 else if (
auto sendPlugin =
dynamic_cast<AuxSendPlugin*
> (p))
1273 if (sendPlugin->isEnabled())
1274 node = makeNode<AuxSendNode> (std::move (node), sendPlugin->busNumber, *sendPlugin,
1275 playHeadState, trackMuteState,
1280 if (returnPlugin->isEnabled())
1281 node = makeNode<ReturnNode> (std::move (node), returnPlugin->busNumber);
1283 else if (
auto rackInstance =
dynamic_cast<RackInstance*
> (p))
1285 node = createNodeForRackInstance (*rackInstance, std::move (node));
1287 else if (
auto insertPlugin =
dynamic_cast<InsertPlugin*
> (p))
1289 if (! insertPlugin->isEnabled())
1292 if (
auto insertReturnNode = createInsertReturnNode (*insertPlugin, playHeadState, params))
1293 node = makeNode<InsertNode> (std::move (node), *insertPlugin, std::move (insertReturnNode),
1298 node = createNodeForPlugin (*p, trackMuteState, std::move (node), params);
1312 if (list !=
nullptr)
1314 for (
auto& modifier : list->getModifiers())
1316 if (modifier->getProcessingPosition() != position)
1319 node = makeNode<ModifierNode> (std::move (node), modifier, params.
sampleRate, params.
blockSize,
1334 &trackMuteState, std::move (node), playHeadState, params);
1337 node = createPluginNodeForList (t.pluginList, &trackMuteState, std::move (node), playHeadState, params);
1340 &trackMuteState, std::move (node), playHeadState, params);
1350 if (! track->isPartOfSubmix() && track != &at && track->getOutput().outputsToDestTrack (at))
1351 inputTracks.add (track);
1354 if (! track->isPartOfSubmix() && track->getOutput() != nullptr && track->getOutput()->outputsToDestTrack (at))
1355 inputTracks.add (track);
1362 if (at.getCompGroup() == -1)
1368 const auto crossfadeTime = TimeDuration::fromSeconds (
static_cast<double> (crossfadeTimeMs) / 1000.0);
1370 const auto nonMuteTimes = tc->getNonMuteTimes (at, crossfadeTime);
1371 const auto muteTimes = TrackCompManager::TrackComp::getMuteTimes (nonMuteTimes);
1373 if (muteTimes.isEmpty())
1376 node = makeNode<TimedMutingNode> (std::move (node), std::move (muteTimes), params.
processState.playHeadState);
1378 for (
auto r : nonMuteTimes)
1380 auto fadeIn = r.withLength (crossfadeTime) - 0.0001s;
1381 auto fadeOut = fadeIn.movedToEndAt (r.getEnd() + 0.0001s);
1383 if (! (fadeIn.isEmpty() && fadeOut.isEmpty()))
1384 node = makeNode<FadeInOutNode> (std::move (node),
1386 TimeRange { fadeIn.getStart(), fadeIn.getEnd() },
1387 TimeRange { fadeOut.getStart(), fadeOut.getEnd() },
1388 AudioFadeCurve::convex,
1389 AudioFadeCurve::convex,
false);
1400 auto& playHeadState = params.
processState.playHeadState;
1403 return createNodeForFrozenAudioTrack (at, playHeadState, params);
1405 auto inputTracks = getDirectInputTracks (at);
1406 const bool processMidiWhenMuted = at.
state.
getProperty (IDs::processMidiWhenMuted,
false);
1415 node = makeNode<TrackMutingNode> (std::move (clipsMuteState), std::move (node),
true);
1417 node = createTrackCompNode (at, std::move (node), params);
1420 auto liveInputNode = createLiveInputsNode (at, playHeadState, params);
1423 node = makeNode<LiveMidiOutputNode> (at, std::move (node));
1426 node = makeNode<LiveMidiInjectingNode> (at, std::move (node));
1428 if (node ==
nullptr && inputTracks.
isEmpty() && liveInputNode ==
nullptr)
1431 for (
auto plugin : at.pluginList)
1433 if (plugin->producesAudioWhenNoAudioInput())
1435 node = makeNode<SilentNode> (2);
1449 sumNode->addInput (std::move (node));
1450 sumNode->addInput (std::move (liveInputNode));
1451 node = std::move (sumNode);
1455 node = std::move (liveInputNode);
1464 sumNode->addInput (std::move (node));
1466 for (
auto inputTrack : inputTracks)
1467 if (auto n = createNodeForTrack (*inputTrack, params))
1468 sumNode->addInput (
std::
move (n));
1470 node = std::move (sumNode);
1473 node = createPluginNodeForTrack (at, *trackMuteState, std::move (node), playHeadState, params);
1475 if (isSidechainSource (at))
1476 node = makeNode<SendNode> (std::move (node), getSidechainBusID (at.
itemID));
1478 node = makeNode<TrackMutingNode> (std::move (trackMuteState), std::move (node),
false);
1482 if (at.getWaveInputDevice().isEnabled())
1483 node = makeNode<SendNode> (std::move (node), getWaveInputDeviceBusID (at.
itemID));
1485 if (at.getMidiInputDevice().isEnabled())
1486 node = makeNode<SendNode> (std::move (node), getMidiInputDeviceBusID (at.
itemID));
1496 jassert (submixTrack.isSubmixFolder());
1501 for (
auto t : submixTrack.getAllSubTracks (false))
1504 subAudioTracks.
add (ft);
1507 subFolderTracks.
add (ft);
1514 sumNode->setDoubleProcessingPrecision (submixTrack.edit.
engine.
getPropertyStorage().getProperty (SettingID::use64Bit,
false));
1517 for (
auto ft : subFolderTracks)
1522 if (! ft->isProcessing (
true))
1525 if (ft->isSubmixFolder())
1527 if (
auto node = createNodeForSubmixTrack (*ft, params))
1528 sumNode->addInput (std::move (node));
1532 for (
auto at : ft->getAllAudioSubTracks (false))
1533 if (params.allowedTracks == nullptr || params.allowedTracks->contains (at))
1534 if (auto node = createNodeForAudioTrack (*at, params))
1535 sumNode->addInput (
std::
move (node));
1540 for (
auto at : subAudioTracks)
1541 if (params.allowedTracks == nullptr || params.allowedTracks->contains (at))
1542 if (at->isProcessing (true))
1543 if (auto node = createNodeForAudioTrack (*at, params))
1544 sumNode->addInput (
std::
move (node));
1546 if (sumNode->getDirectInputNodes().empty())
1553 node = createPluginNodeForTrack (submixTrack, *trackMuteState, std::move (node), params.
processState.playHeadState, params);
1555 node = makeNode<TrackMutingNode> (std::move (trackMuteState), std::move (node),
false);
1563 if (
auto t =
dynamic_cast<AudioTrack*
> (&track))
1565 if (! t->isProcessing (
true))
1568 if (! t->createsOutput())
1571 if (t->isPartOfSubmix() && ! shouldRenderTrackInSubmix (*t, params))
1577 return createNodeForAudioTrack (*t, params);
1582 if (! t->isSubmixFolder())
1585 if (t->isPartOfSubmix() && ! shouldRenderTrackInSubmix (*t, params))
1588 if (t->getOutput() ==
nullptr)
1591 return createNodeForSubmixTrack (*t, params);
1600 const auto rackInputID = getRackInputBusID (rackType.rackID);
1601 const auto rackOutputID = getRackOutputBusID (rackType.rackID);
1603 auto rackInputNode = makeNode<ReturnNode> (rackInputID);
1604 auto rackNode = RackNodeBuilder::createRackNode (rackType, params.
sampleRate, params.
blockSize, std::move (rackInputNode),
1606 auto rackOutputNode = makeNode<SendNode> (std::move (rackNode), rackOutputID);
1608 return makeNode<SinkNode> (std::move (rackOutputNode));
1616 for (
auto rackType : rackTypeList.getTypes())
1617 if (getEnabledInstancesForRack (*rackType).
size() > 0)
1618 if (auto rackNode = createNodeForRackType (*rackType, params))
1619 nodes.push_back (
std::
move (rackNode));
1629 auto rackNodes = createNodesForRacks (rackTypeList, params);
1631 if (rackNodes.empty())
1635 sumNode->addInput (std::move (input));
1636 input = std::move (sumNode);
1646 if (insert.getReturnDeviceType() != InsertPlugin::noDevice)
1647 for (
auto i : insert.edit.getAllInputDevices())
1648 if (i->owner.
getName() == insert.inputDevice)
1649 return createLiveInputNodeForDevice (*i, playHeadState, params,
EditItemID());
1656 if (insert.outputDevice != device.getName())
1659 return makeNode<InsertSendNode> (insert);
1671 const auto outId = TemporaryFileManager::getDeviceIDFromFreezeFile (edit, freezeFile);
1673 if (device.getDeviceID() == outId)
1676 const auto length = TimeDuration::fromSeconds (af.getLength());
1681 auto node = tracktion::graph::makeNode<WaveNode> (af, TimeRange (0.0s, length),
1687 EditItemID::fromRawID ((uint64_t) device.getName().hash()),
1706 int sourceIndex = 0;
1708 for (
const auto& channel : waveDevice->getChannels())
1710 if (channel.indexInDevice != -1)
1711 channelMap.push_back (
std::make_pair (sourceIndex, channel.indexInDevice));
1716 return tracktion::graph::makeNode<ChannelRemappingNode> (std::move (node), channelMap,
false);
1720 return tracktion::graph::makeNode<MidiOutputDeviceInstanceInjectingNode> (*midiInstance, std::move (node),
1721 playHeadState.playHead);
1736 auto tempoModList = tempoTrack !=
nullptr ? tempoTrack->
getModifierList() :
nullptr;
1739 nullptr, std::move (node), playHeadState, params);
1742 auto masterModList = masterTrack !=
nullptr ? masterTrack->
getModifierList() :
nullptr;
1745 nullptr, std::move (node), playHeadState, params);
1747 node = createPluginNodeForList (edit.
getMasterPluginList(),
nullptr, std::move (node), playHeadState, params);
1750 nullptr, std::move (node), playHeadState, params);
1752 nullptr, std::move (node), playHeadState, params);
1755 node = createNodeForPlugin (*masterVolPlugin,
nullptr, std::move (node), params);
1770 return makeNode<FadeInOutNode> (std::move (node), params.
processState,
1771 TimeRange { 0_tp, edit.masterFadeIn },
1772 TimeRange { length - edit.masterFadeOut, length },
1786 Edit& edit = epc.edit;
1787 auto& playHeadState = params.
processState.playHeadState;
1788 auto insertPlugins = getAllPluginsOfType<InsertPlugin> (edit);
1801 if (
auto device = output->getOutputDevice (
false))
1803 if (! device->isEnabled())
1808 if (
std::find (devicesWithFrozenNodes.
begin(), devicesWithFrozenNodes.
end(), device)
1809 != devicesWithFrozenNodes.
end())
1812 if (
auto node = createGroupFreezeNodeForDevice (edit, *device, params.
processState))
1814 deviceNodes[device].push_back (std::move (node));
1815 devicesWithFrozenNodes.
push_back (device);
1818 else if (
auto node = createNodeForTrack (*t, params))
1820 deviceNodes[device].push_back (std::move (node));
1827 for (
auto ins : insertPlugins)
1829 if (ins->getSendDeviceType() != InsertPlugin::noDevice)
1833 auto& trackNodeVector = deviceNodes[device];
1848 auto& trackNodeVector = deviceNodes[device];
1859 const bool isSendingMidi = device->isSendingClock()
1860 || device->isSendingTimecode()
1861 || device->isSendingControllerMidiClock();
1863 if (! isSendingMidi)
1866 auto& trackNodeVector = deviceNodes[device];
1875 for (
auto& deviceAndTrackNode : deviceNodes)
1877 auto device = deviceAndTrackNode.first;
1879 auto tracksVector = std::move (deviceAndTrackNode.second);
1885 bool deviceIsBeingUsedAsInsert =
false;
1887 for (
auto ins : insertPlugins)
1889 if (ins->isFrozen())
1892 if (ins->outputDevice != device->getName())
1895 if (
auto sendNode = createInsertSendNode (*ins, *device))
1897 sumNode->addInput (std::move (sendNode));
1898 deviceIsBeingUsedAsInsert =
true;
1904 if (! deviceIsBeingUsedAsInsert)
1907 node = createMasterPluginsNode (edit, playHeadState, std::move (node), params);
1909 node = createMasterFadeInOutNode (edit, std::move (node), params);
1914 node = makeNode<SharedLevelMeasuringNode> (std::move (previewMeasurer), std::move (node));
1919 auto clickAndTracksNode = makeSummingNode ({ node.
release(),
1920 makeNode<ClickNode> (edit, getNumChannelsFromDevice (*device),
1921 device->isMidi(), playHeadState.playHead).release() });
1922 node = std::move (clickAndTracksNode);
1925 if (
auto outputDeviceNode = createNodeForDevice (epc, *device, playHeadState, std::move (node)))
1926 outputNode->addInput (std::move (outputDeviceNode));
1930 finalNode = makeNode<LevelMeasuringNode> (std::move (finalNode), epc.masterLevels);
1931 finalNode = createRackNode (std::move (finalNode), edit.
getRackList(), params);
1932 finalNode = makeNode<PlayHeadPositionNode> (params.
processState, std::move (finalNode), audibleTimeToUpdate);
1940 auto params = originalParams;
1941 auto& playHeadState = params.
processState.playHeadState;
1954 if (output->getDestinationTrack() !=
nullptr)
1962 if (
auto node = createNodeForTrack (*t, params))
1963 trackNodes.
push_back (std::move (node));
1970 node = createMasterPluginsNode (edit, playHeadState, std::move (node), params);
1971 node = createMasterFadeInOutNode (edit, std::move (node), params);
1972 node = createRackNode (std::move (node), edit.
getRackList(), params);
int removeIf(PredicateType &&predicate)
bool isEmpty() const noexcept
int size() const noexcept
ElementType getFirst() const noexcept
void add(const ElementType &newElement)
bool contains(ParameterType elementToLookFor) const
static AudioChannelSet JUCE_CALLTYPE stereo()
static AudioChannelSet JUCE_CALLTYPE canonicalChannelSet(int numChannels)
Type get() const noexcept
bool isValid() const noexcept
constexpr ValueType getStart() const noexcept
const var & getProperty(const Identifier &name) const noexcept
Base class for Clips that produce some kind of audio e.g.
@ speedRamp
Fade is a change of playback speed for tape start/stop effects.
@ chordTrackMono
Clip tracks the chord track with a monophonic pitch change.
ClipSlotList & getClipSlotList()
Returns the ClipSlotList for this track.
juce::ListenerList< Listener > & getListeners()
Returns the listener list so Nodes can manually call them.
bool isFrozen(FreezeType) const override
Returns true if this track is frozen using the given type.
const juce::Array< Clip * > & getClips() const
Returns the clips this owner contains.
A list of the ClipSlots on a Track.
@ beats
A number of beats.
@ loops
A number of loops.
A clip that can contain multiple other clips and mix their output together.
const EditItemID itemID
Every EditItem has an ID which is unique within the edit.
The Tracktion Edit class!
VolumeAndPanPlugin::Ptr getMasterVolumePlugin() const
Returns the master VolumeAndPanPlugin.
juce::CachedValue< TimeDuration > masterFadeIn
The duration in seconds of the fade in.
TimeDuration getLength() const
Returns the end time of last clip.
TrackCompManager & getTrackCompManager() const noexcept
Returns the TrackCompManager for the Edit.
juce::CachedValue< TimeDuration > masterFadeOut
The duration in seconds of the fade out.
MasterTrack * getMasterTrack() const
Returns the global MasterTrack.
SharedLevelMeasurer::Ptr getPreviewLevelMeasurer()
Returns a previously set SharedLevelMeasurer.
TempoTrack * getTempoTrack() const
Returns the global TempoTrack.
juce::CachedValue< AudioFadeCurve::Type > masterFadeInType
The curve type of the fade in.
bool getIsPreviewEdit() const noexcept
Returns true if this Edit is a temporary Edit for previewing files/clips etc.
PluginList & getMasterPluginList() const noexcept
Returns the master PluginList.
bool isClickTrackDevice(OutputDevice &) const
Returns true if the given OutputDevice is being used as the click track output.
juce::CachedValue< AudioFadeCurve::Type > masterFadeOutType
The curve type of the fade out.
RackTypeList & getRackList() const noexcept
Returns the RackTypeList which contains all the RackTypes for the Edit.
Engine & engine
A reference to the Engine.
virtual bool shouldProcessAuxSendWhenTrackIsMuted(AuxSendPlugin &)
Whether or not to include muted track contents in aux send plugins.
PropertyStorage & getPropertyStorage() const
Returns the PropertyStorage user settings customisable XML file.
DeviceManager & getDeviceManager() const
Returns the DeviceManager instance for handling audio / MIDI devices.
EngineBehaviour & getEngineBehaviour() const
Returns the EngineBehaviour instance.
static bool isHostedMidiInputDevice(const MidiInputDevice &)
Returns true if the MidiInput device is a HostedMidiInputDevice.
@ beatsRaw
Event times will be in beats relative to the Edit timeline.
Holds a list of Modifiers that have been added to a Track.
Base class for audio or midi output devices, to which a track's output can be sent.
Holds a sequence of plugins.
virtual bool isDisabled()
Plugins can be disabled to avoid them crashing Edits.
virtual bool isMissing()
for things like VSTs where the DLL is missing.
Clip * getOwnerClip() const
Returns the clip if that's what it's in.
Track * getOwnerTrack() const
Returns the track if it's a track or clip plugin.
Holds info about where temp files should go, and tidies up old ones when needed.
Holds the state of a Track and if its contents/plugins should be played or not.
bool shouldTrackBeAudible() const
Returns true if the track's mix bus should be audible.
bool shouldTrackMidiBeProcessed() const
Returns true if the track's MIDI should be processed to avoid breaks in long notes.
Base class for tracks which contain clips and plugins and can be added to Edit[s].
bool isProcessing(bool includeParents) const
Returns true if this track should be included in playback.
@ individualFreeze
Freezes a track in to a single audio file.
@ groupFreeze
Freezes multiple tracks together in to a single file.
juce::ValueTree state
The state of this Track.
ModifierList * getModifierList() const
Returns the ModifierList for this track, if it has one.
A (virtual) audio output device.
Determines how this block releates to other previous render blocks and if the play head has jumped in...
T emplace_back(T... args)
constexpr Type jmin(Type a, Type b)
void ignoreUnused(Types &&...) noexcept
bool includePlugins
Whether to include track plugins.
TrackOutput * getTrackOutput(Track &track)
Returns the TrackOutput if the given track has one.
juce::Array< TrackType * > getTracksOfType(const Edit &, bool recursive)
Returns the tracks of a given type in an Edit.
juce::String getName(LaunchQType t)
Retuns the name of a LaunchQType for display purposes.
ProcessState & processState
The process state of the graph.
bool includeBypassedPlugins
If false, bypassed plugins will be completely ommited from the graph.
double sampleRate
The sample rate to use.
juce::Array< Track * > getAllTracks(const Edit &edit)
Returns all the tracks in an Edit.
bool forRendering
If the node is for rendering or not.
juce::Array< AudioTrack * > getAudioTracks(const Edit &edit)
Returns all the AudioTracks in an Edit.
tempo::Sequence::Position createPosition(const TempoSequence &s)
Creates a Position to iterate over the given TempoSequence.
std::unique_ptr< tracktion::graph::Node > createNodeForEdit(EditPlaybackContext &epc, std::atomic< double > &audibleTimeToUpdate, const CreateNodeParams ¶ms)
Creates a Node to play back an Edit with live inputs and outputs.
bool includeMasterPlugins
Whether to include master plugins, fades and volume.
int blockSize
The block size to use.
BeatPosition toBeats(TimePosition tp, const TempoSequence &ts)
Converts a TimePosition to a BeatPosition given a TempoSequence.
juce::Array< Track * > * allowedTracks
The tracks to include.
const juce::Array< Clip * > * allowedClips
The clips to include.
bool readAheadTimeStretchNodes
TEMPORARY: If true, real-time time-stretch Nodes will use a larger buffer and background thread to re...
bool implicitlyIncludeSubmixChildTracks
If true, child track in submixes will be included regardless of the allowedTracks param.
TimePosition toTime(BeatPosition bp, const TempoSequence &ts)
Converts a BeatPosition to a TimePosition given a TempoSequence.
bool allowClipSlots
If true, track's clip slots will be included, set to false to disable these (which will use a slightl...
Contains options for Edit Node content creation.
RangeType< BeatPosition > BeatRange
A RangeType based on beats.
constexpr TimePosition toPosition(TimeDuration)
Converts a TimeDuration to a TimePosition.
RangeType< TimePosition > TimeRange
A RangeType based on real time (i.e.
size_t hash(size_t seed, const T &v)
Hashes a type with a given seed and returns the new hash value.
Represents a position in beats.
Represents a duration in real-life time.
Represents a position in real-life time.
ID for objects of type EditElement - e.g.
static std::function< std::unique_ptr< graph::Node >(std::unique_ptr< tracktion::graph::Node >)> insertOptionalLastStageNode
If set, this will be called to give an opportunity to add an additional final node which could be use...
Represents a time range in an Edit stored as either time or beats.
bool isBeats() const
Returns true if the time is stored as beats, false if stored as a TimePosition.
Provides a thread-safe way to share a clip's levels with an audio engine without worrying about the C...
ProcessingPosition
Determines the position in the FX chain where the modifier should be processed.
@ preFX
The Modifier is processed before the plugn chain.
@ postFX
The Modifier is processed after the plugn chain.
Holds the state of a process call.
Describes the time and type of the speed fade in/outs.
#define CRASH_TRACER
This macro adds the current location to a stack which gets logged if a crash happens.
LengthOfOneBeat
Used to determine the length of a beat in beat <-> time conversions.
@ isAlwaysACrotchet
Signifies the length of one beat always depends only the current BPM at that point in the edit,...