11namespace tracktion {
inline namespace engine
40 editPosition (editTime),
41 loopSection (
TimePosition::fromSeconds (loop.getStart().inSeconds() * speed),
42 TimePosition::fromSeconds (loop.getEnd().inSeconds() * speed)),
44 originalSpeedRatio (speed),
45 editItemID (itemIDToUse),
46 isOfflineRender (isRendering),
47 speedFadeDescription (speedFadeDescriptionToUse),
50 channelsToUse (channelSetToUse),
51 destChannels (destChannelsToFill)
54 assert ((! speedFadeDescription.inTimeRange.isEmpty())
55 || (! speedFadeDescription.outTimeRange.isEmpty()));
61 props.hasAudio =
true;
62 props.hasMidi =
false;
63 props.numberOfChannels = destChannels.
size();
64 props.nodeID = (
size_t) editItemID.getRawID();
72 outputSampleRate = info.sampleRate;
73 editPositionInSamples = tracktion::toSamples ({ editPosition.getStart(), editPosition.getEnd() }, outputSampleRate);
74 updateFileSampleRate();
78 if (reader !=
nullptr)
79 for (
int i =
std::max (channelsToUse.
size(), reader->getNumChannels()); --i >= 0;)
86 if (! isOfflineRender)
91 if (audioFile.isNull())
94 if (reader ==
nullptr)
98 if (reader ==
nullptr)
102 if (audioFileSampleRate == 0.0 && ! updateFileSampleRate())
110 SCOPED_REALTIME_CHECK
120 editTime =
juce::jlimit (speedFadeDescription.inTimeRange.getStart(),
121 speedFadeDescription.outTimeRange.getEnd(),
124 if (! speedFadeDescription.inTimeRange.isEmpty()
125 && speedFadeDescription.inTimeRange.containsInclusive (editTime))
127 const auto timeFromStart = editTime - speedFadeDescription.inTimeRange.getStart();
128 const double proportionOfFade = timeFromStart.inSeconds() / speedFadeDescription.inTimeRange.getLength().inSeconds();
129 const double rescaledProportion = rescale (speedFadeDescription.fadeInType, proportionOfFade,
true);
131 editTime = speedFadeDescription.inTimeRange.getStart()
132 + TimeDuration::fromSeconds (rescaledProportion * speedFadeDescription.inTimeRange.getLength().inSeconds());
134 jassert (speedFadeDescription.inTimeRange.containsInclusive (editTime));
136 else if (! speedFadeDescription.outTimeRange.isEmpty()
137 && speedFadeDescription.outTimeRange.containsInclusive (editTime))
139 const auto timeFromStart = editTime - speedFadeDescription.outTimeRange.getStart();
140 const double proportionOfFade = timeFromStart.inSeconds() / speedFadeDescription.outTimeRange.getLength().inSeconds();
141 const double rescaledProportion = rescale (speedFadeDescription.fadeOutType, proportionOfFade,
false);
143 editTime = speedFadeDescription.outTimeRange.getStart()
144 + TimeDuration::fromSeconds (rescaledProportion * speedFadeDescription.outTimeRange.getLength().inSeconds());
146 jassert (speedFadeDescription.outTimeRange.containsInclusive (editTime));
149 return (int64_t) ((editTime - (editPosition.getStart() - offset)).inSeconds()
150 * originalSpeedRatio * audioFileSampleRate + 0.5);
153bool SpeedRampWaveNode::updateFileSampleRate()
155 using namespace tracktion::graph;
157 if (reader ==
nullptr)
160 audioFileSampleRate = reader->getSampleRate();
162 if (audioFileSampleRate <= 0)
165 if (! loopSection.isEmpty())
176 if (reader ==
nullptr
177 || sectionEditTime.getEnd() <= editPosition.getStart()
178 || sectionEditTime.getStart() >= editPosition.getEnd())
181 SCOPED_REALTIME_CHECK
183 if (audioFileSampleRate == 0.0 && ! updateFileSampleRate())
186 const auto fileStart = editTimeToFileSample (sectionEditTime.getStart());
187 const auto fileEnd = editTimeToFileSample (sectionEditTime.getEnd());
188 const auto numFileSamples = (
int) (fileEnd - fileStart);
190 if (numFileSamples <= 3)
192 playedLastBlock =
false;
196 reader->setReadPosition (fileStart);
198 auto destBuffer = pc.buffers.audio;
199 auto numSamples = destBuffer.getNumFrames();
201 auto numChannels = (choc::buffer::ChannelCount) destBufferChannels.size();
202 assert (pc.buffers.audio.getNumChannels() == numChannels);
209 SCOPED_REALTIME_CHECK
211 if (reader->readSamples (numFileSamples + 2, fileData.buffer, destBufferChannels, 0,
213 isOfflineRender ? 5000 : 3))
220 lastSampleFadeLength =
std::min (numSamples, 40u);
221 fileData.buffer.clear();
228 if (numChannels == 2)
239 auto ratio = numFileSamples / (
double) numSamples;
244 jassert ((
int) numChannels <= channelState.size());
246 for (choc::buffer::ChannelCount channel = 0; channel < numChannels; ++channel)
248 if (channel < (choc::buffer::ChannelCount) channelState.size())
250 const auto src = fileData.buffer.getReadPointer ((
int) channel);
251 const auto dest = destBuffer.getChannel (channel).data.data;
253 auto& state = *channelState.getUnchecked ((
int) channel);
254 state.resampler.processAdding (ratio, src, dest, (
int) numSamples, gains[channel & 1]);
256 if (lastSampleFadeLength > 0)
258 for (uint32_t i = 0; i < lastSampleFadeLength; ++i)
260 auto alpha = i / (
float) lastSampleFadeLength;
261 dest[i] = alpha * dest[i] + state.lastSample * (1.0f - alpha);
265 state.lastSample = dest[numSamples - 1];
269 destBuffer.getChannel (channel).clear();
277 if (! playedLastBlock)
283 auto bufferRef = tracktion::graph::toAudioBuffer (destBuffer);
284 bufferRef.applyGainRamp (0, bufferRef.getNumSamples(),
287 playedLastBlock =
false;
292 if (! playedLastBlock)
294 auto bufferRef = tracktion::graph::toAudioBuffer (destBuffer);
295 bufferRef.applyGainRamp (0, bufferRef.getNumSamples(),
299 playedLastBlock =
true;
306 auto numSamplesToClearAtStart = editPositionInSamples.
getStart() - timelineRange.
getStart();
307 auto numSamplesToClearAtEnd = timelineRange.
getEnd() - editPositionInSamples.
getEnd();
309 if (numSamplesToClearAtStart > 0)
310 destBuffer.getStart ((choc::buffer::FrameCount) numSamplesToClearAtStart).clear();
312 if (numSamplesToClearAtEnd > 0)
313 destBuffer.getEnd ((choc::buffer::FrameCount) numSamplesToClearAtEnd).clear();
323 case AudioFadeCurve::convex:
325 : 1.0 - ((-2.0 *
std::
cos ((
juce::MathConstants<
double>::pi * (proportion - 1.0)) / 2.0)) /
juce::MathConstants<
double>::pi + 1.0);
327 case AudioFadeCurve::concave:
331 case AudioFadeCurve::sCurve:
335 case AudioFadeCurve::linear:
338 : ((-
juce::
square (proportion - 1.0)) * 0.5) + 0.5;
int size() const noexcept
static AudioChannelSet JUCE_CALLTYPE canonicalChannelSet(int numChannels)
constexpr ValueType getStart() const noexcept
constexpr ValueType getEnd() const noexcept
Reader::Ptr createReader(const AudioFile &)
Creates a Reader to read an AudioFile.
An audio scratch buffer that has pooled storage.
AudioFileManager & getAudioFileManager() const
Returns the AudioFileManager instance.
tracktion::graph::NodeProperties getNodeProperties() override
Should return the properties of the node.
bool isReadyToProcess() override
Should return true when this node is ready to be processed.
SpeedRampWaveNode(const AudioFile &, TimeRange editTime, TimeDuration offset, TimeRange loopSection, LiveClipLevel, double speedRatio, const juce::AudioChannelSet &sourceChannelsToUse, const juce::AudioChannelSet &destChannelsToFill, ProcessState &, EditItemID, bool isOfflineRender, SpeedFadeDescription)
offset is a time added to the start of the file, e.g.
void process(ProcessContext &) override
Called when the node is to be processed.
void prepareToPlay(const tracktion::graph::PlaybackInitialisationInfo &) override
Called once before playback begins for each node.
Base class for Nodes that provides information about the current process call.
juce::Range< int64_t > getTimelineSampleRange() const
Returns the timeline sample range of the current process block.
double getSampleRate() const
Returns the sample rate of the current process block.
tracktion::graph::PlayHeadState & getPlayHeadState()
Returns the PlayHeadState in use.
tracktion::graph::PlayHead & getPlayHead()
Returns the PlayHead in use.
Struct to describe a single iteration of a process call.
Type jlimit(Type lowerLimit, Type upperLimit, Type valueToConstrain) noexcept
constexpr NumericType square(NumericType n) noexcept
Interpolators::Lagrange LagrangeInterpolator
TimeRange timeRangeFromSamples(juce::Range< int64_t > sampleRange, double sampleRate)
Creates a TimeRange from a range of samples.
constexpr int64_t toSamples(TimePosition, double sampleRate)
Converts a TimePosition to a number of samples.
Represents a duration in real-life time.
Represents a position in real-life time.
Type
A enumeration of the curve classes available.
ID for objects of type EditElement - e.g.
Provides a thread-safe way to share a clip's levels with an audio engine without worrying about the C...
float getGainIncludingMute() const noexcept
Returns the clip's gain if the clip is not muted.
void getLeftAndRightGains(float &left, float &right) const noexcept
Reutrns the left and right gains taking in to account mute and pan values.
Holds the state of a process call.
Describes the time and type of the speed fade in/outs.
Holds some really basic properties of a node.
Passed into Nodes when they are being initialised, to give them useful contextual information that th...