11namespace tracktion {
inline namespace engine
24 engine (e), sourceFile (file)
40 if (reader ==
nullptr)
43 auto numChannels = (
int) reader->numChannels;
44 auto numSamples = reader->lengthInSamples;
45 auto sampleRate = reader->sampleRate;
53 const int blockSize = 65536;
54 const bool useRightChan = numChannels > 1;
59 auto numLeft = numSamples;
60 SampleCount startSample = 0;
67 auto numThisTime = (
int)
std::min ((SampleCount) numLeft, (SampleCount) blockSize);
68 reader->read (&buffer, 0, numThisTime, startSample,
true, useRightChan);
71 startSample += numThisTime;
72 numLeft -= numThisTime;
73 progress = numLeft / (
float) numSamples;
91 bool isSensible =
false;
104 : GeneratorJob (p), engine (acb.edit.
engine), original (o)
108 if (renderTimestretched)
122 bool render()
override
126 AudioFile tempFile (engine, proxy.getFile()
130 bool ok = render (tempFile);
134 ok = proxy.deleteFile();
137 ok = tempFile.getFile().moveFileTo (proxy.getFile());
141 tempFile.deleteFile();
153 if (sourceInfo.metadata.getValue (
"MetaDataSource",
"None") ==
"AIFF")
154 sourceInfo.metadata.clear();
157 sourceInfo.numChannels, sourceInfo.sampleRate,
158 std::max (16, sourceInfo.bitsPerSample),
159 sourceInfo.metadata, 0);
162 && (proxyInfo !=
nullptr ? proxyInfo->render (engine, original, writer,
this, progress)
163 : renderNormalSpeed (writer));
171 if (reader ==
nullptr)
174 SampleCount sourceSample = 0;
175 auto samplesToDo = (SampleCount) reader->lengthInSamples;
179 auto numThisTime = (
int)
std::min (samplesToDo, (SampleCount) 65536);
181 if (numThisTime <= 0)
187 samplesToDo -= numThisTime;
188 sourceSample += numThisTime;
190 progress =
juce::jlimit (0.0f, 1.0f, (
float) (sourceSample / (
double) reader->lengthInSamples));
201 :
Clip (v, targetParent, id, t),
202 loopInfo (edit.engine, state.getOrCreateChildWithName (IDs::LOOPINFO, getUndoManager()), getUndoManager()),
204 lastProxy (edit.engine)
208 level->dbGain.referTo (
state, IDs::gain, um);
209 level->pan.referTo (
state, IDs::pan, um);
210 level->mute.referTo (
state, IDs::mute, um);
213 if (channels.
get().isEmpty())
216 fadeIn.referTo (
state, IDs::fadeIn, um);
217 fadeOut.referTo (
state, IDs::fadeOut, um);
219 fadeInType.
referTo (
state, IDs::fadeInType, um, AudioFadeCurve::linear);
220 fadeOutType.
referTo (
state, IDs::fadeOutType, um, AudioFadeCurve::linear);
226 loopStart.referTo (
state, IDs::loopStart, um);
227 loopLength.referTo (
state, IDs::loopLength, um);
229 loopStartBeats.referTo (
state, IDs::loopStartBeats, um);
230 loopLengthBeats.referTo (
state, IDs::loopLengthBeats, um);
233 proxyAllowed.
referTo (
state, IDs::proxyAllowed, um,
true);
237 beatSensitivity.
referTo (
state, IDs::beatSensitivity, um, 0.5f);
239 timeStretchMode.
referTo (
state, IDs::elastiqueMode, um);
253 autoDetectBeats.
referTo (
state, IDs::autoDetectBeats, um);
255 level->pan =
juce::jlimit (-1.0f, 1.0f,
static_cast<float> (level->pan.get()));
258 useClipLaunchQuantisation.
referTo (
state, IDs::useClipLaunchQuantisation, um);
260 clipEffectsVisible.
referTo (
state, IDs::effectsVisible,
nullptr);
261 updateClipEffectsState();
263 updateLeftRightChannelActivenessFlags();
265 pluginList.setTrackAndClip (
getTrack(),
this);
266 pluginList.initialise (
state);
268 asyncFunctionCaller.
addFunction (updateCrossfadesFlag, [
this] { updateAutoCrossfades (
false); });
269 asyncFunctionCaller.
addFunction (updateCrossfadesOverlappedFlag, [
this] { updateAutoCrossfades (
true); });
281 if (renderJob !=
nullptr)
282 renderJob->removeListener (
this);
294 if (shouldAttemptRender())
298 if (currentSourceFile != audioFile.getFile())
302 if (!
edit.getUndoManager().isPerformingUndoRedo())
314 level->dbGain .setValue (other->level->dbGain,
nullptr);
315 level->pan .setValue (other->level->pan,
nullptr);
316 level->mute .setValue (other->level->mute,
nullptr);
317 channels .
setValue (other->channels,
nullptr);
318 fadeIn .setValue (other->fadeIn,
nullptr);
319 fadeOut .setValue (other->fadeOut,
nullptr);
320 fadeInType .
setValue (other->fadeInType,
nullptr);
321 fadeOutType .
setValue (other->fadeOutType,
nullptr);
322 autoCrossfade .
setValue (other->autoCrossfade,
nullptr);
323 fadeInBehaviour .
setValue (other->fadeInBehaviour,
nullptr);
324 fadeOutBehaviour .
setValue (other->fadeOutBehaviour,
nullptr);
325 loopStart .setValue (other->loopStart,
nullptr);
326 loopLength .setValue (other->loopLength,
nullptr);
327 loopStartBeats .setValue (other->loopStartBeats,
nullptr);
328 loopLengthBeats .setValue (other->loopLengthBeats,
nullptr);
329 transpose .
setValue (other->transpose,
nullptr);
330 pitchChange .
setValue (other->pitchChange,
nullptr);
331 beatSensitivity .
setValue (other->beatSensitivity,
nullptr);
332 timeStretchMode .
setValue (other->timeStretchMode,
nullptr);
334 autoPitch .
setValue (other->autoPitch,
nullptr);
335 autoPitchMode .
setValue (other->autoPitchMode,
nullptr);
336 autoTempo .
setValue (other->autoTempo,
nullptr);
337 isReversed .
setValue (other->isReversed,
nullptr);
338 autoDetectBeats .
setValue (other->autoDetectBeats,
nullptr);
339 warpTime .
setValue (other->warpTime,
nullptr);
340 proxyAllowed .
setValue (other->proxyAllowed,
nullptr);
341 resamplingQuality .
setValue (other->resamplingQuality,
nullptr);
343 copyValueTree (loopInfo.state, other->loopInfo.state,
nullptr);
354void AudioClipBase::updateLeftRightChannelActivenessFlags()
370 if (clipEffects !=
nullptr)
371 clipEffects->flushStateToValueTree();
379 jassert (patternGenerator !=
nullptr);
380 return patternGenerator.get();
384void AudioClipBase::setParent (
ClipOwner* co)
386 Clip::setParent (co);
388 pluginList.setTrackAndClip (
nullptr,
this);
398 clearCachedAudioSegmentList();
409 return juce::Colours::red.
withHue (0.0f);
438 level->pan = std::abs (p) < 0.01 ? 0.0f
447 auto set = activeChannels;
461 channels = set.getSpeakerArrangementAsString();
474 auto set = activeChannels;
488 channels = set.getSpeakerArrangementAsString();
504 if (in + fadeOut > len)
506 const double scale = len / (in + fadeOut);
508 fadeOut = fadeOut * scale;
510 else if (fadeIn != in)
524 if (fadeIn + out > len)
526 const double scale = len / (fadeIn + out);
527 fadeIn = fadeIn * scale;
528 fadeOut = out * scale;
530 else if (fadeOut != out)
548 if (fadeIn + fadeOut > len)
549 return TimeDuration::fromSeconds (fadeIn * len.inSeconds() / (fadeIn + fadeOut));
563 if (fadeIn + fadeOut > len)
564 return TimeDuration::fromSeconds (fadeOut * len.inSeconds() / (fadeIn + fadeOut));
579 if (t != fadeOutType)
588 return timeStretchMode;
611 wi =
AudioFile (
edit.engine, sourceFileReference.getFile()).getInfo();
613 if (wi.lengthInSamples == 0)
621 auto bps =
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart());
636 auto n = TimeRange::between (toPosition (sourceEnd - o.getEnd()),
637 toPosition (sourceEnd - o.getStart()));
645 numLoops += clipOffset / o.getLength().
inSeconds();
646 numLoops = numLoops - (
int) numLoops;
648 auto posAtEnd = o.getStart() + (o.getLength() * numLoops);
649 auto newOffset = sourceEnd - (posAtEnd - n.getStart());
664 const SampleCount newOut = loopInfo.
getInMarker() == 0 ? -1 : (wi.lengthInSamples - loopInfo.
getInMarker());
682 if (fadeIn + fadeOut > len)
684 const double scale = len / (fadeIn + fadeOut);
685 fadeIn = fadeIn * scale;
686 fadeOut = fadeOut * scale;
690 if (autoFadeIn + autoFadeOut > len)
692 const double scale = len / (autoFadeIn + autoFadeOut);
693 autoFadeIn = autoFadeIn * scale;
694 autoFadeOut = autoFadeOut * scale;
704 const auto& clips = ct->getClips();
705 auto ourIndex = clips.indexOf (
const_cast<AudioClipBase*
> (
this));
707 if (direction == ClipDirection::next)
709 for (
int i = ourIndex + 1; i < clips.size(); ++i)
715 else if (direction == ClipDirection::previous)
717 for (
int i = ourIndex; --i >= 0;)
730 asyncFunctionCaller.
updateAsync (updateOverlapped ? updateCrossfadesOverlappedFlag
731 : updateCrossfadesFlag);
734void AudioClipBase::updateAutoCrossfades (
bool updateOverlapped)
741 if (updateOverlapped)
743 if (prevClip !=
nullptr)
744 prevClip->updateAutoCrossfades (
false);
746 if (nextClip !=
nullptr)
747 nextClip->updateAutoCrossfades (
false);
752 autoFadeIn = (prevClip !=
nullptr) ? (prevClip->getPosition().getEnd() -
getPosition().
getStart()) : fadeIn;
753 autoFadeOut = (nextClip !=
nullptr) ? (
getPosition().
getEnd() - nextClip->getPosition().getStart()) : fadeOut;
761 const auto fade = TimeDuration::fromSeconds (0.005);
778 auto& ui =
edit.engine.getUIBehaviour();
780 if (fadeTime.isEmpty())
782 ui.showWarningMessage (
TRANS(
"Could not create automation.")
784 +
TRANS(
"No fade found for this clip"));
795 if (
auto vol = at->getVolumePlugin())
796 param = vol->volParam;
798 if (param ==
nullptr)
800 ui.showWarningMessage (
TRANS(
"Could not create automation.")
802 +
TRANS(
"No volume plguin was found for this track, please insert one and try again"));
806 auto& oldCurve = param->getCurve();
808 if (oldCurve.countPointsInRegion (fadeTime) > 0)
810 if (! ui.showOkCancelAlertBox (
TRANS(
"Overwrite Existing Automation?"),
811 TRANS(
"There is already automation in this region, applying the curve will overwrite it. Is this OK?")))
816 curve.setOwnerParameter (param.get());
819 auto startValue = useFadeIn ? 0.0f : oldCurve.getValueAt (fadeTime.getStart());
820 auto endValue = useFadeIn ? oldCurve.getValueAt (fadeTime.getEnd()) : 0.0f;
825 case AudioFadeCurve::convex:
826 case AudioFadeCurve::concave:
827 case AudioFadeCurve::sCurve:
829 for (
int i = 0; i < 10; ++i)
831 auto alpha = i / 9.0f;
832 auto time = toPosition (fadeTime.getLength()) * alpha;
835 alpha = 1.0f - alpha;
838 auto value = valueLimits.getStart() + (volCurveGain * valueLimits.getLength());
839 curve.addPoint (
time, (
float) value, 0.0f);
845 case AudioFadeCurve::linear:
848 curve.addPoint (
TimePosition(), useFadeIn ? valueLimits.getStart() : valueLimits.getLength(), 0.0f);
849 curve.addPoint (toPosition (fadeTime.getLength()), useFadeIn ? valueLimits.getLength() : valueLimits.getStart(), 0.0f);
857 if (useFadeIn && (oldCurve.countPointsInRegion ({ {}, fadeTime.getStart() + (fadeTime.getLength() * 0.09) }) == 2))
858 oldCurve.removePoint (0);
868 at->setCurrentlyShownAutoParam (param);
871void AudioClipBase::setLoopInfo (
const LoopInfo& loopInfo_)
873 loopInfo = loopInfo_;
889 auto& ts =
edit.tempoSequence;
890 auto newStart = BeatPosition::fromBeats (pos.getOffset().inSeconds() * ts.getBeatsPerSecondAt (pos.getStart()));
896 setLoopRange ({ toPosition (pos.getOffset()), toPosition (pos.getOffset()) + len });
910 pos.offset = toDuration (
getTimeOfRelativeBeat (toDuration (loopStartBeats.get())) - toDuration (pos.getStart()));
914 pos.time = pos.time.withEnd (pos.time.getStart() + loopLength.get());
915 pos.offset = toDuration (loopStart);
928 return { loopStart, loopStart + loopLength };
930 auto bps =
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart());
932 return { TimePosition::fromSeconds (loopStartBeats.get().inBeats() / bps),
933 TimePosition::fromSeconds ((loopStartBeats + loopLengthBeats).inBeats() / bps) };
947 return TimePosition::fromSeconds (loopStartBeats.get().inBeats() /
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart()));
955 return TimeDuration::fromSeconds (loopLengthBeats.get().inBeats() /
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart()));
961 return loopStartBeats;
963 return BeatPosition::fromBeats (loopStart.get().inSeconds() *
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart()));
969 return loopLengthBeats;
971 return BeatDuration::fromBeats (loopLength.get().inSeconds() *
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart()));
979 auto& ts =
edit.tempoSequence;
980 auto newStart = BeatPosition::fromBeats (newRange.getStart().inSeconds() * ts.getBeatsPerSecondAt (pos.getStart()));
981 auto newLength = ts.toBeats (pos.getStart() + newRange.getLength()) - ts.toBeats (pos.getStart());
991 const double maxMultiplesOfSourceLengthForLooping = 50.0;
994 auto newLength =
juce::jlimit (0_td, sourceLen * maxMultiplesOfSourceLengthForLooping /
getSpeedRatio(), newRange.getLength());
996 if (loopStart != newStart || loopLength != newLength)
998 loopStart = newStart;
999 loopLength = newLength;
1007 auto newStartBeat =
juce::jlimit (0_bp, BeatPosition::fromBeats (loopInfo.
getNumBeats()), newRangeBeats.getStart());
1008 auto newLengthBeat =
juce::jlimit (0_bd, BeatDuration::fromBeats (loopInfo.
getNumBeats() * 2), newRangeBeats.getLength());
1010 if (loopStartBeats != newStartBeat || loopLengthBeats != newLengthBeat)
1012 Clip::setSpeedRatio (1.0);
1015 loopStartBeats = newStartBeat;
1016 loopLengthBeats = newLengthBeat;
1020void AudioClipBase::setAutoDetectBeats (
bool b)
1022 autoDetectBeats = b;
1026void AudioClipBase::setBeatSensitivity (
float s)
1028 beatSensitivity = s;
1034 clearCachedAudioSegmentList();
1041void AudioClipBase::clearCachedAudioSegmentList()
1043 if (!
edit.isLoading())
1044 TRACKTION_ASSERT_MESSAGE_THREAD
1046 audioSegmentList.reset();
1051 if (!
edit.isLoading())
1052 TRACKTION_ASSERT_MESSAGE_THREAD
1054 if (audioSegmentList ==
nullptr)
1055 audioSegmentList = AudioSegmentList::create (*
this,
false,
false);
1057 return *audioSegmentList;
1062 resamplingQuality = rq;
1067 return resamplingQuality;
1079 Clip::setSpeedRatio (r);
1080 setLoopRange ({ newLoopStart, newLoopStart + newLoopLen });
1084bool AudioClipBase::isUsingMelodyne()
const
1086 return TimeStretcher::isMelodyne (timeStretchMode);
1089void AudioClipBase::loadMelodyneState()
1094void AudioClipBase::showMelodyneWindow()
1100void AudioClipBase::hideMelodyneWindow()
1106void AudioClipBase::melodyneConvertToMIDI()
1112 if (m.getNumEvents() > 0)
1127 auto& ts =
edit.tempoSequence;
1129 for (
int i = 0; i < m.getNumEvents(); ++i)
1131 auto& e = *m.getEventPointer (i);
1133 if (e.noteOffObject !=
nullptr)
1136 note.setProperty (
"p", e.message.getNoteNumber(), um);
1137 note.setProperty (
"v", e.message.getVelocity(), um);
1138 note.setProperty (
"b", ts.toBeats (TimePosition::fromSeconds (e.message.getTimeStamp())).inBeats(), um);
1139 note.setProperty (
"l", (ts.toBeats (TimePosition::fromSeconds (e.noteOffObject->message.getTimeStamp()))
1140 - ts.toBeats (TimePosition::fromSeconds (e.message.getTimeStamp()))).inBeats(), um);
1152 edit.engine.getUIBehaviour().showWarningMessage (
TRANS(
"No MIDI notes were found by the plugin!"));
1159 timeStretchMode = TimeStretcher::checkModeIsAvailable (mode);
1167 if (warpTimeManager ==
nullptr)
1172 jassert (warpTimeManager !=
nullptr);
1175 return *warpTimeManager;
1178int AudioClipBase::getTransposeSemiTones (
bool includeAutoPitch)
const
1180 if (autoPitch && includeAutoPitch)
1182 int pitch =
edit.pitchSequence.getPitchAt (
getPosition().getStart() + TimeDuration::fromSeconds (0.0001)).getPitch();
1185 while (transposeBase > 6) transposeBase -= 12;
1186 while (transposeBase < -6) transposeBase += 12;
1188 return transpose + transposeBase;
1194LoopInfo AudioClipBase::autoDetectBeatMarkers (
const LoopInfo& current,
bool autoBeat,
float sens)
const
1207 const auto end = (loopInfo.
getOutMarker() == -1) ? reader->lengthInSamples
1211 detect.setSensitivity (sens);
1212 detect.setSampleRate (reader->sampleRate);
1214 if ((end - start) > reader->sampleRate)
1216 auto blockLength = detect.getBlockSize();
1217 auto blockSize = choc::buffer::Size::create (reader->numChannels, blockLength);
1220 choc::buffer::ChannelArrayBuffer<float> buffer (blockSize);
1222 while (pos + blockLength < end)
1224 if (! reader->read (buffer.getView().data.channels,
1225 (
int) reader->numChannels, pos, (
int) blockLength))
1228 detect.audioProcess (buffer);
1232 for (
auto beat : detect.getBeats())
1241bool AudioClipBase::performTempoDetect()
1245 edit.engine.getUIBehaviour().runTaskWithProgressBar (tempoDetectTask);
1261 for (
int i = 0; i < 12; ++i)
1262 s.
add (Pitch::getPitchAsString (e, i));
1275 for (
int i = numSemitones; i >= 1; i--)
1280 for (
int i = 1; i <= numSemitones; ++i)
1285 const int base = autoPitch ?
edit.pitchSequence.getPitchAt (
getPosition().getStart()).getPitch()
1288 for (
int i = numSemitones; i >= 1; i--)
1291 s.
add (
"0 : " + Pitch::getPitchAsString (
edit.engine, base));
1293 for (
int i = 1; i <= numSemitones; ++i)
1300void AudioClipBase::enableEffects (
bool enable,
bool warn)
1311 clipEffectsVisible =
true;
1316 if (! warn ||
edit.engine.getUIBehaviour().showOkCancelAlertBox (
TRANS(
"Remove Clip Effects"),
1317 TRANS(
"Are you sure you want to remove all clip effects?")))
1348 return TimePosition::fromSeconds (b.inBeats() / loopInfo.
getBeatsPerSecond (getAudioFile().getInfo()));
1353 return TimePosition::fromSeconds (b.inBeats() / loopInfo.
getBeatsPerSecond (getAudioFile().getInfo()));
1369void AudioClipBase::addMark (TimePosition relCursorPos)
1371 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
1373 auto marks = sourceItem->getMarkedPoints();
1374 marks.add (clipTimeToSourceFileTime (relCursorPos));
1375 sourceItem->setMarkedPoints (marks);
1379void AudioClipBase::moveMarkTo (TimePosition relCursorPos)
1381 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
1383 auto marks = sourceItem->getMarkedPoints();
1389 int indexOfNearest = -1;
1390 auto nearestDiff = Edit::getMaximumEditEnd();
1392 for (
int i = rescaled.
size(); --i >= 0;)
1394 auto diff = TimePosition::fromSeconds (std::abs ((rescaled[i] - toDuration (relCursorPos)).inSeconds()));
1396 if (diff < nearestDiff)
1399 indexOfNearest = index[i];
1403 if (indexOfNearest != -1)
1405 marks.set (indexOfNearest, clipTimeToSourceFileTime (relCursorPos));
1406 sourceItem->setMarkedPoints (marks);
1411void AudioClipBase::deleteMark (TimePosition relCursorPos)
1413 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
1415 auto marks = sourceItem->getMarkedPoints();
1421 int indexOfNearest = -1;
1422 auto nearestDiff = Edit::getMaximumEditEnd();
1424 for (
int i = rescaled.
size(); --i >= 0;)
1426 auto diff = TimePosition::fromSeconds (std::abs ((rescaled[i] - toDuration (relCursorPos)).inSeconds()));
1428 if (diff < nearestDiff)
1431 indexOfNearest = index[i];
1435 if (indexOfNearest != -1)
1437 marks.remove (indexOfNearest);
1438 sourceItem->setMarkedPoints (marks);
1443bool AudioClipBase::canSnapToOriginalBWavTime()
1448void AudioClipBase::snapToOriginalBWavTime()
1455 auto t = TimePosition::fromSeconds (bwavTime.
getLargeIntValue() / f.getSampleRate());
1464 auto results = Clip::getReferencedItems();
1467 item.firstTimeUsed = 0;
1468 item.lengthUsed = 0;
1470 if (! getAutoTempo())
1481 item.firstTimeUsed = (
getLoopStart() * speed).inSeconds();
1490 item.itemID = takeID;
1494 jassert (! results.isEmpty());
1498 item.itemID = ProjectItemID (sourceFileReference.source.
get());
1504 for (
auto& ref : results)
1506 auto wi =
edit.engine.getAudioFileManager().getAudioFile (
ref.itemID).getInfo();
1508 if (wi.sampleRate > 0)
1510 ref.firstTimeUsed = 0;
1511 ref.lengthUsed = wi.getLengthInSeconds();
1520 ProjectItemID newItemID,
double newStartTime)
1522 Clip::reassignReferencedItem (item, newItemID, newStartTime);
1531 loopStart = loopStart - TimeDuration::fromSeconds ((newStartTime /
getSpeedRatio()));
1551 return launchHandle;
1556 if (! launchQuantisation)
1559 return launchQuantisation.get();
1564 if (! followActions)
1567 return followActions.
get();
1576 if (! p->canBeAddedToClip())
1577 return TRANS(
"Can't add this kind of plugin to a clip!");
1579 if (pluginList.size() >=
edit.engine.getEngineBehaviour().getEditLimits().maxPluginsOnClip)
1580 return TRANS(
"Can't add any more plugins to this clip!");
1590 pluginList.insertPlugin (p, -1, &sm);
1599 return pluginList.getPlugins();
1604 pluginList.sendMirrorUpdateToAllPlugins (p);
1608bool AudioClipBase::setupARA (
bool dontPopupErrorMessages)
1610 TRACKTION_ASSERT_MESSAGE_THREAD
1611 static bool araReentrancyCheck =
false;
1613 if (araReentrancyCheck)
1618 #if TRACKTION_ENABLE_ARA
1623 TRACKTION_LOG (
"Created ARA reader!");
1630 if (! dontPopupErrorMessages)
1632 TRACKTION_LOG_ERROR (
"Failed setting up ARA for audio clip!");
1634 if (TimeStretcher::isMelodyne (timeStretchMode)
1635 &&
edit.engine.getPluginManager().getARACompatiblePlugDescriptions().size() <= 0)
1637 TRACKTION_LOG_ERROR (
"No ARA-compatible plugins were found!");
1639 edit.engine.getUIBehaviour().showWarningMessage (
TRANS (
"This audio clip is setup with Melodyne's time-stretching, but there aren't any ARA-compatible plugins available!")
1641 +
TRANS (
"If you know you have ARA-compatible plugins installed, they must be scanned and part of the list of known plugins!"));
1657void AudioClipBase::markAsDirty()
1659 lastRenderJobFailed =
false;
1663void AudioClipBase::updateSourceFile()
1667 if (! isInitialised)
1670 TRACKTION_ASSERT_MESSAGE_THREAD
1674 const AudioFile audioFile (RenderManager::getAudioFileForHash (
edit.engine,
edit.getTempDirectory (
false),
getHash()));
1679 if (renderJob !=
nullptr && renderJob->proxy == audioFile && (! renderJob->shouldExit()))
1682 if (! (audioFile.getFile().existsAsFile() || audioFile.isValid()))
1686 else if (renderJob !=
nullptr)
1688 renderJob->removeListener (
this);
1689 renderJob =
nullptr;
1693void AudioClipBase::renderSource()
1695 TRACKTION_ASSERT_MESSAGE_THREAD
1699 const bool isValid = audioFile.isValid();
1701 if (audioFile.getFile().existsAsFile() && isValid)
1707 bool needsToChangeJob = renderJob ==
nullptr
1708 || (renderJob !=
nullptr && (renderJob->proxy != audioFile));
1710 if (needsToChangeJob)
1712 if (renderJob !=
nullptr)
1713 renderJob->removeListener (
this);
1715 renderJob = getRenderJob (audioFile);
1717 if (renderJob !=
nullptr)
1718 renderJob->addListener (
this);
1726 if (renderJob !=
nullptr)
1728 renderJob->removeListener (
this);
1729 renderJob =
nullptr;
1736void AudioClipBase::renderComplete()
1738 TRACKTION_ASSERT_MESSAGE_THREAD
1742 if (clipEffects !=
nullptr)
1743 clipEffects->notifyListenersOfRenderCompletion();
1757 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
1761 auto beats = sourceItem->getMarkedPoints();
1762 auto afi = getAudioFile().getInfo();
1764 for (
int i = 0; i <
beats.size(); ++i)
1773 for (
int i = 0; i <
beats.size(); ++i)
1786 for (
int i = 0; i <
beats.size(); ++i)
1788 auto newB = BeatDuration::fromBeats (
beats[i].inSeconds() + (toPosition (b) -
getLoopStartBeats()).inBeats());
1790 if (newB >= b && newB < b + loopLen)
1802 for (
int i = 0; i <
beats.size(); ++i)
1806 if (newT >= TimePosition())
1818 auto origTimes = sourceItem->getMarkedPoints();
1820 for (
int i = origTimes.size(); --i >= 0;)
1827 for (
int i = 0; i < origTimes.size(); ++i)
1829 if (origTimes[i] >= TimePosition() && origTimes[i] < toPosition (t))
1831 times.add(origTimes[i]);
1838 for (
int i = 0; i < origTimes.size(); ++i)
1842 if (newT >= t && newT < t + loopLen)
1844 times.add(toPosition (newT));
1854 times = sourceItem->getMarkedPoints();
1856 for (
int i = 0; i <
times.size(); ++i)
1872 if (clipEffects !=
nullptr)
1873 return clipEffects->isUsingFile (af);
1878void AudioClipBase::setUsesProxy (
bool canUseProxy)
noexcept
1880 proxyAllowed = canUseProxy;
1884bool AudioClipBase::usesTimeStretchedProxy()
const
1893 && TimeStretcher::canProcessFor (timeStretchMode));
1899AudioFile AudioClipBase::getProxyFileToCreate (
bool renderTimestretched)
1901 if (renderTimestretched)
1902 return TemporaryFileManager::getFileForCachedClipRender (*
this,
getProxyHash());
1904 return TemporaryFileManager::getFileForCachedFileRender (
edit,
getHash());
1910 static constexpr int maxNumChannels = 8;
1916 fileInfo (file.getInfo()),
1917 crossfadeSamples ((
int) tracktion::toSamples (info.audioSegmentList->getCrossfadeLength(), sampleRate)),
1918 numChannelsToUse (
juce::jlimit (1, maxNumChannels, fileInfo.numChannels))
1923 if (reader !=
nullptr)
1925 auto sampleRange = segment.getSampleRange();
1927 if (segment.isFollowedBySilence())
1929 reader->setReadPosition (sampleRange.getStart());
1933 reader->setLoopRange (sampleRange);
1934 reader->setReadPosition (0);
1937 timestretcher.
initialise (fileInfo.sampleRate, outputBufferSize, numChannelsToUse,
1938 info.mode, info.options,
false);
1942 segment.getTranspose());
1948 if (reader ==
nullptr)
1953 auto loopRange = segment.getRange();
1955 if (! editTime.overlaps (loopRange))
1960 if (loopRange.getEnd() < editTime.getEnd())
1961 numSamples =
std::max (0, (
int) (numSamples * (loopRange.getEnd() - editTime.getStart()).inSeconds()
1962 / editTime.getLength().inSeconds()));
1964 if (loopRange.getStart() > editTime.getStart())
1966 auto skip =
juce::jlimit (0, numSamples, (
int) (numSamples * (loopRange.getStart() - editTime.getStart()).inSeconds() / editTime.getLength().inSeconds()));
1971 while (numSamples > 0)
1973 auto numReady =
std::min (numSamples, readySamplesEnd - readySamplesStart);
1978 buffer.
addFrom (i, start, fifo,
1980 readySamplesStart, numReady);
1982 readySamplesStart += numReady;
1984 numSamples -= numReady;
1988 auto blockSize = fillNextBlock();
1989 renderFades (blockSize);
1991 readySampleOutputPos += blockSize;
1999 float* outs[maxNumChannels] = {};
2001 for (
int i = 0; i < numChannelsToUse; ++i)
2012 auto sourceChannelsToUse = bufferChannels;
2017 jassert (reader->readSamples (needed, scratch.
buffer, bufferChannels, 0, sourceChannelsToUse, 5000));
2019 reader->readSamples (needed, scratch.
buffer, bufferChannels, 0, sourceChannelsToUse, 5000);
2023 const float* ins[maxNumChannels] = {};
2025 for (
int i = 0; i < numChannelsToUse; ++i)
2028 numRead = timestretcher.
processData (ins, needed, outs);
2033 numRead = timestretcher.
flush (outs);
2036 readySamplesStart = 0;
2037 readySamplesEnd = numRead;
2042 void renderFades (
int numSamples)
2045 auto renderedEnd = readySampleOutputPos + numSamples;
2047 if (segment.hasFadeIn())
2048 if (readySampleOutputPos < crossfadeSamples)
2049 renderFade (0, crossfadeSamples,
false, numSamples);
2051 if (segment.hasFadeOut())
2053 auto fadeOutStart = (SampleCount) (segment.getSampleRange().
getLength() / segment.getStretchRatio()) - crossfadeSamples;
2055 if (renderedEnd > fadeOutStart)
2056 renderFade (fadeOutStart, fadeOutStart + crossfadeSamples + 2,
true, numSamples);
2060 void renderFade (SampleCount start, SampleCount end,
bool isFadeOut,
int numSamples)
2062 float alpha1 = 0.0f, alpha2 = 1.0f;
2063 auto renderedEnd = readySampleOutputPos + numSamples;
2065 if (end > renderedEnd)
2067 alpha2 = (renderedEnd - start) / (
float) (end - start);
2071 if (start < readySampleOutputPos)
2073 alpha1 = alpha2 * (readySampleOutputPos - start) / (
float) (end - start);
2074 start = readySampleOutputPos;
2081 alpha1 = 1.0f - alpha1;
2082 alpha2 = 1.0f - alpha2;
2085 AudioFadeCurve::applyCrossfadeSection (fifo,
2086 (
int) (start - readySampleOutputPos),
2087 (
int) (end - start),
2088 AudioFadeCurve::convex, alpha1, alpha2);
2098 const int outputBufferSize = 1024;
2099 int readySamplesStart = 0, readySamplesEnd = 0;
2100 SampleCount readySampleOutputPos = 0;
2101 const int crossfadeSamples, numChannelsToUse;
2111 p->audioSegmentList = AudioSegmentList::create (*
this,
true,
true);
2114 p->mode = (timeStretchMode != TimeStretcher::disabled && timeStretchMode != TimeStretcher::melodyne)
2116 : TimeStretcher::defaultMode;
2127 if (audioSegmentList->getSegments().isEmpty() || ! sourceFile.isValid())
2132 auto sampleRate = sourceFile.getSampleRate();
2134 for (
auto& segment : audioSegmentList->getSegments())
2135 segments.
add (
new StretchSegment (engine, sourceFile, *
this, sampleRate, segment));
2137 const int samplesPerBlock = 1024;
2141 auto numBlocks = 1 + (
int) (clipTime.getLength().inSeconds() * sampleRate / samplesPerBlock);
2143 for (
int i = 0; i < numBlocks; ++i)
2150 auto endTime =
time + samplesPerBlock / sampleRate;
2151 const auto editTime = TimeRange (TimePosition::fromSeconds (
time), TimePosition::fromSeconds (endTime));
2154 for (
auto s : segments)
2155 s->renderNextBlock (buffer, editTime, samplesPerBlock);
2160 progress = i / (
float) numBlocks;
2175 if (timestretched || af.getInfo().needsCachedProxy)
2188 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
2189 return AudioFile (
edit.engine, sourceItem->getSourceFile()).getInfo();
2194HashCode AudioClipBase::getProxyHash()
2201 ^
static_cast<HashCode
> (timeStretchMode.
get())
2203 ^ (7342847 *
static_cast<HashCode
> (pitchChange * 199.0))
2204 ^
static_cast<HashCode
> (clipPos.getLength().inSeconds() * 10005.0)
2205 ^
static_cast<HashCode
> (clipPos.getOffset().inSeconds() * 9997.0)
2217 for (
auto& segment : segmentList.getSegments())
2218 hash ^=
static_cast<HashCode
> (segment.getHashCode() * (i++ + 0.1));
2224void AudioClipBase::beginRenderingNewProxyIfNeeded()
2237 if (playFile.isNull())
2242 if (shouldAttemptRender() && ! original.isValid())
2246 if (playFile.getSampleRate() <= 0.0)
2253 TRACKTION_ASSERT_MESSAGE_THREAD
2255 if (&job == renderJob.
get())
2257 lastRenderJobFailed = ! completedOk;
2258 renderJob->removeListener (
this);
2259 renderJob =
nullptr;
2266void AudioClipBase::createNewProxyAsync()
2272void AudioClipBase::cancelCurrentRender()
2274 if (renderJob !=
nullptr)
2275 renderJob->cancelJob();
2280 if (
edit.isLoading()
2281 || !
edit.getTransport().isAllowedToReallocate())
2285 if (shouldAttemptRender())
2289 if (! getAudioFile().isValid())
2291 createNewProxyAsync();
2298 if (! canUseProxy())
2301 const bool isTimeStretched = usesTimeStretchedProxy();
2303 const AudioFile originalFile (getAudioFile());
2304 const AudioFile newProxy (getPlaybackFile());
2306 const bool proxyChanged = lastProxy != newProxy;
2308 if (proxyChanged || ! newProxy.getFile().exists())
2311 && lastProxy != originalFile
2312 && lastProxy.getFile().
isAChildOf (
edit.getTempDirectory (
false))
2313 && !
edit.areAnyClipsUsingFile (lastProxy))
2316 lastProxy = newProxy;
2318 if (isTimeStretched || newProxy != originalFile)
2321 .beginJob (
new ProxyGeneratorJob (getAudioFile(), newProxy, *
this, isTimeStretched));
2324 if (proxyChanged || newProxy.getFile().exists())
2326 Selectable::changed();
2327 edit.restartPlayback();
2336 if (
id == IDs::fadeInType ||
id == IDs::fadeOutType
2337 ||
id == IDs::fadeInBehaviour ||
id == IDs::fadeOutBehaviour
2338 ||
id == IDs::fadeIn ||
id == IDs::fadeOut
2339 ||
id == IDs::loopStart ||
id == IDs::loopLength
2340 ||
id == IDs::loopStartBeats ||
id == IDs::loopLengthBeats
2341 ||
id == IDs::transpose ||
id == IDs::pitchChange
2342 ||
id == IDs::elastiqueMode ||
id == IDs::autoPitch
2343 ||
id == IDs::elastiqueOptions ||
id == IDs::warpTime
2344 ||
id == IDs::effectsVisible ||
id == IDs::autoPitchMode
2345 ||
id == IDs::resamplingQuality
2346 ||
id == IDs::launchQuantisation ||
id == IDs::useClipLaunchQuantisation)
2348 if (
id == IDs::warpTime)
2354 if (shouldAttemptRender())
2363 else if (
id == IDs::gain)
2367 else if (
id == IDs::pan ||
id == IDs::mute
2368 ||
id == IDs::autoCrossfade)
2372 if (
id == IDs::mute)
2375 if (
auto f = track->getParentFolderTrack())
2378 else if (
id == IDs::autoCrossfade)
2383 else if (
id == IDs::autoTempo)
2388 updateAutoTempoState();
2391 else if (
id == IDs::isReversed)
2394 updateReversedState();
2396 else if (
id == IDs::channels)
2399 updateLeftRightChannelActivenessFlags();
2402 else if (
id == IDs::proxyAllowed)
2404 propertiesChanged();
2411 else if (tree.
hasType (IDs::WARPMARKER))
2413 if (
id == IDs::warpTime ||
id == IDs::sourceTime)
2416 else if (tree.
hasType (IDs::LOOPINFO))
2429 if (parentState ==
state)
2431 if (child.
hasType (IDs::PLUGIN))
2432 Selectable::changed();
2433 else if (child.
hasType (IDs::EFFECTS))
2434 updateClipEffectsState();
2435 else if (child.
hasType (IDs::PATTERNGENERATOR))
2437 else if (child.
hasType (IDs::LOOPINFO) && isInitialised)
2438 loopInfo.state = child;
2440 else if (parentState.
hasType (IDs::LOOPINFO) || child.
hasType (IDs::WARPMARKER))
2452 if (parentState ==
state)
2454 if (child.
hasType (IDs::PLUGIN))
2455 Selectable::changed();
2456 else if (child.
hasType (IDs::EFFECTS))
2457 updateClipEffectsState();
2458 else if (child.
hasType (IDs::PATTERNGENERATOR))
2459 patternGenerator =
nullptr;
2460 else if (child.
hasType (IDs::LOOPINFO) && isInitialised)
2461 copyValueTree (loopInfo.state, LoopInfo (
edit.engine).state,
nullptr);
2463 else if (parentState.
hasType (IDs::LOOPINFO) || child.
hasType (IDs::WARPMARKER))
2485void AudioClipBase::updateReversedState()
2496 SelectionManager::refreshAllPropertyPanels();
2499void AudioClipBase::updateAutoTempoState()
2503 auto bps =
edit.tempoSequence.getBeatsPerSecondAt (
getPosition().getStart());
2508 loopStartBeats = BeatPosition::fromBeats (loopStart.get().inSeconds() * bps);
2509 loopLengthBeats = BeatDuration::fromBeats (loopLength.get().inSeconds() * bps);
2517 loopStart = TimePosition::fromSeconds (loopStartBeats.get().inBeats() / bps);
2518 loopLength = TimeDuration::fromSeconds (loopLengthBeats.get().inBeats() / bps);
2520 loopStartBeats = 0_bp;
2521 loopLengthBeats = 0_bd;
2528void AudioClipBase::updateClipEffectsState()
2534 if (clipEffects ==
nullptr)
2540 else if (clipEffects !=
nullptr)
2542 clipEffects =
nullptr;
2544 if (
auto sourceItem = sourceFileReference.getSourceProjectItem())
int size() const noexcept
Type * getWritePointer(int channelNumber) noexcept
int getNumChannels() const noexcept
void addFrom(int destChannel, int destStartSample, const AudioBuffer &source, int sourceChannel, int sourceStartSample, int numSamples, Type gainToApplyToSource=Type(1)) noexcept
const Type * getReadPointer(int channelNumber) const noexcept
int size() const noexcept
static AudioChannelSet JUCE_CALLTYPE disabled()
static AudioChannelSet JUCE_CALLTYPE stereo()
void addChannel(ChannelType newChannelType)
String getSpeakerArrangementAsString() const
static AudioChannelSet JUCE_CALLTYPE canonicalChannelSet(int numChannels)
int getChannelIndexForType(ChannelType type) const noexcept
void forceUpdateOfCachedValue()
void setValue(const Type &newValue, UndoManager *undoManagerToUse)
void referTo(ValueTree &tree, const Identifier &property, UndoManager *um)
Type get() const noexcept
Colour withHue(float newHue) const noexcept
String getFileExtension() const
File getSiblingFile(StringRef siblingFileName) const
File withFileExtension(StringRef newExtension) const
bool isAChildOf(const File &potentialParentDirectory) const
bool isValid() const noexcept
ObjectClass * add(ObjectClass *newObject)
int64 nextInt64() noexcept
static constexpr Range between(const ValueType position1, const ValueType position2) noexcept
constexpr ValueType getLength() const noexcept
ReferencedType * get() const noexcept
void add(String stringToAdd)
bool isEmpty() const noexcept
static String toHexString(IntegerType number)
int64 getLargeIntValue() const noexcept
bool isNotEmpty() const noexcept
bool shouldExit() const noexcept
void stopTimer() noexcept
bool isTimerRunning() const noexcept
void startTimer(int intervalInMilliseconds) noexcept
virtual void timerCallback()=0
virtual void valueTreeChildRemoved(ValueTree &parentTree, ValueTree &childWhichHasBeenRemoved, int indexFromWhichChildWasRemoved)
virtual void valueTreeChildOrderChanged(ValueTree &parentTreeWhoseChildrenHaveMoved, int oldIndex, int newIndex)
virtual void valueTreeParentChanged(ValueTree &treeWhoseParentHasChanged)
virtual void valueTreePropertyChanged(ValueTree &treeWhosePropertyHasChanged, const Identifier &property)
virtual void valueTreeChildAdded(ValueTree &parentTree, ValueTree &childWhichHasBeenAdded)
bool hasType(const Identifier &typeName) const noexcept
void removeChild(const ValueTree &child, UndoManager *undoManager)
bool isValid() const noexcept
ValueTree & setProperty(const Identifier &name, const var &newValue, UndoManager *undoManager)
void addChild(const ValueTree &child, int index, UndoManager *undoManager)
const var & getProperty(const Identifier &name) const noexcept
ValueTree getChildWithName(const Identifier &type) const
ValueTree getOrCreateChildWithName(const Identifier &type, UndoManager *undoManager)
void removeProperty(const Identifier &name, UndoManager *undoManager)
Performs a tempo detection task on a background thread.
TempoDetectTask(Engine &e, const juce::File &file)
Creates a task for a given file.
float getBpm()
Returns the bpm after a successful detection.
bool isResultSensible()
Returns true if the result was within a sensible range.
JobStatus runJob() override
Performs the actual detection.
float getCurrentTaskProgress() override
Returns the current progress.
Base class for Clips that produce some kind of audio e.g.
@ gainFade
Fade is a volume/gain ramp.
virtual bool needsRender() const
Subclasses should override this to return true if they need the rest of the render callbacks.
std::unique_ptr< ProxyRenderingInfo > createProxyRenderingInfo()
Creates a ProxyRenderingInfo object to decribe the stretch segements of this clip.
void reverseLoopPoints()
Reverses the loop points to expose the same section of the source file but reversed.
virtual juce::File getOriginalFile() const =0
Must return the file that the source ProjectItemID refers to.
TimeDuration getFadeOut() const
Returns the fade out duration in seconds.
virtual TimeDuration getSourceLength() const =0
Must return the length in seconds of the source material e.g.
virtual bool canHaveEffects() const
Returns true if this clip can have ClipEffects added to it.
LoopInfo autoDetectBeatMarkers(const LoopInfo ¤t, bool autoBeat, float sensitivity) const
Scans the current source file for any beats and adds them to the LoopInfo returned.
void updateSourceFile()
Checks the current source file to see if it's up to date and then triggers a source render if needed.
virtual void renderComplete()
Callback to indicate that the render has completed.
juce::String canAddClipPlugin(const Plugin::Ptr &) const
Returns an empty string if this plugin can be added, otherwise an error message due to the clip plugi...
bool isRightChannelActive() const
Returns whether the right channel of the clip is enabled.
const AudioSegmentList & getAudioSegmentList()
Returns an AudioSegmentList describing this file if it is using auto-tempo.
juce::Colour getDefaultColour() const override
Returns the default colour for this clip.
void setLeftChannelActive(bool)
Enables the left channel of the clip.
void setGainDB(float dB)
Sets the gain of the clip in dB.
TimeDuration getFadeIn() const
Returns the fade in duration in seconds.
AudioClipBase * getOverlappingClip(ClipDirection) const
Returns the previous/next overlapping clip if one exists.
void updateAutoCrossfadesAsync(bool updateOverlapped)
Triggers an update of the auto-crossfades.
void setRightChannelActive(bool)
Enables the right channel of the clip.
void setPan(float pan)
Sets the pan of the clip.
virtual AudioFile getAudioFile() const
Returns the file used to play back the source and will get proxied etc.
TimeStretcher::Mode getActualTimeStretchMode() const noexcept
Returns the time-stretch mode that is in use.
bool setFadeIn(TimeDuration length)
Sets the fade in duration in seconds.
void disableLooping() override
Disables all looping.
virtual HashCode getHash() const =0
Must return a unique hash for this clip's source.
void setFadeInType(AudioFadeCurve::Type)
Sets the curve shape for the fade in to use.
void flushStateToValueTree() override
Can be overridden to ensure any state (e.g.
AudioFile getProxyFileToCreate(bool renderTimestretched)
Returns the AudioFile to create to play this clip back.
virtual void setLoopDefaults()=0
Override this to fill in the LoopInfo structure as best fits the source.
void setLoopRange(TimeRange) override
Sets the loop range the clip should use in seconds.
float getPitchChange() const
Returns the number of semitones to transpose the clip by.
bool getWarpTime() const
Returns true if warp time is enabled.
virtual juce::Array< ProjectItemID > getTakes() const
Returns the ProjectItemID of the clip's takes.
bool isLooping() const override
Returns true if this clip is currently looping.
bool getAutoTempo() const
Returns true if auto-tempo has been set.
bool isUsingMelodyne() const
Returns true if this clip is using Melodyne.
void setFadeOutType(AudioFadeCurve::Type newType)
Sets the curve shape for the fade out to use.
TimeRange getLoopRange() const
Returns the loop range in seconds.
~AudioClipBase() override
Destructor.
void createNewProxyAsync()
Triggers a source or proxy render after a timeout.
void checkFadeLengthsForOverrun()
Trims the fade in out lengths to avoid any overlap between them.
TimeStretcher::Mode getTimeStretchMode() const noexcept
Returns the time-stretch mode that has been set.
virtual AudioFileInfo getWaveInfo()
Returns the WaveInfo for a clip.
AudioFadeCurve::Type getFadeOutType() const
Returns the curve shape for the fade out to use.
bool usesTimeStretchedProxy() const
Retuns true if this clip use a proxy file due to timestretching.
bool isLeftChannelActive() const
Returns whether the left channel of the clip is enabled.
void copyFadeToAutomation(bool fadeIn, bool removeClipFade)
Copies the fade in curve to a volume automation curve.
bool setFadeOut(TimeDuration length)
Sets the fade out duration in seconds.
void initialise() override
Initialises the Clip.
AudioFadeCurve::Type getFadeInType() const
Returns the curve shape for the fade in to use.
bool getAutoPitch() const
Returns true if auto-pitch has been set.
void markAsDirty()
Resets the dirty flag so that a new render will be attempted.
TimeDuration getMaximumLength() override
Returns the maximum length for this clip.
void setLoopInfo(const LoopInfo &)
Sets a LoopInfo to describe this clip's tempo, time sig etc.
void applyEdgeFades()
Sets the fade in/out lengths to be 0.03s to avoid any clicks at the start/end of th clip.
juce::ReferenceCountedObjectPtr< MelodyneFileReader > melodyneProxy
The MelodyneFileReader proxy if this clip is using Melodyne.
ClipDirection
Defines a prevous/next direction.
bool canUseProxy() const noexcept
Retuns true if this clip can use a proxy file.
bool canBeAddedTo(ClipOwner &) override
Tests whether this clip can go on the given parent.
HashCode getProxyHash()
Returns a hash identifying the proxy settings.
void changed() override
This should be called to send a change notification to any SelectableListeners that are registered wi...
void cloneFrom(Clip *) override
Clones the given clip to this clip.
AudioClipBase(const juce::ValueTree &, EditItemID, Type, ClipOwner &)
Creates a basic AudioClip.
void setUsesProxy(bool canUseProxy) noexcept
Can be used to disable proxy file generation for this clip.
AudioFile getPlaybackFile()
Returns the current AudioFile being used by the Clip, either the original source or a proxy.
PatternGenerator * getPatternGenerator() override
Returns the PatternGenerator for this clip if it has one.
void setAutoTempo(bool shouldUseAutoTempo)
Enables/disables auto-tempo.
juce::CachedValue< TimeStretcher::ElastiqueProOptions > elastiqueProOptions
The ElastiqueProOptions for fine tuning Elastique (if available).
Reader::Ptr createReader(const AudioFile &)
Creates a Reader to read an AudioFile.
Smart wrapper for writing to an audio file.
bool appendBuffer(juce::AudioBuffer< float > &buffer, int numSamples)
Appends an AudioBuffer to the file.
bool writeFromAudioReader(juce::AudioFormatReader &, SampleCount startSample, SampleCount numSamples)
Appends a block of samples to the file from an audio format reader.
bool isOpen() const noexcept
Returns true if the file is open and ready to write to.
An audio scratch buffer that has pooled storage.
juce::AudioBuffer< float > & buffer
The buffer to use.
Holds a list of audio regions for playback of things like warp time.
Base class for items that can contain clips.
virtual void disableLooping()
Disables all looping.
virtual juce::Array< TimePosition > getRescaledMarkPoints() const
Returns the mark points relative to the start of the clip, rescaled to the current speed.
virtual TimePosition getLoopStart() const
Returns the start time of the loop start point.
virtual Plugin::Array getAllPlugins()
Returns all the plugins on the clip.
juce::Array< ReferencedItem > getReferencedItems() override
Returns an array of any ReferencedItem[s] e.g.
virtual bool beatBasedLooping() const
Returns true if this clip's looping is based on beats or false if absolute time.
virtual void sendMirrorUpdateToAllPlugins(Plugin &) const
Sends an update to all plugins mirroing the one passed in.
ClipTrack * getClipTrack() const
Returns the parent ClipTrack this clip is on (if any).
virtual juce::String getName() const override
Returns the name of the clip.
void changed() override
This should be called to send a change notification to any SelectableListeners that are registered wi...
void setOffset(TimeDuration newOffset)
Sets the offset of the clip, i.e.
virtual bool hasAnyTakes() const
Returns true if this clip has any takes.
virtual bool isLooping() const
Returns true if this clip is currently looping.
virtual void pitchTempoTrackChanged()
Called when there are pitch or tempo changes made which might require clips to adjust timing informat...
juce::ValueTree state
The ValueTree of the Clip state.
virtual void flushStateToValueTree()
Can be overridden to ensure any state (e.g.
virtual TimeDuration getMaximumLength()
Returns the maximum length this clip can have.
void setStart(TimePosition newStart, bool preserveSync, bool keepLength)
Sets the start time of the clip.
Track * getTrack() const override
Returns the parent Track this clip is on (if any).
void reassignReferencedItem(const ReferencedItem &, ProjectItemID, double) override
Should be implemented to change the underlying source to a new ProjectItemID.
virtual void initialise()
Initialises the Clip.
ClipOwner * getParent() const
Returns the parent ClipOwner this clip is on.
virtual void setLoopRange(TimeRange)
Sets the loop range the clip should use in seconds.
virtual std::shared_ptr< LaunchHandle > getLaunchHandle()
Some clip types can be launched, if that's possible, this returns a handle to trigger starting/stoppi...
virtual bool canLoop() const
Returns true if this clip is capable of looping.
virtual void cloneFrom(Clip *)
Clones the given clip to this clip.
void setLength(TimeDuration newLength, bool preserveSync)
Sets the length of the clip.
virtual BeatPosition getLoopStartBeats() const
Returns the beat position of the loop start point.
double getSpeedRatio() const noexcept
Returns the speed ratio i.e.
virtual void setNumberOfLoops(int)
Sets the clip looping a number of times.
virtual TimeDuration getLoopLength() const
Returns the length of loop in seconds.
virtual void setLoopRangeBeats(BeatRange)
Sets the loop range the clip should use in beats.
virtual void setSpeedRatio(double)
Sets a speed ratio i.e.
TimeRange getLoopRange() const
Returns the loop range in seconds.
juce::File getCurrentSourceFile() const
Returns the current source file, this is different to the SourceFileReference as it could be a tempor...
virtual FollowActions * getFollowActions()
Some clip types can be launched, if that's possible, this can be used to determine the action to perf...
virtual LaunchQuantisation * getLaunchQuantisation()
Some clip types can be launched, if that's possible, this returns a quantisation that can be used for...
juce::UndoManager * getUndoManager() const
Returns the UndoManager.
ClipPosition getPosition() const override
Returns the ClipPosition on the parent Track.
virtual bool addClipPlugin(const Plugin::Ptr &, SelectionManager &)
Adds a plugin to the clip.
@ beats
A number of beats.
void setCurrentSourceFile(const juce::File &)
Sets a new source file for this clip.
virtual BeatDuration getLoopLengthBeats() const
Returns the length of loop in beats.
void setPosition(ClipPosition newPosition)
Sets the position of the clip.
static TimeDuration getMaximumLength()
Returns the maximum length an Edit can be.
Engine & engine
A reference to the Engine.
The Engine is the central class for all tracktion sessions.
AudioFileFormatManager & getAudioFileFormatManager() const
Returns the AudioFileFormatManager that maintains a list of available audio file formats.
AudioFileManager & getAudioFileManager() const
Returns the AudioFileManager instance.
Represents a launch quantisation.
Holds tempo/beat information about an audio file.
SampleCount getOutMarker() const
Returns the sample number used as the end point in the file.
void setInMarker(SampleCount)
Sets the sample number to be used as the start point in the file.
void setBpm(double newBpm, const AudioFileInfo &)
Sets the tempo of the object.
void addLoopPoint(SampleCount, LoopPointType)
Adds a loop point at the given position.
bool isLoopable() const
Returns true if this can be looped.
LoopPoint getLoopPoint(int index) const
Returns the loop points at the given index.
int getRootNote() const
Returns the root note of the object.
void changeLoopPoint(int index, SampleCount, LoopPointType)
Sets the loop point at the given index to a new position and type.
int getNumLoopPoints() const
Returns the number of loop points in the object.
@ automatic
An automatoc loop point.
SampleCount getInMarker() const
Returns the sample number used as the start point in the file.
double getNumBeats() const
Returns the number of beats.
void setOutMarker(SampleCount)
Sets the sample number to be used as the end point in the file.
void deleteLoopPoint(int index)
Removes the loop point at the given index.
double getBeatsPerSecond(const AudioFileInfo &) const
Returns the tempo of the object.
The base class that all generator jobs derive from.
static AudioFile getAudioFileForHash(Engine &, const juce::File &directory, HashCode hash)
Returns the AudioFile for a particular hash.
virtual void changed()
This should be called to send a change notification to any SelectableListeners that are registered wi...
Manages a list of items that are currently selected.
void setToProjectFileReference(const juce::File &, bool updateProjectItem)
Points this source at a new file via a project item.
Uses the SoundTouch BPMDetect class to guess the tempo of some audio.
float finishAndDetect()
Completes the detection process and returns the BPM.
void processSection(juce::AudioBuffer< float > &buffer, int numSamplesToProcess)
Processes a non-interleaved buffer section.
void prepareForJobDeletion()
Call this in your sub-class destructor to to remvoe it from the manager queue before this class's des...
void setName(const juce::String &newName)
Sets the job's name but also updates the manager so the list will reflect it.
Handles time/pitch stretching using various supported libraries.
void initialise(double sourceSampleRate, int samplesPerBlock, int numChannels, Mode, ElastiqueProOptions, bool realtime)
Initialises the TimeStretcher ready to perform timestretching.
int getFramesNeeded() const
Returns the expected number of frames required to generate some output.
bool isInitialised() const
Returns true if this has been fully initialised.
int processData(const float *const *inChannels, int numSamples, float *const *outChannels)
Processes some input frames and fills some output frames with the applied speed ratio and pitch shift...
int flush(float *const *outChannels)
Flushes the end of the stream when input data is exhausted but there is still output data available.
static Mode checkModeIsAvailable(Mode)
Checks if the given mode is available for use.
bool setSpeedAndPitch(float speedRatio, float semitones)
Sets the timestretch speed ratio and semitones pitch shift.
Mode
Holds the various algorithms to which can be used (if enabled).
@ defaultMode
Default mode.
@ disabled
No algorithm enabled.
Type
Defines the types of item that can live on Track[s].
BeatPosition getStartBeat() const
Returns the start beat in the Edit of this item.
BeatDuration getLengthInBeats() const
Returns the duration in beats the of this item.
TimePosition getTimeOfRelativeBeat(BeatDuration) const
Returns an Edit time point for a given number of beats from the start of this item.
BeatDuration getOffsetInBeats() const
Returns an the offset of this item in beats.
TimeRange getEditTimeRange() const
Returns the time range of this item.
BeatPosition getBeatOfRelativeTime(TimeDuration) const
Returns an Edit beat point for a given number of seconds from the start of this item.
A WarpTimeManager contains a list of WarpMarkers and some source material and maps times from a linea...
#define TRANS(stringLiteral)
Type jlimit(Type lowerLimit, Type upperLimit, Type valueToConstrain) noexcept
void ignoreUnused(Types &&...) noexcept
bool canContainAudio(const ClipOwner &co)
Returns true if this Track can contain WaveAudioClip[s].
ResamplingQuality
Specifies a number of resampling qualities that can be used.
@ lagrange
Lagrange interpolation.
juce::AudioChannelSet channelSetFromSpeakerArrangmentString(const juce::String &arrangement)
Creates an AudioChannelSet from a list of abbreviated channel names.
constexpr TimeDuration toDuration(TimePosition)
Converts a TimePosition to a TimeDuration.
Represents a duration in beats.
Represents a position in beats.
Represents a duration in real-life time.
constexpr double inSeconds() const
Returns the TimeDuration as a number of seconds.
Represents a position in real-life time.
constexpr double inSeconds() const
Returns the TimePosition as a number of seconds.
void updateAsync(int functionID)
Triggers an asyncronous call to one of the functions.
void handleUpdateNowIfNeeded()
If an update has been triggered and is pending, this will invoke it synchronously.
void addFunction(int functionID, const std::function< void()> &f)
Adds a function and associates a functionID with it.
Holds information about how to render a proxy for this clip.
bool render(Engine &, const AudioFile &, AudioFileWriter &, juce::ThreadPoolJob *const &, std::atomic< float > &progress) const
Renders this audio segment list to an AudioFile.
~ProxyRenderingInfo()
Destructor.
ProxyRenderingInfo()
Constructor.
Type
A enumeration of the curve classes available.
static float alphaToGainForType(Type type, float alpha) noexcept
Converts an alpha position along the curve (0 to 1.0) into the gain at that point.
TimePosition getEnd() const
Returns the end time.
TimePosition getStart() const
Returns the start time.
TimeRange time
The TimeRange this ClipPosition occupies.
TimeDuration getOffset() const
Returns the offset.
TimeDuration getLength() const
Returns the length.
ID for objects of type EditElement - e.g.
Provides a thread-safe way to share a clip's levels with an audio engine without worrying about the C...
bool isAutomatic() const
Returns true if this is an automatic loop point.
#define CRASH_TRACER
This macro adds the current location to a stack which gets logged if a crash happens.