tracktion-engine 3.0-10-g034fdde4aa5
Tracktion Engine — High level data model for audio applications

« « « Anklang Documentation
Loading...
Searching...
No Matches
tracktion_AudioSegmentList.cpp
Go to the documentation of this file.
1 /*
2 ,--. ,--. ,--. ,--.
3 ,-' '-.,--.--.,--,--.,---.| |,-.,-' '-.`--' ,---. ,--,--, Copyright 2024
4 '-. .-'| .--' ,-. | .--'| /'-. .-',--.| .-. || \ Tracktion Software
5 | | | | \ '-' \ `--.| \ \ | | | |' '-' '| || | Corporation
6 `---' `--' `--`--'`---'`--'`--' `---' `--' `---' `--''--' www.tracktion.com
7
8 Tracktion Engine uses a GPL/commercial licence - see LICENCE.md for details.
9*/
10
11namespace tracktion { inline namespace engine
12{
13
14inline void dumpSegments (const juce::Array<AudioSegmentList::Segment>& segments)
15{
16
17 DBG ("******************************************");
18 for (auto& s : segments)
19 {
20 juce::String text;
21
22 text += "Start: " + juce::String (s.start.inSeconds()) + "(" + juce::String (s.startSample) + ")\n";
23 text += "Length: " + juce::String (s.length.inSeconds()) + "(" + juce::String (s.lengthSample) + ")\n";
24 text += "Transpose: " + juce::String (s.transpose) + "\n";
25 text += "===============================================";
26
27 DBG(text);
28 }
29}
30
31//==============================================================================
32TimeRange AudioSegmentList::Segment::getRange() const { return { start, start + length }; }
33SampleRange AudioSegmentList::Segment::getSampleRange() const { return { startSample, startSample + lengthSample }; }
34
35float AudioSegmentList::Segment::getStretchRatio() const { return stretchRatio; }
36float AudioSegmentList::Segment::getTranspose() const { return transpose; }
37
38bool AudioSegmentList::Segment::hasFadeIn() const { return fadeIn; }
39bool AudioSegmentList::Segment::hasFadeOut() const { return fadeOut; }
40
41bool AudioSegmentList::Segment::isFollowedBySilence() const { return followedBySilence; }
42
43HashCode AudioSegmentList::Segment::getHashCode() const
44{
45 return startSample
46 ^ (lengthSample * 127)
47 ^ (followedBySilence ? 1234 : 5432)
48 ^ static_cast<HashCode> (stretchRatio * 1003.0f)
49 ^ static_cast<HashCode> (transpose * 117.0f);
50}
51
52bool AudioSegmentList::Segment::operator== (const Segment& other) const
53{
54 return (start == other.start &&
55 length == other.length &&
56 startSample == other.startSample &&
57 lengthSample == other.lengthSample &&
58 stretchRatio == other.stretchRatio &&
59 transpose == other.transpose &&
60 fadeIn == other.fadeIn &&
61 fadeOut == other.fadeOut);
62}
63
64bool AudioSegmentList::Segment::operator!= (const Segment& other) const
65{
66 return ! operator== (other);
67}
68
69//==============================================================================
70AudioSegmentList::AudioSegmentList (AudioClipBase& acb) : clip (acb)
71{
72}
73
74AudioSegmentList::AudioSegmentList (AudioClipBase& acb, bool relTime, bool shouldCrossfade)
75 : clip (acb), relativeTime (relTime)
76{
77 if (shouldCrossfade)
78 crossfadeTime = TimeDuration::fromSeconds (static_cast<double> (clip.edit.engine.getPropertyStorage().getProperty (SettingID::crossfadeBlock, 12.0 / 1000.0)));
79
80 auto& pm = acb.edit.engine.getProjectManager();
81
82 auto anyTakesValid = [&]
83 {
84 for (ProjectItemID m : clip.getTakes())
85 if (pm.findSourceFile (m).existsAsFile())
86 return true;
87
88 return false;
89 };
90
91 #if JUCE_DEBUG
92 auto f = pm.findSourceFile (clip.getSourceFileReference().getSourceProjectItemID());
93 jassert (f == juce::File() || f == clip.getSourceFileReference().getFile());
94 #endif
95
96 if (clip.getCurrentSourceFile().existsAsFile() || anyTakesValid())
97 build (shouldCrossfade);
98}
99
100static float calcStretchRatio (const AudioSegmentList::Segment& seg, double sampleRate)
101{
102 double srcSamples = sampleRate * seg.getRange().getLength().inSeconds();
103
104 if (srcSamples > 0)
105 return (float) (seg.getSampleRange().getLength() / srcSamples);
106
107 return 1.0f;
108}
109
110std::unique_ptr<AudioSegmentList> AudioSegmentList::create (AudioClipBase& acb, bool relativeTime, bool crossFade)
111{
112 return std::unique_ptr<AudioSegmentList> (new AudioSegmentList (acb, relativeTime, crossFade));
113}
114
115std::unique_ptr<AudioSegmentList> AudioSegmentList::create (AudioClipBase& acb)
116{
117 return create (acb, acb.getWarpTimeManager(), acb.getWaveInfo(), acb.getLoopInfo());
118}
119
120std::unique_ptr<AudioSegmentList> AudioSegmentList::create (AudioClipBase& acb, const WarpTimeManager& wtm, const AudioFile& af)
121{
122 auto wi = af.getInfo();
123 return create (acb, wtm, wi, wi.loopInfo);
124}
125
126std::unique_ptr<AudioSegmentList> AudioSegmentList::create (AudioClipBase& acb, const WarpTimeManager& wtm, const AudioFileInfo& wi, const LoopInfo& li)
127{
128 std::unique_ptr<AudioSegmentList> asl (new AudioSegmentList (acb));
129
131 auto in = li.getInMarker();
132 auto out = (li.getOutMarker() == -1) ? wi.lengthInSamples : li.getOutMarker();
133 jassert (in <= out);
134
135 if (in <= out)
136 {
137 TimeRange region (std::max (TimePosition(), wtm.getWarpedStart()),
138 wtm.getWarpEndMarkerTime());
139
140 juce::Array<TimeRange> warpTimeRegions;
141 callBlocking ([&] { warpTimeRegions = wtm.getWarpTimeRegions (region); });
142 auto position = warpTimeRegions.size() > 0 ? warpTimeRegions.getUnchecked (0).getStart() : TimePosition();
143
144 for (auto warpRegion : warpTimeRegions)
145 {
146 TimeRange sourceRegion (wtm.warpTimeToSourceTime (warpRegion.getStart()),
147 wtm.warpTimeToSourceTime (warpRegion.getEnd()));
148
149 Segment seg;
150
151 seg.startSample = tracktion::toSamples (sourceRegion.getStart(), wi.sampleRate) + in;
152 seg.lengthSample = tracktion::toSamples (sourceRegion.getEnd(), wi.sampleRate) + in - seg.startSample;
153 seg.start = position;
154 seg.length = warpRegion.getLength();
155 seg.stretchRatio = calcStretchRatio (seg, wi.sampleRate);
156 seg.fadeIn = false;
157 seg.fadeOut = false;
158 seg.transpose = 0.0f;
159
160 position = position + warpRegion.getLength();
161 jassert (seg.startSample >= in);
162 jassert (seg.startSample + seg.lengthSample <= out);
163
164 asl->segments.add (seg);
165 }
166
167 asl->crossfadeTime = 0.01s;
168 asl->crossFadeSegments();
169 }
170
171 return asl;
172}
173
174bool AudioSegmentList::operator== (const AudioSegmentList& other) const noexcept
175{
176 return crossfadeTime == other.crossfadeTime
177 && relativeTime == other.relativeTime
178 && segments == other.segments;
179}
180
181bool AudioSegmentList::operator!= (const AudioSegmentList& other) const noexcept
182{
183 return ! operator== (other);
184}
185
186void AudioSegmentList::build (bool crossfade)
187{
188 if (clip.getAutoPitch() && clip.getAutoPitchMode() == AudioClipBase::chordTrackMono)
189 if (auto pg = clip.getPatternGenerator())
190 pg->getFlattenedChordProgression (progression, true);
191
192 if (clip.getAutoTempo())
193 buildAutoTempo (crossfade);
194 else
195 buildNormal (crossfade);
196
197 if (relativeTime)
198 {
199 auto offset = toDuration (getStart());
200
201 for (auto& s : segments)
202 s.start = s.start - offset;
203 }
204}
205
206void AudioSegmentList::chopSegment (Segment& seg, TimePosition at, int insertPos)
207{
208 Segment newSeg;
209
210 newSeg.start = at;
211 newSeg.length = seg.getRange().getEnd() - newSeg.getRange().getStart();
212
213 newSeg.transpose = getPitchAt (newSeg.start + 0.0001s);
214 newSeg.stretchRatio = (float) clip.getSpeedRatio();
215
216 newSeg.fadeIn = true;
217 newSeg.fadeOut = seg.fadeOut;
218
219 newSeg.lengthSample = juce::roundToInt (seg.lengthSample * newSeg.length.inSeconds() / seg.length.inSeconds());
220 newSeg.startSample = seg.getSampleRange().getEnd() - newSeg.lengthSample;
221
222 seg.length = seg.length - newSeg.length;
223 seg.lengthSample = newSeg.startSample - seg.startSample;
224
225 seg.fadeOut = true;
226 seg.followedBySilence = false;
227
228 jassert (newSeg.length > 0.01s);
229 jassert (seg.length > 0.01s);
230
231 segments.insert (insertPos, newSeg);
232}
233
234void AudioSegmentList::buildNormal (bool crossfade)
235{
237 auto wi = clip.getWaveInfo();
238
239 if (wi.sampleRate == 0.0)
240 return;
241
242 auto rate = clip.getSpeedRatio() * wi.sampleRate;
243 auto clipPos = clip.getPosition();
244
245 if (clip.isLooping())
246 {
247 auto clipLoopLen = clip.getLoopLength();
248
249 if (clipLoopLen <= 0s)
250 return;
251
252 auto startSamp = std::max ((SampleCount) 0, (SampleCount) (rate * clip.getLoopStart().inSeconds()));
253 auto lengthSamp = std::max ((SampleCount) 0, (SampleCount) (rate * clipLoopLen.inSeconds()));
254
255 for (int i = 0; ; ++i)
256 {
257 auto startTime = clipPos.getStart() + clipLoopLen * i - clipPos.getOffset();
258
259 if (startTime >= clipPos.getEnd())
260 break;
261
262 auto end = startTime + clipLoopLen;
263
264 if (end < clipPos.getStart())
265 continue;
266
267 Segment seg;
268
269 seg.startSample = startSamp;
270 seg.lengthSample = lengthSamp;
271
272 if (startTime < clipPos.getStart())
273 {
274 auto diff = (SampleCount) ((clipPos.getStart() - startTime).inSeconds() * rate);
275
276 seg.startSample += diff;
277 seg.lengthSample -= diff;
278 startTime = clipPos.getStart();
279 }
280
281 if (end > clipPos.getEnd())
282 {
283 auto diff = (SampleCount) ((end - clipPos.getEnd()).inSeconds() * rate);
284 seg.lengthSample -= diff;
285 end = clipPos.getEnd();
286 }
287
288 if (seg.lengthSample <= 0)
289 continue;
290
291 seg.start = startTime;
292 seg.length = end - startTime;
293
294 seg.transpose = getPitchAt (startTime + 0.0001s);
295 seg.stretchRatio = (float) clip.getSpeedRatio();
296
297 seg.fadeIn = true;
298 seg.fadeOut = true;
299 seg.followedBySilence = true;
300
301 if (! segments.isEmpty())
302 {
303 auto& prev = segments.getReference (segments.size() - 1);
304
305 if (tracktion::abs (prev.getRange().getEnd() - seg.getRange().getStart()) < 0.01s)
306 prev.followedBySilence = false;
307 }
308
309 segments.add (seg);
310 }
311
312 if (! segments.isEmpty())
313 {
314 segments.getReference (0).fadeIn = false;
315 segments.getReference (segments.size() - 1).fadeOut = false;
316 }
317 }
318 else
319 {
320 // not looped
321 Segment seg;
322
323 seg.start = clipPos.getStart();
324 seg.length = clipPos.getLength();
325
326 seg.startSample = juce::jlimit ((SampleCount) 0, wi.lengthInSamples, (SampleCount) (clipPos.getOffset().inSeconds() * rate));
327 seg.lengthSample = juce::jlimit ((SampleCount) 0, wi.lengthInSamples, (SampleCount) (clipPos.getLength().inSeconds() * rate));
328
329 seg.transpose = getPitchAt (clipPos.getStart() + 0.0001s);
330 seg.stretchRatio = (float) clip.getSpeedRatio();
331
332 seg.fadeIn = false;
333 seg.fadeOut = false;
334
335 seg.followedBySilence = true;
336
337 if (seg.length > 0s)
338 segments.add (seg);
339 }
340
341 // chop up an segments that have pitch changes in them
342 if (clip.getAutoPitch())
343 {
344 auto& ps = clip.edit.pitchSequence;
345
346 for (int i = 0; i < ps.getNumPitches(); ++i)
347 {
348 auto* pitch = ps.getPitch(i);
349 jassert (pitch != nullptr);
350
351 auto pitchTm = pitch->getPosition().getStart();
352
353 if (pitchTm > getStart() + 0.01s && pitchTm < getEnd() - 0.01s)
354 {
355 for (int j = 0; j < segments.size(); ++j)
356 {
357 auto& seg = segments.getReference (j);
358
359 if (seg.getRange().reduced (0.01s).contains (pitchTm)
360 && std::abs (getPitchAt (pitchTm) - getPitchAt (seg.getRange().getStart())) > 0.0001)
361 {
362 chopSegment (seg, pitchTm, j + 1);
363 break;
364 }
365 }
366 }
367 }
368
369 chopSegmentsForChords();
370 }
371
372 if (crossfade)
373 crossFadeSegments();
374}
375
376void AudioSegmentList::chopSegmentsForChords()
377{
378 if (clip.getAutoPitchMode() == AudioClipBase::chordTrackMono && progression.size() > 0)
379 {
380 auto& ts = clip.edit.tempoSequence;
381
382 BeatPosition pos;
383
384 for (auto& p : progression)
385 {
386 auto chordTime = ts.toTime (pos);
387
388 if (chordTime > getStart() + 0.01s && chordTime < getEnd() - 0.01s)
389 {
390 for (int j = 0; j < segments.size(); ++j)
391 {
392 auto& seg = segments.getReference (j);
393
394 if (seg.getRange().reduced (0.01s).contains (chordTime))
395 {
396 chopSegment (seg, chordTime, j + 1);
397 break;
398 }
399 }
400
401 }
402
403 pos = pos + p->lengthInBeats;
404 }
405 }
406}
407
408static juce::Array<SampleCount> findSyncSamples (const LoopInfo& loopInfo, SampleRange range)
409{
410 juce::Array<SampleCount> syncSamples;
411 auto numLoopPoints = loopInfo.getNumLoopPoints();
412
413 if (numLoopPoints == 0)
414 {
415 const auto numBeats = (int) std::ceil (loopInfo.getNumBeats());
416 syncSamples.ensureStorageAllocated (numBeats);
417
418 for (int i = 0; i < numBeats; ++i)
419 syncSamples.add ((SampleCount) (range.getLength() / (double) numBeats * i + range.getStart() + 0.5));
420 }
421 else
422 {
423 for (int i = 0; i < numLoopPoints; ++i)
424 {
425 auto pos = loopInfo.getLoopPoint (i).pos;
426
427 if (range.contains (pos))
428 syncSamples.add (pos);
429 }
430 }
431
432 if (! syncSamples.contains (range.getStart()))
433 syncSamples.add (range.getStart());
434
435 std::sort (syncSamples.begin(), syncSamples.end());
436 return syncSamples;
437}
438
439static juce::Array<SampleCount> trimInitialSyncSamples (const juce::Array<SampleCount>& samples, SampleCount start)
440{
442 result.add (start);
443
444 for (auto& s : samples)
445 if (s > start)
446 result.add (s);
447
448 return result;
449}
450
451void AudioSegmentList::initialiseSegment (Segment& seg, BeatPosition startBeat, BeatPosition endBeat, double sampleRate)
452{
453 auto& ts = clip.edit.tempoSequence;
454 seg.start = ts.toTime (startBeat);
455 seg.length = ts.toTime (endBeat) - seg.start;
456 seg.stretchRatio = calcStretchRatio (seg, sampleRate);
457 seg.fadeIn = false;
458 seg.fadeOut = false;
459 seg.transpose = getPitchAt (seg.start + 0.0001s);
460}
461
462void AudioSegmentList::removeExtraSegments()
463{
464 for (int i = segments.size(); --i >= 0;)
465 {
466 auto& seg = segments.getReference (i);
467 auto segTime = seg.getRange();
468 auto clipTime = clip.getPosition().time;
469
470 if (! segTime.overlaps (clipTime))
471 {
472 segments.remove(i);
473 }
474 else if (segTime.getStart() < clipTime.getEnd() && segTime.getEnd() > clipTime.getEnd())
475 {
476 auto oldLen = seg.length;
477 seg.length = getEnd() - seg.start;
478 auto ratio = oldLen / seg.length;
479 seg.lengthSample = static_cast<SampleCount> (seg.lengthSample / ratio + 0.5);
480 }
481 else if (segTime.getStart() < clipTime.getStart() && segTime.getEnd() > clipTime.getStart())
482 {
483 auto oldLen = seg.length;
484 auto delta = getStart() - segTime.getStart();
485 seg.start = seg.start + delta;
486 seg.length = seg.length - delta;
487 auto ratio = oldLen / segTime.getLength();
488 auto oldEndSamp = seg.getSampleRange().getEnd();
489 seg.lengthSample = static_cast<SampleCount> (seg.lengthSample / ratio + 0.5);
490 seg.startSample = oldEndSamp - seg.lengthSample;
491 }
492 }
493}
494
495void AudioSegmentList::mergeSegments (double sampleRate)
496{
497 for (int i = segments.size() - 1; i >= 1; --i)
498 {
499 auto& s1 = segments.getReference (i - 1);
500 auto& s2 = segments.getReference (i);
501
502 if (std::abs (s1.stretchRatio - s2.stretchRatio) < 0.0001
503 && std::abs (s1.transpose - s2.transpose) < 0.0001
504 && tracktion::abs (s1.start + s1.length - s2.start) < 0.0001s
505 && s1.startSample + s1.lengthSample == s2.startSample)
506 {
507 s1.length = s1.length + s2.length;
508 s1.lengthSample += s2.lengthSample;
509 s1.stretchRatio = calcStretchRatio (s1, sampleRate);
510
511 segments.remove (i);
512 }
513 }
514}
515
516void AudioSegmentList::crossFadeSegments()
517{
518 for (int i = 0; i < segments.size(); ++i)
519 {
520 auto& s = segments.getReference(i);
521
522 // fade out
523 if (i < segments.size() - 1
524 && (tracktion::abs (s.getRange().getEnd() - segments.getReference (i + 1).start) < 0.0001s))
525 {
526 auto oldLen = s.length;
527 s.fadeOut = true;
528 s.length = s.length + crossfadeTime;
529 auto ratio = oldLen / s.length;
530 s.lengthSample = static_cast<SampleCount> (s.lengthSample / ratio + 0.5);
531 s.followedBySilence = false;
532 }
533 else
534 {
535 s.followedBySilence = true;
536 }
537
538 // fade in
539 if (i > 0 && segments.getReference (i - 1).fadeOut)
540 s.fadeIn = true;
541 }
542}
543
544void AudioSegmentList::buildAutoTempo (bool crossfade)
545{
547 auto wi = clip.getWaveInfo();
548 auto& li = clip.getLoopInfo();
549
550 SampleRange range (li.getInMarker(),
551 li.getOutMarker() == -1 ? wi.lengthInSamples
552 : li.getOutMarker());
553
554 if (range.isEmpty())
555 return;
556
557 auto& ts = clip.edit.tempoSequence;
558 auto syncSamples = findSyncSamples (li, range);
559 auto clipStartBeat = clip.getStartBeat();
560
561 if (clip.isLooping())
562 {
563 auto loopLengthBeats = clip.getLoopLengthBeats();
564
565 if (loopLengthBeats == BeatDuration())
566 return;
567
568 auto offsetBeat = clip.getOffsetInBeats();
569
570 while (offsetBeat > loopLengthBeats)
571 offsetBeat = offsetBeat - loopLengthBeats;
572
573 if (tracktion::abs (offsetBeat).inBeats() < 0.00001)
574 offsetBeat = BeatDuration();
575
576 auto loopStartBeat = clip.getLoopStartBeats() + offsetBeat;
577
578 auto offsetTime = TimePosition::fromSeconds (loopStartBeat.inBeats() / li.getBeatsPerSecond (wi));
579 auto offsetSample = tracktion::toSamples (offsetTime, wi.sampleRate) + range.getStart();
580
581 auto syncSamplesSubset = trimInitialSyncSamples (syncSamples, offsetSample);
582
583 BeatPosition beatPos;
584 BeatPosition loopEndBeat = toPosition (loopLengthBeats) - offsetBeat;
585
586 for (int i = 0; i < syncSamplesSubset.size(); ++i)
587 {
588 Segment seg;
589
590 seg.startSample = syncSamplesSubset[i];
591 seg.lengthSample = ((i == syncSamplesSubset.size() - 1) ? (range.getEnd() - seg.startSample)
592 : (syncSamplesSubset[i + 1]) - seg.startSample);
593
594 auto startBeat = beatPos;
595 beatPos = beatPos + BeatDuration::fromBeats (TimeDuration::fromSamples (seg.lengthSample, wi.sampleRate).inSeconds() * li.getBeatsPerSecond (wi));
596 auto endBeat = beatPos;
597
598 initialiseSegment (seg, clipStartBeat + toDuration (startBeat), clipStartBeat + toDuration (endBeat), wi.sampleRate);
599
600 if (startBeat >= loopEndBeat)
601 break;
602
603 if (endBeat > loopEndBeat)
604 {
605 auto oldLength = endBeat - startBeat;
606 auto newLength = loopEndBeat - startBeat;
607
608 seg.length = ts.toTime (clipStartBeat + toDuration (loopEndBeat)) - seg.start;
609 seg.lengthSample = static_cast<SampleCount> (seg.lengthSample * (newLength / oldLength) + 0.5);
610
611 jassert (seg.startSample >= range.getStart());
612 jassert (seg.startSample + seg.lengthSample <= range.getEnd());
613 segments.add (seg);
614 break;
615 }
616
617 jassert (seg.startSample >= range.getStart());
618 jassert (seg.startSample + seg.lengthSample <= range.getEnd());
619 segments.add (seg);
620 }
621
622 loopStartBeat = clip.getLoopStartBeats();
623
624 offsetTime = TimePosition::fromSeconds (loopStartBeat.inBeats() / li.getBeatsPerSecond (wi));
625 offsetSample = tracktion::toSamples (offsetTime, wi.sampleRate);
626
627 syncSamplesSubset = trimInitialSyncSamples (syncSamples, offsetSample);
628
629 beatPos = loopEndBeat;
630 loopEndBeat = beatPos + loopLengthBeats;
631
632 while (beatPos < toPosition (clip.getLengthInBeats()))
633 {
634 for (int i = 0; i < syncSamplesSubset.size(); ++i)
635 {
636 Segment seg;
637
638 seg.startSample = syncSamplesSubset[i];
639 seg.lengthSample = ((i == syncSamplesSubset.size() - 1) ? (range.getEnd() - seg.startSample)
640 : (syncSamplesSubset[i + 1]) - seg.startSample);
641
642 auto startBeat = beatPos;
643 beatPos = beatPos + BeatDuration::fromBeats ((seg.lengthSample / wi.sampleRate) * li.getBeatsPerSecond (wi));
644 auto endBeat = beatPos;
645
646 initialiseSegment (seg, clipStartBeat + toDuration (startBeat), clipStartBeat + toDuration (endBeat), wi.sampleRate);
647
648 if (startBeat >= loopEndBeat)
649 break;
650
651 if (endBeat > loopEndBeat)
652 {
653 auto oldLength = endBeat - startBeat;
654 auto newLength = loopEndBeat - startBeat;
655
656 seg.length = ts.toTime (clipStartBeat + toDuration (loopEndBeat)) - seg.start;
657 seg.lengthSample = static_cast<SampleCount> (seg.lengthSample * (newLength / oldLength) + 0.5);
658
659 jassert (seg.startSample >= range.getStart());
660 jassert (seg.startSample + seg.lengthSample <= range.getEnd());
661 segments.add (seg);
662 break;
663 }
664
665 jassert (seg.startSample >= range.getStart());
666 jassert (seg.startSample + seg.lengthSample <= range.getEnd());
667 segments.add (seg);
668 }
669
670 beatPos = loopEndBeat;
671 loopEndBeat = beatPos + loopLengthBeats;
672 }
673 }
674 else
675 {
676 auto offsetTime = TimeDuration::fromSeconds (clip.getOffsetInBeats().inBeats() / li.getBeatsPerSecond (wi));
677 auto offsetSample = tracktion::toSamples (offsetTime, wi.sampleRate) + range.getStart();
678 BeatPosition beatPos;
679
680 syncSamples = trimInitialSyncSamples (syncSamples, offsetSample);
681
682 for (int i = 0; i < syncSamples.size(); ++i)
683 {
684 Segment seg;
685
686 seg.startSample = syncSamples[i];
687 seg.lengthSample = ((i == syncSamples.size() - 1) ? (range.getEnd() - seg.startSample)
688 : (syncSamples[i + 1]) - seg.startSample);
689
690 auto startBeat = beatPos;
691 beatPos = beatPos + BeatDuration::fromBeats ((seg.lengthSample / wi.sampleRate) * li.getBeatsPerSecond (wi));
692 auto endBeat = beatPos;
693
694 initialiseSegment (seg, clipStartBeat + toDuration (startBeat), clipStartBeat + toDuration (endBeat), wi.sampleRate);
695
696 jassert (seg.startSample >= range.getStart());
697 jassert (seg.startSample + seg.lengthSample <= range.getEnd());
698 segments.add (seg);
699 }
700 }
701
702 chopSegmentsForChords();
703 removeExtraSegments();
704 mergeSegments (wi.sampleRate);
705
706 if (crossfade)
707 crossFadeSegments();
708}
709
710TimePosition AudioSegmentList::getStart() const
711{
712 if (! segments.isEmpty())
713 return segments.getReference (0).getRange().getStart();
714
715 return 0.0s;
716}
717
718TimePosition AudioSegmentList::getEnd() const
719{
720 if (! segments.isEmpty())
721 return segments.getReference (segments.size() - 1).getRange().getEnd();
722
723 return 0.0s;
724}
725
726float AudioSegmentList::getPitchAt (TimePosition t)
727{
728 if (clip.getAutoPitch() && clip.getAutoPitchMode() == AudioClipBase::chordTrackMono && progression.size() > 0)
729 {
730 auto& ts = clip.edit.tempoSequence;
731
732 auto& ps = clip.edit.pitchSequence;
733 auto& pitchSetting = ps.getPitchAt (t);
734
735 auto beat = ts.toBeats (t);
736 BeatPosition pos;
737
738 for (auto& p : progression)
739 {
740 if (beat >= pos && beat < pos + p->lengthInBeats)
741 {
742 int key = pitchSetting.getPitch() % 12;
743
744 auto scale = pitchSetting.getScale();
745
746 if (p->chordName.get().isNotEmpty())
747 {
748 int scaleNote = key;
749 int chordNote = p->getRootNote (key, scale);
750
751 int delta = chordNote - scaleNote;
752
753 int transposeBase = scaleNote - (clip.getLoopInfo().getRootNote() % 12);
754
755 while (transposeBase > 6) transposeBase -= 12;
756 while (transposeBase < -6) transposeBase += 12;
757
758 transposeBase += p->octave * 12;
759
760 return (float) (transposeBase + delta + clip.getTransposeSemiTones (false));
761 }
762 }
763
764 pos = pos + p->lengthInBeats.get();
765 }
766 }
767
768 if (clip.getAutoPitch())
769 {
770 auto& ps = clip.edit.pitchSequence;
771 auto& pitchSetting = ps.getPitchAt (t);
772
773 int pitch = pitchSetting.getPitch();
774 int transposeBase = pitch - clip.getLoopInfo().getRootNote();
775
776 while (transposeBase > 6) transposeBase -= 12;
777 while (transposeBase < -6) transposeBase += 12;
778
779 return (float) (transposeBase + clip.getTransposeSemiTones (false));
780 }
781
782 return clip.getPitchChange();
783}
784
785}} // namespace tracktion { inline namespace engine
T ceil(T... args)
ElementType getUnchecked(int index) const
void ensureStorageAllocated(int minNumElements)
int size() const noexcept
ElementType * begin() noexcept
ElementType * end() noexcept
void add(const ElementType &newElement)
bool contains(ParameterType elementToLookFor) const
T end(T... args)
T is_pointer_v
#define jassert(expression)
#define DBG(textToWrite)
typedef int
typedef float
T max(T... args)
Type jlimit(Type lowerLimit, Type upperLimit, Type valueToConstrain) noexcept
int roundToInt(const FloatType value) noexcept
constexpr int64_t toSamples(TimePosition, double sampleRate)
Converts a TimePosition to a number of samples.
constexpr TimePosition toPosition(TimeDuration)
Converts a TimeDuration to a TimePosition.
RangeType< TimePosition > TimeRange
A RangeType based on real time (i.e.
constexpr TimeDuration toDuration(TimePosition)
Converts a TimePosition to a TimeDuration.
TimePosition abs(TimePosition)
Returns the absolute of this TimePosition.
T prev(T... args)
T sort(T... args)
#define CRASH_TRACER
This macro adds the current location to a stack which gets logged if a crash happens.