Skip to content

Commit a7a96cf

Browse files
authored
Merge pull request #110 from mihai8804858/main
Allow passing `AVAsset` to waveform analyzer
2 parents 5f1ce68 + f8a2ec3 commit a7a96cf

1 file changed

Lines changed: 13 additions & 3 deletions

File tree

Sources/DSWaveformImage/WaveformAnalyzer.swift

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,16 @@ public struct WaveformAnalyzer: Sendable {
3232
public func samples(fromAudioAt audioAssetURL: URL, count: Int, qos: DispatchQoS.QoSClass = .userInitiated) async throws -> [Float] {
3333
try await Task(priority: taskPriority(qos: qos)) {
3434
let audioAsset = AVURLAsset(url: audioAssetURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
35+
return try await samples(fromAsset: audioAsset, count: count, qos: qos)
36+
}.value
37+
}
38+
39+
/// Calculates the amplitude envelope of the initialized audio asset, downsampled to the required `count` amount of samples.
40+
/// - Parameter audioAsset: asset of the audio file to process.
41+
/// - Parameter count: amount of samples to be calculated. Downsamples.
42+
/// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on.
43+
public func samples(fromAsset audioAsset: AVAsset, count: Int, qos: DispatchQoS.QoSClass = .userInitiated) async throws -> [Float] {
44+
try await Task(priority: taskPriority(qos: qos)) {
3545
let assetReader = try AVAssetReader(asset: audioAsset)
3646

3747
guard let assetTrack = try await audioAsset.loadTracks(withMediaType: .audio).first else {
@@ -124,7 +134,7 @@ fileprivate extension WaveformAnalyzer {
124134
}
125135
CMSampleBufferInvalidate(nextSampleBuffer)
126136

127-
let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel)
137+
let processedSamples = process(sampleBuffer, downsampleTo: samplesPerPixel)
128138
outputSamples += processedSamples
129139

130140
if processedSamples.count > 0 {
@@ -150,15 +160,15 @@ fileprivate extension WaveformAnalyzer {
150160
let backfillPaddingSampleCount16 = backfillPaddingSampleCount * MemoryLayout<Int16>.size
151161
let backfillPaddingSamples = [UInt8](repeating: 0, count: backfillPaddingSampleCount16)
152162
sampleBuffer.append(backfillPaddingSamples, count: backfillPaddingSampleCount16)
153-
let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel)
163+
let processedSamples = process(sampleBuffer, downsampleTo: samplesPerPixel)
154164
outputSamples += processedSamples
155165
}
156166

157167
let targetSamples = Array(outputSamples[0..<targetSampleCount])
158168
return WaveformAnalysis(amplitudes: normalize(targetSamples), fft: outputFFT)
159169
}
160170

161-
private func process(_ sampleBuffer: Data, from assetReader: AVAssetReader, downsampleTo samplesPerPixel: Int) -> [Float] {
171+
private func process(_ sampleBuffer: Data, downsampleTo samplesPerPixel: Int) -> [Float] {
162172
var downSampledData = [Float]()
163173
let sampleLength = sampleBuffer.count / MemoryLayout<Int16>.size
164174

0 commit comments

Comments
 (0)