iOS Quickstart
Build a simple pitch detector in 5 minutes.
What You'll Build
A minimal SwiftUI app that:
- Records audio from the microphone
- Detects pitch in real-time
- Displays the detected note
Prerequisites
- Xcode 14 or later
- VoxaTrace installed (see Installation)
- Microphone permission configured in Info.plist
Step 1: Configure Info.plist
Add microphone usage description:
<key>NSMicrophoneUsageDescription</key>
<string>We need microphone access to detect pitch from your voice.</string>
Step 2: Create the Pitch Detector View
import SwiftUI
import VoxaTrace
struct PitchDetectorView: View {
@StateObject private var viewModel = PitchDetectorViewModel()
var body: some View {
VStack(spacing: 20) {
Text(viewModel.note)
.font(.system(size: 72, weight: .bold))
Text("\(Int(viewModel.frequency)) Hz")
.font(.title2)
ProgressView(value: viewModel.confidence)
.frame(width: 200)
Button(viewModel.isRecording ? "Stop" : "Start") {
if viewModel.isRecording {
viewModel.stop()
} else {
Task {
await viewModel.start()
}
}
}
.buttonStyle(.borderedProminent)
}
.padding()
}
}
Step 3: Create the View Model
import Foundation
import VoxaTrace
import Combine
@MainActor
class PitchDetectorViewModel: ObservableObject {
@Published var note: String = "--"
@Published var frequency: Float = 0
@Published var confidence: Float = 0
@Published var isRecording: Bool = false
private var recorder: SonixRecorder?
private var detector: CalibraPitch.Detector?
private var task: Task<Void, Never>?
func start() async {
// Create recorder with voice settings
recorder = SonixRecorder.createTemporary(config: .voice)
// Create pitch detector
detector = CalibraPitch.createDetector()
// Start recording
recorder?.start()
isRecording = true
// Process audio buffers
task = Task {
guard let recorder = recorder else { return }
for await buffer in recorder.audioBuffersStream() {
guard !Task.isCancelled else { break }
let samples = buffer.samples
guard let point = detector?.detect(samples: samples, sampleRate: Int(buffer.sampleRate)) else {
continue
}
await MainActor.run {
if point.pitch > 0 {
self.note = CalibraMusic.hzToNoteLabel(point.pitch)
self.frequency = point.pitch
self.confidence = point.confidence
} else {
self.note = "--"
self.frequency = 0
self.confidence = 0
}
}
}
}
}
func stop() {
task?.cancel()
task = nil
recorder?.stop()
recorder?.release()
recorder = nil
detector?.close()
detector = nil
isRecording = false
}
deinit {
stop()
}
}
What You'll See
When you sing into the microphone, the console will show:
Note: A4, Frequency: 440 Hz, Confidence: 92%
Note: A4, Frequency: 441 Hz, Confidence: 89%
Note: B4, Frequency: 494 Hz, Confidence: 87%
Note: --, Frequency: 0 Hz, Confidence: 0% ← breath/silence
Note: C5, Frequency: 523 Hz, Confidence: 91%
The app is:
- Recording audio buffers from the microphone (~50ms chunks)
- Running pitch detection on each buffer
- Converting frequency to musical note name using
CalibraMusic.hzToNoteLabel() - Showing confidence (how certain the detection is)
Troubleshooting:
- Seeing lots of
--entries? Make sure microphone permission is granted in Settings - Low confidence values? Sing closer to the device, reduce background noise
- App crashes on launch? Check that
NSMicrophoneUsageDescriptionis in Info.plist
Step 4: Request Microphone Permission
The system will automatically prompt for permission when you start recording. However, you can request it proactively:
import AVFoundation
func requestMicrophonePermission() async -> Bool {
await withCheckedContinuation { continuation in
AVAudioSession.sharedInstance().requestRecordPermission { granted in
continuation.resume(returning: granted)
}
}
}
Use it in your view:
struct PitchDetectorView: View {
@StateObject private var viewModel = PitchDetectorViewModel()
@State private var hasPermission = false
var body: some View {
Group {
if hasPermission {
// ... pitch detector UI
} else {
Button("Grant Microphone Access") {
Task {
hasPermission = await requestMicrophonePermission()
}
}
}
}
.task {
hasPermission = AVAudioSession.sharedInstance().recordPermission == .granted
}
}
}
Complete UIKit Example
If you're using UIKit:
import UIKit
import VoxaTrace
class PitchDetectorViewController: UIViewController {
private let noteLabel = UILabel()
private let frequencyLabel = UILabel()
private let confidenceBar = UIProgressView()
private var recorder: SonixRecorder?
private var detector: CalibraPitch.Detector?
private var task: Task<Void, Never>?
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
Task {
await startPitchDetection()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
stopPitchDetection()
}
private func setupUI() {
view.backgroundColor = .systemBackground
noteLabel.font = .systemFont(ofSize: 72, weight: .bold)
noteLabel.textAlignment = .center
noteLabel.text = "--"
frequencyLabel.font = .systemFont(ofSize: 24)
frequencyLabel.textAlignment = .center
frequencyLabel.text = "0 Hz"
confidenceBar.progressViewStyle = .default
let stack = UIStackView(arrangedSubviews: [noteLabel, frequencyLabel, confidenceBar])
stack.axis = .vertical
stack.spacing = 16
stack.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(stack)
NSLayoutConstraint.activate([
stack.centerXAnchor.constraint(equalTo: view.centerXAnchor),
stack.centerYAnchor.constraint(equalTo: view.centerYAnchor),
confidenceBar.widthAnchor.constraint(equalToConstant: 200)
])
}
private func startPitchDetection() async {
recorder = SonixRecorder.createTemporary(config: .voice)
detector = CalibraPitch.createDetector()
recorder?.start()
task = Task {
guard let recorder = recorder else { return }
for await buffer in recorder.audioBuffersStream() {
guard !Task.isCancelled else { break }
let samples = buffer.samples
guard let point = detector?.detect(samples: samples, sampleRate: Int(buffer.sampleRate)) else {
continue
}
await MainActor.run {
if point.pitch > 0 {
self.noteLabel.text = CalibraMusic.hzToNoteLabel(point.pitch)
self.frequencyLabel.text = "\(Int(point.pitch)) Hz"
self.confidenceBar.progress = point.confidence
} else {
self.noteLabel.text = "--"
self.frequencyLabel.text = "0 Hz"
self.confidenceBar.progress = 0
}
}
}
}
}
private func stopPitchDetection() {
task?.cancel()
recorder?.stop()
recorder?.release()
detector?.close()
}
}
Next Steps
- Detecting Pitch Guide - Deep dive into pitch detection options
- Recording Audio Guide - Learn about recording features
- Live Evaluation Guide - Score singing against reference