14

I am trying to combine MicrophoneAnalysis and Recorder examples. It keeps crashing at the line try recorder.record.

2018-01-08 21:21:48.507019-0800 Music Practice[90266:18761122] [avae] AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAEGraphNode.mm:804:CreateRecordingTap: (nullptr == Tap())] 2018-01-08 21:21:48.527443-0800 Music Practice[90266:18761122] * Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: nullptr == Tap()' * First throw call stack:

// -----------

import AudioKit

import AudioKitUI

import UIKit

class SecondViewController: UIViewController {

    @IBOutlet private var inputPlot: AKNodeOutputPlot!
    @IBOutlet weak var tempViewForRecordingAndPlay: UIView!
    @IBOutlet weak var outputWavePlot: AKOutputWaveformPlot!

    // for microphone Analysis
    @IBOutlet weak var frequencyLabel: UILabel!
    @IBOutlet weak var amplitudeLabel: UILabel!
    @IBOutlet weak var noteNameWithSharpsLabel: UILabel!
    @IBOutlet weak var noteNameWithFlatsLabel: UILabel!
    @IBOutlet private var audioInputPlot: EZAudioPlot!


    var micMixer: AKMixer!
    var recorder: AKNodeRecorder!
    var player: AKAudioPlayer!
    var tape: AKAudioFile!
    var micBooster: AKBooster!
    var moogLadder: AKMoogLadder!
    var delay: AKDelay!
    var mainMixer: AKMixer!

    let mic = AKMicrophone()

    var state = State.readyToRecord


    @IBOutlet private weak var infoLabel: UILabel!
    @IBOutlet private weak var resetButton: UIButton!
    @IBOutlet private weak var RecordOrPlay_Btn: UIButton!
    @IBOutlet private weak var frequencySlider: AKSlider!
    @IBOutlet private weak var resonanceSlider: AKSlider!
    @IBOutlet private weak var loopButton: UIButton!
    @IBOutlet private weak var moogLadderTitle: UILabel!

    enum State {
        case readyToRecord
        case recording
        case readyToPlay
        case playing

    }
    var plot:  AKNodeOutputPlot!
    var micNew: AKMicrophone!
    var tracker: AKFrequencyTracker!
    var silence: AKBooster!

    let noteFrequencies = [16.35, 17.32, 18.35, 19.45, 20.6, 21.83, 23.12, 24.5, 25.96, 27.5, 29.14, 30.87]
    let noteNamesWithSharps = ["C", "C♯", "D", "D♯", "E", "F", "F♯", "G", "G♯", "A", "A♯", "B"]
    let noteNamesWithFlats = ["C", "D♭", "D", "E♭", "E", "F", "G♭", "G", "A♭", "A", "B♭", "B"]


    @objc func updateUI() {
        if tracker.amplitude > 0.1 {
            frequencyLabel.text = String(format: "%0.1f", tracker.frequency)

            var frequency = Float(tracker.frequency)
            while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
                frequency /= 2.0
            }
            while frequency < Float(noteFrequencies[0]) {
                frequency *= 2.0
            }

            var minDistance: Float = 10_000.0
            var index = 0

            for i in 0..<noteFrequencies.count {
                let distance = fabsf(Float(noteFrequencies[i]) - frequency)
                if distance < minDistance {
                    index = i
                    minDistance = distance
                }
            }
            let octave = Int(log2f(Float(tracker.frequency) / frequency))
            noteNameWithSharpsLabel.text = "\(noteNamesWithSharps[index])\(octave)"
            noteNameWithFlatsLabel.text = "\(noteNamesWithFlats[index])\(octave)"
        }
        amplitudeLabel.text = String(format: "%0.2f", tracker.amplitude)
    }

    #if NOT_USED
    func setupPlot() {
        plot = AKNodeOutputPlot(micNew, frame: audioInputPlot.bounds)
        plot.plotType = .rolling
        plot.shouldFill = true
        plot.shouldMirror = true
        plot.color = UIColor.blue
        audioInputPlot.addSubview(plot)
    }
    #endif

    func setupPlot_forMic() {
        plot = AKNodeOutputPlot(micMixer, frame: audioInputPlot.bounds)
        plot.plotType = .rolling
        plot.shouldFill = true
        plot.shouldMirror = true
        plot.color = UIColor.red
        audioInputPlot.addSubview(plot)
    }


    func execute_viewDidAppear_micAnalysis() {
        //AudioKit.output = silence
        //AudioKit.start()
        setupPlot_forMic()
        Timer.scheduledTimer(timeInterval: 0.1,
                             target: self,
                             selector: #selector(SecondViewController.updateUI),
                             userInfo: nil,
                             repeats: true)
    }


    //view DID APPEAR
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        execute_viewDidAppear_micAnalysis()
    }

    // View DID DOWNLOAD
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.
        self.tempViewForRecordingAndPlay.addSubview(inputPlot);
        self.tempViewForRecordingAndPlay.addSubview(outputWavePlot);
        //parentView.bringSubview(toFront: childView)
        tempViewForRecordingAndPlay.bringSubview(toFront: inputPlot);
        tempViewForRecordingAndPlay.bringSubview(toFront: outputWavePlot);

        recorderPlayerSettings()

    #if TEMP

    #else
       micAnalysisSettings()
    #endif

    }


    func recorderPlayerSettings() {

        setupButtonNames()

        // Clean tempFiles!
        AKAudioFile.cleanTempDirectory()

        // Session settings
        AKSettings.bufferLength = .medium

        do {
            try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
        } catch {
            AKLog("Could not set session category.")
        }

        AKSettings.defaultToSpeaker = true

        // Patching
        inputPlot.node = mic
        micMixer = AKMixer(mic)
        micBooster = AKBooster(micMixer)

        //play(from: innerTime, to: endTime, when: 0)
       // passing 0 for endTime will use the duration.

        // Will set the level of microphone monitoring
        micBooster.gain = 0
        recorder = try? AKNodeRecorder(node: micMixer)
        if let file = recorder.audioFile {
            player = try? AKAudioPlayer(file: file)
        }
        player.looping = true
        player.completionHandler = playingEnded

        moogLadder = AKMoogLadder(player)

        mainMixer = AKMixer(moogLadder, micBooster)

        AudioKit.output = mainMixer
        AudioKit.start()
        setupUIForRecording()

    }

    func micAnalysisSettings() {
        AKSettings.audioInputEnabled = true
        //micNew = AKMicrophone()
        tracker = AKFrequencyTracker(micMixer)
        //tracker = AKFrequencyTracker(mic)
        silence = AKBooster(tracker, gain: 0)
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    // CallBack triggered when playing has ended
    // Must be seipatched on the main queue as completionHandler
    // will be triggered by a background thread
    func playingEnded() {
        DispatchQueue.main.async {
            self.setupUIForPlaying ()
        }
    }

    @IBAction func RecordOrPlay_BtnTouched(_ sender: UIButton) {

        switch state {
        case .readyToRecord :
            infoLabel.text = "Recording"
            RecordOrPlay_Btn.setTitle("Stop", for: .normal)
            state = .recording
            // microphone will be monitored while recording
            // only if headphones are plugged
            if AKSettings.headPhonesPlugged {
                micBooster.gain = 1
            }
            do {
                try recorder.record()
            } catch { print("Errored recording.") }

        case .recording :
            // Microphone monitoring is muted
            micBooster.gain = 0
            do {
                try player.reloadFile()
            } catch { print("Errored reloading.") }

            let recordedDuration = player != nil ? player.audioFile.duration  : 0
            if recordedDuration > 0.0 {
                recorder.stop()
                player.audioFile.exportAsynchronously(name: "TempTestFile.m4a",
                                                      baseDir: .documents,
                                                      exportFormat: .m4a) {_, exportError in
                                                        if let error = exportError {
                                                            print("Export Failed \(error)")
                                                        } else {
                                                            print("Export succeeded")
                                                        }
                }
                setupUIForPlaying ()
            }
        case .readyToPlay :
            player.play()
            infoLabel.text = "Playing..."
            RecordOrPlay_Btn.setTitle("Stop", for: .normal)
            state = .playing
        case .playing :
            player.stop()
            setupUIForPlaying()
        }
    }

    struct Constants {
        static let empty = ""
    }

    func setupButtonNames() {
        resetButton.setTitle(Constants.empty, for: UIControlState.disabled)
        RecordOrPlay_Btn.setTitle(Constants.empty, for: UIControlState.disabled)
        loopButton.setTitle(Constants.empty, for: UIControlState.disabled)
    }

    func setupUIForRecording () {
        state = .readyToRecord
        infoLabel.text = "Ready to record"
        RecordOrPlay_Btn.setTitle("Record", for: .normal)
        resetButton.isEnabled = false
        resetButton.isHidden = true
        micBooster.gain = 0
        setSliders(active: false)
    }

    func setupUIForPlaying () {
        let recordedDuration = player != nil ? player.audioFile.duration  : 0
        infoLabel.text = "Recorded: \(String(format: "%0.1f", recordedDuration)) seconds"
        RecordOrPlay_Btn.setTitle("Play", for: .normal)
        state = .readyToPlay
        resetButton.isHidden = false
        resetButton.isEnabled = true
        setSliders(active: true)
        frequencySlider.value = moogLadder.cutoffFrequency
        resonanceSlider.value = moogLadder.resonance
    }

    func setSliders(active: Bool) {
        loopButton.isEnabled = active
        moogLadderTitle.isEnabled = active
        frequencySlider.callback = updateFrequency
        frequencySlider.isHidden = !active
        resonanceSlider.callback = updateResonance
        resonanceSlider.isHidden = !active
        frequencySlider.range = 10 ... 2_000
        moogLadderTitle.text = active ? "Moog Ladder Filter" : Constants.empty
    }

    @IBAction func loopButtonTouched(_ sender: UIButton) {

        if player.looping {
            player.looping = false
            sender.setTitle("Loop is Off", for: .normal)
        } else {
            player.looping = true
            sender.setTitle("Loop is On", for: .normal)

        }

    }


    func updateFrequency(value: Double) {
        moogLadder.cutoffFrequency = value
        frequencySlider.property = "Frequency"
        frequencySlider.format = "%0.0f"
    }

    func updateResonance(value: Double) {
        moogLadder.resonance = value
        resonanceSlider.property = "Resonance"
        resonanceSlider.format = "%0.3f"
    }

    @IBAction func resetEverything(_ sender: Any) {
        player.stop()
        do {
            try recorder.reset()
        } catch { print("Errored resetting.") }

        //try? player.replaceFile((recorder.audioFile)!)
        setupUIForRecording()
    }



    // convert to a generic view animate function and dissociate from the button
    @IBAction func animateButton(_ sender: UIButton) {

        UIView.animate(withDuration: 0.5, delay: 0.0, options: UIViewAnimationOptions.curveEaseIn, animations: {
            //Frame Option 1:
            self.tempViewForRecordingAndPlay.frame = CGRect(x: self.tempViewForRecordingAndPlay.frame.origin.x, y: 20, width: self.tempViewForRecordingAndPlay.frame.width, height: self.tempViewForRecordingAndPlay.frame.height)

            //Frame Option 2:
            self.tempViewForRecordingAndPlay.center = CGPoint(x: self.view.frame.width / 2, y: self.view.frame.height / 4)
            //self.tempViewForRecordingAndPlay.backgroundColor = .blue

        },completion: { finish in
           /*
            UIView.animate(withDuration: 1, delay: 0.25,options: UIViewAnimationOptions.curveEaseOut,animations: {
                self.tempViewForRecordingAndPlay.backgroundColor = .orange
                self.tempViewForRecordingAndPlay.transform = CGAffineTransform(scaleX: 0.25, y: 0.25)

                //self.animationButton.isEnabled = false // If you want to restrict the button not to repeat animation..You can enable by setting into true

            },completion: nil)})
           */

            UIView.animate(withDuration: 1.0, delay: 0.25, usingSpringWithDamping:
            0.6, initialSpringVelocity: 0.3, options:
                UIViewAnimationOptions.allowAnimatedContent, animations: { () -> Void in
                //do actual move
                self.tempViewForRecordingAndPlay.center = self.tempViewForRecordingAndPlay.center
        }, completion:  nil)})

    }
Gereon
  • 14,827
  • 4
  • 36
  • 62
user2876642
  • 161
  • 1
  • 7
  • 1
    while debugging this problem I found that commenting execute_viewDidAppear_micAnalysis() in viewDidAppear() doesnt crash, but commenting the above function will not enable the microphone analysis functions as well. Any thoughts? – user2876642 Jan 10 '18 at 04:59

1 Answers1

8

You probably already have a tap on the bus and you can not have another on the same bus.

Try to micMixer.outputNode.removeTap(onBus: 0) before You calling recorder.record().

HereTrix
  • 1,285
  • 7
  • 13
  • A whole week of debugging ended with this single line. thank you very much. can you please help me to understand what does that mean to "have already a tap on the bus and cannot have another one"? thanks a lot! – Dima Gimburg Oct 07 '18 at 20:30
  • 1
    As I understand, tap is a mid-level interface for reading or processing data. Restriction for only one tap on node was found in Apple documentation ( https://developer.apple.com/documentation/avfoundation/avaudionode/1387122-installtap ) – HereTrix Oct 08 '18 at 10:26
  • Thanks, so as I understand, it seems as AudioKit attaches a tap onto the microphone mixer node whenever a record method on recorder is called (a recorder that as input got the microphone mixer output)? – Dima Gimburg Oct 08 '18 at 11:20
  • Yep. If You check record method (https://github.com/AudioKit/AudioKit/blob/master/AudioKit/Common/Internals/AKNodeRecorder.swift) You can find that tap added each time recordint starts – HereTrix Oct 09 '18 at 11:06
  • @HereTrix Tnx this "solve" my problem. But now it records empty file with no sound. I am trying to record the final sound with all effects. While keeping the option to loop with dry mic input. Like "Recorder" example. So I use two recorders. Is that possible? tnx – Spring Apr 19 '19 at 13:36
  • Hi, thanks for i upvote for the great answer. I just use the same code and having not get analis working however recording work but same time analisis timer wont gives any data do you have any idea? @HereTrix – Nitin Gohel Nov 04 '19 at 06:25