首页 > 解决方案 > 为什么我的多通道映射不能正常工作?

问题描述

我最近发布了这个关于在 iOS 中使用多路由的问题,我以为我解决了它,但是我发现它不太有效:AVAudioEngine Multichannel mapping

我遇到的问题是多路由仅适用于前两个输出通道。我正在尝试使其适用于 4 通道音频接口。

我已经设法使用 AVAudioPlayer 将音频路由到 USB 接口的每个输出:

var avplayer = AVAudioPlayer()

@IBAction func avAudioPlayerPlay(_ sender: Any)
{
    let audioSession = AVAudioSession.sharedInstance()
    let route = audioSession.currentRoute

    // set the session category
    do
    {
        //try audioSession.setCategory(.multiRoute)
        try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
    }
    catch
    {
        print("unable to set category", error)
        return
    }

    // activate the audio session - turns on multiroute I believe
    do
    {
        try audioSession.setActive(true)
        //try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
    }
    catch
    {
        print("unable to set active", error)
        return
    }

    //audio interface + headphone jack
    let outputs:[AVAudioSessionChannelDescription] = [
        route.outputs[0].channels![2], // 3rd channel on Audio Interface
        route.outputs[1].channels![1]  // Right Channel of Headphones
    ]

    guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
    let fileURL: URL = URL(fileURLWithPath: filePath)

    do
    {
        avplayer = try AVAudioPlayer(contentsOf: fileURL)
    }
    catch
    {
        print("play error", error)
        return
    }

    avplayer.channelAssignments = outputs

    let result = avplayer.play()
    print(result)
}

但我无法使用 AVAudioEngine 让它工作:

private func getOutputChannelMapIndices(_ names:[String?]) -> [Int]
{
    let session = AVAudioSession.sharedInstance()
    let route = session.currentRoute
    let outputPorts = route.outputs

    var channelMapIndices:[Int] = []

    for name in names
    {
        var chIndex = 0
        for outputPort in outputPorts
        {
            guard let channels = outputPort.channels else
            {
                continue
            }
            for channel in channels
            {
                print(channel.channelName)
                if channel.channelName == name
                {
                    if names.count > channelMapIndices.count
                    {
                        channelMapIndices.append(chIndex)
                    }
                }
                chIndex += 1
            }
        }
    }
    return channelMapIndices
}

@IBAction func nodesPlay(_ sender: Any)
{
    let channelNames = [
        "UMC204HD 192k 3",
        "Headphones Left",
        "Headphones Right",
        nil
    ]

    let audioSession = AVAudioSession.sharedInstance()

    // set the session category
    do
    {
        //try audioSession.setCategory(.multiRoute)
        try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
    }
    catch
    {
        print("unable to set category", error)
        return
    }

    // activate the audio session - turns on multiroute I believe
    do
    {
        try audioSession.setActive(true)
        //try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
    }
    catch
    {
        print("unable to set active", error)
        return
    }

    let channelMapIndices = getOutputChannelMapIndices(channelNames)

    print("channelMapIndices: ", channelMapIndices)

    engine = AVAudioEngine()
    output = engine.outputNode
    mixer = engine.mainMixerNode

    player = AVAudioPlayerNode()

    engine.attach(player)

    guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
    let fileURL: URL = URL(fileURLWithPath: filePath)
    let file = try! AVAudioFile(forReading: fileURL)

    let outputNumChannels = output.outputFormat(forBus: 0).channelCount
    print("outputNumChannels:" , outputNumChannels)

    var outputChannelMap:[Int] = Array(repeating: -1, count: Int(outputNumChannels))

    let numberOfSourceChannels = file.processingFormat.channelCount
    print("numberOfSourceChannels: ", numberOfSourceChannels)

    var sourceChIndex = 0
    for chIndex in channelMapIndices
    {
        if chIndex < outputNumChannels && sourceChIndex < numberOfSourceChannels
        {
            outputChannelMap[chIndex] = sourceChIndex
            sourceChIndex += 1
        }
    }

    print("outputChannelMap: ", outputChannelMap)

    if let au = output.audioUnit
    {
        let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
        print("propSize:", propSize)
        let result = AudioUnitSetProperty(au, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Global, 0, &outputChannelMap, propSize)
        print("result: ", result)
    }

    let channelLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | UInt32(numberOfSourceChannels))
    let format = AVAudioFormat(streamDescription: file.processingFormat.streamDescription, channelLayout: channelLayout)

    engine.connect(player, to: mixer, format:format)
    engine.connect(mixer, to: output, format:format)

    player.scheduleFile(file, at: nil, completionHandler: nil)

    do
    {
        try engine.start()
    }
    catch
    {
        print("can't start", error)
        return
    }

    player.play()
}

如果有人能解释为什么我似乎无法播放任何音频来输出 3 或 4,我将不胜感激。

请注意,这里有很多代码是从这里翻译过来的:https ://forums.developer.apple.com/thread/15416

标签: iosswiftavaudioplayeravaudiosessionavaudioengine

解决方案


我相信问题是线路

let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))

这是给你数组对象的大小,本质上是指针的大小,而不是数组中对象的大小。请参阅Apple 文档中的讨论

属性的大小应该是数组中包含的通道数乘以 的大小Int32,因为AudioUnitSetProperty它是一个 C API,这将是相应 C 数组的大小。

let propSize = UInt32(MemoryLayout<Int32>.stride * outputChannelMap.count)

您还应该声明outputChannelMap为一个数组,Int32因为这是预期的类型kAudioOutputUnitProperty_ChannelMap

var outputChannelMap:[Int32] = Array(repeating: -1, count: Int(outputNumChannels))

推荐阅读