Since CoreDispatcher has been deprecated in the WinUI 3 Windows App SDK and replaced with DispatcherQueue, how can the following code be converted to use DispatcherQueue properly?
fileInputNode.FileCompleted += FileInputNodeOnFileCompleted;
private async void FileInputNodeOnFileCompleted(AudioFileInputNode sender, object args)
{
await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
sender.Stop();
sender.Reset();
});
}
Another reason to update the code to use DispatcherQueue is that when using CoreDispatcher in the code, the following error occurs:
Object reference not set to an instance of an object.
EDIT
Per YangXiaoPo-MSFT's request, here is a reproducible sample of the test app that contains the problem at hand.
The following is the XAML pertaining to it:
<Window
x:Class="Test_3_Take_2.MainWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:Test_3_Take_2"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
mc:Ignorable="d">
<StackPanel Orientation="Vertical" HorizontalAlignment="Center" VerticalAlignment="Center">
<Button x:Name="recordStopButton" Content="Record" Click="recordStopButton_Click"/>
<Button x:Name="playButton" Content="Play Recorded File" Margin="0,50,0,0" IsEnabled="False" Click="playButton_Click"/>
</StackPanel>
</Window>
And the following is the code behind:
using Microsoft.UI.Xaml;
using System;
using System.Threading.Tasks;
using Windows.Media.Audio;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Media.Transcoding;
using Windows.Media.Capture;
using Windows.Media.Render;
using Microsoft.UI.Dispatching;
namespace Test_3_Take_2
{
public sealed partial class MainWindow : Window
{
private AudioGraph graph, graph2;
private AudioFileInputNode fileInputNode;
private AudioFileOutputNode fileOutputNode;
private AudioDeviceOutputNode deviceOutputNode, deviceOutputNode2;
private AudioDeviceInputNode deviceInputNode;
private StorageFolder temporaryFolder = ApplicationData.Current.TemporaryFolder;
private StorageFile file;
public MainWindow()
{
this.InitializeComponent();
this.Title = "Test 3 Take 2";
var root = this.Content as FrameworkElement;
if (root != null)
root.Loaded += async (s, e) => await CreateAudioGraph();
var root2 = this.Content as FrameworkElement;
if (root2 != null)
root2.Loaded += async (s, e) => await CreateAudioGraph2();
}
private void playButton_Click(object sender, RoutedEventArgs e)
{
if (file != null)
{
TogglePlay();
}
}
private async void TogglePlay()
{
//Toggle playback
if (playButton.Content.Equals("Play Recorded File"))
{
if (fileInputNode != null) {
fileInputNode.Dispose();
}
CreateAudioFileInputNodeResult fileInputResult = await graph2.CreateFileInputNodeAsync(file);
if (AudioFileNodeCreationStatus.Success != fileInputResult.Status)
{
// Cannot read input file
return;
}
fileInputNode = fileInputResult.FileInputNode;
fileInputNode.AddOutgoingConnection(deviceOutputNode2);
fileInputNode.StartTime = TimeSpan.FromSeconds(0);
graph2.Start();
playButton.Content = "Stop Playing Recorded File";
fileInputNode.FileCompleted += FileInputNodeOnFileCompleted;
if (fileInputNode.Position == TimeSpan.FromSeconds(0))
{
graph2.Stop();
playButton.Content = "Play Recorded File";
}
}
else
{
graph2.Stop();
playButton.Content = "Play Recorded File";
}
}
private async void FileInputNodeOnFileCompleted(AudioFileInputNode sender, object args) {
await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
sender.Stop();
sender.Reset();
}); }
private async void recordStopButton_Click(object sender, RoutedEventArgs e)
{
await ToggleRecordStop();
}
private async Task ToggleRecordStop()
{
if (recordStopButton.Content.Equals("Record"))
{
playButton.Content = "Play Recorded File";
playButton.IsEnabled = false;
if (deviceInputNode != null)
{
deviceInputNode.Dispose();
CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);
if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
// Cannot create device input node
return;
}
deviceInputNode = deviceInputNodeResult.DeviceInputNode;
}
file = await temporaryFolder.CreateFileAsync("file1.mp3",
CreationCollisionOption.ReplaceExisting);
// File can be null if cancel is hit in the file picker
if (file == null)
{
return;
}
MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
// Operate node at the graph format, but save file at the specified format
CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);
if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
{
// FileOutputNode creation failed
return;
}
fileOutputNode = fileOutputNodeResult.FileOutputNode;
// Connect the input node to both output nodes
deviceInputNode.AddOutgoingConnection(fileOutputNode);
deviceInputNode.AddOutgoingConnection(deviceOutputNode);
graph.Start();
recordStopButton.Content = "Stop";
}
else if (recordStopButton.Content.Equals("Stop"))
{
// Good idea to stop the graph to avoid data loss
graph.Stop();
TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync();
if (finalizeResult != TranscodeFailureReason.None)
{
// Finalization of file failed. Check result code to see why
return;
}
recordStopButton.Content = "Record";
playButton.IsEnabled = true;
}
}
private async Task CreateAudioGraph()
{
AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
if (result.Status != AudioGraphCreationStatus.Success)
{
// Cannot create graph
return;
}
graph = result.Graph;
// Create a device output node
CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
// Cannot create device output node
return;
}
deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
// Create a device input node using the default audio input device
CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);
if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
// Cannot create device input node
return;
}
deviceInputNode = deviceInputNodeResult.DeviceInputNode;
}
private async Task CreateAudioGraph2()
{
// Create an AudioGraph with default settings
AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
if (result.Status != AudioGraphCreationStatus.Success)
{
// Cannot create graph
return;
}
graph2 = result.Graph;
// Create a device output node
CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph2.CreateDeviceOutputNodeAsync();
if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
return;
}
deviceOutputNode2 = deviceOutputNodeResult.DeviceOutputNode;
}
}
}
