555 lines
23 KiB
C#

using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
using Windows.UI.Core;
using Windows.Media.Capture;
using Windows.ApplicationModel;
using System.Threading.Tasks;
using Windows.System.Display;
using Windows.Graphics.Display;
using Windows.Media.Capture.Frames;
using Windows.Media.Devices;
using Windows.Devices.Enumeration;
using Windows.Media.Audio;
using Windows.Media.Render;
using Windows.Media.MediaProperties;
using Windows.Graphics.Imaging;
using System.Threading;
using Windows.UI.Xaml.Media.Imaging;
using System.Diagnostics;
using System.Collections.Concurrent;
// The Blank Page item template is documented at https://go.microsoft.com/fwlink/?LinkId=402352&clcid=0x409
namespace CameraRendererCS
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public sealed partial class MainPage : Page
{
MediaCapture mediaCapture;
MediaFrameReader frameReader;
bool isStreamRunning = false;
bool calcPerfStats = true;
DisplayRequest displayRequest = new DisplayRequest();
bool _taskRunning = false;
private SoftwareBitmap _backBuffer = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 1, 1);
AudioGraph graph;
// Populated lists
DeviceInformationCollection audioDevices;
IReadOnlyList<MediaFrameSourceGroup> videoDevices;
public MainPage()
{
this.InitializeComponent();
visualDecayTimer = new Timer((t) =>
{
fadeWindow();
});
}
protected override async void OnNavigatedTo(NavigationEventArgs e)
{
// Grab the list of video media sources and show them to the user
await FindVideoSources();
await FindAudioSources();
// Grab the audio sources too
}
private async Task FindVideoSources()
{
videoComboBox.Items.Clear();
videoDevices = await MediaFrameSourceGroup.FindAllAsync();
videoComboBox.Items.Add("-- Pick input device --");
foreach (MediaFrameSourceGroup device in videoDevices)
{
videoComboBox.Items.Add(device.DisplayName);
}
if (videoComboBox.SelectedIndex == -1 && videoComboBox.Items.Count > 0) videoComboBox.SelectedIndex = 0;
}
private async Task FindAudioSources()
{
audioComboBox.Items.Clear();
audioDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector());
audioComboBox.Items.Add("-- Pick input device --");
foreach (DeviceInformation device in audioDevices)
{
audioComboBox.Items.Add(device.Name);
}
if (audioComboBox.SelectedIndex == -1 && audioComboBox.Items.Count > 0) audioComboBox.SelectedIndex = 0;
}
private async void startStreamButton_Click(object sender, RoutedEventArgs e)
{
await CreateVideoReader();
await CreateAudioGraph();
displayRequest.RequestActive();
}
private async Task CreateAudioGraph()
{
AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
{
//settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
PrimaryRenderDevice = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default))
};
CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
graph = result.Graph;
if (result.Status != AudioGraphCreationStatus.Success)
{
return;
}
CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
return;
}
CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, audioDevices[audioComboBox.SelectedIndex - 1]);
if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
{
return;
}
deviceInputNodeResult.DeviceInputNode.AddOutgoingConnection(deviceOutputNodeResult.DeviceOutputNode);
graph.Start();
}
private async Task CreateVideoReader()
{
MediaFrameFormat format = ((FrameFormatModel)videoTypeComboBox.SelectedItem).Format;
MediaFrameSource source = (MediaFrameSource)videoStreamComboBox.SelectedItem;
// Look for a format which the FrameRenderer can render.
string requestedSubtype = null;
requestedSubtype = GetSubtypeForFrameReader(source.Info.SourceKind, format);
if (requestedSubtype != null)
{
// Tell the source to use the format we can render.
await source.SetFormatAsync(format);
}
if (requestedSubtype == null)
{
}
Task t = new Task(async () =>
{
try
{
frameReader = await mediaCapture.CreateFrameReaderAsync(source, requestedSubtype);
frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
frameReader.FrameArrived += FrameReader_FrameArrived;
lastFrameTime = Stopwatch.GetTimestamp();
MediaFrameReaderStartStatus status = await frameReader.StartAsync();
if (status == MediaFrameReaderStartStatus.Success)
{
}
else
{
}
}
catch { }
});
try
{
t.Start();
isStreamRunning = true;
if (!mouseInside)
{
_ = visualDecayTimer.Change(15000, Timeout.Infinite);
}
}
catch (InvalidOperationException e)
{
Console.WriteLine(e.Message);
Console.WriteLine("Media Source Info: {0}", source.Info.ToString());
}
}
long lastFrameTime = 0;
double averageFrameTime = -1.0;
double lastFrameTimeMaximum = 0;
// Time taken to process the frame, in delta-ticks
long frameProcessingTime = 0;
TaskFactory perfTaskFactory = new TaskFactory(TaskCreationOptions.None, TaskContinuationOptions.None);
// Use a concurrent queue to ensure synchronisation
ConcurrentQueue<double> previousFrameTimes = new ConcurrentQueue<double>();
private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
{
long newTime = Stopwatch.GetTimestamp();
if (calcPerfStats)
{
// Build the new task, to help fire off frames quickly
_ = perfTaskFactory.StartNew(() =>
{
// TODO: Push the calculation onto a different thread, as it isn't needed here. May save some time as this function takes ~6 ms to execute
// Most of this time is likely spent converting the bitmap
if (previousFrameTimes.Count >= 180)
{
_ = previousFrameTimes.TryDequeue(out _);
}
string deltaTime = "";
if (averageFrameTime < 0)
{
averageFrameTime = (newTime - lastFrameTime) / (double)TimeSpan.TicksPerMillisecond;
}
else
{
double newVal = (newTime - lastFrameTime) / (double)TimeSpan.TicksPerMillisecond;
deltaTime = (averageFrameTime - newVal).ToString("N1");
previousFrameTimes.Enqueue(newVal);
double localMaxIn = previousFrameTimes.Max();
_ = Interlocked.Exchange(ref lastFrameTimeMaximum, localMaxIn);
double sampledAverage = previousFrameTimes.Sum() / previousFrameTimes.Count();
/*double localAvg = averageFrameTime * 9;
localAvg += newVal;
localAvg /= 10;*/
_ = Interlocked.Exchange(ref averageFrameTime, sampledAverage);
}
string time = averageFrameTime.ToString("N1");
_ = Interlocked.Exchange(ref lastFrameTime, newTime);
var task = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
timelabel.Text = time;
timelabel_delta.Text = deltaTime;
maxTimeLabel.Text = lastFrameTimeMaximum.ToString("N2");
// Delegate the double-precision division till this function
frameProcessingTimeLabel.Text = (frameProcessingTime / (double)TimeSpan.TicksPerMillisecond).ToString("N2");
});
});
}
// TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
// This can return null if there is no such frame, or if the reader is not in the
// "Started" state. The latter can occur if a FrameArrived event was in flight
// when the reader was stopped.
using (MediaFrameReference frame = sender.TryAcquireLatestFrame())
{
if (frame != null)
{
SoftwareBitmap result = null;
using (SoftwareBitmap inputBitmap = frame.VideoMediaFrame.SoftwareBitmap)
{
if (inputBitmap != null)
{
// XAML requires Bgra8 with premultiplied alpha.
// We requested Bgra8 from the MediaFrameReader, so all that's
// left is fixing the alpha channel if necessary.
if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8)
{
//
int i = 0;
}
else if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied)
{
// Already in the correct format.
result = SoftwareBitmap.Copy(inputBitmap);
//result = inputBitmap;
}
else
{
// Convert to premultiplied alpha.
result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);
}
}
}
if (result != null)
{
// Swap the processed frame to _backBuffer and trigger UI thread to render it
result = Interlocked.Exchange(ref _backBuffer, result);
// UI thread always reset _backBuffer before using it. Unused bitmap should be disposed.
result?.Dispose();
// Changes to xaml ImageElement must happen in UI thread through Dispatcher
var task = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.High,
async () =>
{
var imageSource = (SoftwareBitmapSource)renderTarget.Source;
// Don't let two copies of this task run at the same time.
if (_taskRunning)
{
return;
}
_taskRunning = true;
// Keep draining frames from the backbuffer until the backbuffer is empty.
SoftwareBitmap latestBitmap;
// About to push a frame; get the delta at this point
long thisFrameProcessTime = Stopwatch.GetTimestamp() - newTime;
_ = Interlocked.Exchange(ref frameProcessingTime, thisFrameProcessTime);
while ((latestBitmap = Interlocked.Exchange(ref _backBuffer, null)) != null)
{
await ((SoftwareBitmapSource)renderTarget.Source).SetBitmapAsync(latestBitmap);
latestBitmap.Dispose();
}
_taskRunning = false;
});
}
}
}
}
public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format)
{
// Note that media encoding subtypes may differ in case.
// https://docs.microsoft.com/en-us/uwp/api/Windows.Media.MediaProperties.MediaEncodingSubtypes
string subtype = format.Subtype;
switch (kind)
{
// For color sources, we accept anything and request that it be converted to Bgra8.
case MediaFrameSourceKind.Color:
return MediaEncodingSubtypes.Bgra8;
// The only depth format we can render is D16.
case MediaFrameSourceKind.Depth:
return String.Equals(subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase) ? subtype : null;
// The only infrared formats we can render are Nv12, L8, and L16.
case MediaFrameSourceKind.Infrared:
return (String.Equals(subtype, MediaEncodingSubtypes.Nv12, StringComparison.OrdinalIgnoreCase) ||
String.Equals(subtype, MediaEncodingSubtypes.L8, StringComparison.OrdinalIgnoreCase) ||
String.Equals(subtype, MediaEncodingSubtypes.L16, StringComparison.OrdinalIgnoreCase)) ? subtype : null;
// No other source kinds are supported by this class.
default:
return null;
}
}
bool flyOutState = true;
private void CollapseButton_Click(object sender, RoutedEventArgs e)
{
if (flyOutState)
{
FlyoutGrid.RowDefinitions[0].Height = new GridLength(0);
FlyoutGrid.Width = CollapseButton.Width;
CollapseButton.Content = "";
flyOutState = false;
}
else
{
FlyoutGrid.RowDefinitions[0].Height = GridLength.Auto;
CollapseButton.Content = "";
FlyoutGrid.Width = double.NaN;
flyOutState = true;
}
}
// Time taken to decay
Timer visualDecayTimer;
bool mouseInside = false;
private void Page_PointerMoved(object sender, PointerRoutedEventArgs e)
{
if (isStreamRunning & !mouseInside)
{
var task = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.Low, () =>
{
ScalarTransition fade = new ScalarTransition
{
Duration = TimeSpan.FromSeconds(0)
};
FlyoutGrid.OpacityTransition = fade;
FlyoutGrid.Opacity = 1.0;
});
visualDecayTimer.Change(5000, Timeout.Infinite);
}
}
private void fadeWindow()
{
var task = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.Low, () =>
{
ScalarTransition fade = new ScalarTransition
{
Duration = TimeSpan.FromSeconds(5)
};
FlyoutGrid.OpacityTransition = fade;
FlyoutGrid.Opacity = 0.01;
});
}
private void FlyoutGrid_PointerEntered(object sender, PointerRoutedEventArgs e)
{
mouseInside = true;
}
private void FlyoutGrid_PointerExited(object sender, PointerRoutedEventArgs e)
{
mouseInside = false;
}
private async void videoComboBox_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
// Deregister and desroy an existing player
if (frameReader != null)
{
frameReader.FrameArrived -= FrameReader_FrameArrived;
frameReader.Dispose();
frameReader = null;
}
// reset items
mediaCapture?.Dispose();
mediaCapture = null;
if (videoComboBox.SelectedIndex > 0)
{
// Blank the existing canvas
renderTarget.Source = new SoftwareBitmapSource();
// Initialise a new MediaCapture
mediaCapture = new MediaCapture();
MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings
{
SourceGroup = videoDevices[videoComboBox.SelectedIndex - 1],
// This media capture can share streaming with other apps.
SharingMode = MediaCaptureSharingMode.ExclusiveControl,
// Only stream video and don't initialize audio capture devices.
StreamingCaptureMode = StreamingCaptureMode.Video,
// Set to CPU to ensure frames always contain CPU SoftwareBitmap images
// instead of preferring GPU D3DSurface images.
MemoryPreference = MediaCaptureMemoryPreference.Cpu
};
await mediaCapture.InitializeAsync(settings);
// Grab the kinds of media source we have
HashSet<MediaFrameSourceKind> startedKinds = new HashSet<MediaFrameSourceKind>();
IEnumerable<MediaFrameSource> mediaFrameSources = mediaCapture.FrameSources.Values.ToArray();
videoStreamComboBox.ItemsSource = mediaFrameSources;
MediaFrameSource selectedSource;
// TODO: If the count of streams is greater than 1, allow the user to select the specific stream
if (mediaCapture.FrameSources.Values.Count() > 1)
{
videoStreamComboBox.SelectedIndex = 0;
// Add another combobox; allow user to select desired source
selectedSource = mediaCapture.FrameSources.Values.First();
videoStreamLabel.Visibility = Visibility.Visible;
videoStreamComboBox.Visibility = Visibility.Visible;
}
else if (mediaCapture.FrameSources.Values.Count() == 0)
{
// Error, as camera has no streams
return;
}
else
{
// Just the one, continue
selectedSource = mediaCapture.FrameSources.Values.First();
videoStreamComboBox.SelectedIndex = 0;
}
// Source selected; enumerate formats
IEnumerable<FrameFormatModel> streamFormats = selectedSource.SupportedFormats
.Where(format => GetSubtypeForFrameReader(selectedSource.Info.SourceKind, format) != null)
.Select(format => new FrameFormatModel(format));
videoTypeComboBox.ItemsSource = streamFormats;
}
}
/// <summary>
/// View model for MediaFrameFormat used in XAML ContentControl.
/// </summary>
public class FrameFormatModel
{
public MediaFrameFormat Format { get; }
public string DisplayName { get; }
public FrameFormatModel(MediaFrameFormat format)
{
this.Format = format;
this.DisplayName = string.Format("{0} | {1} | {2} x {3} | {4:#.##}fps",
format.MajorType,
format.Subtype,
format.VideoFormat?.Width,
format.VideoFormat?.Height,
Math.Round((double)format.FrameRate.Numerator / format.FrameRate.Denominator, 2));
}
public override string ToString()
{
return this.DisplayName;
}
/// <summary>
/// Compares the Format contained by this view model to the given format for equivalency.
/// </summary>
/// <param name="otherFormat">The MediaFrameFormat to compare to the MediaFrameFormat in this view model.</param>
/// <returns></returns>
public bool HasSameFormat(MediaFrameFormat otherFormat)
{
if (otherFormat == null)
{
return (Format == null);
}
return this.Format.MajorType == otherFormat.MajorType &&
this.Format.Subtype == otherFormat.Subtype &&
this.Format.FrameRate.Numerator == otherFormat.FrameRate.Numerator &&
this.Format.FrameRate.Denominator == otherFormat.FrameRate.Denominator &&
this.Format.VideoFormat?.Width == otherFormat.VideoFormat?.Width &&
this.Format.VideoFormat?.Height == otherFormat.VideoFormat?.Height;
}
}
private void PerfStatsView_Unchecked(object sender, RoutedEventArgs e)
{
calcPerfStats = false;
_ = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
StatisticsPane.Visibility = Visibility.Collapsed;
});
}
private void PerfStatsView_Checked(object sender, RoutedEventArgs e)
{
calcPerfStats = true;
_ = renderTarget.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
StatisticsPane.Visibility = Visibility.Visible;
timelabel.Text = "0.0";
timelabel_delta.Text = "0.0";
maxTimeLabel.Text = "";
frameProcessingTimeLabel.Text = "";
});
}
}
}