- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
所以我添加了 coding4fun 和 Microsoft 源代码,甚至完成了 kinect sdk 中用于在屏幕上显示 kinect 图像帧的操作,但由于某种原因它不会显示,虽然我在 sdk 上工作
我写的代码
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Microsoft.Research.Kinect.Nui;
using Coding4Fun.Kinect.Wpf;
using System.Net.Mail;
using System.IO;
using System.ComponentModel;
namespace WpfApplication1
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
}
Runtime nui;
int PersonDetected = 0;
int totalFrames = 0;
int lastFrames = 0;
DateTime lastTime = DateTime.MaxValue;
Dictionary<JointID, Brush> jointColors = new Dictionary<JointID, Brush>() {
{JointID.HipCenter, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
{JointID.Spine, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
{JointID.ShoulderCenter, new SolidColorBrush(Color.FromRgb(168, 230, 29))},
{JointID.Head, new SolidColorBrush(Color.FromRgb(200, 0, 0))},
{JointID.ShoulderLeft, new SolidColorBrush(Color.FromRgb(79, 84, 33))},
{JointID.ElbowLeft, new SolidColorBrush(Color.FromRgb(84, 33, 42))},
{JointID.WristLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
{JointID.HandLeft, new SolidColorBrush(Color.FromRgb(215, 86, 0))},
{JointID.ShoulderRight, new SolidColorBrush(Color.FromRgb(33, 79, 84))},
{JointID.ElbowRight, new SolidColorBrush(Color.FromRgb(33, 33, 84))},
{JointID.WristRight, new SolidColorBrush(Color.FromRgb(77, 109, 243))},
{JointID.HandRight, new SolidColorBrush(Color.FromRgb(37, 69, 243))},
{JointID.HipLeft, new SolidColorBrush(Color.FromRgb(77, 109, 243))},
{JointID.KneeLeft, new SolidColorBrush(Color.FromRgb(69, 33, 84))},
{JointID.AnkleLeft, new SolidColorBrush(Color.FromRgb(229, 170, 122))},
{JointID.FootLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
{JointID.HipRight, new SolidColorBrush(Color.FromRgb(181, 165, 213))},
{JointID.KneeRight, new SolidColorBrush(Color.FromRgb(71, 222, 76))},
{JointID.AnkleRight, new SolidColorBrush(Color.FromRgb(245, 228, 156))},
{JointID.FootRight, new SolidColorBrush(Color.FromRgb(77, 109, 243))}
};
private void Window_Loaded(object sender, RoutedEventArgs e)
{
if (Runtime.Kinects.Count == 0)
{
MessageBox.Show("No Kinect Detected");
}
else
{
nui = Runtime.Kinects[0];
nui.Initialize(RuntimeOptions.UseColor | RuntimeOptions.UseSkeletalTracking);
lastTime = DateTime.Now;
nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_VideoFrameReady);
nui.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);
//nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
//nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution640x480, ImageType.DepthAndPlayerIndex);
}
}
void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
{
//KinectImage.Source = e.ImageFrame.ToBitmapSource();
PlanarImage Image = e.ImageFrame.Image;
++totalFrames;
{
string bb1 = Convert.ToString(DateTime.Now);
string filename = "C:\\Kinected\\Kinect1_Image " + bb1 + ".jpg";
KinectImage.Source = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
BitmapSource image = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
if (PersonDetected == 1)
{
image.Save(filename, Coding4Fun.Kinect.Wpf.ImageFormat.Jpeg);
SendNotificationEmail();
PersonDetected = 0;
}
}
}
骨架示例代码
using System;
using System.Net.Mail;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Microsoft.Research.Kinect.Nui;
using Coding4Fun.Kinect.WinForm;
using Coding4Fun.Kinect.Wpf;
using System.IO;
using System.ComponentModel;
namespace SkeletalViewer
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
}
Runtime nui;
// public int ElevationAngle { get; set; }
// public static readonly int ElevationMaximum;
// public static readonly int ElevationMinimum;
//public static readonly int ElevationMedian;
//int ElevationMedian = ElevationAngle == 0;
int totalFrames = 0;
int totalFrames2 = 0;
int lastFrames = 0;
int PersonDetected = 0;
DateTime lastTime = DateTime.MaxValue;
// We want to control how depth data gets converted into false-color data
// for more intuitive visualization, so we keep 32-bit color frame buffer versions of
// these, to be updited whenever we receive and process a 16-bit frame.
const int RED_IDX = 2;
const int GREEN_IDX = 1;
const int BLUE_IDX = 0;
byte[] depthFrame32 = new byte[320 * 240 * 4];
Dictionary<JointID,Brush> jointColors = new Dictionary<JointID,Brush>() {
{JointID.HipCenter, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
{JointID.Spine, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
{JointID.ShoulderCenter, new SolidColorBrush(Color.FromRgb(168, 230, 29))},
{JointID.Head, new SolidColorBrush(Color.FromRgb(200, 0, 0))},
{JointID.ShoulderLeft, new SolidColorBrush(Color.FromRgb(79, 84, 33))},
{JointID.ElbowLeft, new SolidColorBrush(Color.FromRgb(84, 33, 42))},
{JointID.WristLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
{JointID.HandLeft, new SolidColorBrush(Color.FromRgb(215, 86, 0))},
{JointID.ShoulderRight, new SolidColorBrush(Color.FromRgb(33, 79, 84))},
{JointID.ElbowRight, new SolidColorBrush(Color.FromRgb(33, 33, 84))},
{JointID.WristRight, new SolidColorBrush(Color.FromRgb(77, 109, 243))},
{JointID.HandRight, new SolidColorBrush(Color.FromRgb(37, 69, 243))},
{JointID.HipLeft, new SolidColorBrush(Color.FromRgb(77, 109, 243))},
{JointID.KneeLeft, new SolidColorBrush(Color.FromRgb(69, 33, 84))},
{JointID.AnkleLeft, new SolidColorBrush(Color.FromRgb(229, 170, 122))},
{JointID.FootLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
{JointID.HipRight, new SolidColorBrush(Color.FromRgb(181, 165, 213))},
{JointID.KneeRight, new SolidColorBrush(Color.FromRgb(71, 222, 76))},
{JointID.AnkleRight, new SolidColorBrush(Color.FromRgb(245, 228, 156))},
{JointID.FootRight, new SolidColorBrush(Color.FromRgb(77, 109, 243))}
};
private void Window_Loaded(object sender, EventArgs e)
{
nui = new Runtime();
try
{
nui.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking | RuntimeOptions.UseColor);
}
catch (InvalidOperationException)
{
System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
return;
}
try
{
nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
}
catch (InvalidOperationException)
{
System.Windows.MessageBox.Show("Failed to open stream. Please make sure to specify a supported image type and resolution.");
return;
}
if (Runtime.Kinects.Count < 2)
{
lastTime = DateTime.Now;
nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
nui.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);
nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_ColorFrameReady2);
}
if (Runtime.Kinects.Count == 2)
{
lastTime = DateTime.Now;
nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
nui.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);
nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_ColorFrameReady2);
nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_ColorFrameReady3);
}
}
// Converts a 16-bit grayscale depth frame which includes player indexes into a 32-bit frame
// that displays different players in different colors
byte[] convertDepthFrame(byte[] depthFrame16)
{
for (int i16 = 0, i32 = 0; i16 < depthFrame16.Length && i32 < depthFrame32.Length; i16 += 2, i32 += 4)
{
//
// System.Windows.MessageBox.Show(depthFrame16);
int player = depthFrame16[i16] & 0x07;
int realDepth = (depthFrame16[i16+1] << 5) | (depthFrame16[i16] >> 3);
// transform 13-bit depth information into an 8-bit intensity appropriate
// for display (we disregard information in most significant bit)
byte intensity = (byte)(255 - (255 * realDepth / 0x0fff));
depthFrame32[i32 + RED_IDX] = 0;
depthFrame32[i32 + GREEN_IDX] = 0;
depthFrame32[i32 + BLUE_IDX] = 0;
// choose different display colors based on player
switch (player)
{
case 0:
depthFrame32[i32 + RED_IDX] = (byte)(intensity / 2);
depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 2);
depthFrame32[i32 + BLUE_IDX] = (byte)(intensity / 2);
break;
case 1:
depthFrame32[i32 + RED_IDX] = intensity;
break;
case 2:
depthFrame32[i32 + GREEN_IDX] = intensity;
break;
case 3:
depthFrame32[i32 + RED_IDX] = (byte)(intensity / 4);
depthFrame32[i32 + GREEN_IDX] = (byte)(intensity);
depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
break;
case 4:
depthFrame32[i32 + RED_IDX] = (byte)(intensity);
depthFrame32[i32 + GREEN_IDX] = (byte)(intensity);
depthFrame32[i32 + BLUE_IDX] = (byte)(intensity / 4);
break;
case 5:
depthFrame32[i32 + RED_IDX] = (byte)(intensity);
depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 4);
depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
break;
case 6:
depthFrame32[i32 + RED_IDX] = (byte)(intensity / 2);
depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 2);
depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
break;
case 7:
depthFrame32[i32 + RED_IDX] = (byte)(255 - intensity);
depthFrame32[i32 + GREEN_IDX] = (byte)(255 - intensity);
depthFrame32[i32 + BLUE_IDX] = (byte)(255 - intensity);
break;
}
// file_dist.Close();
}
// Console.WriteLine(depthFrame32);
return depthFrame32;
}
void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
{
// 32-bit per pixel, RGBA image
PlanarImage Image = e.ImageFrame.Image;
++totalFrames;
string bb1 = Convert.ToString(totalFrames);
// string file_name_3 = "C:\\Research\\Kinect\\Proposal\\Depth_Img" + bb1 + ".jpg"; xxx
string file_name_4 = "C:\\temp\\Video_Img" + bb1 + ".jpg";
video.Source = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
BitmapSource image4 = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
// image4.Save(file_name_4, Coding4Fun.Kinect.Wpf.ImageFormat.Jpeg);
}
void nui_ColorFrameReady2(object sender, ImageFrameReadyEventArgs e)
{
// 32-bit per pixel, RGBA image xxx
PlanarImage Image = e.ImageFrame.Image;
//int deltaFrames = totalFrames - lastFrameWithMotion;
//if (totalFrames2 <= stopFrameNumber & deltaFrames > 300)
{
++totalFrames2;
string bb1 = Convert.ToString(totalFrames2);
// string file_name_3 = "C:\\Research\\Kinect\\Proposal\\Depth_Img" + bb1 + ".jpg"; xxx
string file_name_4 = "C:\\Kinected\\Kinect1_Img" + bb1 + ".jpg";
video.Source = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
BitmapSource image4 = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
if (PersonDetected == 1)
{
if (totalFrames2 % 10 == 0)
{
image4.Save(file_name_4, Coding4Fun.Kinect.Wpf.ImageFormat.Jpeg);
SendNotificationEmail();
PersonDetected = 0;
// lastFrameWithMotion = totalFrames;
// topFrameNumber += 100;
}
}
}
}
关于为什么它不在我的应用程序上显示图像有什么想法吗?
最佳答案
按照 benjgorman 所说的进行操作,如果您使用的是 xbox kinect,它仍然可以与新的 sdk 一起使用。可以领取here .代码感觉与旧程序有点不同,但您很快就会习惯它。你可以得到我使用的教程from here ,或使用我提供的代码,确保添加源 Microsoft.Kinect
并从 kinect 资源管理器添加项目(通过 sdk 下载) Microsoft.Samples.Kinect。 WpfViewers
然后将其添加为源。我建议您观看教程,因为它们解释了这些类以及您在 SDK 中需要什么
代码
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Microsoft.Kinect;
using System.Net.Mail;
using Coding4Fun.Kinect.Wpf;
using Microsoft.Samples.Kinect.WpfViewers;
using System.Diagnostics;
namespace WpfApplication1
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
}
private void Window_Loaded(object sender, RoutedEventArgs e)
{
kinectSensorChooser1.KinectSensorChanged += new DependencyPropertyChangedEventHandler(kinectSensorChooser1_KinectSensorChanged);
}
void kinectSensorChooser1_KinectSensorChanged(object sender, DependencyPropertyChangedEventArgs e)
{
KinectSensor oldSensor = (KinectSensor)e.OldValue;
StopKinect(oldSensor);
KinectSensor newSensor = (KinectSensor)e.NewValue;
newSensor.ColorStream.Enable();
newSensor.DepthStream.Enable();
newSensor.SkeletonStream.Enable();
newSensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(newSensor_AllFramesReady);
try
{
newSensor.Start();
}
catch (System.IO.IOException)
{
kinectSensorChooser1.AppConflictOccurred();
}
}
void StopKinect(KinectSensor sensor)
{
if (sensor != null)
{
sensor.Stop();
sensor.AudioSource.Stop();
}
}
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
StopKinect(kinectSensorChooser1.Kinect);
}
void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
{
using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
{
if (colorFrame == null)
{
return;
}
byte[] pixels = new byte[colorFrame.PixelDataLength];
colorFrame.CopyPixelDataTo(pixels);
int stride = colorFrame.Width * 4;
video.Source = BitmapSource.Create(
colorFrame.Width, colorFrame.Height, 96, 96,
PixelFormats.Bgr32, null, pixels, stride);
}
}
希望这对您有所帮助。
关于c# - Kinect 不显示彩色图像,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/10004397/
#include using namespace std; class C{ private: int value; public: C(){ value = 0;
这个问题已经有答案了: What is the difference between char a[] = ?string?; and char *p = ?string?;? (8 个回答) 已关闭
关闭。此题需要details or clarity 。目前不接受答案。 想要改进这个问题吗?通过 editing this post 添加详细信息并澄清问题. 已关闭 7 年前。 此帖子已于 8 个月
除了调试之外,是否有任何针对 c、c++ 或 c# 的测试工具,其工作原理类似于将独立函数复制粘贴到某个文本框,然后在其他文本框中输入参数? 最佳答案 也许您会考虑单元测试。我推荐你谷歌测试和谷歌模拟
我想在第二台显示器中移动一个窗口 (HWND)。问题是我尝试了很多方法,例如将分辨率加倍或输入负值,但它永远无法将窗口放在我的第二台显示器上。 关于如何在 C/C++/c# 中执行此操作的任何线索 最
我正在寻找 C/C++/C## 中不同类型 DES 的现有实现。我的运行平台是Windows XP/Vista/7。 我正在尝试编写一个 C# 程序,它将使用 DES 算法进行加密和解密。我需要一些实
很难说出这里要问什么。这个问题模棱两可、含糊不清、不完整、过于宽泛或夸夸其谈,无法以目前的形式得到合理的回答。如需帮助澄清此问题以便重新打开,visit the help center . 关闭 1
有没有办法强制将另一个 窗口置于顶部? 不是应用程序的窗口,而是另一个已经在系统上运行的窗口。 (Windows, C/C++/C#) 最佳答案 SetWindowPos(that_window_ha
假设您可以在 C/C++ 或 Csharp 之间做出选择,并且您打算在 Windows 和 Linux 服务器上运行同一服务器的多个实例,那么构建套接字服务器应用程序的最明智选择是什么? 最佳答案 如
你们能告诉我它们之间的区别吗? 顺便问一下,有什么叫C++库或C库的吗? 最佳答案 C++ 标准库 和 C 标准库 是 C++ 和 C 标准定义的库,提供给 C++ 和 C 程序使用。那是那些词的共同
下面的测试代码,我将输出信息放在注释中。我使用的是 gcc 4.8.5 和 Centos 7.2。 #include #include class C { public:
很难说出这里问的是什么。这个问题是含糊的、模糊的、不完整的、过于宽泛的或修辞性的,无法以目前的形式得到合理的回答。如需帮助澄清此问题以便重新打开它,visit the help center 。 已关
我的客户将使用名为 annoucement 的结构/类与客户通信。我想我会用 C++ 编写服务器。会有很多不同的类继承annoucement。我的问题是通过网络将这些类发送给客户端 我想也许我应该使用
我在 C# 中有以下函数: public Matrix ConcatDescriptors(IList> descriptors) { int cols = descriptors[0].Co
我有一个项目要编写一个函数来对某些数据执行某些操作。我可以用 C/C++ 编写代码,但我不想与雇主共享该函数的代码。相反,我只想让他有权在他自己的代码中调用该函数。是否可以?我想到了这两种方法 - 在
我使用的是编写糟糕的第 3 方 (C/C++) Api。我从托管代码(C++/CLI)中使用它。有时会出现“访问冲突错误”。这使整个应用程序崩溃。我知道我无法处理这些错误[如果指针访问非法内存位置等,
关闭。这个问题不符合Stack Overflow guidelines .它目前不接受答案。 我们不允许提问寻求书籍、工具、软件库等的推荐。您可以编辑问题,以便用事实和引用来回答。 关闭 7 年前。
已关闭。此问题不符合Stack Overflow guidelines 。目前不接受答案。 要求我们推荐或查找工具、库或最喜欢的场外资源的问题对于 Stack Overflow 来说是偏离主题的,因为
我有一些 C 代码,将使用 P/Invoke 从 C# 调用。我正在尝试为这个 C 函数定义一个 C# 等效项。 SomeData* DoSomething(); struct SomeData {
这个问题已经有答案了: Why are these constructs using pre and post-increment undefined behavior? (14 个回答) 已关闭 6
我是一名优秀的程序员,十分优秀!