gpt4 book ai didi

c# - 从 VideoPlayer 获取当前帧纹理

转载 作者:行者123 更新时间:2023-12-05 00:54:22 27 4
gpt4 key购买 nike

在这篇文章中说明了Using new Unity VideoPlayer and VideoClip API to play video可以“根据需要检索每一帧的纹理”

请问将当前帧作为 Texture2D 的正确方法是什么?

编辑:

回答后我这样做了,但它不起作用:

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;

public class AverageColorFromTexture : MonoBehaviour {

public VideoClip videoToPlay;
public Light lSource;

private Color targetColor;
private VideoPlayer videoPlayer;
private VideoSource videoSource;
private Renderer rend;
private Texture tex;
private AudioSource audioSource;

void Start()
{
Application.runInBackground = true;
StartCoroutine(playVideo());
}

IEnumerator playVideo()
{

rend = GetComponent<Renderer>();

videoPlayer = gameObject.AddComponent<VideoPlayer>();
audioSource = gameObject.AddComponent<AudioSource>();

//Disable Play on Awake for both Video and Audio
videoPlayer.playOnAwake = false;
audioSource.playOnAwake = false;

videoPlayer.source = VideoSource.VideoClip;
videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
videoPlayer.EnableAudioTrack(0, true);
videoPlayer.SetTargetAudioSource(0, audioSource);

//Set video To Play then prepare Audio to prevent Buffering
videoPlayer.clip = videoToPlay;
videoPlayer.Prepare();

//Wait until video is prepared
while (!videoPlayer.isPrepared)
{
Debug.Log("Preparing Video");
yield return null;
}
Debug.Log("Done Preparing Video");

//Assign the Texture from Video to Material texture
tex = videoPlayer.texture;
rend.material.mainTexture = tex;

//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;

//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;

//Play Video
videoPlayer.Play();

//Play Sound
audioSource.Play();

Debug.Log("Playing Video");
while (videoPlayer.isPlaying)
{
Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
yield return null;
}
Debug.Log("Done Playing Video");
}

void OnNewFrame(VideoPlayer source, long frameIdx)
{
Texture2D videoFrame = (Texture2D)source.texture;

targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor ;
}


Color32 CalculateAverageColorFromTexture(Texture2D tex)
{
Color32[] texColors = tex.GetPixels32();
int total = texColors.Length;
float r = 0;
float g = 0;
float b = 0;

for(int i = 0; i < total; i++)
{
r += texColors[i].r;
g += texColors[i].g;
b += texColors[i].b;
}
return new Color32((byte)(r / total) , (byte)(g / total) , (byte)(b / total) , 0);
}
}

最佳答案

您可以通过三个步骤正确地做到这一点:

  1. 通过设置 VideoPlayer.sendFrameReadyEvents 启用新框架事件为 true

  2. 订阅VideoPlayer.frameReady事件

  3. 您分配给 VideoPlayer.frameReady 的功能新框架可用时将调用事件。只需从 VideoPlayer 访问视频帧,它将通过强制转换 VideoPlayer.texture 传递给参数到 Texture2D.

就是这样。


在代码中:

video.Play() 之前添加这些:

// Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;

// Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;

这是您的 OnNewFrame 函数签名。

void OnNewFrame(VideoPlayer source, long frameIdx)
{
Texture2D videoFrame = (Texture2D)source.texture;
// Do anything with the videoFrame Texture.
}

值得注意的是,启用该事件的成本很高。在执行此操作之前,请确保您需要每一帧。

Texture2D videoFrame = (Texture2D)source.texture;Texture2D videoFrame = source.texture as Texture2D; 都失败了。

我把 Debug.Log(source.texture); 放在 OnNewFrame 函数里面得到:

TempBuffer 294 320x240 (UnityEngine.RenderTexture)

所以,看起来 Video.texture 属性返回的是 RenderTexture 类型,而不是应有的 Texture 类型。

我们必须将 RenderTexture 转换为 Texture2D

void Start()
{
videoFrame = new Texture2D(2, 2);]
...
}

//Initialize in the Start function
Texture2D videoFrame;

void OnNewFrame(VideoPlayer source, long frameIdx)
{
RenderTexture renderTexture = source.texture as RenderTexture;

if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
{
videoFrame.Resize(renderTexture.width, renderTexture.height);
}
RenderTexture.active = renderTexture;
videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
videoFrame.Apply();
RenderTexture.active = null;

targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor;
}

您问题的完整代码:

public class AverageColorFromTexture : MonoBehaviour
{
public VideoClip videoToPlay;
public Light lSource;

private Color targetColor;
private VideoPlayer videoPlayer;
private VideoSource videoSource;
private Renderer rend;
private Texture tex;
private AudioSource audioSource;

void Start()
{
videoFrame = new Texture2D(2, 2);
Application.runInBackground = true;
StartCoroutine(playVideo());
}

IEnumerator playVideo()
{
rend = GetComponent<Renderer>();

videoPlayer = gameObject.AddComponent<VideoPlayer>();
audioSource = gameObject.AddComponent<AudioSource>();

//Disable Play on Awake for both Video and Audio
videoPlayer.playOnAwake = false;
audioSource.playOnAwake = false;

videoPlayer.source = VideoSource.VideoClip;
videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
videoPlayer.EnableAudioTrack(0, true);
videoPlayer.SetTargetAudioSource(0, audioSource);

//Set video To Play then prepare Audio to prevent Buffering
videoPlayer.clip = videoToPlay;
videoPlayer.Prepare();

//Wait until video is prepared
while (!videoPlayer.isPrepared)
{
Debug.Log("Preparing Video");
yield return null;
}
Debug.Log("Done Preparing Video");

//Assign the Texture from Video to Material texture
tex = videoPlayer.texture;
rend.material.mainTexture = tex;

//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;

//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;

//Play Video
videoPlayer.Play();

//Play Sound
audioSource.Play();

Debug.Log("Playing Video");
while (videoPlayer.isPlaying)
{
Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
yield return null;
}
Debug.Log("Done Playing Video");
}

//Initialize in the Start function
Texture2D videoFrame;

void OnNewFrame(VideoPlayer source, long frameIdx)
{
RenderTexture renderTexture = source.texture as RenderTexture;


if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
{
videoFrame.Resize(renderTexture.width, renderTexture.height);
}
RenderTexture.active = renderTexture;
videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
videoFrame.Apply();
RenderTexture.active = null;

targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor;
}

Color32 CalculateAverageColorFromTexture(Texture2D tex)
{
Color32[] texColors = tex.GetPixels32();
int total = texColors.Length;
float r = 0;
float g = 0;
float b = 0;

for (int i = 0; i < total; i++)
{
r += texColors[i].r;
g += texColors[i].g;
b += texColors[i].b;
}
return new Color32((byte)(r / total), (byte)(g / total), (byte)(b / total), 0);
}
}

关于c# - 从 VideoPlayer 获取当前帧纹理,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/42747285/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com