- iOS/Objective-C 元类和类别
- objective-c - -1001 错误,当 NSURLSession 通过 httpproxy 和/etc/hosts
- java - 使用网络类获取 url 地址
- ios - 推送通知中不播放声音
正如我们在 ARCore 中所知,我们可以在水平平面上单击时检测 3d 对象。 我需要在用户要单击平面表面时显示视频,而不是 3d 对象。外观应该与显示的 3d 对象相同。视频应以预览模式显示,而不是 3d 对象。
在 ARcore 中,他们目前使用一个带有 Surfaceview 的 Relativelayout。因此,为了显示视频,我使用了 Surfaceview 并附加了媒体播放器。
public void onsurfacecreatedvideo(){
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();
mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
Log.i(TAG,"ONPREPArED abhilash");
setVideoSize();
mp.start();
}
});
try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
}
synchronized(this) {
updateSurface = false;
}
mMediaPlayer.start();
}
void ondrawvideo(){
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
/////////////
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
// Visualize planes.
mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx);
// Visualize anchors created by touch.
float scaleFactor = 1.0f;
for (PlaneAttachment planeAttachment : mTouches) {
ondrawvideo();
if (!planeAttachment.isTracking()) {
continue;
}
// Get the current combined pose of an Anchor and Plane in world space. The Anchor
// and Plane poses are updated during calls to session.update() as ARCore refines
// its estimate of the world.
planeAttachment.getPose().toMatrix(mAnchorMatrix, 0);
// Update and draw the model and its shadow.
mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
目前我的输出是这样的。当我点击平面 Surfaces 时,它显示如下:
如您所见,在图像下方,我需要这样实现。我只是在这个特定的 bugdroid 图像中标记了应该播放视频,视频不应超过全屏;它应该只像 bugdroid 图像大小一样显示:
最佳答案
我通过创建一个名为 MovieClipRenderer 的新类来做到这一点 - 它模仿 HelloAR 示例中的 ObjectRenderer 类。这将创建一个四边形几何体并在四边形中渲染来自媒体播放器的纹理。四边形固定在一个平面上,因此当用户环顾四周时它不会移动。
为了进行测试,我使用了以下影片:https://www.videvo.net/video/chicken-on-green-screen/3435/并将其添加到 src/main/assets
然后我将渲染器的成员变量添加到 HelloArActivity
private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer();
在 onSurfaceCreated()
中,我用其他方法初始化了渲染器
mMovieClipRenderer.createOnGlThread();
为了尝试一下,我在平面上进行了第一次点击,通过稍微更改 HitTest 代码来创建电影 anchor :
if (mMovieAnchor == null) {
mMovieAnchor = hit.createAnchor();
} else {
mAnchors.add(hit.createAnchor());
}
然后在 onDrawFrame()
的底部,我检查了 anchor 并开始播放它:
if (mMovieAnchor != null) {
// Draw chickens!
if (!mMovieClipRenderer.isStarted()) {
mMovieClipRenderer.play("chicken.mp4", this);
}
mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
}
渲染类很长,但它是创建 OES 纹理和初始化视频播放器的标准 GLES 代码,创建四边形的顶点并加载绘制 OES 纹理的 fragment 着色器。
/**
* Renders a movie clip with a green screen aware shader.
* <p>
* Copyright 2018 Google LLC
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class MovieClipRenderer implements
SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = MovieClipRenderer.class.getSimpleName();
// Quad geometry
private static final int COORDS_PER_VERTEX = 3;
private static final int TEXCOORDS_PER_VERTEX = 2;
private static final int FLOAT_SIZE = 4;
private static final float[] QUAD_COORDS = new float[]{
-1.0f, -1.0f, 0.0f,
-1.0f, +1.0f, 0.0f,
+1.0f, -1.0f, 0.0f,
+1.0f, +1.0f, 0.0f,
};
private static final float[] QUAD_TEXCOORDS = new float[]{
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
// Shader for a flat quad.
private static final String VERTEX_SHADER =
"uniform mat4 u_ModelViewProjection;\n\n" +
"attribute vec4 a_Position;\n" +
"attribute vec2 a_TexCoord;\n\n" +
"varying vec2 v_TexCoord;\n\n" +
"void main() {\n" +
" gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
" v_TexCoord = a_TexCoord;\n" +
"}";
// The fragment shader samples the video texture, blending to
// transparent for the green screen
// color. The color was determined by sampling a screenshot
// of the video in an image editor.
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"\n" +
"precision mediump float;\n" +
"varying vec2 v_TexCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"\n" +
"void main() {\n" +
" //TODO make this a uniform variable - " +
" but this is the color of the background. 17ad2b\n" +
" vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
" float thresh = 0.4f; // 0 - 1.732\n" +
" float slope = 0.2;\n" +
" vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
" float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
" float edge0 = thresh * (1.0f - slope);\n" +
" float alpha = smoothstep(edge0,thresh,d);\n" +
" gl_FragColor = vec4(input_color, alpha);\n" +
"}";
// Geometry data in GLES friendly data structure.
private FloatBuffer mQuadVertices;
private FloatBuffer mQuadTexCoord;
// Shader program id and parameters.
private int mQuadProgram;
private int mQuadPositionParam;
private int mQuadTexCoordParam;
private int mModelViewProjectionUniform;
private int mTextureId = -1;
// Matrix for the location and perspective of the quad.
private float[] mModelMatrix = new float[16];
// Media player, texture and other bookkeeping.
private MediaPlayer player;
private SurfaceTexture videoTexture;
private boolean frameAvailable = false;
private boolean started = false;
private boolean done;
private boolean prepared;
private static Handler handler;
// Lock used for waiting if the player was not yet created.
private final Object lock = new Object();
/**
* Update the model matrix based on the location and scale to draw the quad.
*/
public void update(float[] modelMatrix, float scaleFactor) {
float[] scaleMatrix = new float[16];
Matrix.setIdentityM(scaleMatrix, 0);
scaleMatrix[0] = scaleFactor;
scaleMatrix[5] = scaleFactor;
scaleMatrix[10] = scaleFactor;
Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
}
/**
* Initialize the GLES objects.
* This is called from the GL render thread to make sure
* it has access to the EGLContext.
*/
public void createOnGlThread() {
// 1 texture to hold the video frame.
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureId = textures[0];
int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
GLES20.glBindTexture(mTextureTarget, mTextureId);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_NEAREST);
videoTexture = new SurfaceTexture(mTextureId);
videoTexture.setOnFrameAvailableListener(this);
// Make a quad to hold the movie
ByteBuffer bbVertices = ByteBuffer.allocateDirect(
QUAD_COORDS.length * FLOAT_SIZE);
bbVertices.order(ByteOrder.nativeOrder());
mQuadVertices = bbVertices.asFloatBuffer();
mQuadVertices.put(QUAD_COORDS);
mQuadVertices.position(0);
int numVertices = 4;
ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
bbTexCoords.order(ByteOrder.nativeOrder());
mQuadTexCoord = bbTexCoords.asFloatBuffer();
mQuadTexCoord.put(QUAD_TEXCOORDS);
mQuadTexCoord.position(0);
int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
int fragmentShader = loadGLShader(TAG,
GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
mQuadProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mQuadProgram, vertexShader);
GLES20.glAttachShader(mQuadProgram, fragmentShader);
GLES20.glLinkProgram(mQuadProgram);
GLES20.glUseProgram(mQuadProgram);
ShaderUtil.checkGLError(TAG, "Program creation");
mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
mModelViewProjectionUniform = GLES20.glGetUniformLocation(
mQuadProgram, "u_ModelViewProjection");
ShaderUtil.checkGLError(TAG, "Program parameters");
Matrix.setIdentityM(mModelMatrix, 0);
initializeMediaPlayer();
}
public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) {
if (done || !prepared) {
return;
}
synchronized (this) {
if (frameAvailable) {
videoTexture.updateTexImage();
frameAvailable = false;
}
}
float[] modelMatrix = new float[16];
pose.toMatrix(modelMatrix, 0);
float[] modelView = new float[16];
float[] modelViewProjection = new float[16];
Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);
ShaderUtil.checkGLError(TAG, "Before draw");
GLES20.glEnable(GL10.GL_BLEND);
GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
GLES20.glUseProgram(mQuadProgram);
// Set the vertex positions.
GLES20.glVertexAttribPointer(
mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, 0, mQuadVertices);
// Set the texture coordinates.
GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, 0, mQuadTexCoord);
// Enable vertex arrays
GLES20.glEnableVertexAttribArray(mQuadPositionParam);
GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
modelViewProjection, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Disable vertex arrays
GLES20.glDisableVertexAttribArray(mQuadPositionParam);
GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);
ShaderUtil.checkGLError(TAG, "Draw");
}
private void initializeMediaPlayer() {
if (handler == null)
handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
@Override
public void run() {
synchronized (lock) {
player = new MediaPlayer();
lock.notify();
}
}
});
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
frameAvailable = true;
}
}
public boolean play(final String filename, Context context)
throws FileNotFoundException {
// Wait for the player to be created.
if (player == null) {
synchronized (lock) {
while (player == null) {
try {
lock.wait();
} catch (InterruptedException e) {
return false;
}
}
}
}
player.reset();
done = false;
player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
prepared = true;
mp.start();
}
});
player.setOnErrorListener(new MediaPlayer.OnErrorListener() {
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
done = true;
Log.e("VideoPlayer",
String.format("Error occured: %d, %d\n", what, extra));
return false;
}
});
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
done = true;
}
});
player.setOnInfoListener(new MediaPlayer.OnInfoListener() {
@Override
public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) {
return false;
}
});
try {
AssetManager assets = context.getAssets();
AssetFileDescriptor descriptor = assets.openFd(filename);
player.setDataSource(descriptor.getFileDescriptor(),
descriptor.getStartOffset(),
descriptor.getLength());
player.setSurface(new Surface(videoTexture));
player.prepareAsync();
synchronized (this) {
started = true;
}
} catch (IOException e) {
Log.e(TAG, "Exception preparing movie", e);
return false;
}
return true;
}
public synchronized boolean isStarted() {
return started;
}
static int loadGLShader(String tag, int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
}
关于android - 需要在 ARCORE 中播放视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47657403/
我对此很陌生,我在这里的论坛上检查过答案,但我没有找到任何真正可以帮助我的答案。我正在尝试播放 res/raw 文件夹中的视频。到目前为止我已经设置了这段代码: MediaPlayer mp; @Ov
我可以播放一个视频剪辑,检测视频的结尾,然后创建一个表单,然后播放另一个视频剪辑。我的问题是,表单 react 不正确,我创建了带有提交按钮和两个单选按钮可供选择的表单。我希望让用户进行选择,验证响应
首先,我必须说我在web2py讨论组中看到过类似的内容,但我不太理解。 我使用 web2py 设置了一个数据库驱动的网站,其中的条目只是 HTML 文本。其中大多数将包含 img和/或video指向相
我正在尝试在视频 View 中播放 YouTube 视频。 我将 xml 布局如下: 代码是这样的: setContentView(R.layout.webview); VideoV
我正在开发一个需要嵌入其中的 youtube 视频播放器的 android 应用程序。我成功地从 API 获得了 RTSP 视频 URL,但是当我试图在我的 android 视频 View 中加载这个
我目前正在从事一个使用 YouTube API 的网络项目。 我完全不熟悉 API。所以每一行代码都需要付出很多努力。 使用以下代码,我可以成功检索播放列表中的项目: https://www.goog
是否可以仅使用视频 ID 和 key 使用 API V3 删除 youtube 视频?我不断收到有关“必需参数:部分”丢失的错误消息。我用服务器和浏览器 api 键试了一下这是我的代码: // $yo
所以我一直坚持这个大约一个小时左右,我就是无法让它工作。到目前为止,我一直在尝试从字符串中提取整个链接,但现在我觉得只获取视频 ID 可能更容易。 RegEx 需要从以下链接样式中获取 ID/URL,
var app = angular.module('speakout', []).config( function($sceDelegateProvider) {
我正在努力从 RSS 提要中阅读音频、视频新闻。我如何确定该 rss 是用于新闻阅读器还是用于音频或视频? 这是视频源:http://feeds.cbsnews.com/CBSNewsVideo 这是
利用python反转图片/视频 准备:一张图片/一段视频 python库:pillow,moviepy 安装库 ?
我希望在用户双击视频区域时让我的视频全屏显示,而不仅仅是在他们单击控件中的小图标时。有没有办法添加事件或其他东西来控制用户点击视频时发生的情况? 谢谢! 最佳答案 按照 Musa 的建议,附
关闭。这个问题需要更多 focused .它目前不接受答案。 想改进这个问题?更新问题,使其仅关注一个问题 editing this post . 7年前关闭。 Improve this questi
我有一个公司培训视频加载到本地服务器上。我正在使用 HTML5 的视频播放来观看这些视频。该服务器无法访问网络,但我已加载 apache 并且端口 8080 对同一网络上的所有机器开放。 这些文件位于
我想混合来自 video.mp4 的视频(时长 1 分钟)和来自 audio.mp3 的音频(10 分钟持续时间)到一个持续时间为 1 分钟的输出文件中。来自 audio.mp3 的音频应该是从 4
关闭。这个问题需要更多 focused .它目前不接受答案。 想改进这个问题?更新问题,使其仅关注一个问题 editing this post . 8年前关闭。 Improve this questi
我正在尝试使用 peer/getUserMedia 创建一个视频 session 网络应用程序。 目前,当我将唯一 ID 发送到视频 session 时,我能够听到/看到任何加入我的 session
考虑到一段时间内的观看次数,我正在评估一种针对半自动脚本的不同方法,该脚本将对视频元数据执行操作。 简而言之,只要视频达到指标中的某个阈值,就说观看次数,它将触发某些操作。 现在要执行此操作,我将不得
我正在通过iBooks创建专门为iPad创建动态ePub电子书的网站。 它需要支持youtube视频播放,所以当我知道视频的直接路径时,我正在使用html5 标记。 有没有一种使用html5 标签嵌入
我对Android不熟悉,我想浏览youtube.com并在Webview内从网站显示视频。当前,当我尝试执行此操作时,将出现设备的浏览器,并让我使用设备浏览器浏览该站点。如果Webview不具备这种
我是一名优秀的程序员,十分优秀!