- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我想就我正在设计的简单多线程系统获得一些建议。
想法:该应用程序正在捕获帧并将其显示在第一个 ImageView 中。这些捕获的帧也正在被处理(由 MyHandDetectionThread),然后显示在第二个 ImageView 中。
我的解决方案:
public class VideoManager {
private volatile BufferLinkedList<InputFrame> mInputFrames;
private volatile BufferLinkedList<ProcessedFrame> mProcessedFrames;
private static VideoManager mVideoManagerInstance = new VideoManager();
private Timer captureTimer;
private MyVideoCaptureThread myVideoCaptureThread;
private MyFrameDisplayThread myFrameDisplayThread;
private MyHandDetectionThread myHandDetectionThread;
private MyProcessedFrameDisplayThread myProcessedFrameDisplayThread;
private enum ThreadMessages {
PROCESS_INPUT_FRAME,
NEW_INPUT_FRAME,
NEW_PROCESSED_FRAME_ARRIVED,
GET_NEW_FRAME
}
public static VideoManager getInstance() {
if (mVideoManagerInstance == null) {
mVideoManagerInstance = new VideoManager();
}
return mVideoManagerInstance;
}
// not visible constructor - for singleton purposes
private VideoManager() {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
}
public void startDetectionAndRecognition(ImageView camIV, ImageView handIV) {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
captureTimer = new Timer();
myVideoCaptureThread = new MyVideoCaptureThread();
myFrameDisplayThread = new MyFrameDisplayThread(camIV, handIV);
myHandDetectionThread = new MyHandDetectionThread();
myProcessedFrameDisplayThread = new MyProcessedFrameDisplayThread();
captureTimer.schedule(new TimerTask() {
public void run() {
if (myVideoCaptureThread != null && myVideoCaptureThread.threadMessages != null)
myVideoCaptureThread.threadMessages.offer(ThreadMessages.GET_NEW_FRAME);
}
}, 0, 1000 / Config.fps);
myFrameDisplayThread.start();
myVideoCaptureThread.start();
myHandDetectionThread.start();
myProcessedFrameDisplayThread.start();
}
public void stop() {
captureTimer.cancel();
myVideoCaptureThread.interrupt();
myHandDetectionThread.interrupt();
myFrameDisplayThread.interrupt();
myGestureRecogitionThread.interrupt();
mInputFrames.removeAll(mInputFrames);
mProcessedFrames.removeAll(mProcessedFrames);
isActive = false;
}
public boolean isActive() {
return isActive;
}
////////////////////////
// Thread clases
////////////////////////
private class MyVideoCaptureThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
@Override
public void run() {
WebCamVideoCapture vc = new WebCamVideoCapture();
while (!isInterrupted()) {
if (threadMessages != null && threadMessages.poll() == ThreadMessages.GET_NEW_FRAME) {
Mat mat = vc.getNextMatFrame();
if (mat != null && mInputFrames != null) {
mInputFrames.offerFirst(new InputFrame(mat));
if (myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null)
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_INPUT_FRAME);
if (myHandDetectionThread != null && myHandDetectionThread.threadMessages != null)
myHandDetectionThread.threadMessages.offer(ThreadMessages.PROCESS_INPUT_FRAME);
}
}
}
vc.close();
}
}
private class MyFrameDisplayThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
ImageView mCamImageView;
long lastUpdatedCamImageViewMillis;
long lastUpdatedHandImageViewMillis;
public MyFrameDisplayThread(ImageView mImageView) {
this.mCamImageView = mImageView;
}
private synchronized void updateImageViews() {
if (threadMessages.poll() == ThreadMessages.NEW_INPUT_FRAME && mInputFrames != null && !mInputFrames.isEmpty() && mInputFrames.peek() != null && mInputFrames.peek().getFrame() != null) {
if(Config.IS_DEBUG) System.out.println("Updating frame image view");
mCamImageView.setImage(Utils.cvMatToImage(mInputFrames.peekFirst().getFrame()));
}
}
@Override
public void run() {
while (!isInterrupted()) {
updateImageViews();
}
}
}
private class MyHandDetectionThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128); //TODO if multiple threads, define it out of class
HandDetector hd = new HandDetector();
@Override
public void run() {
while (!isInterrupted()) {
if (threadMessages.poll() == ThreadMessages.PROCESS_INPUT_FRAME && mInputFrames != null && mInputFrames.size() > 0 && mInputFrames.peek() != null) {
if(Config.IS_DEBUG) System.out.println("Detecting hand...");
mProcessedFrames.offerFirst(new ProcessedFrame(hd.detectHand(mInputFrames.peek()), null, null, null));
if (myGestureRecogitionThread != null && myGestureRecogitionThread.threadMessages != null)
myGestureRecogitionThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
if(myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null)
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
}
}
}
}
private class MyProcessedFrameDisplayThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
ImageView mHandImageView;
public MyProcessedFrameDisplayThread(ImageView mHandImageView) {
mHandImageView = mHandImageView;
}
private synchronized void updateImageViews() {
if(threadMessages.poll() == ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED && mProcessedFrames != null && !mProcessedFrames.isEmpty() && mProcessedFrames.peek() != null && mProcessedFrames.peek().getmHandMask() != null) {
if(Config.IS_DEBUG) System.out.println("Updating hand image view");
mHandImageView.setImage(Utils.cvMatToImage(mProcessedFrames.peekFirst().getmHandMask()));
}
}
@Override
public void run() {
while (!isInterrupted())
if (threadMessages.poll() == ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED)
updateImageViews();
}
}
}
public class BufferLinkedList<E> extends LinkedList<E> {
private int counter = 0;
private int sizeLimit = 48;
public BufferLinkedList(int sizeLimit) {
this.sizeLimit = sizeLimit;
}
@Override
public synchronized boolean offerFirst(E e) {
while(size() > sizeLimit) {
removeLast();
}
return super.offerFirst(e);
}
@Override
public synchronized E peekFirst() {
return super.peekFirst();
}
@Override
public synchronized E peekLast() {
return super.peekLast();
}
@Override
public synchronized E pollFirst() {
return super.pollFirst();
}
@Override
public synchronized E pollLast() {
return super.pollLast();
}
}
我的问题:帧显示不流畅。触发更新 ImageView 的方法之间有 1-5 秒的不规则休息时间。然而,MyHandDetectionThread 的任务运行得非常快。并且显示线程的消息队列的大小正在快速增加。也许这是因为存储帧的列表上有一些锁?
问题:我的解决方案正确吗?是否有一些设计模式描述了这种情况?您有一些改进建议吗?
编辑:我在线程循环中添加了等待和通知。结果令人满意。 CPU 成本现在约为 30%,而之前约为 80%。一切运行更稳定、更流畅。但是,我不熟悉等待和通知方法。因此,如果您在我的代码中发现一些愚蠢的内容,请告诉我。
public class VideoManager {
private volatile BufferLinkedList<InputFrame> mInputFrames;
private volatile BufferLinkedList<ProcessedFrame> mProcessedFrames;
private static VideoManager mVideoManagerInstance = new VideoManager();
private Timer captureTimer;
private MyVideoCaptureThread myVideoCaptureThread;
private MyFrameDisplayThread myFrameDisplayThread;
private MyHandDetectionThread myHandDetectionThread;
private MyGestureRecogitionThread myGestureRecogitionThread;
private MySkinDisplayThread mySkinDisplayThread;
private final static int THREAD_MESSAGES_LIMIT = 10000;
private final static int TIMER_INTERVAL = 1000 / Config.fps;
private final static int WAITING_TIMEOUT = 2000;
private enum ThreadMessages {
PROCESS_INPUT_FRAME,
NEW_INPUT_FRAME,
NEW_PROCESSED_FRAME_ARRIVED,
GET_NEW_FRAME
}
public static VideoManager getInstance() {
if (mVideoManagerInstance == null) {
mVideoManagerInstance = new VideoManager();
}
return mVideoManagerInstance;
}
// not visible constructor - for singleton purposes
private VideoManager() {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
}
public void startDetectionAndRecognition(ImageView camIV, ImageView handIV) {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
captureTimer = new Timer();
myFrameDisplayThread = new MyFrameDisplayThread(camIV);
myVideoCaptureThread = new MyVideoCaptureThread();
myHandDetectionThread = new MyHandDetectionThread();
myGestureRecogitionThread = new MyGestureRecogitionThread();
mySkinDisplayThread = new MySkinDisplayThread(handIV);
myVideoCaptureThread.start();
captureTimer.schedule(new TimerTask() {
public void run() {
if (myVideoCaptureThread != null && myVideoCaptureThread.threadMessages != null) {
myVideoCaptureThread.threadMessages.offer(ThreadMessages.GET_NEW_FRAME);
System.out.println("Timer get frame request sent");
myVideoCaptureThread.wakeUp();
}
}
}, 0, TIMER_INTERVAL);
myFrameDisplayThread.start();
mySkinDisplayThread.start();
myHandDetectionThread.start();
myGestureRecogitionThread.start();
}
public void stop() {
captureTimer.cancel();
myVideoCaptureThread.interrupt();
myHandDetectionThread.interrupt();
mySkinDisplayThread.interrupt();
myFrameDisplayThread.interrupt();
myGestureRecogitionThread.interrupt();
mInputFrames.removeAll(mInputFrames);
mProcessedFrames.removeAll(mProcessedFrames);
}
////////////////////////
// Lock class
////////////////////////
private static final class Lock {}
////////////////////////
// Thread clases
////////////////////////
private class MyVideoCaptureThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
WebCamVideoCapture vc = new WebCamVideoCapture();
Lock lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.GET_NEW_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
System.out.println("WideoCaptureThread waiting");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Mat mat = vc.getNextMatFrame();
System.out.println("getting next frame from webcam");
if (mat != null && mInputFrames != null) {
mInputFrames.offerFirst(new InputFrame(vc.getNextMatFrame()));
if (myHandDetectionThread != null && myHandDetectionThread.threadMessages != null) {
myHandDetectionThread.wakeUp();
myHandDetectionThread.threadMessages.offer(ThreadMessages.PROCESS_INPUT_FRAME);
}
if (myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null) {
myFrameDisplayThread.wakeUp();
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_INPUT_FRAME);
}
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up WideoCapture");
}
}
@Override
public void interrupt() {
vc.close();
super.interrupt();
}
}
private class MyFrameDisplayThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
Lock lock = new Lock();
ImageView mCamImageView;
public MyFrameDisplayThread(ImageView mImageView) {
this.mCamImageView = mImageView;
}
private void updateImageViews() {
if (shouldUpdateCamImageView() && mInputFrames != null && !mInputFrames.isEmpty() && mInputFrames.peek() != null && mInputFrames.peek().getFrame() != null) {
System.out.println("Updating frame image view");
mCamImageView.setImage(Utils.cvMatToImage(mInputFrames.peekFirst().getFrame()));
threadMessages.poll();
}
}
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.peek() != ThreadMessages.NEW_INPUT_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
updateImageViews();
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up FrameDisplay");
}
}
private boolean shouldUpdateCamImageView() {
if (!Config.CAPTURE_PREVIEW_MODE) return false;
return true;
}
}
private class MySkinDisplayThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
ImageView mHandImageView;
Object lock = new Lock();
public MySkinDisplayThread(ImageView mHandImageView) {
this.mHandImageView = mHandImageView;
}
private synchronized void updateHandImageView() {
if (shouldUpdateHandImageView() && mProcessedFrames != null && !mProcessedFrames.isEmpty() && mProcessedFrames.peek() != null && mProcessedFrames.peek().getmHandMask() != null) {
System.out.println("Updating skin image view");
mHandImageView.setImage(Utils.cvMatToImage(mProcessedFrames.peekFirst().getmHandMask()));
threadMessages.poll();
}
}
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.peek() != ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
updateHandImageView();
}
}
}
private boolean shouldUpdateHandImageView() {
if (!Config.SKIN_MASK_PREVIEW_MODE) return false;
return true;
// long now = System.currentTimeMillis();
// boolean should = now - lastUpdatedHandImageViewMillis > TIMER_INTERVAL;
// lastUpdatedHandImageViewMillis = now;
// return should;
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up FrameDisplay");
}
}
}
private class MyHandDetectionThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT); //TODO if multiple threads, define it out of class
HandDetector hd = new HandDetector();
Object lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.PROCESS_INPUT_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (mInputFrames != null /*&& mInputFrames.size() > 0 && mInputFrames.peek() != null && !mInputFrames.peek().getIsProcessed()*/) {
System.out.println("Detecting hand...");
// Mat handMask = hd.detectHand(mInputFrames.peek());
// int[][] fingerCoordinates = new int[5][2];
// int[] convDefects = new int[5];
// int[] handCenterCoordinates = new int[2];
mProcessedFrames.offerFirst(new ProcessedFrame(hd.detectHand(mInputFrames.peek()), null, null, null));
if (myGestureRecogitionThread != null && myGestureRecogitionThread.threadMessages != null) {
myGestureRecogitionThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
mySkinDisplayThread.wakeUp();
}
if (mySkinDisplayThread != null && mySkinDisplayThread.threadMessages != null) {
mySkinDisplayThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
mySkinDisplayThread.wakeUp();
}
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up hand Detection");
}
}
}
private class MyGestureRecogitionThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
GestureRecognizer r = new GestureRecognizer();
Lock lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
r.lookForGestures(mProcessedFrames);
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up hand Detection");
}
}
}
}
最佳答案
两个线程似乎都在其 run()
方法中使用了轮询;即它们不断循环检查 boolean 条件的语句。这可能对 CPU 使用率不利,因为单个线程可以锁定 CPU,而不给其他线程提供任何周期;它最终可能会占用 CPU,即使它没有做任何有用的事情;只是不满足某些 boolean 条件。
您应该使用异步方法与线程进行通信; you should put threads to sleep 而不是使用轮询机制当不需要它们做任何处理时,并在需要时唤醒它们。这允许线程让出 CPU,这意味着它们愿意放弃其 Activity 上下文,以便其他线程可以执行。
关于Java通过多线程共享对象 - 需要设计模式,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/30482165/
我将 Bootstrap 与 css 和 java 脚本结合使用。在不影响前端代码的情况下,我真的很难在css中绘制这个背景。在许多问题中,人们将宽度和高度设置为 0%。但是由于我的导航栏,我不能使用
我正在用 c 编写一个程序来读取文件的内容。代码如下: #include void main() { char line[90]; while(scanf("%79[^\
我想使用 javascript 获取矩阵数组的所有对 Angular 线。假设输入输出如下: input = [ [1,2,3], [4,5,6], [7,8,9], ] output =
可以用pdfmake绘制lines,circles和other shapes吗?如果是,是否有documentation或样本?我想用jsPDF替换pdfmake。 最佳答案 是的,有可能。 pdfm
我有一个小svg小部件,其目的是显示角度列表(参见图片)。 现在,角度是线元素,仅具有笔触,没有填充。但是现在我想使用一种“内部填充”颜色和一种“笔触/边框”颜色。我猜想line元素不能解决这个问题,
我正在为带有三角对象的 3D 场景编写一个非常基本的光线转换器,一切都工作正常,直到我决定尝试从场景原点 (0/0/0) 以外的点转换光线。 但是,当我将光线原点更改为 (0/1/0) 时,相交测试突
这个问题已经有答案了: Why do people write "#!/usr/bin/env python" on the first line of a Python script? (22 个回
如何使用大约 50 个星号 * 并使用 for 循环绘制一条水平线?当我尝试这样做时,结果是垂直(而不是水平)列出 50 个星号。 public void drawAstline() { f
这是一个让球以对角线方式下降的 UI,但球保持静止;线程似乎无法正常工作。你能告诉我如何让球移动吗? 请下载一个球并更改目录,以便程序可以找到您的球的分配位置。没有必要下载足球场,但如果您愿意,也可以
我在我的一个项目中使用 Jmeter 和 Ant,当我们生成报告时,它会在报告中显示 URL、#Samples、失败、成功率、平均时间、最短时间、最长时间。 我也想在报告中包含 90% 的时间线。 现
我有一个不寻常的问题,希望有人能帮助我。我想用 Canvas (android) 画一条 Swing 或波浪线,但我不知道该怎么做。它将成为蝌蚪的尾部,所以理想情况下我希望它的形状更像三角形,一端更大
这个问题已经有答案了: Checking Collision of Shapes with JavaFX (1 个回答) 已关闭 8 年前。 我正在使用 JavaFx 8 库。 我的任务很简单:我想检
如何按编号的百分比拆分文件。行数? 假设我想将我的文件分成 3 个部分(60%/20%/20% 部分),我可以手动执行此操作,-_-: $ wc -l brown.txt 57339 brown.tx
我正在努力实现这样的目标: 但这就是我设法做到的。 你能帮我实现预期的结果吗? 更新: 如果我删除 bootstrap.css 依赖项,问题就会消失。我怎样才能让它与 Bootstrap 一起工作?
我目前正在构建一个网站,但遇到了 transform: scale 的问题。我有一个按钮,当用户将鼠标悬停在它上面时,会发生两件事: 背景以对 Angular 线“扫过” 按钮标签颜色改变 按钮稍微变
我需要使用直线和仿射变换绘制大量数据点的图形(缩放图形以适合 View )。 目前,我正在使用 NSBezierPath,但我认为它效率很低(因为点在绘制之前被复制到贝塞尔路径)。通过将我的数据切割成
我正在使用基于 SVM 分类的 HOG 特征检测器。我可以成功提取车牌,但提取的车牌除了车牌号外还有一些不必要的像素/线。我的图像处理流程如下: 在灰度图像上应用 HOG 检测器 裁剪检测到的区域 调
我有以下图片: 我想填充它的轮廓(即我想在这张图片中填充线条)。 我尝试了形态学闭合,但使用大小为 3x3 的矩形内核和 10 迭代并没有填满整个边界。我还尝试了一个 21x21 内核和 1 迭代,但
我必须找到一种算法,可以找到两组数组之间的交集总数,而其中一个数组已排序。 举个例子,我们有这两个数组,我们向相应的数字画直线。 这两个数组为我们提供了总共 7 个交集。 有什么样的算法可以帮助我解决
简单地说 - 我想使用透视投影从近裁剪平面绘制一条射线/线到远裁剪平面。我有我认为是使用各种 OpenGL/图形编程指南中描述的方法通过单击鼠标生成的正确标准化的世界坐标。 我遇到的问题是我的光线似乎
我是一名优秀的程序员,十分优秀!