- c - 在位数组中找到第一个零
- linux - Unix 显示有关匹配两种模式之一的文件的信息
- 正则表达式替换多个文件
- linux - 隐藏来自 xtrace 的命令
我最近安装了Opencv-2.4.6我写了一个鸟瞰图转换的代码(不是最终代码)。在Opencv(2.4.3)的早期版本中,它正在被执行。
现在我在运行时遇到错误。
**加载共享库时出错:libopencv_core.so.2.4:无法打开共享对象文件:没有这样的文件或目录** 没有编译错误。
代码是:
// This code will take undistorted images as input and give the bird's eye view using them
// First we need to calculate the homography matrix
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define heightBirdEyeView 800
#define widthBirdEyeView 800 //earlier was 300 300
using namespace cv;
using namespace std;
//global/
//camera parameters input
//resolution values
/*
float resolution_x=50, resolution_y=50;
//camera height and tilt
float height_camera = 1.25;
float tilt_camera=12;
//focal length in x and y
float focal_length_x = 354.05700;
float focal_length_y = 353.65297;*/
//intensity finding function
// find the intensity for the transformed point
/*float findintensity(float corrected_x,float corrected_y)
{
int intensity;
if((corrected_x>=1&&corrected_x<=widthBirdEyeView)&&(corrected_y>=1&&corrected_y<=heightBirdEyeView))
intensity=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))+(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))+(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))+(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
else
intensity=0;
return intensity;
}*/
int main(int argc, char** argv)
{
//loading the undistorted image
Mat undistor_img=imread(argv[1], 1);
namedWindow("undistorted image");
int intensity_change=0;
Mat undistor_img_hsv;
cvtColor( undistor_img,undistor_img_hsv, CV_RGB2HSV);
imshow("undistorted image", undistor_img);
imshow("hsv image",undistor_img_hsv);
cout<<"size="<<undistor_img_hsv.rows<<" "<<undistor_img_hsv.cols<<endl; // for obs7.jpg the columns are 220 and the rows are 165
//cout<<"undistorted image="<<undistor_img_hsv<<endl;
// erode the image
Mat eroded;
erode(undistor_img_hsv,eroded,Mat());
imshow("eroded",eroded);
//dilate the image
Mat dilated;
dilate(eroded,dilated,Mat());
imshow("dilated",dilated);
Mat output;
resize(undistor_img_hsv,output,cv::Size(heightBirdEyeView,widthBirdEyeView));// this will be having the orthogonal transform
int i,j;
for(i=0;i<=heightBirdEyeView;i++)
{
for(j=0;j<widthBirdEyeView;j++)
output.at<uchar>(i,j)=0;
}
imshow("output",output);
//should have size as that of height and width of the bird eye view
//camera parameters input
//resolution values
float resolution_x=50, resolution_y=50;
//camera height and tilt
float height_camera = 1.25;
float tilt_camera=12;
//focal length in x and y
float focal_length_x = 354.05700;
float focal_length_y = 353.65297;
//generate transformation matrix
float H1[3][3]={resolution_x,0,widthBirdEyeView/2+1,
0,-1*resolution_y,heightBirdEyeView,
0,0,1};
Mat transformation_matrix(3,3,CV_32FC1,H1);
cout<<"transformation matrix="<<endl<<transformation_matrix<<endl<<endl;
//generate top view matrix
float H2[3][3]={height_camera/focal_length_x,0,0,
0,0,height_camera,
0,cos(tilt_camera)/focal_length_y,sin(tilt_camera)};
Mat topview_matrix(3,3,CV_32FC1,H2);
cout<<"topview matrix="<<endl<<topview_matrix<<endl<<endl;
//generate scale matrix
float H3[3][3]={1,0,undistor_img.rows,
0,1,undistor_img.rows,
0,0,1};
Mat scale_matrix(3,3,CV_32FC1,H3);
cout<<"scale matrix="<<endl<<scale_matrix<<endl<<endl;
//generate homography matrix
Mat homography_matrix=transformation_matrix*topview_matrix/scale_matrix;
cout<<"homography matrix="<<endl<<homography_matrix<<endl<<endl;
Mat transpose_homography_matrix =homography_matrix;
Mat temp_matrix =homography_matrix;
//cout<<"temp_matrix=" <<endl<<temp_matrix<<endl<<endl;
//transpose of homography matrix
for (int i=0;i<3;i++)
{
for (int j=0;j<3;j++)
transpose_homography_matrix.at<float>(i,j)=temp_matrix.at<float>(j,i);
}
//cout<<"transpose homography matrix="<<endl<<transpose_homography_matrix<<endl<<endl;
float bev_zero[heightBirdEyeView][widthBirdEyeView]; //bev_zero will be the new image matrix
for(int i=0;i<heightBirdEyeView;i++)
{
for(int j=0;j<widthBirdEyeView;j++)
bev_zero[i][j]=0;
}
Mat new_point_matrix; // this 3x1 matrix will be used to give new point for all old points in old image
//(undistor_img_hsv.rows,undistor_img_hsv.cols,CV_32FC1,bev_zero);
//cout<<endl<<new_point_matrix<<endl<<endl;
//new_point_matrix=
//Mat new_point_matrix;
float corrected_x,corrected_y;
Mat old_point_matrix;
//conversion from point to its new point
for(int k=0;k<undistor_img_hsv.cols;k++)
{
for(int l=0;l<undistor_img_hsv.rows;l++)
{
float point[3][1]={k,
l,
1};
Mat old_point_matrix(3,1,CV_32FC1,point);
//get the new points for new view
//corrected_x=bev_zero[0][0]/bev_zero[2][0];
//corrected_y=bev_zero[1][0]/bev_zero[2][0];
new_point_matrix=transpose_homography_matrix*old_point_matrix;
cout<<"old point="<<old_point_matrix<<",new point="<<new_point_matrix<<endl; // every old point in the old image has got a new corresponding point
//cout<<"new x="<<corrected_x<<", new y="<<corrected_y<<endl<<endl;
//cout<<bev_zero[0][0]<<bev_zero[1][0]<<endl<<endl;
//cout<<"new x="<<new_point_matrix.at<float>(0,0)/new_point_matrix.at<float>(2,0)<<endl;
//cout<<", new y="<<new_point_matrix.at<float>(1,0)/new_point_matrix.at<float>(2,0)<<endl;x
//new_point_matrix=new_point_matrix/new_point_matrix.at<float>(2,0);
//find corrected values
corrected_x=abs(new_point_matrix.at<float>(0,0)/new_point_matrix.at<float>(2,0));
corrected_y=abs(new_point_matrix.at<float>(1,0)/new_point_matrix.at<float>(2,0));
//cout<<"point= ("<<corrected_x<<","<<corrected_y<<")"<<endl;
//cout<<"old point= ("<<k<<","<<l<<")"<<endl;
cout<<"corrected_x="<<corrected_x<<endl<<"corrected_y="<<corrected_y<<endl;
float intensity; // to store intensity values
// based on the corrected values, find out the intensity points
//cout<<findintensity(corrected_x,corrected_y)<<endl;
if((corrected_x>=1&&corrected_x<=widthBirdEyeView)&&(corrected_y>=1&&corrected_y<=heightBirdEyeView))
{
intensity_change++;
intensity=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))+(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))+(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))+(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
}
else
intensity=0;
cout<<"intensity="<<intensity<<endl;
//cout<<new_point_matrix<<endl;
cout<<floor(new_point_matrix.at<float>(0,0))<<endl;
cout<<int(new_point_matrix.at<float>(1,0))<<endl;
// now I just need to give this intensity value to the corresponding point for the old point
//output.at<uchar>(floor(new_point_matrix.at<float>(0,0)),floor(new_point_matrix.at<float>(1,0)))=intensity;
//cout<<"value="<<new_point_matrix.at<uchar>(0,150);
//cout<<"value21="<<undistor_img.at<uchar>(0,150);
//cout<<"pixel intensity at this point="<<new_point_matrix<<endl;
cout<<endl;
}
}
//cout<<"intensity changed "<<intensity_change<<" times."<<endl;
//imshow("output",output);
//cout<<"old point matrix="<<old_point_matrix<<endl;
//cout<<"new point matrix="<<new_point_matrix<<endl;
//cout<<"pixel intensity at this point="<<new_point_matrix<<endl; //ERROR HERE
//cout<<"changed new point="<<new_point_matrix<<endl;
/*
Mat p_new(3,1,CV_32FC1); // this will give the coordinates in the bird's eye view
float corrected_x,corrected_y;
//float Floor_corrected_y,Floor_corrected_x,Ceil_corrected_x,Ceil_corrected_y,Ratio_corrected_x,Ratio_corrected_y;
int a=0,b=0;
// counters for if and else blocks
//now we need matrix with coordinates of the image plane, to be projected
for(int p=0; p<heightBirdEyeView;p++)
{
uchar* data= undistor_img.ptr<uchar>(p);
uchar* hdata= birdeyeview_img.ptr<uchar>(p);
for(int q=0;q<widthBirdEyeView;q++)
{
//birdeyeview_img.at<cv::Vec3b>(q,p)[0]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[0];
//birdeyeview_img.at<cv::Vec3b>(q,p)[1]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[1];
//birdeyeview_img.at<cv::Vec3b>(q,p)[2]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[2];
//birdeyeview_img.at<uchar>(q,p)=transpose_homography_matrix*undistor_img.at<uchar>(q,p);
//birdeyeview_img.at<uchar>(q,p)=transpose_homography_matrix*int(undistor_img.at<int>(q,p));
//cout<<transpose_homography_matrix*undistor_img.at<int>(q,p)<<endl;
int M[]={q,p,1};
Mat p_old(3,1,CV_32FC1,M); //holding the positions in undistorted image
//cout<<transpose_homography_matrix*p_old<<endl;
p_new=transpose_homography_matrix*p_old;
//cout<<endl<<p_new;
//cout<<endl<<p_new.at<float>(0,0);
//cout<<endl<<p_new[1];
//cout<<endl<<p_new[2];
//cout<<endl<<cvmGet(p_new,0,0);
corrected_x=p_new.at<float>(0,0)/p_new.at<float>(2,0);
corrected_y=p_new.at<float>(1,0)/p_new.at<float>(2,0);
//cout<<"the pixel intensity to be assigned="<<(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)))<<endl;
//cout<<"values to be greater than 1"<<corrected_x<<","<<corrected_y<<endl;
//cout<<"those should be less than"<<undistor_img.rows<<"and"<<undistor_img.cols<<endl;
//cout<<corrected_x<<" "<<corrected_y<<endl;
if (((abs(corrected_y)>=1)&&(corrected_y<=undistor_img.rows))&&((abs(corrected_x)>=1)&&(corrected_x<=undistor_img.cols)))
{// Floor_corrected_y = floor(corrected_y);
//Ceil_corrected_y = ceil(corrected_y);
//Floor_corrected_x = floor(corrected_x);
//Ceil_corrected_x = ceil(corrected_x);
// Ratio_corrected_y = corrected_y-floor(corrected_y);
// Ratio_corrected_x = corrected_x-floor(corrected_x);
//birdeyeview_img.at<uchar>(p,q)=(1-(corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
//cout<<"if read"<<endl;
//hdata[q]=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*data[q] +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*data[q] +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*data[q]+( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*data[q]; //works to some extent
hdata[q]=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)) +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x))+( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)) +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x));
a++;}
//birdeyeview_img.at<uchar>(q,p)=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
//}
else{
b++;
hdata[q]=data[q];
//birdeyeview_img.at<uchar>(p,q)=255;
//hdata[q]=undistor_img.at<uchar>(round(corrected_y),round(corrected_x));
//hdata[q]=(int)undistor_img.at<uchar>(q,p);//gives sm output
//hdata[q]= undistor_img.ptr<uchar>(p,q);
//hdata[q]= undistor_img.at<uchar>(p,q);//works to sm extent
//cout<<endl<<"pixel value"<<(int) undistor_img.at<uchar>(p,q);
//birdeyeview_img.at<uchar>(q,p)=undistor_img.at<uchar>(round(corrected_y),round(corrected_x));
}
//cout<<(int)birdeyeview_img.at<uchar>(p,q)<<endl;
//birdeyeview_img.at<uchar>(p,q)=0;}
//cout<<"else read"<<endl;}
//undistor_img.at<uchar>(p,q)=(int)undistor_img.at<uchar>(round(corrected_y),round(corrected_x));}
//birdeyeview_img.at<uchar>(p,q)=124;
//cout<<endl<<(int)undistor_img.at<uchar>(p,q);
//cout<<endl<<(int)birdeyeview_img.at<uchar>(p,q);
//cout<<"working here1"<<endl;
}
}
//flip(birdeyeview_img,birdeyeview_img,1);
// perspectiveTransform(undistor_img,birdeyeview_img ,homography_matrix);
//cout<<"bird"<<birdeyeview_img<<endl;
//cout<<"working here2"<<endl;
//cout<<birdeyeview_img<<endl;
cout<<"input channels="<<undistor_img.channels()<<endl;
//cout<<"grayscaled image channels="<<gray_undistor_img.channels()<<endl;
cout<<"output channels="<<birdeyeview_img.channels()<<endl;
cout<<"if was read"<<a <<"times"<<endl;
cout<<"else was read"<<b <<"times"<<endl;
imshow("bird's eye view image",birdeyeview_img);
cout<<"input size="<<undistor_img.rows<<"X"<<undistor_img.cols<<endl;
cout<<"result size="<<birdeyeview_img.rows<<"X"<<birdeyeview_img.cols<<endl;
//cout<<"working here3"<<endl;
*/
cvWaitKey();
}``
有人可以帮忙做什么吗?
最佳答案
只需打开名为/etc/ld.so.conf.d/opencv.conf
的文件,然后插入
/usr/local/opencv/
然后输入:sudo ldconfig
关于c++ - OpenCV : libopencv_core. so.2.4:无法打开共享对象文件:没有这样的文件或目录,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19164343/
我的一位教授给了我们一些考试练习题,其中一个问题类似于下面(伪代码): a.setColor(blue); b.setColor(red); a = b; b.setColor(purple); b
我似乎经常使用这个测试 if( object && object !== "null" && object !== "undefined" ){ doSomething(); } 在对象上,我
C# Object/object 是值类型还是引用类型? 我检查过它们可以保留引用,但是这个引用不能用于更改对象。 using System; class MyClass { public s
我在通过 AJAX 发送 json 时遇到问题。 var data = [{"name": "Will", "surname": "Smith", "age": "40"},{"name": "Wil
当我尝试访问我的 View 中的对象 {{result}} 时(我从 Express js 服务器发送该对象),它只显示 [object][object]有谁知道如何获取 JSON 格式的值吗? 这是
我有不同类型的数据(可能是字符串、整数......)。这是一个简单的例子: public static void main(String[] args) { before("one"); }
嗨,我是 json 和 javascript 的新手。 我在这个网站找到了使用json数据作为表格的方法。 我很好奇为什么当我尝试使用 json 数据作为表时,我得到 [Object,Object]
已关闭。此问题需要 debugging details 。目前不接受答案。 编辑问题以包含 desired behavior, a specific problem or error, and the
我听别人说 null == object 比 object == null check 例如: void m1(Object obj ) { if(null == obj) // Is thi
Match 对象 提供了对正则表达式匹配的只读属性的访问。 说明 Match 对象只能通过 RegExp 对象的 Execute 方法来创建,该方法实际上返回了 Match 对象的集合。所有的
Class 对象 使用 Class 语句创建的对象。提供了对类的各种事件的访问。 说明 不允许显式地将一个变量声明为 Class 类型。在 VBScript 的上下文中,“类对象”一词指的是用
Folder 对象 提供对文件夹所有属性的访问。 说明 以下代码举例说明如何获得 Folder 对象并查看它的属性: Function ShowDateCreated(f
File 对象 提供对文件的所有属性的访问。 说明 以下代码举例说明如何获得一个 File 对象并查看它的属性: Function ShowDateCreated(fil
Drive 对象 提供对磁盘驱动器或网络共享的属性的访问。 说明 以下代码举例说明如何使用 Drive 对象访问驱动器的属性: Function ShowFreeSpac
FileSystemObject 对象 提供对计算机文件系统的访问。 说明 以下代码举例说明如何使用 FileSystemObject 对象返回一个 TextStream 对象,此对象可以被读
我是 javascript OOP 的新手,我认为这是一个相对基本的问题,但我无法通过搜索网络找到任何帮助。我是否遗漏了什么,或者我只是以错误的方式解决了这个问题? 这是我的示例代码: functio
我可以很容易地创造出很多不同的对象。例如像这样: var myObject = { myFunction: function () { return ""; } };
function Person(fname, lname) { this.fname = fname, this.lname = lname, this.getName = function()
任何人都可以向我解释为什么下面的代码给出 (object, Object) 吗? (console.log(dope) 给出了它应该的内容,但在 JSON.stringify 和 JSON.parse
我正在尝试完成散点图 exercise来自免费代码营。然而,我现在只自己学习了 d3 几个小时,在遵循 lynda.com 的教程后,我一直在尝试确定如何在工具提示中显示特定数据。 This code
我是一名优秀的程序员,十分优秀!