"Fossies" - the Fresh Open Source Software Archive 
Member "pfstools-2.2.0/src/camera/pfsalign.cpp" (12 Aug 2021, 26550 Bytes) of package /linux/privat/pfstools-2.2.0.tgz:
As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style:
standard) with prefixed line numbers and
code folding option.
Alternatively you can here
view or
download the uninterpreted source code file.
1 /**
2 * @file pfsalign.cpp
3 * @brief Image stack alignment using feature matching
4 *
5 * The code relies on OpenCV SURF feature detector / descriptor. The ideas for
6 * prunning false matches are mostly based on the book:
7 *
8 * Lagani, R. (2011). OpenCV 2 Computer Vision Application Programming Cookbook.
9 * Packt Publishing.
10 *
11 *
12 * This file is a part of pfscalibration package.
13 * ----------------------------------------------------------------------
14 * Copyright (C) 2003-2013 Rafal Mantiuk and Grzegorz Krawczyk
15 *
16 * This library is free software; you can redistribute it and/or
17 * modify it under the terms of the GNU Lesser General Public
18 * License as published by the Free Software Foundation; either
19 * version 2.1 of the License, or (at your option) any later version.
20 *
21 * This library is distributed in the hope that it will be useful,
22 * but WITHOUT ANY WARRANTY; without even the implied warranty of
23 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
24 * Lesser General Public License for more details.
25 *
26 * You should have received a copy of the GNU Lesser General Public
27 * License along with this library; if not, write to the Free Software
28 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 * ----------------------------------------------------------------------
30 *
31 * @author Rafal Mantiuk, <mantiuk@mpi-sb.mpg.de>
32 *
33 * $Id: pfsalign.cpp,v 1.11 2013/11/14 21:28:28 rafm Exp $
34 */
35
36 #include <iostream>
37 #include <sstream>
38 #include <string>
39 #include <algorithm>
40 #include <getopt.h>
41 #include <string.h>
42
43 // Because non-free OpenCV modules is not included in most of the distributions
44 // the current codes relies now on a less restrictive AKAZE feature detector
45 // Uncomment the line below to use SURF
46 //#define USE_SURF
47
48 #include <opencv2/core/core.hpp>
49 #include <opencv2/highgui/highgui.hpp>
50 #include <opencv2/features2d/features2d.hpp>
51 #include <opencv2/calib3d/calib3d.hpp>
52 #include <opencv2/imgproc/imgproc.hpp>
53 #include <libexif/exif-data.h>
54
55 #ifdef USE_SURF
56 #include <opencv2/nonfree/features2d.hpp>
57 #endif
58
59 #include <pfs.h>
60
61 #define PROG_NAME "pfsalign"
62 #define VERBOSE_STR if( verbose ) std::cerr << PROG_NAME ": "
63
64 bool verbose = false;
65
66 class QuietException
67 {
68 };
69
70 using namespace cv;
71 using namespace std;
72
73 void printHelp()
74 {
75 fprintf( stderr, PROG_NAME " [-r index|--reference index] [-c (min|max)|--crop (min|max)] [-d|--display-matches] [-f|--fail-no-match] -v -h\n"
76 "See man page for more information.\n" );
77 }
78
79 class Toc
80 {
81 double tickCount;
82 bool newLine;
83 const char *name;
84 public:
85 void tic( const char *name = NULL, bool newLine = false )
86 {
87 this->name = name;
88 this->newLine = newLine;
89 if( name != NULL )
90 name = "processing";
91 VERBOSE_STR << "Starting " << name << "...";
92 if( newLine )
93 VERBOSE_STR << std::endl;
94 tickCount = static_cast<double>( getTickCount() );
95 }
96
97 void toc( )
98 {
99 double duration = (static_cast<double>( getTickCount() ) - tickCount) / getTickFrequency();
100 if( newLine ) {
101 VERBOSE_STR << "Completed " << name << ". It took " << duration << " seconds." << std::endl;
102 } else
103 VERBOSE_STR << "completed. It took " << duration << " seconds." << std::endl;
104 }
105 };
106
107
108
109 void pruneNNDR( std::vector<std::vector<DMatch> > &matches, float ratio )
110 {
111 for( vector<vector<DMatch> >::iterator it = matches.begin(); it != matches.end(); it++ ) {
112 bool prune = false;
113 if( it->size() < 2 )
114 prune = true;
115 else {
116 float NNDR = (*it)[0].distance / (*it)[1].distance;
117 if( NNDR >= ratio )
118 prune = true;
119 }
120 if( prune )
121 it->clear();
122 }
123
124 }
125
126 void symmetryTest( const vector<vector<DMatch> > &matches_12, const vector<vector<DMatch> > &matches_21, vector<DMatch> &matches_sym )
127 {
128 for( vector< vector<DMatch> >::const_iterator it1 = matches_12.begin(); it1 != matches_12.end(); it1++ ) {
129 if( it1->size() == 0 )
130 continue;
131
132 for( vector< vector<DMatch> >::const_iterator it2 = matches_21.begin(); it2 != matches_21.end(); it2++ ) {
133 if( it2->size() == 0 )
134 continue;
135
136 if( (*it1)[0].queryIdx == (*it2)[0].trainIdx && (*it2)[0].trainIdx == (*it1)[0].queryIdx ) {
137 matches_sym.push_back( DMatch( (*it1)[0].queryIdx, (*it1)[0].trainIdx, (*it1)[0].distance ));
138 break;
139 }
140 }
141
142 }
143 }
144
145
146
147
148 /**
149 Match feature points in a pair image and find homography.
150 */
151 bool alignImagePair( const Mat &ref_img, const Mat &exp_img, Mat &homography, int sensitivity, bool display_matches )
152 {
153 Toc toc;
154 Toc toc_global;
155
156 toc_global.tic( "alignment of image pair", true );
157
158 homography = Mat::eye( 3, 3, CV_64F );
159 // cv::imshow( "Result", ref_img );
160 // cv::imshow( "Result2", exp_img );
161 // cv::waitKey(0);
162
163 // Ptr<FeatureDetector> detector;
164 // detector = new GoodFeaturesToTrackDetector();
165 // detector = new SurfFeatureDetector();
166
167 std::vector<KeyPoint> keypoints1, keypoints2;
168 Mat descr_ref, descr_exp;
169
170 #ifdef USE_SURF
171 Ptr<FeatureDetector> detector(new DynamicAdaptedFeatureDetector( new SurfAdjuster( (11-sensitivity) * 100.f, 2, 1000 ),
172 100, 1000, sensitivity/2+2 ));
173 toc.tic( "feature detection" );
174 detector->detect( ref_img, keypoints1 );
175 detector->detect( exp_img, keypoints2 );
176 toc.toc( );
177 #else
178 Ptr<AKAZE> akaze = AKAZE::create();
179 toc.tic( "feature detection and extraction" );
180 akaze->setThreshold( 0.005f );
181 akaze->detectAndCompute( ref_img, noArray(), keypoints1, descr_ref);
182 akaze->detectAndCompute( exp_img, noArray(), keypoints2, descr_exp);
183 toc.toc( );
184 VERBOSE_STR << "Detected " << keypoints1.size() << " keypoints in the reference image." << endl;
185 VERBOSE_STR << "Detected " << keypoints2.size() << " keypoints in the test image." << endl;
186 #endif
187
188 if( keypoints1.size() < 10 || keypoints2.size() < 10 ) {
189 cerr << PROG_NAME ": Could not detect a sufficient number of keypoints (found "
190 << keypoints1.size() << " and " << keypoints2.size() << " keypoints)" << endl;
191
192 if( display_matches ) {
193 Mat vis;
194 vector<char> inliners_c;
195 std::vector<cv::DMatch> matches_sym;
196 drawMatches( ref_img, keypoints1, exp_img, keypoints2, matches_sym, vis, Scalar(0,0,255), Scalar(0,255,255), inliners_c, DrawMatchesFlags::DRAW_RICH_KEYPOINTS );
197 cv::namedWindow( "Image pair matches" );
198 cv::imshow( "Image pair matches", vis );
199 cv::waitKey(0);
200 }
201
202 return false;
203 }
204
205 // SiftDescriptorExtractor surfDesc;
206 #ifdef USE_SURF
207 SurfDescriptorExtractor surfDesc;
208
209 toc.tic( "descriptor extraction" );
210 surfDesc.compute( ref_img, keypoints1, descr_ref );
211 surfDesc.compute( exp_img, keypoints2, descr_exp );
212 toc.toc( );
213 #endif
214
215 #ifdef USE_SURF
216 FlannBasedMatcher matcher;
217 // BruteForceMatcher< cv::L2<float> > matcher;
218 #else
219 BFMatcher matcher(NORM_HAMMING);
220 #endif
221 toc.tic( "matching" );
222 std::vector< std::vector<cv::DMatch> > matches_rt;
223 std::vector< std::vector<cv::DMatch> > matches_tr;
224 matcher.knnMatch( descr_ref, descr_exp, matches_rt, 2 );
225 matcher.knnMatch( descr_exp, descr_ref, matches_tr, 2 );
226
227 pruneNNDR( matches_rt, 1 );
228 pruneNNDR( matches_tr, 1 );
229
230 std::vector<cv::DMatch> matches_sym;
231 symmetryTest( matches_rt, matches_tr, matches_sym );
232 toc.toc( );
233
234 std::vector<cv::DMatch>::iterator it;
235 vector<Point3f> p1, p2;
236 Mat_<float> pp1(matches_sym.size(),2);
237 Mat_<float> pp2(matches_sym.size(),2);
238 int kk = 0;
239 for( it = matches_sym.begin(); it != matches_sym.end(); it++, kk++ ) {
240
241 const Point2f from = keypoints1[it->queryIdx].pt;
242 p1.push_back( Point3f( from.x, from.y, 1 ) );
243 pp1(kk,0) = from.x;
244 pp1(kk,1) = from.y;
245 // pp1(kk,2) = 1;
246 const Point2f to = keypoints2[it->trainIdx].pt;
247 p2.push_back( Point3f( to.x, to.y, 1 ) );
248 pp2(kk,0) = to.x;
249 pp2(kk,1) = to.y;
250 //pp2(kk,2) = 1;
251 // std::cout << "Matches: " << from << " -> " << to << std::endl;
252 }
253
254 if( matches_sym.size() < 9 ) {
255 cerr << PROG_NAME ": Not enough matches found between a pair of images (found " << matches_sym.size() << ")" << endl;
256 return false;
257 }
258
259 // Mat affine = estimateRigidTransform( e1, e2, false );
260
261 vector<uchar> inliners(matches_sym.size(), 0);
262 // affine = findHomography( pp1, pp2, inliners, CV_RANSAC, 1. );
263 homography = findHomography( pp2, pp1, RANSAC, 1., inliners );
264
265 // solve( pp1, pp2, affine, DECOMP_SVD );
266 // Mat affine = (Mat_<float>(2,3) << 1, 0, 0, 0, 1, 10);
267 int inliner_count = count( inliners.begin(), inliners.end(), 1 );
268
269 VERBOSE_STR << "Found " << matches_sym.size() << " matching features, reduced to " << inliner_count << " inliners." << endl;
270
271 if( display_matches ) {
272 Mat vis;
273 vector<char> inliners_c(matches_sym.size(), 0);
274 for( size_t i = 0; i < inliners.size(); i++ )
275 inliners_c[i] = (char)inliners[i];
276
277 drawMatches( ref_img, keypoints1, exp_img, keypoints2, matches_sym, vis, Scalar(0,0,255), Scalar(0,255,255), inliners_c, DrawMatchesFlags::DRAW_RICH_KEYPOINTS|DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
278
279 cv::namedWindow( "Image pair matches" );
280 cv::imshow( "Image pair matches", vis );
281
282 cv::waitKey(0);
283 }
284
285 if( inliner_count < 9 ) {
286 homography = Mat::eye( 3, 3, CV_64F );
287 cerr << PROG_NAME ": Not enough inliners to find a reliable transformation." << endl;
288 return false;
289 }
290 toc_global.toc();
291
292
293 return true;
294
295 }
296
297
298 /**
299 Convert floating point image to 8-bit image.
300 */
301 Mat convert_to_8bit( Mat &in, bool apply_gamma )
302 {
303 Mat out;
304 if( !apply_gamma ) {
305 in.convertTo( out, CV_8UC3, 255 );
306 } else {
307 out.create( in.rows, in.cols, CV_8UC3 );
308 for(int i = 0; i < in.rows; i++)
309 {
310 Vec3f *i_p = in.ptr<Vec3f>(i);
311 Vec3b *o_p = out.ptr<Vec3b>(i);
312 for(int j = 0; j < in.cols; j++) {
313 for( int cc=0; cc < 3; cc++ ) {
314 o_p[j][cc] = saturate_cast<uchar>(powf( i_p[j][cc], 1.f/2.2f ) * 255.f);
315 }
316 }
317 }
318
319
320 }
321
322
323 return out;
324 }
325
326
327 // Find maximum and positive minimum for an image
328 void find_min_max( Mat &in, float &minV, float &maxV )
329 {
330 minV = 1e10f;
331 maxV = -1e10f;
332 for(int i = 0; i < in.rows; i++)
333 {
334 Vec3f *i_p = in.ptr<Vec3f>(i);
335 for(int j = 0; j < in.cols; j++) {
336 for( int cc=0; cc < 3; cc++ ) {
337 float v = i_p[j][cc];
338 if( v > 0 ) {
339 if( v < minV )
340 minV = v;
341 if( v > maxV )
342 maxV = v;
343 }
344 }
345 }
346 }
347
348 }
349
350 Mat convert_and_log_scale( Mat &in, float min, float max, float mult, float gamma )
351 {
352 Mat out;
353 out.create( in.rows, in.cols, CV_8UC3 );
354
355 const float l_min = logf( min );
356 const float l_max = logf( max );
357 const float l_mult = logf( mult );
358
359 for(int i = 0; i < in.rows; i++)
360 {
361 Vec3f *i_p = in.ptr<Vec3f>(i);
362 Vec3b *o_p = out.ptr<Vec3b>(i);
363 for(int j = 0; j < in.cols; j++) {
364 for( int cc=0; cc < 3; cc++ ) {
365 if( i_p[j][cc] > 0.f )
366 o_p[j][cc] = saturate_cast<uchar>( ((gamma*(logf(i_p[j][cc]) - l_max) + l_mult) / (l_max-l_min) + 1.f) * 255.f);
367 else
368 o_p[j][cc] = 0;
369 }
370 }
371 }
372
373 return out;
374 }
375
376 /**
377 Convert a pair of floating point images to 8-bit.
378 */
379 void convert_to_8bit( Mat &in1, Mat &in2, Mat &out1, Mat &out2, float exp_mult1, float exp_mult2, bool is_ldr )
380 {
381 if( is_ldr && exp_mult1 == 1.f && exp_mult2 == 1.f ) {
382 in1.convertTo( out1, CV_8UC3, 255*exp_mult1);
383 in2.convertTo( out2, CV_8UC3, 255*exp_mult2);
384 } else {
385 float minV1, minV2, maxV1, maxV2;
386 find_min_max( in1, minV1, maxV1 );
387 find_min_max( in2, minV2, maxV2 );
388
389 minV1 *= exp_mult1;
390 maxV1 *= exp_mult1;
391 minV2 *= exp_mult2;
392 maxV2 *= exp_mult2;
393
394 float gamma = is_ldr ? 2.2f : 1.f;
395
396 minV1 = powf( minV1, gamma );
397 minV2 = powf( minV2, gamma );
398
399 float minV, maxV;
400 minV = std::min( minV1, minV2 );
401 maxV = std::max( maxV1, maxV2 );
402
403
404 if( (maxV/minV) > 10000.f ) {
405 // To avoid compressing contrast too much
406 minV = maxV / 10000.f;
407 }
408 if( (maxV/minV) < 100.f ) {
409 // To avoid streching contrast too much
410 minV = maxV / 100.f;
411 }
412
413 VERBOSE_STR << "Using log scaling (min val: " << minV1 << " max value: " << maxV1 << ")" << endl;
414
415
416 out1 = convert_and_log_scale( in1, minV, maxV1, exp_mult1, gamma );
417 out2 = convert_and_log_scale( in2, minV, maxV1, exp_mult2, gamma );
418 }
419 }
420
421 Point2f intersect_lines( Point2f p1, Point2f p2, Point2f p3, Point2f p4 )
422 {
423 float s = ((p4.x-p3.x)*(p3.y-p1.y) - (p3.x-p1.x)*(p4.y-p3.y)) / ( (p4.x-p3.x)*(p2.y-p1.y) - (p2.x-p1.x)*(p4.y-p3.y) );
424
425 Point2f p;
426 p.x = p1.x + s * (p2.x-p1.x);
427 p.y = p1.y + s * (p2.y-p1.y);
428
429 return p;
430 }
431
432 Rect auto_crop( const Size2f &size, const vector<Point2f> &rotated )
433 {
434
435 assert( rotated.size() == 4 ); // 4 corners of the polygon
436
437 Point2f centre;
438 for( int kk=0; kk < 4; kk++ )
439 centre += rotated[kk];
440
441 centre = centre * 0.25;
442
443 //cout << "Float: " << (rot_mat.type() == CV_32F) << endl;
444
445 float best_d = 1e10;
446 Point2d best_p;
447 for( int jj = 0; jj < 2; jj++ ) {
448 for( int ii=0; ii < 2; ii++ ) {
449 Point2f to;
450 if( jj == 0 )
451 to = centre - Point2f( size.width/2, size.height/2 );
452 else
453 to = centre - Point2f( -size.width/2, size.height/2 );
454
455 Point2f p1 = intersect_lines( centre, to, rotated[ii], rotated[ii+1] );
456 float d = powf(p1.x-centre.x,2)+powf(p1.y-centre.y,2);
457 if( d < best_d ) {
458 best_d = d;
459 best_p = p1;
460 }
461 }
462 }
463 Point2f ul( centre.x - fabs(best_p.x-centre.x), centre.y - fabs(best_p.y-centre.y) );
464 Point2f br( centre.x + fabs(best_p.x-centre.x), centre.y + fabs(best_p.y-centre.y) );
465
466 Rect crop( ul, br );
467
468 return crop;
469 }
470
471 struct FrameInfo {
472 Mat image;
473 std::string file_name;
474 pfs::Frame *frame;
475 Size size;
476
477 FrameInfo( Mat &image, const char *file_name, pfs::Frame *frame ) :
478 image( image ), file_name( file_name ), frame( frame )
479 {
480 size = Size( image.cols, image.rows );
481 }
482 };
483
484 void alignFrames(int argc, char *argv[])
485 {
486 pfs::DOMIO pfsio;
487
488 int reference_index = 0; // Index of the reference image
489 bool display_matches = false;
490 bool crop_min = false;
491 bool fail_on_no_match = false;
492 int sensitivity = 5;
493
494 static struct option cmdLineOptions[] = {
495 { "help", no_argument, NULL, 'h' },
496 { "verbose", no_argument, NULL, 'v' },
497 { "reference", required_argument, NULL, 'r' },
498 { "crop", required_argument, NULL, 'c' },
499 { "sensitivity", required_argument, NULL, 's' },
500 { "fail-no-match", no_argument, NULL, 'f' },
501 { "display-matches", no_argument, NULL, 'd' },
502 { NULL, 0, NULL, 0 }
503 };
504
505 int optionIndex = 0;
506 while( 1 ) {
507 int c = getopt_long (argc, argv, "r:c:s:fhvd", cmdLineOptions, &optionIndex);
508 if( c == -1 ) break;
509 switch( c ) {
510 case 'h':
511 printHelp();
512 throw QuietException();
513 case 'v':
514 verbose = true;
515 break;
516 case 'r':
517 reference_index = strtol( optarg, NULL, 10 ) - 1;
518 break;
519 case 'd':
520 display_matches = true;
521 break;
522 case 'c':
523 if( !strcasecmp( optarg, "min" ) ) {
524 crop_min = true;
525 } else if( !strcasecmp( optarg, "max" ) ) {
526 crop_min = false;
527 } else
528 throw pfs::Exception( "Unrecognized cropping mode" );
529 break;
530 case 'f':
531 fail_on_no_match = true;
532 break;
533 case 's':
534 sensitivity = strtol( optarg, NULL, 10 );
535 if( sensitivity <= 0 || sensitivity > 10 ) {
536 throw pfs::Exception( "Sensitivity parameter must be within 1-10 range.");
537 }
538 break;
539 case '?':
540 throw QuietException();
541 case ':':
542 throw QuietException();
543 }
544 }
545
546 vector<FrameInfo> frames;
547
548 if( crop_min ) {
549 VERBOSE_STR << "Cropping to the size that contains only overlaping pixels (min)" << endl;
550 } else {
551 VERBOSE_STR << "Cropping to the size that contains all pixels (max)" << endl;
552 }
553
554
555 // Load all images
556 // bool first_frame = true;
557 int frame_count = 0;
558 for( ; true; frame_count++ ) {
559
560 pfs::Frame *frame = pfsio.readFrame( stdin );
561 if( frame == NULL ) break; // No more frames
562
563 const char *f_name = frame->getTags()->getString("FILE_NAME");
564 if( f_name == NULL ) {
565 std::stringstream f_name_str;
566 f_name_str << "frame_" << frame_count;
567 f_name = f_name_str.str().c_str();
568 }
569 VERBOSE_STR << "Loading " << f_name << endl;
570
571 pfs::Channel *X, *Y, *Z;
572 frame->getXYZChannels( X, Y, Z );
573
574 if( X != NULL ) { // Color, XYZ
575 pfs::Channel* &R = X;
576 pfs::Channel* &G = Y;
577 pfs::Channel* &B = Z;
578 pfs::transformColorSpace( pfs::CS_XYZ, X, Y, Z, pfs::CS_RGB, R, G, B );
579
580 Mat img;
581 img.create( frame->getHeight(), frame->getWidth(), CV_32FC3 );
582
583 int in_index = 0;
584 for(int i = 0; i < img.rows; i++)
585 {
586 Vec3f *o_p = img.ptr<Vec3f>(i);
587 for(int j = 0; j < img.cols; j++) {
588 o_p[j][0] = (*B)(in_index);
589 o_p[j][1] = (*G)(in_index);
590 o_p[j][2] = (*R)(in_index);
591 in_index++;
592 }
593 }
594
595 frames.push_back( FrameInfo( img, f_name, frame ) );
596
597 } else
598 throw pfs::Exception( "Only color images are supported" );
599
600
601 // Remove all channels from the frame to save memory
602 pfs::ChannelIteratorPtr cit( frame->getChannelIterator() );
603 while( cit->hasNext() ) {
604 pfs::Channel *ch = cit->getNext();
605 frame->removeChannel( ch );
606 }
607
608 }
609
610 if( reference_index < 0 || (size_t)reference_index >= frames.size() )
611 throw pfs::Exception( "Reference image index out of range" );
612
613
614 vector<Mat> homoM;
615 vector<Size> imageSizes;
616 for( int ff=1; ff < (int)frames.size(); ff++ ) {
617 VERBOSE_STR << "Processing " << frames[ff-1].file_name << " and " << frames[ff].file_name << endl;
618
619 cv::Mat ref_img = frames[ff-1].image;
620 cv::Mat exp_img = frames[ff].image;
621
622 imageSizes.push_back( exp_img.size() );
623
624 bool is_ldr = false;
625 const char *lum_type = frames[ff].frame->getTags()->getString("LUMINANCE");
626 if( lum_type ) {
627 if( !strcmp( lum_type, "DISPLAY" ) )
628 is_ldr = true;
629 }
630
631 // Extract exposure values
632 float exp1 = 1.f, exp2 = 1.f;
633 const char* exp_str = frames[ff-1].frame->getTags()->getString("BV");
634 if( exp_str != NULL )
635 exp1 = powf(2.0f,strtof( exp_str, NULL ));
636 exp_str = frames[ff].frame->getTags()->getString("BV");
637 if( exp_str != NULL )
638 exp2 = powf(2.0f,strtof( exp_str, NULL ));
639
640 if( exp1 / exp2 != 1.f ) {
641 VERBOSE_STR << "Exposure pair: " << exp1 << ", " << exp2 << " (" << exp2/exp1 << " ratio)" << endl;
642 }
643
644 const float exp_mult1 = exp1 / std::min( exp1, exp2 );
645 const float exp_mult2 = exp2 / std::min( exp1, exp2 );
646
647 Mat ref_img_8b, exp_img_8b;
648
649 // Mat ref_img_8b = convert_to_8bit( ref_img, apply_gamma );
650 // Mat exp_img_8b = convert_to_8bit( exp_img, apply_gamma );
651 // convert_to_8bit( ref_img, exp_img, ref_img_8b, exp_img_8b, 1, 1, is_ldr );
652 convert_to_8bit( ref_img, exp_img, ref_img_8b, exp_img_8b, exp_mult1, exp_mult2, is_ldr );
653
654 Mat homography;
655 bool success = alignImagePair( ref_img_8b, exp_img_8b, homography, sensitivity, display_matches );
656 if( !success && fail_on_no_match )
657 throw pfs::Exception( "Stopping because could not find a match between image pair");
658
659 VERBOSE_STR << "Homography (for the image pair):" << endl << homography << endl;
660
661 homoM.push_back( homography );
662 }
663
664
665 // Chain all transformations and find the cropping box
666 vector<Mat> homoMC;
667 Rect_<double> pano_rect( 0, 0, frames[0].image.cols, frames[0].image.rows );
668 for( int kk = 0; kk < (int)frames.size(); kk++ )
669 {
670 Mat_<double> trans = Mat::eye(3,3,CV_64F);
671 for( int ll = min( kk, reference_index )+1; ll <= max( kk, reference_index); ll++ )
672 {
673 if( ll > 0 )
674 trans = trans*homoM[ll-1];
675 }
676 if( kk < reference_index )
677 trans = trans.inv();
678
679 homoMC.push_back( trans );
680
681 double data[4][3] = { { 0., 0., 1. }, { (double)frames[kk].size.width, 0., 1. }, { (double)frames[kk].size.width, (double)frames[kk].size.height, 1. }, { 0., (double)frames[kk].size.height, 1. } };
682 Mat corners( 4, 3, CV_64F, data );
683
684 Mat corners_trans = trans * corners.t();
685
686 // VERBOSE_STR << "Image: " << fileNames[kk] << endl;
687 // VERBOSE_STR << " Corners: " << trans * corners.t() << endl;
688
689 Mat p_nh; //( 4, 3, CV_32F );
690 Mat_<float> corners_f;
691 corners_trans.convertTo( corners_f, CV_32F );
692 convertPointsFromHomogeneous( corners_f.t(), p_nh );
693
694
695 if( crop_min ) {
696 vector<Point2f> dest_rect(4);
697 for( int ii=0; ii < 4; ii++ ) {
698 dest_rect[ii].x = p_nh.at<float>(ii,0);
699 dest_rect[ii].y = p_nh.at<float>(ii,1);
700 }
701 Rect_<float> img_rect = auto_crop( frames[kk].size, dest_rect );
702 Rect_<double> img_rect_d = img_rect;
703 pano_rect = pano_rect & img_rect_d;
704 } else {
705 Rect_<double> img_rect = boundingRect( p_nh );
706 pano_rect = pano_rect | img_rect;
707 }
708
709
710 // VERBOSE_STR << "Bounding rect: " << pano_rect.x << ", " << pano_rect.y << " - " << pano_rect.width << "x" << pano_rect.height << endl;
711 }
712
713
714 // Align
715 Size dest_size = Size( ceil( pano_rect.width ), ceil( pano_rect.height) );
716 // Mat pano_img( ceil( pano_rect.height), ceil( pano_rect.width ), CV_8UC3 );
717 // pano_img.setTo( Vec3b(255, 255, 255 ));
718 for( size_t ff=0; ff < frames.size(); ff++ ) {
719 VERBOSE_STR << "Warping: "<< frames[ff].file_name << endl;
720
721 Mat exp_img = frames[ff].image;
722
723 Mat_<double> translate = Mat::eye(3,3,CV_64F);
724 translate(0,2) = -pano_rect.x;
725 translate(1,2) = -pano_rect.y;
726 Mat trans = translate*homoMC[ff];
727 VERBOSE_STR << "Homography (to reference): " << trans << endl;
728
729 Mat res_img;
730 warpPerspective( exp_img, res_img, trans, dest_size, INTER_LINEAR );
731
732 pfs::Frame *alignedFrame = NULL;
733 alignedFrame = pfsio.createFrame( res_img.cols, res_img.rows );
734
735 pfs::Channel *X = alignedFrame->createChannel( "X" );
736 pfs::Channel *Y = alignedFrame->createChannel( "Y" );
737 pfs::Channel *Z = alignedFrame->createChannel( "Z" );
738
739 pfs::Channel* &R = X;
740 pfs::Channel* &G = Y;
741 pfs::Channel* &B = Z;
742
743 int out_index = 0;
744 for(int i = 0; i < res_img.rows; i++)
745 {
746 Vec3f *i_p = res_img.ptr<Vec3f>(i);
747 for(int j = 0; j < res_img.cols; j++) {
748 (*B)(out_index) = i_p[j][0];
749 (*G)(out_index) = i_p[j][1];
750 (*R)(out_index) = i_p[j][2];
751 out_index++;
752 }
753 }
754 pfs::transformColorSpace( pfs::CS_RGB, R, G, B, pfs::CS_XYZ, X, Y, Z );
755 pfs::copyTags( frames[ff].frame, alignedFrame );
756
757 pfsio.writeFrame( alignedFrame, stdout );
758
759 pfsio.freeFrame( alignedFrame );
760 pfsio.freeFrame( frames[ff].frame );
761
762
763 // TODO: Add alpha channel
764 /* Mat in_mask( exp_img.size(), CV_8U ), out_mask;
765 in_mask.setTo( Scalar( 255 ) );
766 warpPerspective( in_mask, out_mask, trans, pano_img.size(), INTER_LINEAR );
767 res_img.copyTo( pano_img, out_mask );
768
769 double data[4][3] = { { 0, 0, 1 }, { exp_img.cols, 0, 1 }, { exp_img.cols, exp_img.rows, 1 }, { 0, exp_img.rows, 1 } };
770 Mat corners( 4, 3, CV_64F, data );
771 Mat_<double> corners_trans = trans * corners.t();
772 Mat p_nh; //( 4, 3, CV_32F );
773 Mat_<float> corners_f;
774 corners_trans.convertTo( corners_f, CV_32F );
775 convertPointsFromHomogeneous( corners_f.t(), p_nh );*/
776 // Scalar borderColor = CV_RGB( 0, 0, 0 );
777 // line( pano_img, Point2f( p_nh(0,0), p_nh(0,1) ), Point2f( p_nh(1,0), p_nh(1,1) ), borderColor, 3 );
778 // line( pano_img, Point2f( p_nh(1,0), p_nh(1,1) ), Point2f( p_nh(2,0), p_nh(2,1) ), borderColor, 3 );
779 // line( pano_img, Point2f( p_nh(2,0), p_nh(2,1) ), Point2f( p_nh(3,0), p_nh(3,1) ), borderColor, 3 );
780 // line( pano_img, Point2f( p_nh(3,0), p_nh(3,1) ), Point2f( p_nh(0,0), p_nh(0,1) ), borderColor, 3 );
781 }
782
783 }
784
785 int main( int argc, char* argv[] )
786 {
787 try {
788 alignFrames( argc, argv );
789 }
790 catch( pfs::Exception ex ) {
791 fprintf( stderr, PROG_NAME " error: %s\n", ex.getMessage() );
792 return EXIT_FAILURE;
793 }
794 catch( QuietException ex ) {
795 return EXIT_FAILURE;
796 }
797 return EXIT_SUCCESS;
798 }