๐Ÿ˜Ž ๊ณต๋ถ€ํ•˜๋Š” ์ง•์ง•์•ŒํŒŒ์นด๋Š” ์ฒ˜์Œ์ด์ง€?

[C++ ๋กœ OpenCV ๊ตฌํ˜„ํ•˜๊ธฐ] (10) Project2 - Document Scanner ๋ณธ๋ฌธ

๐Ÿ‘ฉ‍๐Ÿ’ป IoT (Embedded)/Image Processing

[C++ ๋กœ OpenCV ๊ตฌํ˜„ํ•˜๊ธฐ] (10) Project2 - Document Scanner

์ง•์ง•์•ŒํŒŒ์นด 2023. 6. 14. 01:26
728x90
๋ฐ˜์‘ํ˜•

<๋ณธ ๋ธ”๋กœ๊ทธ๋Š” Murtaza's Workshop ์˜ ์œ ํŠœ๋ธŒ๋ฅผ ์ฐธ๊ณ ํ•ด์„œ ๊ณต๋ถ€ํ•˜๋ฉฐ ์ž‘์„ฑํ•˜์˜€์Šต๋‹ˆ๋‹ค :-)>

=> LEARN OPENCV C++ in 4 HOURS | Including 3x Projects | Computer Vision

 

๐ŸŒ€ Project2 - Document Scanner

๐Ÿ’ง ๋ฌธ์„œ ์ด๋ฏธ์ง€์˜ dilate ์ ์šฉ์œผ๋กœ ์Šค์บ” ํ•˜๊ธฐ

dilate : ํฐ์ƒ‰ ํ”ฝ์…€์„ ์ค„์ด๋Š” ์—ญํ• 

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgCanny, imgThre, imgBlur, imgErode, imgDil;
Mat preProcessing(Mat img) {
	// cvtColor( input Array,  output Array, flag)
	// : input Array๋ฅผ ์ž…๋ ฅ๋ฐ›์•„ flag ์— ๋Œ€ํ•œ ์˜ต์…˜์œผ๋กœ ์ด๋ฏธ์ง€ ์ƒ‰์ฑ„๋„์„ ๋ณ€๊ฒฝ
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	// GaussianBlur( src, dst, kernel_size, sigma_x, sigma_y, borderType) 
	// : ์ค‘์•™๊ฐ’์— ๊ฐ€์ค‘์น˜๋ฅผ ๋” ์ฃผ๊ณ  ์ฃผ๋ณ€์€ ๋” ํ๋ฆฌ๊ฒŒ
	GaussianBlur(img, imgBlur, Size(3, 3), 3, 0);
	// Canny( src, dst, threshold1, threshold2)
	// : ๊ฒฝ๊ณ„์„  ๊ฒ€์ถœ
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	// Morphology 
	// For Erosion : erode( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ๋Š˜๋ฆฌ๋Š” ์—ญํ• 
	//erode(imgCanny, imgErode, kernel);
	// For Dilation : dilate( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ์ค„์ด๋Š” ์—ญํ• 
	dilate(imgCanny, imgDil, kernel);
	return imgDil;
}

int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprocessing
	imgThre = preProcessing(imgOriginal);
	imshow("image", imgOriginal);
	imshow("image Dial", imgDil);

	waitKey(0);
}

 

๐Ÿ’ง ๋ฌธ์„œ์˜ ํ…Œ๋‘๋ฆฌ์™€ ๊ผญ์ง“์  ๊ฒ€์ถœํ•ด์„œ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgCanny, imgThre, imgBlur, imgErode, imgDil;
vector<Point> initialPoints;

Mat preProcessing(Mat img) {
	// cvtColor( input Array,  output Array, flag)
	// : input Array๋ฅผ ์ž…๋ ฅ๋ฐ›์•„ flag ์— ๋Œ€ํ•œ ์˜ต์…˜์œผ๋กœ ์ด๋ฏธ์ง€ ์ƒ‰์ฑ„๋„์„ ๋ณ€๊ฒฝ
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	// GaussianBlur( src, dst, kernel_size, sigma_x, sigma_y, borderType) 
	// : ์ค‘์•™๊ฐ’์— ๊ฐ€์ค‘์น˜๋ฅผ ๋” ์ฃผ๊ณ  ์ฃผ๋ณ€์€ ๋” ํ๋ฆฌ๊ฒŒ
	GaussianBlur(img, imgBlur, Size(3, 3), 3, 0);
	// Canny( src, dst, threshold1, threshold2)
	// : ๊ฒฝ๊ณ„์„  ๊ฒ€์ถœ
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	// Morphology 
	// For Erosion : erode( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ๋Š˜๋ฆฌ๋Š” ์—ญํ• 
	//erode(imgCanny, imgErode, kernel);
	// For Dilation : dilate( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ์ค„์ด๋Š” ์—ญํ• 
	dilate(imgCanny, imgDil, kernel);
	return imgDil;
}

vector<Point> getContours(Mat imgDil) {
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	// findContours(image, mode, method, contours=None, hierarchy=None, offset=None) 
	// : ์™ธ๊ณฝ์„  ๊ฒ€์ถœ์ด๋ž€ ๊ฐ์ฒด์˜ ์™ธ๊ณฝ์„  ์ขŒํ‘œ๋ฅผ ๋ชจ๋‘ ์ถ”์ถœํ•˜๋Š” ์ž‘์—…
	findContours(imgDil, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
	vector<vector<Point>> conPoly(contours.size());
	vector<Rect> boundRect(contours.size());
	vector<Point> biggest;
	int maxArea = 0;

	for (int i = 0; i < contours.size(); i++) {
		int area = contourArea(contours[i]);
		cout << area << endl;

		string objectType;

		if (area > 1000) {
			float peri = arcLength(contours[i], true);
			// approxPolyDP(์œค๊ณฝ์„ , ๊ทผ์‚ฌ์น˜ ์ •ํ™•๋„, ํ๊ณก์„ )
			// : ์œค๊ณฝ์„ ๋“ค์˜ ์œค๊ณฝ์ ๋“ค๋กœ ๊ทผ์‚ฌํ•ด ๊ทผ์‚ฌ ๋‹ค๊ฐํ˜•์œผ๋กœ ๋ฐ˜ํ™˜
			approxPolyDP(contours[i], conPoly[i], 0.02 * peri, true);

			if (area > maxArea && conPoly[i].size() == 4) {
				drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 5);
				biggest = { conPoly[i][0], conPoly[i][1], conPoly[i][2], conPoly[i][3] };
				maxArea = area;
			}
			// drawContours(image, contours, contourIdx, color, thickness=None, lineType=No)
			// : ๊ฒ€์ถœํ•œ ์™ธ๊ณฝ์„ ์„ ํ™•์ธํ•˜๊ธฐ ์œ„ํ•ด ์ด ํ•จ์ˆ˜๋ฅผ ์ด์šฉํ•˜์—ฌ ์™ธ๊ณฝ์„ ์„ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ
			//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 2);
			//rectangle(imgOriginal, boundRect[i].tl(), boundRect[i].br(), Scalar(0, 255, 0), 5);
		}
	}
	return biggest;
}

void drawPoints(vector<Point> points, Scalar color) {
	for (int i = 0; i < points.size(); i++) {
		circle(imgOriginal, points[i], 30, color, FILLED);
		putText(imgOriginal, to_string(i), points[i], FONT_HERSHEY_PLAIN, 2, color, 2);
	}
}

int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprocessing
	imgThre = preProcessing(imgOriginal);
	// Get contours - Biggest
	initialPoints = getContours(imgThre);
	drawPoints(initialPoints, Scalar(0, 0, 255));
	// Warp
	imshow("image", imgOriginal);
	imshow("image Dial", imgDil);

	waitKey(0);
}

 

๐Ÿ’ง ์Šค์บ”๋œ ๋ฌธ์„œ์˜ ๊ผญ์ง“์ ์— ํ…์ŠคํŠธ ์ถ”๊ฐ€ํ•˜๊ธฐ

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgCanny, imgThre, imgBlur, imgErode, imgDil;
vector<Point> initialPoints;

Mat preProcessing(Mat img) {
	// cvtColor( input Array,  output Array, flag)
	// : input Array๋ฅผ ์ž…๋ ฅ๋ฐ›์•„ flag ์— ๋Œ€ํ•œ ์˜ต์…˜์œผ๋กœ ์ด๋ฏธ์ง€ ์ƒ‰์ฑ„๋„์„ ๋ณ€๊ฒฝ
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	// GaussianBlur( src, dst, kernel_size, sigma_x, sigma_y, borderType) 
	// : ์ค‘์•™๊ฐ’์— ๊ฐ€์ค‘์น˜๋ฅผ ๋” ์ฃผ๊ณ  ์ฃผ๋ณ€์€ ๋” ํ๋ฆฌ๊ฒŒ
	GaussianBlur(img, imgBlur, Size(3, 3), 3, 0);
	// Canny( src, dst, threshold1, threshold2)
	// : ๊ฒฝ๊ณ„์„  ๊ฒ€์ถœ
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	// Morphology 
	// For Erosion : erode( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ๋Š˜๋ฆฌ๋Š” ์—ญํ• 
	//erode(imgCanny, imgErode, kernel);
	// For Dilation : dilate( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ์ค„์ด๋Š” ์—ญํ• 
	dilate(imgCanny, imgDil, kernel);
	return imgDil;
}

vector<Point> getContours(Mat imgDil) {
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	// findContours(image, mode, method, contours=None, hierarchy=None, offset=None) 
	// : ์™ธ๊ณฝ์„  ๊ฒ€์ถœ์ด๋ž€ ๊ฐ์ฒด์˜ ์™ธ๊ณฝ์„  ์ขŒํ‘œ๋ฅผ ๋ชจ๋‘ ์ถ”์ถœํ•˜๋Š” ์ž‘์—…
	findContours(imgDil, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
	vector<vector<Point>> conPoly(contours.size());
	vector<Rect> boundRect(contours.size());
	vector<Point> biggest;
	int maxArea = 0;

	for (int i = 0; i < contours.size(); i++) {
		int area = contourArea(contours[i]);
		cout << area << endl;

		string objectType;

		if (area > 1000) {
			float peri = arcLength(contours[i], true);
			// approxPolyDP(์œค๊ณฝ์„ , ๊ทผ์‚ฌ์น˜ ์ •ํ™•๋„, ํ๊ณก์„ )
			// : ์œค๊ณฝ์„ ๋“ค์˜ ์œค๊ณฝ์ ๋“ค๋กœ ๊ทผ์‚ฌํ•ด ๊ทผ์‚ฌ ๋‹ค๊ฐํ˜•์œผ๋กœ ๋ฐ˜ํ™˜
			approxPolyDP(contours[i], conPoly[i], 0.02 * peri, true);

			if (area > maxArea && conPoly[i].size() == 4) {
				drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 5);
				biggest = { conPoly[i][0], conPoly[i][1], conPoly[i][2], conPoly[i][3] };
				maxArea = area;
			}
			// drawContours(image, contours, contourIdx, color, thickness=None, lineType=No)
			// : ๊ฒ€์ถœํ•œ ์™ธ๊ณฝ์„ ์„ ํ™•์ธํ•˜๊ธฐ ์œ„ํ•ด ์ด ํ•จ์ˆ˜๋ฅผ ์ด์šฉํ•˜์—ฌ ์™ธ๊ณฝ์„ ์„ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ
			//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 2);
			//rectangle(imgOriginal, boundRect[i].tl(), boundRect[i].br(), Scalar(0, 255, 0), 5);
		}
	}
	return biggest;
}

void drawPoints(vector<Point> points, Scalar color) {
	for (int i = 0; i < points.size(); i++) {
		circle(imgOriginal, points[i], 10, color, FILLED);
		putText(imgOriginal, to_string(i), points[i], FONT_HERSHEY_PLAIN, 4, color, 4);
	}
}

int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprocessing
	imgThre = preProcessing(imgOriginal);
	// Get contours - Biggest
	initialPoints = getContours(imgThre);
	drawPoints(initialPoints, Scalar(0, 0, 255));
	// Warp
	imshow("image", imgOriginal);
	imshow("image Dial", imgDil);

	waitKey(0);
}

 

๐Ÿ’ง proprecesing ์„ ํ†ตํ•ด ๊ฐ€์žฅ ํฐ ๊ฒ€์ถœ๊ฐ’์— ํ…์ŠคํŠธ ์ถ”๊ฐ€ํ•˜๊ธฐ

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgCanny, imgThre, imgBlur, imgErode, imgDil;
vector<Point> initialPoints, docPoints;

Mat preProcessing(Mat img) {
	// cvtColor( input Array,  output Array, flag)
	// : input Array๋ฅผ ์ž…๋ ฅ๋ฐ›์•„ flag ์— ๋Œ€ํ•œ ์˜ต์…˜์œผ๋กœ ์ด๋ฏธ์ง€ ์ƒ‰์ฑ„๋„์„ ๋ณ€๊ฒฝ
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	// GaussianBlur( src, dst, kernel_size, sigma_x, sigma_y, borderType) 
	// : ์ค‘์•™๊ฐ’์— ๊ฐ€์ค‘์น˜๋ฅผ ๋” ์ฃผ๊ณ  ์ฃผ๋ณ€์€ ๋” ํ๋ฆฌ๊ฒŒ
	GaussianBlur(img, imgBlur, Size(3, 3), 3, 0);
	// Canny( src, dst, threshold1, threshold2)
	// : ๊ฒฝ๊ณ„์„  ๊ฒ€์ถœ
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	// Morphology 
	// For Erosion : erode( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ๋Š˜๋ฆฌ๋Š” ์—ญํ• 
	//erode(imgCanny, imgErode, kernel);
	// For Dilation : dilate( src, dst, kernel, anchor, iteration, borderType, borderValue)
	// : ํฐ์ƒ‰ ํ”ฝ์…€์„ ์ค„์ด๋Š” ์—ญํ• 
	dilate(imgCanny, imgDil, kernel);
	return imgDil;
}

vector<Point> getContours(Mat imgDil) {
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	// findContours(image, mode, method, contours=None, hierarchy=None, offset=None) 
	// : ์™ธ๊ณฝ์„  ๊ฒ€์ถœ์ด๋ž€ ๊ฐ์ฒด์˜ ์™ธ๊ณฝ์„  ์ขŒํ‘œ๋ฅผ ๋ชจ๋‘ ์ถ”์ถœํ•˜๋Š” ์ž‘์—…
	findContours(imgDil, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
	vector<vector<Point>> conPoly(contours.size());
	vector<Rect> boundRect(contours.size());
	vector<Point> biggest;
	int maxArea = 0;

	for (int i = 0; i < contours.size(); i++) {
		int area = contourArea(contours[i]);
		cout << area << endl;

		string objectType;

		if (area > 1000) {
			float peri = arcLength(contours[i], true);
			// approxPolyDP(์œค๊ณฝ์„ , ๊ทผ์‚ฌ์น˜ ์ •ํ™•๋„, ํ๊ณก์„ )
			// : ์œค๊ณฝ์„ ๋“ค์˜ ์œค๊ณฝ์ ๋“ค๋กœ ๊ทผ์‚ฌํ•ด ๊ทผ์‚ฌ ๋‹ค๊ฐํ˜•์œผ๋กœ ๋ฐ˜ํ™˜
			approxPolyDP(contours[i], conPoly[i], 0.02 * peri, true);

			if (area > maxArea && conPoly[i].size() == 4) {
				drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 5);
				biggest = { conPoly[i][0], conPoly[i][1], conPoly[i][2], conPoly[i][3] };
				maxArea = area;
			}
			// drawContours(image, contours, contourIdx, color, thickness=None, lineType=No)
			// : ๊ฒ€์ถœํ•œ ์™ธ๊ณฝ์„ ์„ ํ™•์ธํ•˜๊ธฐ ์œ„ํ•ด ์ด ํ•จ์ˆ˜๋ฅผ ์ด์šฉํ•˜์—ฌ ์™ธ๊ณฝ์„ ์„ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ
			//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 2);
			//rectangle(imgOriginal, boundRect[i].tl(), boundRect[i].br(), Scalar(0, 255, 0), 5);
		}
	}
	return biggest;
}

void drawPoints(vector<Point> points, Scalar color) {
	for (int i = 0; i < points.size(); i++) {
		circle(imgOriginal, points[i], 10, color, FILLED);
		putText(imgOriginal, to_string(i), points[i], FONT_HERSHEY_PLAIN, 4, color, 4);
	}
}

vector<Point> reorder(vector<Point> points) {
	vector<Point> newPoints;
	vector<int> sumPoints, subPoints;

	for (int i = 0; i < 4; i++) {
		sumPoints.push_back(points[0].x + points[0].y);
		subPoints.push_back(points[0].x - points[0].y);
	}

	newPoints.push_back(points[min_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]);	// 0
	newPoints.push_back(points[max_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]);	// 1
	newPoints.push_back(points[min_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]);	// 2
	newPoints.push_back(points[max_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]);	// 3
	
	return newPoints;
}


int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprocessing
	imgThre = preProcessing(imgOriginal);
	// Get contours - Biggest
	initialPoints = getContours(imgThre);
	drawPoints(initialPoints, Scalar(0, 0, 255));
	docPoints = reorder(initialPoints);
	drawPoints(docPoints, Scalar(0, 255, 0));

	// Warp
	imshow("image", imgOriginal);
	imshow("image Dial", imgDil);

	waitKey(0);
}

 

๐Ÿ’ง Perspective(์›๊ทผ๋ฒ•) ๋ณ€ํ™˜ ์œผ๋กœ ํšจ๊ณผ ๋‚ด๊ธฐ

์ง์„ ์˜ ์„ฑ์งˆ๋งŒ ์œ ์ง€๊ฐ€ ๋˜๊ณ , ์„ ์˜ ํ‰ํ–‰์„ฑ์€ ์œ ์ง€๊ฐ€ ๋˜์ง€ ์•Š๋Š” ๋ณ€ํ™˜

์›๊ทผ๋ณ€ํ™˜์„ ๊ฑฐ์น˜๋ฉด ํ‰ํ–‰์„ฑ์€ ์œ ์ง€ ๋˜์ง€ ๋ชปํ•˜๊ณ  ํ•˜๋‚˜์˜ ์ ์—์„œ ๋งŒ๋‚˜๋Š” ๊ฒƒ 

4๊ฐœ์˜ Point์˜ Input๊ฐ’๊ณผ์ด๋™ํ•  output Point ๊ฐ€ ํ•„์š”

 

getPerspectiveTransform(src, dst, solveMethod=None)

 

๋ณ€ํ™˜ ํ–‰๋ ฌ์„ ๊ตฌํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” cv2.getPerspectiveTransform() ํ•จ์ˆ˜๊ฐ€ ํ•„

cv2.warpPerspective() ํ•จ์ˆ˜์— ๋ณ€ํ™˜ํ–‰๋ ฌ๊ฐ’์„ ์ ์šฉํ•˜์—ฌ ์ตœ์ข… ๊ฒฐ๊ณผ ์ด๋ฏธ์ง€๋ฅผ ์–ป์„ ์ˆ˜ ์žˆ์Œ


src: 4๊ฐœ์˜ ์›๋ณธ ์ขŒํ‘œ์ 
dst: 4๊ฐœ์˜ ๊ฒฐ๊ณผ ์ขŒํ‘œ์ 
retval: 3x3 ํˆฌ์‹œ ๋ณ€ํ™˜ ํ–‰๋ ฌ

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgBlur, imgCanny, imgThre, imgDil, imgErode, imgWarp, imgCrop;
vector<Point> initialPoints, docPoints;

float w = 420, h = 596;

Mat preProcessing(Mat img)
{
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	GaussianBlur(imgGray, imgBlur, Size(3, 3), 3, 0);
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	dilate(imgCanny, imgDil, kernel);
	//erode(imgDil, imgErode, kernel);
	return imgDil;
}

vector<Point> getContours(Mat image) {

	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	findContours(image, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
	//drawContours(img, contours, -1, Scalar(255, 0, 255), 2);
	vector<vector<Point>> conPoly(contours.size());
	vector<Rect> boundRect(contours.size());

	vector<Point> biggest;
	int maxArea = 0;

	for (int i = 0; i < contours.size(); i++) {
		int area = contourArea(contours[i]);
		//cout << area << endl;

		string objectType;

		if (area > 1000) {
			float peri = arcLength(contours[i], true);
			// approxPolyDP(์œค๊ณฝ์„ , ๊ทผ์‚ฌ์น˜ ์ •ํ™•๋„, ํ๊ณก์„ )
			// : ์œค๊ณฝ์„ ๋“ค์˜ ์œค๊ณฝ์ ๋“ค๋กœ ๊ทผ์‚ฌํ•ด ๊ทผ์‚ฌ ๋‹ค๊ฐํ˜•์œผ๋กœ ๋ฐ˜ํ™˜
			approxPolyDP(contours[i], conPoly[i], 0.02 * peri, true);

			if (area > maxArea && conPoly[i].size() == 4) {
				//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 5);
				biggest = { conPoly[i][0], conPoly[i][1], conPoly[i][2], conPoly[i][3] };
				maxArea = area;
			}
			// drawContours(image, contours, contourIdx, color, thickness=None, lineType=No)
			// : ๊ฒ€์ถœํ•œ ์™ธ๊ณฝ์„ ์„ ํ™•์ธํ•˜๊ธฐ ์œ„ํ•ด ์ด ํ•จ์ˆ˜๋ฅผ ์ด์šฉํ•˜์—ฌ ์™ธ๊ณฝ์„ ์„ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ
			//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 2);
			//rectangle(imgOriginal, boundRect[i].tl(), boundRect[i].br(), Scalar(0, 255, 0), 5);
		}
	}
	return biggest;
}

void drawPoints(vector<Point> points, Scalar color)
{
	for (int i = 0; i < points.size(); i++)
	{
		circle(imgOriginal, points[i], 10, color, FILLED);
		putText(imgOriginal, to_string(i), points[i], FONT_HERSHEY_PLAIN, 4, color, 4);
	}
}

vector<Point> reorder(vector<Point> points)
{
	vector<Point> newPoints;
	vector<int>  sumPoints, subPoints;

	for (int i = 0; i < 4; i++)
	{
		sumPoints.push_back(points[i].x + points[i].y);
		subPoints.push_back(points[i].x - points[i].y);
	}

	newPoints.push_back(points[min_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]); // 0
	newPoints.push_back(points[max_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]); //1
	newPoints.push_back(points[min_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]); //2
	newPoints.push_back(points[max_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]); //3

	return newPoints;
}

Mat getWarp(Mat img, vector<Point> points, float w, float h)
{
	Point2f src[4] = { points[0],points[1],points[2],points[3] };
	Point2f dst[4] = { {0.0f,0.0f},{w,0.0f},{0.0f,h},{w,h} };

	// ์  4๊ฐœ์˜ ์ด๋™ ์ „, ์ด๋™ ํ›„ ์ขŒํ‘œ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด ํˆฌ์‹œ ๋ณ€ํ™˜ ํ–‰๋ ฌ์„ ๋ฐ˜ํ™˜ํ•˜๋Š” ํ•จ์ˆ˜
	Mat matrix = getPerspectiveTransform(src, dst);
	warpPerspective(img, imgWarp, matrix, Point(w, h));

	return imgWarp;
}


int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprocessing
	imgThre = preProcessing(imgOriginal);
	// Get contours - Biggest
	initialPoints = getContours(imgThre);
	
	docPoints = reorder(initialPoints);
	//drawPoints(docPoints, Scalar(0, 255, 0));

	// Warp
	imgWarp = getWarp(imgOriginal, docPoints, w, h);

	imshow("image", imgOriginal);
	imshow("image Dial", imgThre);
	imshow("image Dial", imgWarp);

	waitKey(0);
}

 

๐Ÿ’ง crop ์œผ๋กœ ๋ฌธ์„œ์˜ ์Šค์บ๋„ˆ ์ถ”์ถœํ•˜๊ธฐ 

#include <opencv2/opencv.hpp>		// OpenCV์—์„œ ์ง€์›ํ•˜๋Š” ๋ชจ๋“  ๊ธฐ๋Šฅ
#include <opencv2/videoio.hpp>		// ๋น„๋””์˜ค ์ถ”์  ๋ฐ ๋ฐฐ๊ฒฝ segmentation๊ณผ ๊ด€๋ จ๋œ ๋ฃจํ‹ด
#include <opencv2/imgcodecs.hpp>	// ๊ธฐ๋ณธ ๋ฐ์ดํ„ฐ ํƒ€์ž…์ด ์„ ์–ธ (Mat ์ด๋‚˜ Point๊ฐ€ ์„ ์–ธ, ํ–‰๋ ฌ ์—ฐ์‚ฐ ํ˜น์€ ๋ฒกํ„ฐ ์—ฐ์‚ฐ)
#include <opencv2/highgui.hpp>		// ์œˆ๋„์šฐ ํ™”๋ฉด, UI์ฒ˜๋ฆฌ(์Šฌ๋ผ์ด๋”, ๋ฒ„ํŠผ ๋“ฑ) ๋ฐ ๋งˆ์šฐ์Šค ์ œ์–ด ๊ฐ€๋Šฅ
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

// #10. Document Scanner
Mat imgOriginal, imgGray, imgBlur, imgCanny, imgThre, imgDil, imgErode, imgWarp, imgCrop;
vector<Point> initialPoints, docPoints;

float w = 420, h = 596;

Mat preProcessing(Mat img)
{
	cvtColor(img, imgGray, COLOR_BGR2GRAY);
	GaussianBlur(imgGray, imgBlur, Size(3, 3), 3, 0);
	Canny(imgBlur, imgCanny, 25, 75);
	Mat kernel = getStructuringElement(MORPH_RECT, Size(3, 3));
	dilate(imgCanny, imgDil, kernel);
	//erode(imgDil, imgErode, kernel);
	return imgDil;
}

vector<Point> getContours(Mat image) {

	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	findContours(image, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
	//drawContours(img, contours, -1, Scalar(255, 0, 255), 2);
	vector<vector<Point>> conPoly(contours.size());
	vector<Rect> boundRect(contours.size());

	vector<Point> biggest;
	int maxArea = 0;

	for (int i = 0; i < contours.size(); i++) {
		int area = contourArea(contours[i]);
		//cout << area << endl;

		string objectType;

		if (area > 1000) {
			float peri = arcLength(contours[i], true);
			// approxPolyDP(์œค๊ณฝ์„ , ๊ทผ์‚ฌ์น˜ ์ •ํ™•๋„, ํ๊ณก์„ )
			// : ์œค๊ณฝ์„ ๋“ค์˜ ์œค๊ณฝ์ ๋“ค๋กœ ๊ทผ์‚ฌํ•ด ๊ทผ์‚ฌ ๋‹ค๊ฐํ˜•์œผ๋กœ ๋ฐ˜ํ™˜
			approxPolyDP(contours[i], conPoly[i], 0.02 * peri, true);

			if (area > maxArea && conPoly[i].size() == 4) {
				//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 5);
				biggest = { conPoly[i][0], conPoly[i][1], conPoly[i][2], conPoly[i][3] };
				maxArea = area;
			}
			// drawContours(image, contours, contourIdx, color, thickness=None, lineType=No)
			// : ๊ฒ€์ถœํ•œ ์™ธ๊ณฝ์„ ์„ ํ™•์ธํ•˜๊ธฐ ์œ„ํ•ด ์ด ํ•จ์ˆ˜๋ฅผ ์ด์šฉํ•˜์—ฌ ์™ธ๊ณฝ์„ ์„ ํ™”๋ฉด์— ๊ทธ๋ฆฌ๊ธฐ
			//drawContours(imgOriginal, conPoly, i, Scalar(255, 0, 255), 2);
			//rectangle(imgOriginal, boundRect[i].tl(), boundRect[i].br(), Scalar(0, 255, 0), 5);
		}
	}
	return biggest;
}

void drawPoints(vector<Point> points, Scalar color)
{
	for (int i = 0; i < points.size(); i++)
	{
		circle(imgOriginal, points[i], 10, color, FILLED);
		putText(imgOriginal, to_string(i), points[i], FONT_HERSHEY_PLAIN, 4, color, 4);
	}
}

vector<Point> reorder(vector<Point> points)
{
	vector<Point> newPoints;
	vector<int>  sumPoints, subPoints;

	for (int i = 0; i < 4; i++)
	{
		sumPoints.push_back(points[i].x + points[i].y);
		subPoints.push_back(points[i].x - points[i].y);
	}

	newPoints.push_back(points[min_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]); // 0
	newPoints.push_back(points[max_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]); //1
	newPoints.push_back(points[min_element(subPoints.begin(), subPoints.end()) - subPoints.begin()]); //2
	newPoints.push_back(points[max_element(sumPoints.begin(), sumPoints.end()) - sumPoints.begin()]); //3

	return newPoints;
}

Mat getWarp(Mat img, vector<Point> points, float w, float h)
{
	Point2f src[4] = { points[0],points[1],points[2],points[3] };
	Point2f dst[4] = { {0.0f,0.0f},{w,0.0f},{0.0f,h},{w,h} };

	// ์  4๊ฐœ์˜ ์ด๋™ ์ „, ์ด๋™ ํ›„ ์ขŒํ‘œ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด ํˆฌ์‹œ ๋ณ€ํ™˜ ํ–‰๋ ฌ์„ ๋ฐ˜ํ™˜ํ•˜๋Š” ํ•จ์ˆ˜
	Mat matrix = getPerspectiveTransform(src, dst);
	warpPerspective(img, imgWarp, matrix, Point(w, h));

	return imgWarp;
}


int main() {
	string path = "Resources/paper.jpg";
	imgOriginal = imread(path);
	resize(imgOriginal, imgOriginal, Size(), 0.5, 0.5);

	// Preprpcessing - Step 1 
	imgThre = preProcessing(imgOriginal);

	// Get Contours - Biggest  - Step 2
	initialPoints = getContours(imgThre);
	
	docPoints = reorder(initialPoints);
	//drawPoints(docPoints, Scalar(0, 255, 0));

	// Warp - Step 3 
	imgWarp = getWarp(imgOriginal, docPoints, w, h);

	//Crop - Step 4
	int cropVal = 5;
	Rect roi(cropVal, cropVal, w - (2 * cropVal), h - (2 * cropVal));
	imgCrop = imgWarp(roi);

	imshow("image", imgOriginal);
	//imshow("image Dial", imgThre);
	//imshow("image Dial", imgWarp);
	imshow("Image Crop", imgCrop);

	waitKey(0);
}

 

728x90
๋ฐ˜์‘ํ˜•
Comments