#include "myOpenCV3.h"
#include <bitset>
#include <string>
#include <filesystem>
#include <fstream>
#include <iostream>
#include <vector>
void thinningIteration(cv::Mat& img, int iteration) {
cv::Mat marker = cv::Mat::ones(img.size(), CV_8UC1);
for (int y = 1; y < img.rows - 1; ++y) {
for (int x = 1; x < img.cols - 1; ++x) {
uchar p2 = img.at<uchar>(y - 1, x);
uchar p3 = img.at<uchar>(y - 1, x + 1);
uchar p4 = img.at<uchar>(y, x + 1);
uchar p5 = img.at<uchar>(y + 1, x + 1);
uchar p6 = img.at<uchar>(y + 1, x);
uchar p7 = img.at<uchar>(y + 1, x - 1);
uchar p8 = img.at<uchar>(y, x - 1);
uchar p9 = img.at<uchar>(y - 1, x - 1);
int C = (!p2 & (p3 | p4)) + (!p4 & (p5 | p6)) +
(!p6 & (p7 | p8)) + (!p8 & (p9 | p2));
int N1 = (p9 | p2) + (p3 | p4) + (p5 | p6) + (p7 | p8);
int N2 = (p2 | p3) + (p4 | p5) + (p6 | p7) + (p8 | p9);
int N = N1 < 3 ? N1 : N2;
int m = iteration == 0 ? ((p6 | p7 | (!p9)) & p8) : ((p2 | p3 | (!p5)) & p4);
if (C == 1 && (N >= 2 && N <= 3) & m == 0)
marker.at<uchar>(y, x) = 0;
}
}
img &= marker;
}
void thinning(const cv::Mat& src, cv::Mat& dst) {
dst = src.clone();
dst /= 255;
// 正規化
cv::Mat normalizedImage;
cv::normalize(src, normalizedImage, 0, 1, cv::NORM_MINMAX, CV_32F);
cv::Mat prev = cv::Mat::zeros(dst.size(), CV_8UC1);
cv::Mat diff;
do {
thinningIteration(dst, 0);
thinningIteration(dst, 1);
cv::absdiff(dst, prev, diff);
dst.copyTo(prev);
} while (cv::countNonZero(diff) > 0);
dst *= 255;
}
std::vector<cv::Point> findBranchPoints(cv::Mat& img)
{
std::vector<cv::Point> branchPoints;
for (int y = 1; y < img.rows - 1; ++y)
{
for (int x = 1; x < img.cols - 1; ++x)
{
// 画像が白(または黒)でない場合は無視
if (img.at<uchar>(y, x) == 255) continue;
// 8近傍の白(または黒)ピクセルの数をカウント
int count = 0;
if (img.at<uchar>(y - 1, x - 1) > 0) ++count;
if (img.at<uchar>(y - 1, x) > 0) ++count;
if (img.at<uchar>(y - 1, x + 1) > 0) ++count;
if (img.at<uchar>(y, x - 1) > 0) ++count;
if (img.at<uchar>(y, x + 1) > 0) ++count;
if (img.at<uchar>(y + 1, x - 1) > 0) ++count;
if (img.at<uchar>(y + 1, x) > 0) ++count;
if (img.at<uchar>(y + 1, x + 1) > 0) ++count;
// 白(または黒)ピクセルが3つ以上ある場合は分岐点として記録
if (count >= 3)
branchPoints.push_back(cv::Point(x, y));
}
}
return branchPoints;
}
std::vector<cv::Point> extractBifurcationPoints(cv::Mat& thinnedImage) {
int rows = thinnedImage.rows;
int cols = thinnedImage.cols;
int neighbor[2][8] = { {-1, -1, 0, 1, 1, 1, 0, -1}, {0, 1, 1, 1, 0, -1, -1, -1} };
std::vector<cv::Point> bifurcationPoints;
for (int i = 1; i < rows - 1; i++) {
for (int j = 1; j < cols - 1; j++) {
if (thinnedImage.at<uchar>(i, j) == 255) {
int transitions = 0;
int neighborCount = 0;
for (int k = 0; k < 8; k++) {
int x = i + neighbor[0][k];
int y = j + neighbor[1][k];
//int nextX = i + neighbor[0][(k + 1) % 8];
//int nextY = j + neighbor[1][(k + 1) % 8];
if (thinnedImage.at<uchar>(x, y) == 255) {
neighborCount++;
//if (thinnedImage.at<uchar>(nextX, nextY) == 0)
// transitions++;
}
}
if (neighborCount >= 3)
bifurcationPoints.push_back(cv::Point(j, i)); // OpenCVでは、x座標が列番号、y座標が行番号に対応します。
}
}
}
return bifurcationPoints;
}
cv::Mat removeSmallAreaImage(const cv::Mat& src, const double areaThreshold)
{
cv::Mat dst;
dst = src.clone();
std::vector<std::vector<cv::Point>> contours;
cv::findContours(src, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
std::ofstream outFile("area.txt");
// 面積が閾値以下の輪郭を削除
for (int i = 0; i < contours.size(); i++) {
double area = cv::contourArea(contours[i]);
outFile << i << ": = " << area <<std::endl;
if (area <= areaThreshold) {
cv::drawContours(dst, contours, i, cv::Scalar(0), cv::FILLED);
}
}
outFile.close();
return dst;
}
cv::Mat inverseImage(const cv::Mat& src)
{
cv::Mat invertedImage;
invertedImage = 255 - src;
return invertedImage;
}
cv::Mat maximumImage(const cv::Mat& img1, const cv::Mat& img2)
{
cv::Mat maxImage = img1.clone(); // 結果を格納するMatを作成(img1のコピーを使用)
cv::max(img1, img2, maxImage);
return maxImage;
}
void BinaryProcessing(const cv::Mat& ori, const cv::Mat& src, std::string dirName)
{
// 二値化
cv::Mat margeBinaryImage, removedMargeBinaryImage;
cv::threshold(src, margeBinaryImage, 55, 255, cv::THRESH_BINARY);
// パディング
int borderPixel = 30;
cv::Rect roi(borderPixel, borderPixel, margeBinaryImage.cols - borderPixel * 2, margeBinaryImage.rows - borderPixel * 2);
cv::Mat ROIImage;
ROIImage = margeBinaryImage(roi).clone();
cv::Mat paddingImage;
cv::copyMakeBorder(ROIImage, paddingImage, borderPixel, borderPixel, borderPixel, borderPixel, cv::BORDER_CONSTANT, cv::Scalar(0));
// 膨張
cv::Mat morphologyImage;
dilate(paddingImage, morphologyImage, cv::Mat::ones(5, 5, CV_8U), cv::Point(-1, -1), 2);
// 小領域削除
cv::Mat removedMargeBinaryImage2;
double areaThreshold = 1000.0;
removedMargeBinaryImage = removeSmallAreaImage(morphologyImage, areaThreshold);
cv::Mat invImage;
cv::Mat thinnedImage;
thinning(removedMargeBinaryImage, thinnedImage);
cv::Mat kernel1 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
// 細線化画像を再度膨張
cv::Mat outputImage;
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
cv::dilate(thinnedImage, outputImage, kernel, cv::Point(-1, -1), 2);
/* morphologyImage = inverseImage(morphologyImage);
outputImage = inverseImage(outputImage);
thinnedImage = inverseImage(thinnedImage);*/
/*cv::imwrite(dirName + "/OtsuBinaryImage.png", otsuBinaryImage);
cv::imwrite(dirName + "/AdaptiveBinary.png", adaptiveBinaryImage);*/
cv::imwrite(dirName + "/MargeTH60BinaryImage.png", margeBinaryImage);
cv::imwrite(dirName + "/RemovedSmallAreaMargeBinaryImage_" + std::to_string((int)areaThreshold) + ".png", removedMargeBinaryImage);
cv::imwrite(dirName + "/ROIImage.png", ROIImage);
cv::imwrite(dirName + "/paddingImage.png", paddingImage);
cv::imwrite(dirName + "/Morpho.png", morphologyImage);
cv::imwrite(dirName + "/Thinnig.png", thinnedImage);
cv::imwrite(dirName + "/DilatedThinning.png", outputImage);
cv::Mat circleImage;
ori.convertTo(circleImage, CV_8U);
cv::Mat circleImageRGB;
cv::cvtColor(circleImage, circleImageRGB, cv::COLOR_GRAY2BGR); // グレースケールからRGBへ変換
std::vector<cv::Point> branchPoints = extractBifurcationPoints(thinnedImage); // 分岐点を検出
// 分岐点に円を描画
for (const auto& point : branchPoints) {
cv::circle(circleImageRGB, point, 10, cv::Scalar(0, 0, 255), 2); // 半径3の円を赤色で描画します
}
cv::imwrite(dirName + "/Circle.png", circleImageRGB);
}
void WindowProcessing(const cv::Mat& src16U, cv::Mat& dst8U, double limit, std::string dirName)
{
double amin, amax;
cv::minMaxLoc(src16U, &amin, &amax);
// スケーリング係数を計算
double scale = limit / (amax - amin);
src16U.convertTo(dst8U, CV_8U, scale, -amin * scale);
//cv::imwrite(dirName + "/Windowed_2.png", dst8U);
}
void GaborProcessing(const cv::Mat& ori, cv::Mat& src, std::string dirName)
{
int division = 16;
int ksize = 51;
double sigma = 18;
double theta;
double lambda = 42;
double gamma = 1;
double psi = 0;
double angle = 0;
cv::Mat maxGaborImage;
double maxGaborSum = -1.0;
int maxAngle = -1;
cv::Mat src64F;
src.convertTo(src64F, CV_64F);
cv::Mat margeImage = cv::Mat::zeros(src64F.size(), CV_64F);
cv::Mat margeNormImage;
// Find the angle that gives the maximum sum of gaborImage8U
for (int i = 0; i < division; i++, angle += (180 / division))
{
theta = angle * CV_PI / 180.0;
// opencvのgabor filter
cv::Mat kern = cv::getGaborKernel(cv::Size(ksize, ksize), sigma, theta, lambda, gamma, psi, CV_64F);
kern /= cv::sum(kern)[0];
cv::Mat gaborImage, gaborBinaryImage;
cv::filter2D(src64F, gaborImage, CV_64F, kern);
//画像の型変換と正規化
double gaborMinVal, gaborMaxVal;
cv::minMaxLoc(gaborImage, &gaborMinVal, &gaborMaxVal); // gaborImageの最小値と最大値を取得
margeImage = maximumImage(gaborImage, margeImage);
// 型変換
// カーネルの値を0から255の範囲に正規化
double kernMinVal, kernMaxVal;
cv::minMaxLoc(kern, &kernMinVal, &kernMaxVal); // 最小値と最大値を取得
kern = 255 * (kern - kernMinVal) / (kernMaxVal - kernMinVal);
// CV_64FからCV_8Uに変換
cv::Mat kern8U;
kern.convertTo(kern8U, CV_8U);
cv::Mat gaborImageScaled, diffImage;
// 差分画像
cv::convertScaleAbs(diffImage, gaborImageScaled, 1.0 / division);
std::string fileStr = std::to_string(i) + "_over_" + std::to_string(division) + "pi";
std::cout << fileStr << ":min = " << gaborMinVal << " :max = " << gaborMaxVal << std::endl;
// 画像の保存
cv::imwrite(dirName + "/Gabor_" + fileStr + ".png", gaborImage);
//cv::imwrite(dirName + "/GaborNorm_" + fileStr + ".png", gaborNormImage);
//cv::imwrite(dirName + "/diff_" + fileStr + ".png", diffImage);
//cv::imwrite(dirName + "/GaborOtsuBinaryImage_" + fileStr + ".png", gaborBinaryImage);
// カーネルを画像ファイルとして保存
cv::imwrite(dirName + "/Kernel_" + fileStr + ".png", kern);
// 画像の合成
//cv::add(margeImage, gaborImageScaled, margeImage, cv::noArray(), CV_8U);
}
double margeMinVal, margeMaxVal;
cv::minMaxLoc(margeImage, &margeMinVal, &margeMaxVal); // 最小値と最大値を取得
cv::normalize(margeImage, margeNormImage, 0, 255, cv::NORM_MINMAX, CV_8U);
//cv::imwrite(dirName + "/MargeOtsuBinaryImage.png", margeOtsuBinaryImage);
//cv::imwrite(dirName + "/RemovedSmallAreaMargeOtsuBinaryImage_" + std::to_string((int)areaThreshold) + ".png", removedMargeOtsuBinaryImage);
cv::imwrite(dirName + "/Marge.png", margeImage);
cv::imwrite(dirName + "/MargeNorm.png", margeNormImage);
BinaryProcessing(ori, margeNormImage, dirName);
}
int main() {
std::string inputFileName = "trimmed.tif";
// 出力処理
std::string baseDir = "result_output/";
std::string dirBaseName = "th50_border30_kernel5_ite2_area1000";
std::string dirName = baseDir + dirBaseName;
// outputディレクトリが存在しない場合は作成
if
(!std::filesystem::exists(baseDir)) {
std::filesystem::create_directory(baseDir);
}
int dirCount = 2;
// 指定されたディレクトリが存在する場合は、新しい名前を生成
while (std::filesystem::exists(dirName)) {
dirName = baseDir + dirBaseName + std::to_string(dirCount);
++dirCount;
}
std::filesystem::create_directory(dirName);
cv::Mat image = cv::imread(inputFileName, cv::IMREAD_ANYDEPTH);
// 階調処理
cv::Mat windowedImage;
WindowProcessing(image, windowedImage, 255.0, dirName);
// CLAHEを用いたヒストグラム平坦化
cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(10.0, cv::Size(16, 16));
cv::Mat claheImage;
clahe->apply(windowedImage, claheImage);
// コントラスト強調
cv::Mat claheOtsuBinaryImage;
cv::threshold(claheImage, claheOtsuBinaryImage, 128, 255, cv::THRESH_BINARY);
//BinaryProcessing(windowedImage, claheImage, dirName);
// ノイズ除去
cv::Mat gaussianBlurImage;
cv::GaussianBlur(claheImage, gaussianBlurImage, cv::Size(19, 19), 9, 0);
cv::Mat inversedClaheImage;
inversedClaheImage = inverseImage(gaussianBlurImage);
// ガボールフィルタ
GaborProcessing(windowedImage, inversedClaheImage, dirName);
cv::imwrite(dirName + "/Original.tif", image);
cv::imwrite(dirName + "/WindowedImage.png", windowedImage);
cv::imwrite(dirName + "/CLAHEGaussian.png", gaussianBlurImage);
cv::imwrite(dirName + "/CLAHE.png", claheImage);
cv::imwrite(dirName + "/CLAHEGaussianInversion.png", inversedClaheImage);
cv::imwrite(dirName + "/CLAHEOtsu.png", claheOtsuBinaryImage);
return 0;
}