diff --git a/Features/DeepLearning/Tools/Converter_ckpt_To_PB b/Features/DeepLearning/Tools/Converter_ckpt_To_PB index 2af50db..f53926e 100644 --- a/Features/DeepLearning/Tools/Converter_ckpt_To_PB +++ b/Features/DeepLearning/Tools/Converter_ckpt_To_PB @@ -1,17 +1,16 @@ import tensorflow as tf from tensorflow.python.framework import dtypes -from PIL import Image import numpy as np from tensorflow.python.tools import freeze_graph from tensorflow.python.tools import optimize_for_inference_lib def freeze_from_checkpoint(): # freeze graph - path = tf.train.latest_checkpoint(r"D:\kei2\Study\Tongue\TongueColorAnalysis\tang_model\\") - input_graph_path = "D:/resultAREinProcess10_gpu_checkpoint/graph_node.pbtxt" # the pbtxt path + path = tf.train.latest_checkpoint(r"D:\Result_RE_Revenge101_checkpoint\\") + input_graph_path = r"D:\Result_RE_Revenge101_checkpoint\graph_node.pbtxt" # the pbtxt path output_nodes = "generator1/decoder_1/Tanh" - restore_op = "save/restore_all" - filename_tensor = "save/Const:0" - output_name = "D:/resultAREinProcess10_gpu_checkpoint/AREinProcess2_step8100.pb" # where you want to export your freezed model + restore_op = r"save\restore_all" + filename_tensor = r"save\Const:0" + output_name = r"D:\Result_RE_Revenge101_checkpoint\pruning101_step11999.pb" # where you want to export your freezed model freeze_graph.freeze_graph(input_graph_path, "", False, path, output_nodes, restore_op, filename_tensor, output_name, True, "") @@ -67,8 +66,8 @@ if __name__ == '__main__': freeze_from_checkpoint() - # optimize_frozen_file() - # + optimize_frozen_file() + # graph = load_graph() # x = graph.get_tensor_by_name("import/input_image:0") # diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs index 2ea2bac..17cf46d 100644 --- a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs +++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs @@ -45,13 +45,13 @@ private void button_ConnectTIAS_Click(object sender, EventArgs e) { // GetImage() - var path_oriImg = @"data_lab\h\Shot0001.bmp"; - var path_calibCsv = @"data_lab\h\Calib.csv"; + var path_oriImg = @"data_lab\1\Shot0001.bmp"; + var path_calibCsv = @"data_lab\1\Calib.csv"; var path_colorMatrixXYZ = "xyz.txt"; using (Mat mat_oriImg = Cv2.ImRead(path_oriImg, ImreadModes.Color)) { // Process_DeepTIAS() - var path_mask = @"data_lab\h\Shot0001_mask.bmp"; + var path_mask = @"data_lab\1\Shot0001_mask.bmp"; Mat mat_finalMask = Cv2.ImRead(path_mask, ImreadModes.Grayscale); Cv2.Threshold(mat_finalMask, mat_finalMask, 128, 255, ThresholdTypes.Binary); @@ -133,15 +133,15 @@ var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max())); // 舌尖領域を示すy座標を取得(割合を今回は決め打ち) - var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8)); - var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min()); - var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max()); + //var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8)); + //var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min()); + //var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max()); // 表示してみる //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1); //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1); - Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1); - Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1); + //Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1); + //Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1); //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1); //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1); @@ -230,15 +230,31 @@ var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min()); var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max()); var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG); - // 直線フィッティング - var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.0); - var lefty = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1); - var righty = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1); + + var lefty = new OpenCvSharp.Point(); + var righty = new OpenCvSharp.Point(); + if (p_topToCoG_center.X == p_bottomToCoG_center.X) + { + lefty.X = p_topToCoG_center.X; + lefty.Y = 0; + righty.X = p_topToCoG_center.X; + righty.Y = mat_finalMask.Rows - 1; + } + else + { + // 直線フィッティング + var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.01); + lefty.X = 0; + lefty.Y = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1); + righty.X = mat_finalMask.Cols - 1; + righty.Y = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1); + + } // 直線上の輪郭点 var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); - Cv2.Line(mat_centerline, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), 3); + Cv2.Line(mat_centerline, righty, lefty, 3); Cv2.DrawContours(mat_contour, contours, 0, 3); Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and); Cv2.FindNonZero(mat_and, mat_and); @@ -256,21 +272,29 @@ } var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3); + // 舌尖領域を示すy座標を取得(割合を今回は決め打ち) + var y_top_avg_3 = (p_left_2.Y + p_right_2.Y) / 2.0; + var y_apex_3 = (int)(y_top_avg_ + ((p_bottom_3.Y - y_top_avg_3) * 0.57)); + var p_apex_left_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Min()); + var p_apex_right_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Max()); + //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1); //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1); - //Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1); + Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1); //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1); //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1); - //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1); - //Cv2.Line(mat_dst, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), new Scalar(0, 100, 255)); //近似直線 - //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1); + Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1); + Cv2.Line(mat_dst, lefty, righty, new Scalar(0, 100, 255)); //近似直線 + //Cv2.Circle(mat_dst, p_apex_left_3, 20, new Scalar(0, 0, 255), -1); + //Cv2.Circle(mat_dst, p_apex_right_3, 20, new Scalar(0, 0, 255), -1); + Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1); // DEBUG Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize); Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5))); // 出力 - li_dst = new List { p_left_2, p_apex_left, p_bottom, p_apex_right, p_right_2 }; + li_dst = new List { p_left_2, p_apex_left_3, p_bottom_3, p_apex_right_3, p_right_2 }; //li_dst = new List { p_left_2, p_apex_left_2, p_bottom_2, p_apex_right_2, p_right_2 }; // 破棄 diff --git a/Main/Tongue extraction/ColorExtractor.cs b/Main/Tongue extraction/ColorExtractor.cs index 589729c..e1f3646 100644 --- a/Main/Tongue extraction/ColorExtractor.cs +++ b/Main/Tongue extraction/ColorExtractor.cs @@ -215,14 +215,30 @@ var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG); // 直線フィッティング - var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.0); - var lefty = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1); - var righty = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1); + var lefty = new OpenCvSharp.Point(); + var righty = new OpenCvSharp.Point(); + if (p_topToCoG_center.X == p_bottomToCoG_center.X) + { + // 傾き0の場合の例外処理 + lefty.X = p_topToCoG_center.X; + lefty.Y = 0; + righty.X = p_topToCoG_center.X; + righty.Y = mat_finalMask.Rows - 1; + } + else + { + var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.01); + lefty.X = 0; + lefty.Y = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1); + righty.X = mat_finalMask.Cols - 1; + righty.Y = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1); + + } // 直線上の輪郭点 var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0); - Cv2.Line(mat_centerline, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), 3); + Cv2.Line(mat_centerline, lefty, righty, 3); Cv2.DrawContours(mat_contour, sortedContour, 0, 3); Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and); Cv2.FindNonZero(mat_and, mat_and); @@ -252,7 +268,6 @@ //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1); //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1); //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1); - //Cv2.Line(mat_dst, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), new Scalar(0, 100, 255)); //近似直線 //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1); // 破棄 diff --git a/Main/Tongue extraction/Form1.cs b/Main/Tongue extraction/Form1.cs index c4455cc..8e9cfed 100644 --- a/Main/Tongue extraction/Form1.cs +++ b/Main/Tongue extraction/Form1.cs @@ -288,7 +288,7 @@ private static string DownloadDefaultModel(string dir) { - var modelFile = Path.Combine(dir, "AREinProcess2_step8100.pb"); + var modelFile = Path.Combine(dir, "pruning101_step11999.pb"); return modelFile; } @@ -859,7 +859,7 @@ using (MemoryStream ms = new MemoryStream()) { // ディレクトリglobの取得 - var glob_dir = Directory.GetDirectories(@"D:\kei2\Study\Tongue\DentalDiagnosisDataAnalysis\Data\NewDataset\Images\Proc"); + var glob_dir = Directory.GetDirectories(@"data\errors"); for (int a = 0; a < glob_dir.Length; a++) {