diff --git a/.gitignore b/.gitignore
index 617cdc1..7373aa2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -113,3 +113,5 @@
Tongue extraction_cropresizemethod/Tongue extraction/obj/x64/Release/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
Tongue extraction_cropresizemethod/Tongue extraction/obj/x64/Release/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
DeepTongue_feature_LabColor/feature_labColor/packages/
+Features/DeepTongue_feature_LabColor/feature_labColor/packages/
+Main/packages/
diff --git a/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln b/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
deleted file mode 100644
index ad5bbe3..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
+++ /dev/null
@@ -1,31 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio Version 16
-VisualStudioVersion = 16.0.30011.22
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "deepTIAS_feature_labColor", "newcamera_deeplearning\deepTIAS_feature_labColor.csproj", "{9B9D9F04-F367-4B3A-A842-51D01B42539B}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Any CPU = Debug|Any CPU
- Debug|x64 = Debug|x64
- Release|Any CPU = Release|Any CPU
- Release|x64 = Release|x64
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.ActiveCfg = Debug|x64
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.Build.0 = Debug|x64
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.Build.0 = Release|Any CPU
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.ActiveCfg = Release|x64
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.Build.0 = Release|x64
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
- GlobalSection(ExtensibilityGlobals) = postSolution
- SolutionGuid = {0887EA03-2F07-4BB6-B5FD-1656497D53DB}
- EndGlobalSection
-EndGlobal
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
deleted file mode 100644
index ccc69e5..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
+++ /dev/null
@@ -1,553 +0,0 @@
-namespace OperateCamera
-{
- partial class Form_TIASAutomaticShootingSystem
- {
- ///
- /// Required designer variable.
- ///
- private System.ComponentModel.IContainer components = null;
-
- ///
- /// Clean up any resources being used.
- ///
- /// true if managed resources should be disposed; otherwise, false.
- protected override void Dispose(bool disposing)
- {
- if (disposing && (components != null))
- {
- components.Dispose();
- }
- base.Dispose(disposing);
- }
-
- #region Windows Form Designer generated code
-
- ///
- /// Required method for Designer support - do not modify
- /// the contents of this method with the code editor.
- ///
- private void InitializeComponent()
- {
- this.components = new System.ComponentModel.Container();
- this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
- this.textBox5 = new System.Windows.Forms.TextBox();
- this.textBox1 = new System.Windows.Forms.TextBox();
- this.panel7 = new System.Windows.Forms.Panel();
- this.pictureBox4 = new System.Windows.Forms.PictureBox();
- this.panel6 = new System.Windows.Forms.Panel();
- this.pictureBox3 = new System.Windows.Forms.PictureBox();
- this.panel5 = new System.Windows.Forms.Panel();
- this.pictureBox2 = new System.Windows.Forms.PictureBox();
- this.label10 = new System.Windows.Forms.Label();
- this.label9 = new System.Windows.Forms.Label();
- this.panel3 = new System.Windows.Forms.Panel();
- this.ShootingGuideBox = new System.Windows.Forms.TextBox();
- this.label1 = new System.Windows.Forms.Label();
- this.panel1 = new System.Windows.Forms.Panel();
- this.button_ConnectTIAS = new System.Windows.Forms.Button();
- this.label7 = new System.Windows.Forms.Label();
- this.label8 = new System.Windows.Forms.Label();
- this.textBox2 = new System.Windows.Forms.TextBox();
- this.textBox3 = new System.Windows.Forms.TextBox();
- this.textBox4 = new System.Windows.Forms.TextBox();
- this.textBox6 = new System.Windows.Forms.TextBox();
- this.textBox7 = new System.Windows.Forms.TextBox();
- this.textBox8 = new System.Windows.Forms.TextBox();
- this.panel2 = new System.Windows.Forms.Panel();
- this.GuideBox = new System.Windows.Forms.TextBox();
- this.label11 = new System.Windows.Forms.Label();
- this.label12 = new System.Windows.Forms.Label();
- this.panel4 = new System.Windows.Forms.Panel();
- this.pictureBox1 = new System.Windows.Forms.PictureBox();
- this.serialPort1 = new System.IO.Ports.SerialPort(this.components);
- this.button1 = new System.Windows.Forms.Button();
- this.tableLayoutPanel1.SuspendLayout();
- this.panel7.SuspendLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).BeginInit();
- this.panel6.SuspendLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).BeginInit();
- this.panel5.SuspendLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).BeginInit();
- this.panel3.SuspendLayout();
- this.panel1.SuspendLayout();
- this.panel2.SuspendLayout();
- this.panel4.SuspendLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
- this.SuspendLayout();
- //
- // tableLayoutPanel1
- //
- this.tableLayoutPanel1.ColumnCount = 4;
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.Controls.Add(this.textBox5, 1, 9);
- this.tableLayoutPanel1.Controls.Add(this.textBox1, 0, 9);
- this.tableLayoutPanel1.Controls.Add(this.panel7, 3, 8);
- this.tableLayoutPanel1.Controls.Add(this.panel6, 2, 8);
- this.tableLayoutPanel1.Controls.Add(this.panel5, 3, 2);
- this.tableLayoutPanel1.Controls.Add(this.label10, 3, 1);
- this.tableLayoutPanel1.Controls.Add(this.label9, 2, 1);
- this.tableLayoutPanel1.Controls.Add(this.panel3, 0, 14);
- this.tableLayoutPanel1.Controls.Add(this.label1, 1, 0);
- this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
- this.tableLayoutPanel1.Controls.Add(this.label7, 0, 7);
- this.tableLayoutPanel1.Controls.Add(this.label8, 1, 7);
- this.tableLayoutPanel1.Controls.Add(this.textBox2, 0, 10);
- this.tableLayoutPanel1.Controls.Add(this.textBox3, 0, 11);
- this.tableLayoutPanel1.Controls.Add(this.textBox4, 0, 12);
- this.tableLayoutPanel1.Controls.Add(this.textBox6, 1, 10);
- this.tableLayoutPanel1.Controls.Add(this.textBox7, 1, 11);
- this.tableLayoutPanel1.Controls.Add(this.textBox8, 1, 12);
- this.tableLayoutPanel1.Controls.Add(this.panel2, 0, 13);
- this.tableLayoutPanel1.Controls.Add(this.label11, 2, 7);
- this.tableLayoutPanel1.Controls.Add(this.label12, 3, 7);
- this.tableLayoutPanel1.Controls.Add(this.panel4, 2, 2);
- this.tableLayoutPanel1.Controls.Add(this.button1, 1, 2);
- this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
- this.tableLayoutPanel1.Name = "tableLayoutPanel1";
- this.tableLayoutPanel1.RowCount = 15;
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.Size = new System.Drawing.Size(834, 561);
- this.tableLayoutPanel1.TabIndex = 0;
- this.tableLayoutPanel1.Paint += new System.Windows.Forms.PaintEventHandler(this.tableLayoutPanel1_Paint);
- //
- // textBox5
- //
- this.textBox5.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox5.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox5.Location = new System.Drawing.Point(211, 336);
- this.textBox5.Multiline = true;
- this.textBox5.Name = "textBox5";
- this.textBox5.Size = new System.Drawing.Size(202, 31);
- this.textBox5.TabIndex = 33;
- this.textBox5.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox5.TextChanged += new System.EventHandler(this.textBox5_TextChanged);
- //
- // textBox1
- //
- this.textBox1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox1.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox1.Location = new System.Drawing.Point(3, 336);
- this.textBox1.Multiline = true;
- this.textBox1.Name = "textBox1";
- this.textBox1.Size = new System.Drawing.Size(202, 31);
- this.textBox1.TabIndex = 32;
- this.textBox1.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox1.TextChanged += new System.EventHandler(this.textBox1_TextChanged);
- //
- // panel7
- //
- this.panel7.Controls.Add(this.pictureBox4);
- this.panel7.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel7.Location = new System.Drawing.Point(627, 299);
- this.panel7.Name = "panel7";
- this.tableLayoutPanel1.SetRowSpan(this.panel7, 5);
- this.panel7.Size = new System.Drawing.Size(204, 179);
- this.panel7.TabIndex = 31;
- this.panel7.Paint += new System.Windows.Forms.PaintEventHandler(this.panel7_Paint);
- //
- // pictureBox4
- //
- this.pictureBox4.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
- this.pictureBox4.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox4.Location = new System.Drawing.Point(0, 0);
- this.pictureBox4.Name = "pictureBox4";
- this.pictureBox4.Size = new System.Drawing.Size(204, 179);
- this.pictureBox4.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox4.TabIndex = 2;
- this.pictureBox4.TabStop = false;
- this.pictureBox4.Click += new System.EventHandler(this.pictureBox4_Click);
- //
- // panel6
- //
- this.panel6.Controls.Add(this.pictureBox3);
- this.panel6.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel6.Location = new System.Drawing.Point(419, 299);
- this.panel6.Name = "panel6";
- this.tableLayoutPanel1.SetRowSpan(this.panel6, 5);
- this.panel6.Size = new System.Drawing.Size(202, 179);
- this.panel6.TabIndex = 30;
- this.panel6.Paint += new System.Windows.Forms.PaintEventHandler(this.panel6_Paint);
- //
- // pictureBox3
- //
- this.pictureBox3.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
- this.pictureBox3.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox3.Location = new System.Drawing.Point(0, 0);
- this.pictureBox3.Name = "pictureBox3";
- this.pictureBox3.Size = new System.Drawing.Size(202, 179);
- this.pictureBox3.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox3.TabIndex = 1;
- this.pictureBox3.TabStop = false;
- this.pictureBox3.Click += new System.EventHandler(this.pictureBox3_Click);
- //
- // panel5
- //
- this.panel5.Controls.Add(this.pictureBox2);
- this.panel5.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel5.Location = new System.Drawing.Point(627, 77);
- this.panel5.Name = "panel5";
- this.tableLayoutPanel1.SetRowSpan(this.panel5, 5);
- this.panel5.Size = new System.Drawing.Size(204, 179);
- this.panel5.TabIndex = 29;
- this.panel5.Paint += new System.Windows.Forms.PaintEventHandler(this.panel5_Paint);
- //
- // pictureBox2
- //
- this.pictureBox2.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
- this.pictureBox2.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox2.Location = new System.Drawing.Point(0, 0);
- this.pictureBox2.Name = "pictureBox2";
- this.pictureBox2.Size = new System.Drawing.Size(204, 179);
- this.pictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox2.TabIndex = 0;
- this.pictureBox2.TabStop = false;
- this.pictureBox2.Click += new System.EventHandler(this.pictureBox2_Click);
- //
- // label10
- //
- this.label10.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label10.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label10.Location = new System.Drawing.Point(627, 37);
- this.label10.Name = "label10";
- this.label10.Size = new System.Drawing.Size(204, 37);
- this.label10.TabIndex = 25;
- this.label10.Text = "Detection";
- this.label10.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label10.Click += new System.EventHandler(this.label10_Click);
- //
- // label9
- //
- this.label9.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label9.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label9.Location = new System.Drawing.Point(419, 37);
- this.label9.Name = "label9";
- this.label9.Size = new System.Drawing.Size(202, 37);
- this.label9.TabIndex = 24;
- this.label9.Text = "Real Time";
- this.label9.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label9.Click += new System.EventHandler(this.label9_Click);
- //
- // panel3
- //
- this.tableLayoutPanel1.SetColumnSpan(this.panel3, 4);
- this.panel3.Controls.Add(this.ShootingGuideBox);
- this.panel3.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel3.Location = new System.Drawing.Point(3, 521);
- this.panel3.Name = "panel3";
- this.panel3.Size = new System.Drawing.Size(828, 37);
- this.panel3.TabIndex = 23;
- this.panel3.Paint += new System.Windows.Forms.PaintEventHandler(this.panel3_Paint);
- //
- // ShootingGuideBox
- //
- this.ShootingGuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
- this.ShootingGuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.ShootingGuideBox.ForeColor = System.Drawing.Color.Red;
- this.ShootingGuideBox.Location = new System.Drawing.Point(0, 0);
- this.ShootingGuideBox.Multiline = true;
- this.ShootingGuideBox.Name = "ShootingGuideBox";
- this.ShootingGuideBox.Size = new System.Drawing.Size(828, 37);
- this.ShootingGuideBox.TabIndex = 24;
- this.ShootingGuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.ShootingGuideBox.TextChanged += new System.EventHandler(this.ShootingGuideBox_TextChanged);
- //
- // label1
- //
- this.tableLayoutPanel1.SetColumnSpan(this.label1, 2);
- this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label1.Font = new System.Drawing.Font("Arial", 25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Pixel);
- this.label1.Location = new System.Drawing.Point(211, 0);
- this.label1.Name = "label1";
- this.label1.Size = new System.Drawing.Size(410, 37);
- this.label1.TabIndex = 0;
- this.label1.Text = "Tongue Image Analyzing System";
- this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label1.Click += new System.EventHandler(this.label1_Click);
- //
- // panel1
- //
- this.tableLayoutPanel1.SetColumnSpan(this.panel1, 2);
- this.panel1.Controls.Add(this.button_ConnectTIAS);
- this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel1.Location = new System.Drawing.Point(3, 151);
- this.panel1.Name = "panel1";
- this.tableLayoutPanel1.SetRowSpan(this.panel1, 2);
- this.panel1.Size = new System.Drawing.Size(410, 68);
- this.panel1.TabIndex = 7;
- this.panel1.Paint += new System.Windows.Forms.PaintEventHandler(this.panel1_Paint);
- //
- // button_ConnectTIAS
- //
- this.button_ConnectTIAS.Dock = System.Windows.Forms.DockStyle.Fill;
- this.button_ConnectTIAS.Font = new System.Drawing.Font("Arial", 20F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.button_ConnectTIAS.Location = new System.Drawing.Point(0, 0);
- this.button_ConnectTIAS.Name = "button_ConnectTIAS";
- this.button_ConnectTIAS.Size = new System.Drawing.Size(410, 68);
- this.button_ConnectTIAS.TabIndex = 0;
- this.button_ConnectTIAS.Text = "START";
- this.button_ConnectTIAS.UseVisualStyleBackColor = true;
- this.button_ConnectTIAS.Click += new System.EventHandler(this.button_ConnectTIAS_Click);
- //
- // label7
- //
- this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label7.Location = new System.Drawing.Point(3, 259);
- this.label7.Name = "label7";
- this.label7.Size = new System.Drawing.Size(202, 37);
- this.label7.TabIndex = 14;
- this.label7.Text = "RGB";
- this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label7.Click += new System.EventHandler(this.label7_Click);
- //
- // label8
- //
- this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label8.Location = new System.Drawing.Point(211, 259);
- this.label8.Name = "label8";
- this.label8.Size = new System.Drawing.Size(202, 37);
- this.label8.TabIndex = 15;
- this.label8.Text = "L*a*b*";
- this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label8.Click += new System.EventHandler(this.label8_Click);
- //
- // textBox2
- //
- this.textBox2.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox2.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox2.Location = new System.Drawing.Point(3, 373);
- this.textBox2.Multiline = true;
- this.textBox2.Name = "textBox2";
- this.textBox2.Size = new System.Drawing.Size(202, 31);
- this.textBox2.TabIndex = 16;
- this.textBox2.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox2.TextChanged += new System.EventHandler(this.textBox2_TextChanged);
- //
- // textBox3
- //
- this.textBox3.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox3.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox3.Location = new System.Drawing.Point(3, 410);
- this.textBox3.Multiline = true;
- this.textBox3.Name = "textBox3";
- this.textBox3.Size = new System.Drawing.Size(202, 31);
- this.textBox3.TabIndex = 17;
- this.textBox3.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox3.TextChanged += new System.EventHandler(this.textBox3_TextChanged);
- //
- // textBox4
- //
- this.textBox4.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox4.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox4.Location = new System.Drawing.Point(3, 447);
- this.textBox4.Multiline = true;
- this.textBox4.Name = "textBox4";
- this.textBox4.Size = new System.Drawing.Size(202, 31);
- this.textBox4.TabIndex = 18;
- this.textBox4.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox4.TextChanged += new System.EventHandler(this.textBox4_TextChanged);
- //
- // textBox6
- //
- this.textBox6.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox6.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox6.Location = new System.Drawing.Point(211, 373);
- this.textBox6.Multiline = true;
- this.textBox6.Name = "textBox6";
- this.textBox6.Size = new System.Drawing.Size(202, 31);
- this.textBox6.TabIndex = 19;
- this.textBox6.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox6.TextChanged += new System.EventHandler(this.textBox6_TextChanged);
- //
- // textBox7
- //
- this.textBox7.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox7.Location = new System.Drawing.Point(211, 410);
- this.textBox7.Multiline = true;
- this.textBox7.Name = "textBox7";
- this.textBox7.Size = new System.Drawing.Size(202, 31);
- this.textBox7.TabIndex = 20;
- this.textBox7.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox7.TextChanged += new System.EventHandler(this.textBox7_TextChanged);
- //
- // textBox8
- //
- this.textBox8.Dock = System.Windows.Forms.DockStyle.Fill;
- this.textBox8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.textBox8.Location = new System.Drawing.Point(211, 447);
- this.textBox8.Multiline = true;
- this.textBox8.Name = "textBox8";
- this.textBox8.Size = new System.Drawing.Size(202, 31);
- this.textBox8.TabIndex = 21;
- this.textBox8.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.textBox8.TextChanged += new System.EventHandler(this.textBox8_TextChanged);
- //
- // panel2
- //
- this.tableLayoutPanel1.SetColumnSpan(this.panel2, 4);
- this.panel2.Controls.Add(this.GuideBox);
- this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel2.Location = new System.Drawing.Point(3, 484);
- this.panel2.Name = "panel2";
- this.panel2.Size = new System.Drawing.Size(828, 31);
- this.panel2.TabIndex = 22;
- this.panel2.Paint += new System.Windows.Forms.PaintEventHandler(this.panel2_Paint);
- //
- // GuideBox
- //
- this.GuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
- this.GuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.GuideBox.ForeColor = System.Drawing.Color.Red;
- this.GuideBox.Location = new System.Drawing.Point(0, 0);
- this.GuideBox.Multiline = true;
- this.GuideBox.Name = "GuideBox";
- this.GuideBox.Size = new System.Drawing.Size(828, 31);
- this.GuideBox.TabIndex = 0;
- this.GuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
- this.GuideBox.TextChanged += new System.EventHandler(this.GuideBox_TextChanged);
- //
- // label11
- //
- this.label11.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label11.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label11.Location = new System.Drawing.Point(419, 259);
- this.label11.Name = "label11";
- this.label11.Size = new System.Drawing.Size(202, 37);
- this.label11.TabIndex = 26;
- this.label11.Text = "Segmentation";
- this.label11.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label11.Click += new System.EventHandler(this.label11_Click);
- //
- // label12
- //
- this.label12.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label12.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
- this.label12.Location = new System.Drawing.Point(627, 259);
- this.label12.Name = "label12";
- this.label12.Size = new System.Drawing.Size(204, 37);
- this.label12.TabIndex = 27;
- this.label12.Text = "Selection Region";
- this.label12.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- this.label12.Click += new System.EventHandler(this.label12_Click);
- //
- // panel4
- //
- this.panel4.Controls.Add(this.pictureBox1);
- this.panel4.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel4.Location = new System.Drawing.Point(419, 77);
- this.panel4.Name = "panel4";
- this.tableLayoutPanel1.SetRowSpan(this.panel4, 5);
- this.panel4.Size = new System.Drawing.Size(202, 179);
- this.panel4.TabIndex = 28;
- this.panel4.Paint += new System.Windows.Forms.PaintEventHandler(this.panel4_Paint);
- //
- // pictureBox1
- //
- this.pictureBox1.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
- this.pictureBox1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox1.Location = new System.Drawing.Point(0, 0);
- this.pictureBox1.Name = "pictureBox1";
- this.pictureBox1.Size = new System.Drawing.Size(202, 179);
- this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox1.TabIndex = 0;
- this.pictureBox1.TabStop = false;
- this.pictureBox1.Click += new System.EventHandler(this.pictureBox1_Click);
- //
- // button1
- //
- this.button1.Location = new System.Drawing.Point(211, 77);
- this.button1.Name = "button1";
- this.button1.Size = new System.Drawing.Size(75, 23);
- this.button1.TabIndex = 34;
- this.button1.Text = "button1";
- this.button1.UseVisualStyleBackColor = true;
- this.button1.Click += new System.EventHandler(this.button1_Click);
- //
- // Form_TIASAutomaticShootingSystem
- //
- this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.None;
- this.ClientSize = new System.Drawing.Size(834, 561);
- this.Controls.Add(this.tableLayoutPanel1);
- this.Name = "Form_TIASAutomaticShootingSystem";
- this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
- this.Text = "TIAS BackGround";
- this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form_TIASAutomaticShootingSystem_FormClosing);
- this.Load += new System.EventHandler(this.Form_TIASAutomaticShootingSystem_Load);
- this.tableLayoutPanel1.ResumeLayout(false);
- this.tableLayoutPanel1.PerformLayout();
- this.panel7.ResumeLayout(false);
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).EndInit();
- this.panel6.ResumeLayout(false);
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).EndInit();
- this.panel5.ResumeLayout(false);
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).EndInit();
- this.panel3.ResumeLayout(false);
- this.panel3.PerformLayout();
- this.panel1.ResumeLayout(false);
- this.panel2.ResumeLayout(false);
- this.panel2.PerformLayout();
- this.panel4.ResumeLayout(false);
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
- this.ResumeLayout(false);
-
- }
-
- #endregion
-
- private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
- private System.Windows.Forms.Panel panel3;
- public System.Windows.Forms.TextBox ShootingGuideBox;
- private System.Windows.Forms.Label label1;
- private System.Windows.Forms.Panel panel1;
- private System.Windows.Forms.Button button_ConnectTIAS;
- private System.Windows.Forms.Label label7;
- private System.Windows.Forms.Label label8;
- private System.Windows.Forms.TextBox textBox2;
- private System.Windows.Forms.TextBox textBox3;
- private System.Windows.Forms.TextBox textBox4;
- private System.Windows.Forms.TextBox textBox6;
- private System.Windows.Forms.TextBox textBox7;
- private System.Windows.Forms.TextBox textBox8;
- private System.Windows.Forms.Panel panel2;
- public System.Windows.Forms.TextBox GuideBox;
- private System.Windows.Forms.Panel panel7;
- public System.Windows.Forms.PictureBox pictureBox4;
- private System.Windows.Forms.Panel panel6;
- public System.Windows.Forms.PictureBox pictureBox3;
- private System.Windows.Forms.Panel panel5;
- public System.Windows.Forms.PictureBox pictureBox2;
- private System.Windows.Forms.Label label10;
- private System.Windows.Forms.Label label9;
- private System.Windows.Forms.Label label11;
- private System.Windows.Forms.Label label12;
- private System.Windows.Forms.Panel panel4;
- private System.IO.Ports.SerialPort serialPort1;
- private System.Windows.Forms.PictureBox pictureBox1;
- private System.Windows.Forms.TextBox textBox5;
- private System.Windows.Forms.TextBox textBox1;
- private System.Windows.Forms.Button button1;
- }
-}
\ No newline at end of file
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
deleted file mode 100644
index 2ea2bac..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
+++ /dev/null
@@ -1,959 +0,0 @@
-using OpenCvSharp;
-using System;
-using System.Collections.Generic;
-using System.Drawing;
-using System.IO;
-using System.Linq;
-using System.Windows.Forms;
-
-namespace OperateCamera
-{
- public partial class Form_TIASAutomaticShootingSystem : Form
- {
- // Config
- const int RADIUS_COLORAREA = 10;
-
- public static Bitmap bitmap;
-
- float[] a = new float[17];
- float[] b = new float[17];
- float[] c = new float[17];
- float d;
- float e;
- float f;
- int k;
- public static bool m_getColor = false;
- public static Mat m_CalibFrame; //キャリブレーション用画像
- public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
- double[] m_BforLab = new double[24];
- double[] m_GforLab = new double[24];
- double[] m_RforLab = new double[24];
- public static bool m_bCalib;
- public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
- public static Mat m_PointedFrame; //ポイントされたMat
- public static int click = 0; //クリック回数
-
- OpenCvSharp.Point P1 = new OpenCvSharp.Point();
- OpenCvSharp.Point P2 = new OpenCvSharp.Point();
-
- //Serial communication
- public Form_TIASAutomaticShootingSystem()
- {
- InitializeComponent();
- }
-
- private void button_ConnectTIAS_Click(object sender, EventArgs e)
- {
- // GetImage()
- var path_oriImg = @"data_lab\h\Shot0001.bmp";
- var path_calibCsv = @"data_lab\h\Calib.csv";
- var path_colorMatrixXYZ = "xyz.txt";
- using (Mat mat_oriImg = Cv2.ImRead(path_oriImg, ImreadModes.Color))
- {
- // Process_DeepTIAS()
- var path_mask = @"data_lab\h\Shot0001_mask.bmp";
- Mat mat_finalMask = Cv2.ImRead(path_mask, ImreadModes.Grayscale);
- Cv2.Threshold(mat_finalMask, mat_finalMask, 128, 255, ThresholdTypes.Binary);
-
- // マスクされた舌領域画像の作成
- Mat mat_maskedImg = new Mat();
- mat_oriImg.CopyTo(mat_maskedImg, mat_finalMask);
-
- // 5点クリック法(2010石川)
- List list_5points = Get5points(mat_finalMask);
- Show5point(mat_oriImg.Clone(), list_5points);
-
- // 8領域の取得
- List list_8area = Get8area(list_5points);
- Show8area(mat_oriImg.Clone(), list_8area);
-
- // DEBUG
- //List list_5points_002 = new List() {
- // new OpenCvSharp.Point(230, 628),
- // new OpenCvSharp.Point(704, 572),
- // new OpenCvSharp.Point(642, 782),
- // new OpenCvSharp.Point(360, 808),
- // new OpenCvSharp.Point(500, 920)
- //};
- //List list_8area_002 = Get8area(list_5points_002);
- //Show8area(mat_oriImg.Clone(), list_8area_002);
-
-
- /*
- // 色抽出
- List list_8Bgr = Get8colors(mat_maskedImg, list_8area);
-
- // 色変換(RGB->XYZ->Lab)
- List list_8Lab = Calc8Lab(list_8Bgr, path_calibCsv, path_colorMatrixXYZ);
-
- // 色の表示
- Show8colors(list_8Bgr, list_8Lab);
-
- // 保存
- // Write8colors(list_8Bgr, list_8Lab);
- */
-
-
- // 廃棄
- mat_finalMask.Dispose();
- mat_maskedImg.Dispose();
- GC.Collect();
- }
- System.Threading.Thread.Sleep(100);
- }
-
- private List Get5points(Mat mat_finalMask)
- {
- List li_dst;
-
- // 表示用
- var mat_dst = mat_finalMask.Clone();
- Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
-
- // マスクの舌領域画素座標
- var mat_nonZeroCoordinates = new Mat();
- Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
-
- // 舌領域上の点をすべてlistに詰める
- var list_X = new List();
- var list_Y = new List();
- for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
- {
- var x = mat_nonZeroCoordinates.At(i).X;
- var y = mat_nonZeroCoordinates.At(i).Y;
- list_X.Add(x);
- list_Y.Add(y);
- }
-
- /// method1
- // 端っこを探索(ラスタ左上から)
- var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
- var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
- var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
- var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
-
- // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
- var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
- var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
- var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
-
- // 表示してみる
- //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
- Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
- Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
-
- /// method2
- // 重心(CoG)計算
- var moments = Cv2.Moments(mat_finalMask, true);
- var moment_x = moments.M10 / moments.M00;
- var moment_y = moments.M01 / moments.M00;
-
- // 輪郭座標
- OpenCvSharp.Point[][] contours;
- HierarchyIndex[] hierarchy;
- Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
- var maxArea = contours.Select(n => Cv2.ContourArea(n)).Max();
- var maxContour = contours.Where(n => Cv2.ContourArea(n) == maxArea).ToList()[0];
-
- // 重心-輪郭の距離
- double maxDistance_lefttop = 0.0;
- double maxDistance_righttop = 0.0;
- double maxDistance_bottom = 0.0;
- var p_left_2 = new OpenCvSharp.Point();
- var p_right_2 = new OpenCvSharp.Point();
- var p_bottom_2 = new OpenCvSharp.Point();
- // 距離が最も遠いものを採択
- for (int i = 0; i < maxContour.Count(); i++)
- {
- // 重心より上側
- if (maxContour[i].Y < moment_y)
- {
- // 重心より上側左側
- if (maxContour[i].X < moment_x)
- {
- var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
- if (distance > maxDistance_lefttop)
- {
- maxDistance_lefttop = distance;
- p_left_2 = maxContour[i];
- }
-
- }
- // 重心より上側右側
- if (maxContour[i].X >= moment_x)
- {
- var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
- if (distance > maxDistance_righttop)
- {
- maxDistance_righttop = distance;
- p_right_2 = maxContour[i];
- }
- }
- }
- else
- {
- var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
- if (distance > maxDistance_bottom)
- {
- maxDistance_bottom = distance;
- p_bottom_2 = maxContour[i];
- }
- }
- }
-
- // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
- var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
- var y_apex_2 = (int)(y_top_avg_ + ((p_bottom_2.Y - y_top_avg_) * 0.53));
- var p_apex_left_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Min());
- var p_apex_right_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Max());
-
- // 表示
- Cv2.Circle(mat_dst, p_left_2, 20, new Scalar(255, 0, 0), -1);
- Cv2.Circle(mat_dst, p_right_2, 20, new Scalar(255, 0, 0), -1);
- //Cv2.Circle(mat_dst, p_bottom_2, 20, new Scalar(255, 0, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_left_2, 20, new Scalar(255, 0, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_right_2, 20, new Scalar(255, 0, 0), -1);
- //Cv2.Circle(mat_dst, new OpenCvSharp.Point(moment_x, moment_y), 10, new Scalar(200, 60, 200), -1); //重心
- //Cv2.DrawContours(mat_dst, maxContours, 0, new Scalar(0, 255, 255), 4); // 輪郭
-
- /// method3
- // 重心とtopの中点を算出する
- var y_topToCoG = (int)(moment_y + p_top.Y) / 2;
- var p_topToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Min());
- var p_topToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Max());
- var p_topToCoG_center = new OpenCvSharp.Point((int)((p_topToCoG_left.X + p_topToCoG_right.X) / 2), y_topToCoG);
- // 重心とbottomの中点を算出する
- var y_bottomToCoG = (int)(moment_y + p_bottom.Y) / 2;
- var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min());
- var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max());
- var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG);
- // 直線フィッティング
- var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.0);
- var lefty = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1);
- var righty = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1);
- // 直線上の輪郭点
- var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
- var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
- var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
- Cv2.Line(mat_centerline, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), 3);
- Cv2.DrawContours(mat_contour, contours, 0, 3);
- Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and);
- Cv2.FindNonZero(mat_and, mat_and);
- var bottom_y_3 = 0;
- var bottom_x_3 = 0;
- for (int i = 0; i < mat_and.Total(); i++)
- {
- var x = mat_and.At(i).X;
- var y = mat_and.At(i).Y;
- if (bottom_y_3 < y)
- {
- bottom_y_3 = y;
- bottom_x_3 = x;
- }
- }
- var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3);
-
- //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1);
- //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1);
- //Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1);
- //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1);
- //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1);
- //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1);
- //Cv2.Line(mat_dst, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), new Scalar(0, 100, 255)); //近似直線
- //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1);
-
- // DEBUG
- Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize);
- Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5)));
-
- // 出力
- li_dst = new List { p_left_2, p_apex_left, p_bottom, p_apex_right, p_right_2 };
- //li_dst = new List { p_left_2, p_apex_left_2, p_bottom_2, p_apex_right_2, p_right_2 };
-
- // 破棄
- mat_dst.Dispose();
- mat_nonZeroCoordinates.Dispose();
- GC.Collect();
- return li_dst;
- }
-
- private List Get8area(List list_5points)
- {
- var li_dst = new List();
- // ROIマスク画像1
- // 0____________ 4
- // | |
- // | |
- // | |
- // 1|● ●|3
- // 舌 /
- // 舌_______/
- // 2
-
- var points1 = new int[2, 3]{{0, 2, 1}, {4, 2, 3}};
- var area1 = new OpenCvSharp.Point();
- for (int i = 0; i < 2; i ++)
- {
- var cx1 = (list_5points[points1[i, 0]].X + list_5points[points1[i, 1]].X) / 2;
- var cy1 = (list_5points[points1[i, 0]].Y + list_5points[points1[i, 1]].Y) / 2;
- area1.X = (list_5points[points1[i, 2]].X + cx1) / 2;
- area1.Y = (list_5points[points1[i, 2]].Y + cy1) / 2;
- li_dst.Add(area1);
- }
-
- // ROIマスク画像2
- // 0____________ 4
- // | ● ● |
- // | |
- // | |
- // 1| |3
- // 舌 /
- // 舌_______/
- // 2
-
- var points2 = new int[2, 4]{{0, 3, 0, 4}, {4, 1, 4, 0}};
- var area2 = new OpenCvSharp.Point();
- for (int i = 0; i< 2; i++)
- {
- float cx1 = list_5points[points2[i, 0]].X + (list_5points[points2[i, 1]].X - list_5points[points2[i, 0]].X) / 4;
- float cy1 = list_5points[points2[i, 0]].Y + (list_5points[points2[i, 1]].Y - list_5points[points2[i, 0]].Y) / 4;
- float cx2 = (list_5points[points2[i, 2]].X + list_5points[points2[i, 3]].X) / 2;
- float cy2 = (list_5points[points2[i, 2]].Y + list_5points[points2[i, 3]].Y) / 2;
- area2.X = (int)(cx1 + cx2) / 2;
- area2.Y = (int)(cy1 + cy2) / 2;
- li_dst.Add(area2);
- }
-
- // ROIマスク画像3
- // 0____________ 4
- // | |
- // | |
- // | |
- // 1| ● ● |3
- // 舌 /
- // 舌_______/
- // 2
- var points3 = new int[2, 3]{{0, 3, 2}, {4, 1, 2}};
- var area3 = new OpenCvSharp.Point();
- for (int i = 0; i< 2; i ++)
- {
- float cx1 = list_5points[points3[i, 0]].X + (list_5points[points3[i, 1]].X - list_5points[points3[i, 0]].X) / 4;
- float cy1 = list_5points[points3[i, 0]].Y + (list_5points[points3[i, 1]].Y - list_5points[points3[i, 0]].Y) / 4;
- area3.X = (int)(list_5points[points3[i, 2]].X + cx1) / 2;
- area3.Y = (int)(list_5points[points3[i, 2]].Y + cy1) / 2;
- li_dst.Add(area3);
- }
-
- // ROIマスク画像3
- // 0____________ 4
- // | |
- // | |
- // | |
- // 1| |3
- // 舌 /
- // 舌_●_●_/
- // 2
- var points4 = new int[2, 2]{{0, 2}, {4, 2}};
- var area4 = new OpenCvSharp.Point();
- for (int i = 0; i< 2; i ++)
- {
- area4.X = list_5points[points4[i, 0]].X + (list_5points[points4[i, 1]].X - list_5points[points4[i, 0]].X) * 7 / 8;
- area4.Y = list_5points[points4[i, 0]].Y + (list_5points[points4[i, 1]].Y - list_5points[points4[i, 0]].Y) * 7 / 8;
- li_dst.Add(area4);
- }
-
- // To do : もし8areaが舌領域に載っていなかったら修正
- //bool isOnTongueArea = DiscriminateOnTongueArea(li_dst);
- //if (isOnTongueArea)
- //{
-
- //}
-
- return li_dst;
- }
-
- private List Get8colors(Mat mat_maskedImg, List list_8area)
- {
- List li_dst = new List();
- for (int i = 0; i < list_8area.Count(); i++)
- {
- using (Mat mat_colorRoi = Mat.Zeros(mat_maskedImg.Size(), MatType.CV_8UC1))
- {
- // 色抽出領域を示すマスク画像を作成
- Cv2.Circle(mat_colorRoi, list_8area[i], RADIUS_COLORAREA, 255, -1);
-
- // 領域で色を抽出
- var color = Cv2.Mean(mat_maskedImg, mat_colorRoi);
- li_dst.Add(color);
- }
- }
- return li_dst;
- }
-
- private void Show5point(Mat oriImg, List list_5point)
- {
- Cv2.Circle(oriImg, list_5point[0], 10, new Scalar(255, 0, 0), -1);
- Cv2.Circle(oriImg, list_5point[1], 10, new Scalar(255, 0, 0), -1);
- Cv2.Circle(oriImg, list_5point[2], 10, new Scalar(255, 0, 0), -1);
- Cv2.Circle(oriImg, list_5point[3], 10, new Scalar(255, 0, 0), -1);
- Cv2.Circle(oriImg, list_5point[4], 10, new Scalar(255, 0, 0), -1);
- Cv2.NamedWindow("dst_point", WindowMode.AutoSize);
- Cv2.ImShow("dst_point", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
- }
-
- private void Show8area(Mat oriImg, List list_8area)
- {
- Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
- Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
- Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(255, 255, 255), -1);
- Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(255, 255, 255), -1);
- Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
- Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
- Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 0), -1);
- Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 0), -1);
- Cv2.NamedWindow("dst_", WindowMode.AutoSize);
- Cv2.ImShow("dst_", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
- }
-
- private List Calc8Lab(List list_8colors, string path_calibCsv, string path_colorMatrixXYZ)
- {
- GetColorMatrixRGB(path_calibCsv);
- CalcTransMat(path_colorMatrixXYZ);
-
-
-
-
- Read_TranslationMatrix();
- var li_dst = new List();
- for (int i = 0; i < list_8colors.Count(); i++)
- {
- var LabValue = CalcLab(list_8colors[i]);
- li_dst.Add(LabValue);
- }
- return li_dst;
- }
-
- private void GetColorMatrixRGB(string path_calibCsv)
- {
- System.Text.Encoding encoding = GetType(path_calibCsv);
- System.IO.FileStream fs3 = new System.IO.FileStream(path_calibCsv, System.IO.FileMode.Open, System.IO.FileAccess.Read);
- System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding);
- string strLine = "";
- string[] aryLine;
- sr3.ReadLine(); // headerをスルー
- int i = 0;
- while ((strLine = sr3.ReadLine()) != null)
- {
- aryLine = strLine.Split(',');
- var no = Convert.ToSingle(aryLine[0]);
- float r = Convert.ToSingle(aryLine[1]);
- float g = Convert.ToSingle(aryLine[2]);
- float b = Convert.ToSingle(aryLine[3]);
- m_BforLab[i] = b;
- m_GforLab[i] = g;
- m_RforLab[i] = r;
- i++;
- }
- sr3.Close();
- fs3.Close();
- }
-
- private void CalcTransMat(string path_colorMatrixXYZ)
- {
- Mat RGBmat = new Mat(24, 17, MatType.CV_64F, new Scalar(1.0f));
- Mat XYZmat = new Mat(24, 4, MatType.CV_64F, new Scalar(1.0f));
-
- // ColorChartのXYZ読み込み
- string line;
- string[] split = new string[3];
- double valueX = 0, valueY = 0, valueZ = 0;
- System.Text.Encoding encoding3 = GetType(path_colorMatrixXYZ);
- System.IO.FileStream fs3 = new System.IO.FileStream(path_colorMatrixXYZ, System.IO.FileMode.Open, System.IO.FileAccess.Read);
- System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding3);
- for (int i = 0; i < 24; i++)
- {
- line = sr3.ReadLine();
- split = line.Split(' ');
- valueX = Convert.ToDouble(split[0]);
- valueY = Convert.ToDouble(split[1]);
- valueZ = Convert.ToDouble(split[2]);
- XYZmat.Set(i, 0, valueX);
- XYZmat.Set(i, 1, valueY);
- XYZmat.Set(i, 2, valueZ);
- }
- sr3.Close();
- fs3.Close();
-
- // 変換行列の計算
- for (int j = 0; j < 24; j++)
- {
- //順番
- //R,G,B,RG,RB,GB,R^2,G^2,B^2
- //R^2B,R^2G,G^2,R,G^2B,B^2R,B^2G,RGB
- RGBmat.Set(j, 2, m_BforLab[j]);
- RGBmat.Set(j, 1, m_GforLab[j]);
- RGBmat.Set(j, 0, m_RforLab[j]);
- //2V次の項
- RGBmat.Set(j, 3, m_RforLab[j] * m_GforLab[j]);
- RGBmat.Set(j, 4, m_RforLab[j] * m_BforLab[j]);
- RGBmat.Set(j, 5, m_GforLab[j] * m_BforLab[j]);
- RGBmat.Set(j, 6, m_RforLab[j] * m_RforLab[j]);
- RGBmat.Set(j, 7, m_GforLab[j] * m_GforLab[j]);
- RGBmat.Set(j, 8, m_BforLab[j] * m_BforLab[j]);
- //3V次の項
- RGBmat.Set(j, 9, m_RforLab[j] * m_RforLab[j] * m_BforLab[j]);
- RGBmat.Set(j, 10, m_RforLab[j] * m_RforLab[j] * m_GforLab[j]);
- RGBmat.Set(j, 11, m_GforLab[j] * m_GforLab[j] * m_RforLab[j]);
- RGBmat.Set(j, 12, m_GforLab[j] * m_GforLab[j] * m_BforLab[j]);
- RGBmat.Set(j, 13, m_BforLab[j] * m_BforLab[j] * m_RforLab[j]);
- RGBmat.Set(j, 14, m_BforLab[j] * m_BforLab[j] * m_GforLab[j]);
- RGBmat.Set(j, 15, m_RforLab[j] * m_BforLab[j] * m_GforLab[j]);
- }
- // 変換行列の生成
- Mat translation = new Mat();
- var canSolve = Cv2.Solve(RGBmat, XYZmat, translation, DecompTypes.SVD);
-
- // 保存
- string CSVfilename = "translateMatrix.csv";
- FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
- CSV_file.Seek(0, SeekOrigin.Begin);
- CSV_file.SetLength(0);
- CSV_file.Close();
- StreamWriter CSV_data = new StreamWriter(CSVfilename);
- string s2 = "";
- for (int i = 0; i <= 16; i++)
- {
- for (int j = 0; j <= 2; j++)
- {
- double s1 = translation.At(i, j);
- s2 += s1.ToString() + ",";
- }
- CSV_data.WriteLine(s2);
- s2 = "";
- }
- CSV_data.Close();
- }
-
- private void Read_TranslationMatrix()
- {
- // 変換行列を読み込みなおす
- System.Text.Encoding encoding = GetType("translateMatrix.csv");
- System.IO.FileStream fs1 = new System.IO.FileStream("translateMatrix.csv", System.IO.FileMode.Open, System.IO.FileAccess.Read);
- System.IO.StreamReader sr1 = new System.IO.StreamReader(fs1, encoding);
- for (int i = 0; i < 17; i++)
- {
- a[i] = 0;
- b[i] = 0;
- c[i] = 0;
- }
- k = 0;
- string strLine = "";
- string[] aryLine = null;
- while ((strLine = sr1.ReadLine()) != null)
- {
- aryLine = strLine.Split(',');
- a[k] = Convert.ToSingle(aryLine[0]);
- b[k] = Convert.ToSingle(aryLine[1]);
- c[k] = Convert.ToSingle(aryLine[2]);
- k++;
- }
- sr1.Close();
- fs1.Close();
- }
-
- private OpenCvSharp.Scalar CalcLab(OpenCvSharp.Scalar BgrValue)
- {
- // CIELabの計算
- // XYZに変換
- double X, Y, Z;
- double m_B = BgrValue.Val0;
- double m_G = BgrValue.Val1;
- double m_R = BgrValue.Val2;
- X = m_R * a[0] + m_G * a[1] + m_B * a[2]
- + a[3] * m_R * m_G + a[4] * m_R * m_B + a[5] * m_G * m_B
- + a[6] * m_R * m_R + a[7] * m_G * m_G + a[8] * m_B * m_B
- + a[9] * m_R * m_R * m_B + a[10] * m_R * m_R * m_G
- + a[11] * m_G * m_G * m_R + a[12] * m_G * m_G * m_B
- + a[13] * m_B * m_B * m_R + a[14] * m_B * m_B * m_G
- + a[15] * m_R * m_G * m_B
- + a[16];
- Y = m_R * b[0] + m_G * b[1] + m_B * b[2]
- + b[3] * m_R * m_G + b[4] * m_R * m_B + b[5] * m_G * m_B
- + b[6] * m_R * m_R + b[7] * m_G * m_G + b[8] * m_B * m_B
- + b[9] * m_R * m_R * m_B + b[10] * m_R * m_R * m_G
- + b[11] * m_G * m_G * m_R + b[12] * m_G * m_G * m_B
- + b[13] * m_B * m_B * m_R + b[14] * m_B * m_B * m_G
- + b[15] * m_R * m_G * m_B
- + b[16];
- Z = m_R * c[0] + m_G * c[1] + m_B * c[2]
- + c[3] * m_R * m_G + c[4] * m_R * m_B + c[5] * m_G * m_B
- + c[6] * m_R * m_R + c[7] * m_G * m_G + c[8] * m_B * m_B
- + c[9] * m_R * m_R * m_B + c[10] * m_R * m_R * m_G
- + c[11] * m_G * m_G * m_R + c[12] * m_G * m_G * m_B
- + c[13] * m_B * m_B * m_R + c[14] * m_B * m_B * m_G
- + c[15] * m_R * m_G * m_B
- + c[16];
- if (X < 0) X = 0;
- if (Y < 0) Y = 0;
- if (Z < 0) Z = 0;
-
- // Labに変換(固定の計算式)
- // TIAS 光源 (測定値20201023)
- // double Xn = 99.5829;
- // double Yn = 100.0;
- // double Zn = 57.1402;
-
- // Tangさん,竹田さんが使用してた値 (おそらく昔のTIAS光源の測定値)
- //double Xn = 102.07;
- //double Yn = 100.0;
- //double Zn = 79.41;
-
- // 石川さん,中口先生が使用している値 人口太陽照明?
- double Xn = 92.219;
- double Yn = 100.0;
- double Zn = 95.965;
- double cL = 116.0 * Math.Pow((Y / Yn), 1.0 / 3.0) - 16.0;
- double ca = 500.0 * (Math.Pow((X / Xn), 1.0 / 3.0) - Math.Pow((Y / Yn), 1.0 / 3.0));
- double cb = 200.0 * (Math.Pow((Y / Yn), 1.0 / 3.0) - Math.Pow((Z / Zn), 1.0 / 3.0));
-
- return new OpenCvSharp.Scalar(cL, ca, cb);
- }
-
- private void Show8colors(List list_8Bgr, List list_8Lab)
- {
- Invoke((MethodInvoker)delegate
- {
- textBox1.Text = list_8Bgr[0].ToString() + " " + list_8Bgr[1].ToString();
- textBox2.Text = list_8Bgr[2].ToString() + " " + list_8Bgr[3].ToString();
- textBox3.Text = list_8Bgr[4].ToString() + " " + list_8Bgr[5].ToString();
- textBox4.Text = list_8Bgr[6].ToString() + " " + list_8Bgr[7].ToString();
- textBox5.Text = list_8Lab[0].ToString() + " " + list_8Lab[1].ToString();
- textBox6.Text = list_8Lab[2].ToString() + " " + list_8Lab[3].ToString();
- textBox7.Text = list_8Lab[4].ToString() + " " + list_8Lab[5].ToString();
- textBox8.Text = list_8Lab[6].ToString() + " " + list_8Lab[7].ToString();
- });
- }
-
- private void Write8colors(List list_8Bgr, List list_8Lab)
- {
- // 保存
- string CSVfilename = "CalculatedLab.csv";
- FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
- CSV_file.Seek(0, SeekOrigin.Begin);
- CSV_file.SetLength(0);
- CSV_file.Close();
-
- StreamWriter CSV_data = new StreamWriter(CSVfilename);
- CSV_data.WriteLine("Area,R,G,B,L,a,B");
- for (int i = 0; i < list_8Bgr.Count(); i++)
- {
- string str = (i + 1).ToString() + ",";
- str +=
- list_8Bgr[i].Val2.ToString("0.0000") + "," +
- list_8Bgr[i].Val1.ToString("0.0000") + "," +
- list_8Bgr[i].Val0.ToString("0.0000") + "," +
- list_8Lab[i].Val0.ToString("0.0000") + "," +
- list_8Lab[i].Val1.ToString("0.0000") + "," +
- list_8Lab[i].Val2.ToString("0.0000");
- CSV_data.WriteLine(str);
- }
- CSV_data.Close();
- }
-
- private List IndexOfAll(List li, int target)
- {
- int num = li.IndexOf(target);
- var li_num = new List();
- if (num > 0)
- {
- li_num.Add(num);
- // IndexOfメソッドで見つからなくなるまで繰り返す
- while (num > 0)
- {
- //見つかった位置の次の位置から検索
- num = li.IndexOf(target, num + 1);
- if (num > 0)
- {
- li_num.Add(num);
- }
- }
- }
- else
- {
- Console.WriteLine("{0}は見つかりませんでした", target);
- }
- return li_num;
- }
-
-
- public static System.Text.Encoding GetType(string FILE_NAME)
- {
- System.IO.FileStream fs = new System.IO.FileStream(FILE_NAME, System.IO.FileMode.Open, System.IO.FileAccess.Read);
- System.Text.Encoding r = GetType(fs);
- fs.Close();
- return r;
- }
-
- public static System.Text.Encoding GetType(System.IO.FileStream fs)
- {
- byte[] Unicode = new byte[] { 0xFF, 0xFE, 0x41 };
- byte[] UnicodeBIG = new byte[] { 0xFE, 0xFF, 0x00 };
- byte[] UTF8 = new byte[] { 0xEF, 0xBB, 0xBF };
- System.Text.Encoding reVal = System.Text.Encoding.Default;
-
- System.IO.BinaryReader r = new System.IO.BinaryReader(fs, System.Text.Encoding.Default);
- int i;
- int.TryParse(fs.Length.ToString(), out i);
- byte[] ss = r.ReadBytes(i);
- if (IsUTF8Bytes(ss) || (ss[0] == 0xEF && ss[1] == 0xBB && ss[2] == 0xBF))
- {
- reVal = System.Text.Encoding.UTF8;
- }
- else if (ss[0] == 0xFE && ss[1] == 0xFF && ss[2] == 0x00)
- {
- reVal = System.Text.Encoding.BigEndianUnicode;
- }
- else if (ss[0] == 0xFF && ss[1] == 0xFE && ss[2] == 0x41)
- {
- reVal = System.Text.Encoding.Unicode;
- }
- r.Close();
- return reVal;
- }
-
- private static bool IsUTF8Bytes(byte[] data)
- {
- int charByteCounter = 1;
- byte curByte;
- for (int i = 0; i < data.Length; i++)
- {
- curByte = data[i];
- if (charByteCounter == 1)
- {
- if (curByte >= 0x80)
- {
- while (((curByte <<= 1) & 0x80) != 0)
- {
- charByteCounter++;
- }
- if (charByteCounter == 1 || charByteCounter > 6)
- {
- return false;
- }
- }
- }
- else
- {
- if ((curByte & 0xC0) != 0x80)
- {
- return false;
- }
- charByteCounter--;
- }
- }
- if (charByteCounter > 1)
- {
- throw new Exception("Error");
- }
- return true;
- }
-
- private void Form_TIASAutomaticShootingSystem_FormClosing(object sender, FormClosingEventArgs e)
- {
- System.Threading.Thread.Sleep(100);
- }
-
- private void Form_TIASAutomaticShootingSystem_Load(object sender, EventArgs e)
- {
- }
-
- private void textBox5_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox1_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void panel7_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void pictureBox4_Click(object sender, EventArgs e)
- {
-
- }
-
- private void panel6_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void pictureBox3_Click(object sender, EventArgs e)
- {
-
- }
-
- private void panel5_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void pictureBox2_Click(object sender, EventArgs e)
- {
-
- }
-
- private void label10_Click(object sender, EventArgs e)
- {
-
- }
-
- private void label9_Click(object sender, EventArgs e)
- {
-
- }
-
- private void panel3_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void ShootingGuideBox_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void label1_Click(object sender, EventArgs e)
- {
-
- }
-
- private void panel1_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void tableLayoutPanel1_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void label7_Click(object sender, EventArgs e)
- {
-
- }
-
- private void label8_Click(object sender, EventArgs e)
- {
-
- }
-
- private void textBox2_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox3_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox4_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox6_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox7_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void textBox8_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void panel2_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void GuideBox_TextChanged(object sender, EventArgs e)
- {
-
- }
-
- private void label11_Click(object sender, EventArgs e)
- {
-
- }
-
- private void label12_Click(object sender, EventArgs e)
- {
-
- }
-
- private void panel4_Paint(object sender, PaintEventArgs e)
- {
-
- }
-
- private void pictureBox1_Click(object sender, EventArgs e)
- {
-
- }
-
- private void button1_Click(object sender, EventArgs e)
- {
- var path_csv = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_clicked.csv";
- var path_out = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_area.csv";
- var sr = new StreamReader(path_csv);
- var sw = new StreamWriter(path_out);
-
- // skip header
- var head = sr.ReadLine();
-
- //
- for (int i = 0; i < 101; i++)
- {
- var line = sr.ReadLine();
- var words = line.Split(',');
- sw.Write(words[0] + "," + words[1] + ",");
- var list_point = new List();
-
- for (int j = 2; j < words.Length; j = j + 2)
- {
- var p = new OpenCvSharp.Point(int.Parse(words[j]), int.Parse(words[j + 1]));
- list_point.Add(p);
- }
- var list_Psorted = list_point.OrderBy(n => n.X).ToList();
- var list_area = Get8area(list_Psorted);
- foreach (var n in list_Psorted)
- {
- sw.Write(n.X + "," + n.Y + ",");
- }
- foreach (var n in list_area)
- {
- sw.Write(n.X + "," + n.Y + ",");
- }
- sw.WriteLine();
- }
-
- // dispose
- sw.Close();
- sr.Close();
- }
- }
-}
\ No newline at end of file
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
deleted file mode 100644
index 5f4899d..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
+++ /dev/null
@@ -1,126 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- 17, 17
-
-
- 56
-
-
\ No newline at end of file
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
deleted file mode 100644
index 4df843f..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading;
-using System.IO.Ports;
-using System.Windows.Forms;
-using System.Linq;
-
-namespace OperateCamera
-{
- static class Program
- {
- ///
- /// Main System Running In Here!
- ///
- [STAThread]
- static void Main()
- {
- Application.EnableVisualStyles();
- Application.SetCompatibleTextRenderingDefault(false);
- Application.Run(new Form_TIASAutomaticShootingSystem());
- }
- }
-}
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
deleted file mode 100644
index 13ca395..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
-// アセンブリに関連付けられている情報を変更するには、
-// これらの属性値を変更してください。
-[assembly: AssemblyTitle("newcamera_deeplearning")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("newcamera_deeplearning")]
-[assembly: AssemblyCopyright("Copyright © 2018")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
-// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
-// その型の ComVisible 属性を true に設定してください。
-[assembly: ComVisible(false)]
-
-// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
-[assembly: Guid("9b9d9f04-f367-4b3a-a842-51d01b42539b")]
-
-// アセンブリのバージョン情報は次の 4 つの値で構成されています:
-//
-// メジャー バージョン
-// マイナー バージョン
-// ビルド番号
-// Revision
-//
-// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
-// 既定値にすることができます:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
deleted file mode 100644
index 22a72a7..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace deepTIAS_feature_labColor.Properties {
- using System;
-
-
- ///
- /// A strongly-typed resource class, for looking up localized strings, etc.
- ///
- // This class was auto-generated by the StronglyTypedResourceBuilder
- // class via a tool like ResGen or Visual Studio.
- // To add or remove a member, edit your .ResX file then rerun ResGen
- // with the /str option, or rebuild your VS project.
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
- [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- internal class Resources {
-
- private static global::System.Resources.ResourceManager resourceMan;
-
- private static global::System.Globalization.CultureInfo resourceCulture;
-
- [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
- internal Resources() {
- }
-
- ///
- /// Returns the cached ResourceManager instance used by this class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Resources.ResourceManager ResourceManager {
- get {
- if (object.ReferenceEquals(resourceMan, null)) {
- global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("deepTIAS_feature_labColor.Properties.Resources", typeof(Resources).Assembly);
- resourceMan = temp;
- }
- return resourceMan;
- }
- }
-
- ///
- /// Overrides the current thread's CurrentUICulture property for all
- /// resource lookups using this strongly typed resource class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Globalization.CultureInfo Culture {
- get {
- return resourceCulture;
- }
- set {
- resourceCulture = value;
- }
- }
- }
-}
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
deleted file mode 100644
index af7dbeb..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
deleted file mode 100644
index 11eed20..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace deepTIAS_feature_labColor.Properties {
-
-
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
- internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
-
- private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
-
- public static Settings Default {
- get {
- return defaultInstance;
- }
- }
- }
-}
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
deleted file mode 100644
index 3964565..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
deleted file mode 100644
index 061c9ca..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
deleted file mode 100644
index 8b969e5..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
+++ /dev/null
@@ -1,185 +0,0 @@
-
-
-
-
-
- Debug
- AnyCPU
- {9B9D9F04-F367-4B3A-A842-51D01B42539B}
- WinExe
- Properties
- deepTIAS_feature_labColor
- deepTIAS_feature_labColor
- v4.7.1
- 512
- true
-
-
-
- publish\
- true
- Disk
- false
- Foreground
- 7
- Days
- false
- false
- true
- 0
- 1.0.0.%2a
- false
- false
- true
-
-
- AnyCPU
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE
- prompt
- 4
-
-
- AnyCPU
- pdbonly
- true
- bin\Release\
- TRACE
- prompt
- 4
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE
- full
- x64
- prompt
- MinimumRecommendedRules.ruleset
- true
- true
-
-
- bin\x64\Release\
- TRACE
- true
- pdbonly
- x64
- prompt
- MinimumRecommendedRules.ruleset
- true
- true
-
-
-
- Form
-
-
- Form_TIASAutomaticShootingSystem.cs
-
-
-
-
- Form_TIASAutomaticShootingSystem.cs
-
-
- ResXFileCodeGenerator
- Resources.Designer.cs
- Designer
-
-
- True
- Resources.resx
- True
-
-
-
- SettingsSingleFileGenerator
- Settings.Designer.cs
-
-
- True
- Settings.settings
- True
-
-
-
-
-
-
-
- False
- Microsoft .NET Framework 4.7.1 %28x86 および x64%29
- true
-
-
- False
- .NET Framework 3.5 SP1
- false
-
-
-
-
- Always
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.dll
-
-
- ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Blob.dll
-
-
- ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Extensions.dll
-
-
- ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.UserInterface.dll
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
+++ /dev/null
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
+++ /dev/null
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
+++ /dev/null
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
+++ /dev/null
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
+++ /dev/null
diff --git a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
deleted file mode 100644
index e69de29..0000000
--- a/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
+++ /dev/null
diff --git a/Features/.idea/Features.iml b/Features/.idea/Features.iml
new file mode 100644
index 0000000..5f9eaa6
--- /dev/null
+++ b/Features/.idea/Features.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Features/DeepLearning/.idea/DeepLearning.iml b/Features/DeepLearning/.idea/DeepLearning.iml
new file mode 100644
index 0000000..5bf3430
--- /dev/null
+++ b/Features/DeepLearning/.idea/DeepLearning.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/1.py" "b/Features/DeepLearning/Reference/Tang\047s/1.py"
new file mode 100644
index 0000000..9381c02
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/1.py"
@@ -0,0 +1,163 @@
+#pix2pixの学習を行なうプログラム
+#!/usr/bin/env python
+
+# python train_facade.py -g 0 -i ./facade/base --out result_facade --snapshot_interval 10000
+
+from __future__ import print_function
+import argparse
+import os
+
+import chainer
+from chainer import training
+from chainer.training import extensions
+from chainer import serializers
+
+from net import Discriminator
+from net import Encoder
+from net import Decoder
+from updater import PicUpdater
+
+from img_dataset import ImgDataset
+from pic_visualizer import out_image
+
+# dataset paths
+#学習に使う画像のある場所
+#実際の舌画像がある場所
+DATASET_SRC = "D:/test13/Sample16"
+
+#二値化した舌画像がある場所
+DATASET_DST = "D:/test13/Label16"
+
+#パラメータの保存場所
+SAVE_DIR = './pix2pix_param2'
+def main():
+ """#各種設定"""
+ #入出力画像サイズ
+ w_img = 256
+ h_img = 256
+ #コマンドライン上で--batchsize ○○のように打つか
+ #defaltの値を直接書き換える
+ #適宜書き換える場所
+ #1 バッチサイズ
+ #2 エポック数
+ #3 GPUの番号(負の番号でUPUを利用※時間がかかりすぎるため非推奨)
+ #4 実際の舌画像がある場所(上記のDATASET_SRCを書き換え)
+ #5 二値化した舌画像がある場所(上記のDATASET_DSTを書き換え)
+ #6 学習したパラメータの保存場所
+ #7 パラメータの保存間隔
+
+ parser = argparse.ArgumentParser(description='chainer implementation of pix2pix')
+ #1
+ parser.add_argument('--batchsize', '-b', type=int, default=4,
+ help='Number of images in each mini-batch')
+ #2
+ parser.add_argument('--epoch', '-e', type=int, default=4000,
+ help='Number of sweeps over the dataset to train')
+ #3
+ parser.add_argument('--gpu', '-g', type=int, default=0,
+ help='GPU ID (negative value indicates CPU)')
+ #4
+ parser.add_argument('--data_src', '-s', default=DATASET_SRC,
+ help='Directory of image files.')
+ #5
+ parser.add_argument('--data_dst', '-d', default=DATASET_DST,
+ help='Directory of ground truth image files.')
+ #6
+ parser.add_argument('--out', '-o', default=SAVE_DIR,
+ help='Directory to output the result')
+ parser.add_argument('--resume', '-r', default='',
+ help='Resume the training from snapshot')
+ parser.add_argument('--seed', type=int, default=0,
+ help='Random seed')
+ #7
+ parser.add_argument('--snapshot_interval', type=int, default=5000,
+ help='Interval of snapshot')
+ parser.add_argument('--display_interval', type=int, default=5000,
+ help='Interval of displaying log to console')
+ args = parser.parse_args()
+
+ print('GPU: {}'.format(args.gpu))
+ print('# Minibatch-size: {}'.format(args.batchsize))
+ print('# epoch: {}'.format(args.epoch))
+ print('')
+
+ # Set up a neural network to train
+ enc = Encoder(in_ch=3)
+ dec = Decoder(out_ch=3)
+ dis = Discriminator(in_ch=3, out_ch=3)
+
+ if args.gpu >= 0:
+ chainer.cuda.get_device(args.gpu).use() # Make a specified GPU current
+ enc.to_gpu() # Copy the model to the GPU
+ dec.to_gpu()
+ dis.to_gpu()
+
+ # Setup an optimizer
+ def make_optimizer(model, alpha=0.0002, beta1=0.5):
+ optimizer = chainer.optimizers.Adam(alpha=alpha, beta1=beta1)
+ optimizer.setup(model)
+ optimizer.add_hook(chainer.optimizer.WeightDecay(0.00001), 'hook_dec')
+ return optimizer
+ opt_enc = make_optimizer(enc)
+ opt_dec = make_optimizer(dec)
+ opt_dis = make_optimizer(dis)
+
+ train_d = ImgDataset(args.data_src, args.data_dst, data_range=(0,0.9))
+ test_d = ImgDataset(args.data_src, args.data_dst, data_range=(0.9,1))
+ #train_iter = chainer.iterators.MultiprocessIterator(train_d, args.batchsize, n_processes=4)
+ #test_iter = chainer.iterators.MultiprocessIterator(test_d, args.batchsize, n_processes=4)
+ train_iter = chainer.iterators.SerialIterator(train_d, args.batchsize)
+ test_iter = chainer.iterators.SerialIterator(test_d, args.batchsize)
+
+ # Set up a trainer
+ updater = PicUpdater(
+ models=(enc, dec, dis),
+ iterator={
+ 'main': train_iter,
+ 'test': test_iter},
+ optimizer={
+ 'enc': opt_enc, 'dec': opt_dec,
+ 'dis': opt_dis},
+ device=args.gpu)
+ trainer = training.Trainer(updater, (args.epoch, 'epoch'), out=args.out)
+
+ snapshot_interval = (args.snapshot_interval, 'iteration')
+ display_interval = (args.display_interval, 'iteration')
+ trainer.extend(extensions.snapshot(
+ filename='snapshot_iter_{.updater.iteration}.npz'),
+ trigger=snapshot_interval)
+ #イテレータごとにパラメータの保存
+ trainer.extend(extensions.snapshot_object(
+ enc, 'enc_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.snapshot_object(
+ dec, 'dec_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.snapshot_object(
+ dis, 'dis_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.LogReport(trigger=display_interval))
+ trainer.extend(extensions.PrintReport([
+ 'epoch', 'iteration', 'enc/loss', 'dec/loss', 'dis/loss',
+ ]), trigger=display_interval)
+ trainer.extend(extensions.ProgressBar(update_interval=10))
+ trainer.extend(
+ out_image(
+ updater, enc, dec,
+ 1, 1, args.seed, args.out, args.gpu,
+ w_img, h_img),
+ trigger=snapshot_interval)
+ #最終的な学習結果を保存
+ trainer.extend(extensions.snapshot_object(
+ enc, 'enc_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+ trainer.extend(extensions.snapshot_object(
+ dec, 'dec_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+ trainer.extend(extensions.snapshot_object(
+ dis, 'dis_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+
+ if args.resume:
+ # Resume from a snapshot
+ chainer.serializers.load_npz(args.resume, trainer)
+
+ # Run the training
+ trainer.run()
+
+if __name__ == '__main__':
+ main()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/20181017.py" "b/Features/DeepLearning/Reference/Tang\047s/20181017.py"
new file mode 100644
index 0000000..bcc5ce9
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/20181017.py"
@@ -0,0 +1,1195 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+import tensorflow.contrib.slim as slim
+import numpy as np
+import csv
+import os
+import glob
+import random
+import collections
+import math
+import time
+
+train_input_sample = "D:/test13/Sample16_new_revenge1/" # training data set dir
+train_input_label = "D:/test13/Label16_new_revenge1/"
+
+train_input_sample2 = "D:/test13/VS16_new_revenge1/" # validation data set dir
+train_input_label2 = "D:/test13/VL16_new_revenge1/"
+
+test_input_dir = "D:/result_contract/Otherkind/Tongue/20191119/testinput/"
+test_input_label = "D:/result_contract/Otherkind/Tongue/20191119/testlabel"
+test_output_dir = "D:/result_contract/Otherkind/Tongue/20191119/testoutput/"
+
+train_output = "D:/Result_RE_Revenge109/training" # training images output dir
+validation_output = "D:/Result_RE_Revenge109/validation/" # validation images output dir
+
+checkpoint = "D:/Result_RE_Revenge109_checkpoint/" # model saving dir
+
+# 创建目录
+if not os.path.exists(train_output):
+ os.makedirs(train_output)
+if not os.path.exists(validation_output):
+ os.makedirs(validation_output)
+if not os.path.exists(checkpoint):
+ os.makedirs(checkpoint)
+if not os.path.exists(test_output_dir):
+ os.makedirs(test_output_dir)
+
+log = open(r'D:/Result_RE_Revenge109_checkpoint/log.txt', 'a') # log saving dir
+loss_csv = open(r'D:/Result_RE_Revenge109_checkpoint/loss_csv.csv', 'a', newline='') # loss value saving dir
+
+seed = None
+max_steps = 20000 # number of training steps (0 to disable)
+max_epochs = None # number of training epochs
+
+progress_freq = 50 # display progress every progress_freq steps
+trace_freq = 0 # trace execution every trace_freq steps
+display_freq = 50 # write current training images every display_freq steps
+validation_freq = 50
+
+save_freq = None # save model every save_freq steps, 0 to disable
+
+separable_conv = False # use separable convolutions in the generator
+aspect_ratio = 1.0 # aspect ratio of output images (width/height)
+batch_size = 2 # help="number of images in batch")
+which_direction = "AtoB" # choices=["AtoB", "BtoA"])
+ngf = 64 # help="number of generator filters in first conv layer")
+ndf = 64 # help="number of discriminator filters in first conv layer")
+scale_size = 256 # help="scale images to this size before cropping to 256x256")
+validation_size = 256
+
+flip = False # flip images horizontally
+
+brightness = False
+contrast = False
+# hue = False
+# saturation = False
+gamma = False
+
+lr = 0.0001 # initial learning rate for adam
+beta1 = 0.5 # momentum term of adam
+l1_weight = 100.0 # weight on L1 term for generator gradient
+gan_weight = 1.0 # weight on GAN term for generator gradient
+
+EPS = 1e-12 # Very small number, preventing gradient loss to 0
+CROP_SIZE = 256 # Crop size of the image
+
+# Named tuples for storing loaded data collections to create good models
+Examples = collections.namedtuple("Examples", "paths, inputs, targets, count, steps_per_epoch")
+# Model = collections.namedtuple("Model", "outputs, predict_real, predict_fake, discrim_loss,"
+# "discrim_grads_and_vars, gen_loss_GAN, gen_loss_L1,"
+# "gen_grads_and_vars, train, validate")
+Model = collections.namedtuple("Model", "outputs, gen_loss_L1,"
+ "gen_grads_and_vars, train, validate")
+
+
+# Image preprocessing [0, 1] => [-1, 1]
+# def preprocess(image):
+# with tf.name_scope("preprocess"):
+# return image * 2 - 1
+
+
+# Image post processing [-1, 1] => [0, 1]
+# def deprocess(image):
+# with tf.name_scope("deprocess"):
+# return (image + 1) / 2
+
+
+# 判别器的卷积定义,batch_input为 [ batch , 256 , 256 , 6 ]
+# def discrim_conv(batch_input, out_channels, stride):
+# # [ batch , 256 , 256 , 6 ] ===>[ batch , 258 , 258 , 6 ]
+# padded_input = tf.pad(batch_input, [[0, 0], [1, 1], [1, 1], [0, 0]], mode="CONSTANT")
+# '''
+# [0,0]: 第一维batch大小不扩充
+# [1,1]:第二维图像宽度左右各扩充一列,用0填充
+# [1,1]:第三维图像高度上下各扩充一列,用0填充
+# [0,0]:第四维图像通道不做扩充
+# '''
+# return tf.layers.conv2d(padded_input, out_channels, kernel_size=4, strides=(stride, stride), padding="valid",
+# kernel_initializer=tf.random_normal_initializer(0, 0.02))
+
+
+# 生成器的卷积定义,卷积核为4*4,步长为2,输出图像为输入的一半
+def gen_conv(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ return tf.layers.conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+
+
+def gen_conv2(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d(batch_input, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put3 = tf.layers.max_pooling2d(inputs=out_put2, pool_size=[2, 2], strides=2, padding="same")
+ return out_put3
+
+
+def gen_conv3(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d(batch_input, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same")
+ return out_put2
+
+
+# 生成器的反卷积定义
+def gen_deconv(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ return tf.layers.conv2d_transpose(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+
+
+def gen_deconv2(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d_transpose(batch_input, out_channels, kernel_size=3, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put3 = tf.layers.conv2d(out_put2, out_channels, kernel_size=3, strides=(1, 1), padding="same")
+ return out_put3
+
+
+def gen_deconv3(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put4 = tf.layers.conv2d(batch_input, out_channels, kernel_size=1, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ return out_put4
+
+
+# 定义LReLu激活函数
+def lrelu(x, a):
+ with tf.name_scope("lrelu"):
+ # adding these together creates the leak part and linear part
+ # then cancels them out by subtracting/adding an absolute value term
+ # leak: a*x/2 - a*abs(x)/2
+ # linear: x/2 + abs(x)/2
+
+ # this block looks like it has 2 inputs on the graph unless we do this
+ x = tf.identity(x)
+ return (0.5 * (1 + a)) * x + (0.5 * (1 - a)) * tf.abs(x)
+
+
+# 批量归一化图像
+def batchnorm(inputs):
+ return tf.layers.batch_normalization(inputs, axis=3, epsilon=1e-5, momentum=0.1, training=True,
+ gamma_initializer=tf.random_normal_initializer(1.0, 0.02))
+
+
+# 检查图像的维度
+def check_image(image):
+ assertion = tf.assert_equal(tf.shape(image)[-1], 3, message="image must have 3 color channels")
+ with tf.control_dependencies([assertion]):
+ image = tf.identity(image)
+
+ if image.get_shape().ndims not in (3, 4):
+ raise ValueError("image must be either 3 or 4 dimensions")
+
+ # make the last dimension 3 so that you can unstack the colors
+ shape = list(image.get_shape())
+ shape[-1] = 3
+ image.set_shape(shape)
+ return image
+
+
+# 去除文件的后缀,获取文件名
+def get_name(path):
+ # os.path.basename(),返回path最后的文件名。若path以/或\结尾,那么就会返回空值。
+ # os.path.splitext(),分离文件名与扩展名;默认返回(fname,fextension)元组
+ name, _ = os.path.splitext(os.path.basename(path))
+ return name
+
+
+# 加载数据集,从文件读取-->解码-->归一化--->拆分为输入和目标-->像素转为[-1,1]-->转变形状
+def load_examples(input_sample, input_label, shuffle, trans):
+ if input_sample is None or not os.path.exists(input_sample):
+ raise Exception("input_dir does not exist")
+
+ if input_label is None or not os.path.exists(input_label):
+ raise Exception("input_dir2 does not exist")
+
+ # 匹配第一个参数的路径中所有的符合条件的文件,并将其以list的形式返回。
+ input_path_sample = glob.glob(os.path.join(input_sample, "*.jpg"))
+ input_path_label = glob.glob(os.path.join(input_label, "*.jpg"))
+
+ # 图像解码器
+ decode = tf.image.decode_jpeg
+
+ if len(input_path_sample) == 0:
+ raise Exception("input_sample contains no image files")
+
+ if len(input_path_label) == 0:
+ raise Exception("input_label contains no image files")
+
+ # 如果文件名是数字,则用数字进行排序,否则用字母排序
+ if all(get_name(path).isdigit() for path in input_path_sample):
+ input_path_sample = sorted(input_path_sample, key=lambda path: int(get_name(path)))
+ else:
+ input_path_sample = sorted(input_path_sample)
+
+ if all(get_name(path).isdigit() for path in input_path_label):
+ input_path_label = sorted(input_path_label, key=lambda path: int(get_name(path)))
+ else:
+ input_path_label = sorted(input_path_label)
+
+ # sess = tf.Session()
+
+ with tf.name_scope("load_images"):
+ # 把我们需要的全部文件打包为一个tf内部的queue类型,之后tf开文件就从这个queue中取目录了,
+ # 如果是训练模式时,shuffle为True
+ if shuffle == 1:
+ path_queue = tf.train.slice_input_producer([input_path_sample, input_path_label], shuffle=True)
+ if shuffle == 0:
+ path_queue = tf.train.slice_input_producer([input_path_sample, input_path_label], shuffle=False)
+
+ # Read的输出将是一个文件名(key)和该文件的内容(value,每次读取一个文件,分多次读取)。
+ # reader = tf.WholeFileReader()
+
+ paths = input_path_sample
+
+ samples = tf.read_file(path_queue[0])
+ labels = tf.read_file(path_queue[1])
+
+ # 对文件进行解码并且对图片作归一化处理
+ raw_input_sample = decode(samples)
+ raw_input_label = decode(labels)
+ raw_input_sample = tf.image.convert_image_dtype(raw_input_sample, dtype=tf.float32) # 归一化处理
+ raw_input_label = tf.image.convert_image_dtype(raw_input_label, dtype=tf.float32)
+
+ # 判断两个值知否相等,如果不等抛出异常
+ assertion_sample = tf.assert_equal(tf.shape(raw_input_sample)[2], 3, message="image does not have 3 channels")
+ assertion_label = tf.assert_equal(tf.shape(raw_input_label)[2], 3, message="image2 does not have 3 channels")
+ '''
+ 对于control_dependencies这个管理器,只有当里面的操作是一个op时,才会生效,也就是先执行传入的
+ 参数op,再执行里面的op。如果里面的操作不是定义的op,图中就不会形成一个节点,这样该管理器就失效了。
+ tf.identity是返回一个一模一样新的tensor的op,这会增加一个新节点到gragh中,这时control_dependencies就会生效.
+ '''
+ with tf.control_dependencies([assertion_sample]):
+ raw_input_sample = tf.identity(raw_input_sample)
+
+ raw_input_sample.set_shape([None, None, 3])
+
+ with tf.control_dependencies([assertion_label]):
+ raw_input_label = tf.identity(raw_input_label)
+
+ raw_input_label.set_shape([None, None, 3])
+
+ # 图像值由[0,1]--->[-1, 1]
+ # width = tf.shape(raw_input)[1] # [height, width, channels]
+ # a_images = preprocess(raw_input_sample)
+ # b_images = preprocess(raw_input_label)
+ a_images = raw_input_sample
+ b_images = raw_input_label
+
+ # 这里的which_direction为:BtoA
+ if which_direction == "AtoB":
+ inputs, targets = [a_images, b_images]
+ elif which_direction == "BtoA":
+ inputs, targets = [b_images, a_images]
+ else:
+ raise Exception("invalid direction")
+
+ # synchronize seed for image operations so that we do the same operations to both
+ # input and output images
+ # seed = random.randint(0, 2 ** 31 - 1)
+
+ # 图像预处理,翻转、改变形状
+ with tf.name_scope("input_images"):
+ if trans == 0:
+ input_images = notransform(inputs)
+ if trans == 1:
+ inputs = transform(inputs)
+ input_images = transform2(inputs)
+ # input_images = random_erasing(inputs)
+ # input_images.set_shape([256, 256, 3])
+
+ with tf.name_scope("target_images"):
+ if trans == 0:
+ target_images = notransform(targets)
+ if trans == 1:
+ target_images = transform(targets)
+
+ # 获得输入图像、目标图像的batch块
+ paths_batch, inputs_batch, targets_batch = tf.train.batch([paths, input_images, target_images],
+ batch_size=batch_size)
+ steps_per_epoch = int(math.ceil(len(input_path_sample) / batch_size))
+
+ return Examples(
+ paths=paths_batch, # 输入的文件名块
+ inputs=inputs_batch, # 输入的图像块
+ targets=targets_batch, # 目标图像块
+ count=len(input_path_sample), # 数据集的大小
+ steps_per_epoch=steps_per_epoch, # batch的个数
+ )
+
+
+def random_erasing(img, probability=0.5, sl=0.005, sh=0.05, r1=0.5):
+ height = tf.shape(img)[0]
+ width = tf.shape(img)[1]
+ channel = tf.shape(img)[2]
+ area = tf.cast(width*height, tf.float32)
+
+ erase_area_low_bound = tf.cast(tf.round(tf.sqrt(sl * area * r1)), tf.int32)
+ erase_area_up_bound = tf.cast(tf.round(tf.sqrt((sh * area) / r1)), tf.int32)
+ h_upper_bound = tf.minimum(erase_area_up_bound, height)
+ w_upper_bound = tf.minimum(erase_area_up_bound, width)
+
+ h = tf.random_uniform([], erase_area_low_bound, h_upper_bound, tf.int32)
+ w = tf.random_uniform([], erase_area_low_bound, w_upper_bound, tf.int32)
+
+ erase_area = tf.cast(tf.random_uniform([h, w, channel], -255, 255, tf.int32), tf.float32)
+ erase_area_img = tf.image.resize_image_with_crop_or_pad(erase_area, 256, 256)
+ erase_area_img = tf.image.resize_images(erase_area_img, [512, 512], method=tf.image.ResizeMethod.BILINEAR)
+ # offset1 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 384 + 1, seed=seed)), dtype=tf.int32)
+ # offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ # offset1 = tf.cast(tf.floor(tf.random_uniform([2], 512 - 320, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ # offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ offset1 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ erase_area_img = tf.image.crop_to_bounding_box(erase_area_img, offset1[0], offset2[1], 256, 256)
+ img_v = img + erase_area_img
+ return tf.cond(tf.random_uniform([], 0, 1) > probability, lambda: img, lambda: img_v)
+
+ # x1 = tf.random_uniform([], 0, height+1 - h, tf.int32)
+ # y1 = tf.random_uniform([], 0, width+1 - w, tf.int32)
+ #
+ # erase_area = tf.cast(tf.random.uniform([h, w, channel], 0, 255, tf.int32), tf.uint8)
+ #
+ # erasing_img = img[x1:x1 + h, y1:y1 + w, :].assign(erase_area)
+ #
+ # return tf.cond(tf.random.uniform([], 0, 1) > probability, lambda: img, lambda: erasing_img)
+
+
+# 图像预处理,翻转、改变形状
+def transform(image):
+ r = image
+ if flip:
+ r = tf.image.random_flip_left_right(r, seed=seed)
+ # r = tf.image.random_flip_up_down(r, seed=seed)
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [scale_size, scale_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, scale_size - CROP_SIZE + 1, seed=seed)), dtype=tf.int32)
+ if scale_size > CROP_SIZE:
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], CROP_SIZE, CROP_SIZE)
+ elif scale_size < CROP_SIZE:
+ raise Exception("scale size cannot be less than crop size")
+ return r
+
+
+# 图像预处理
+def transform2(image):
+ r = image
+ r1 = image
+ r2 = image
+ r3 = image
+ r4 = image
+ r5 = image
+ r6 = image
+ a = random.uniform(1, 2)
+ # b = random.uniform(0.5, 1.5)
+ b = 1
+
+ if brightness:
+ r1 = tf.image.random_brightness(r1, max_delta=0.2)
+ if contrast:
+ r1 = tf.image.random_contrast(r1, lower=0.5, upper=1.5)
+ if gamma:
+ r1 = tf.image.adjust_gamma(r1, gain=a, gamma=b)
+ # if saturation:
+ # r1 = tf.image.random_saturation(r1, lower=1, upper=2)
+ # if hue:
+ # r1 = tf.image.random_hue(r1, max_delta=0.5)
+
+ if brightness:
+ r2 = tf.image.random_brightness(r2, max_delta=0.2)
+ if gamma:
+ r2 = tf.image.adjust_gamma(r2, gain=a, gamma=b)
+ if contrast:
+ r2 = tf.image.random_contrast(r2, lower=0.5, upper=1.5)
+
+ if contrast:
+ r3 = tf.image.random_contrast(r3, lower=0.5, upper=1.5)
+ if brightness:
+ r3 = tf.image.random_brightness(r3, max_delta=0.2)
+ if gamma:
+ r3 = tf.image.adjust_gamma(r3, gain=a, gamma=b)
+
+ if contrast:
+ r4 = tf.image.random_contrast(r4, lower=0.5, upper=1.5)
+ if gamma:
+ r4 = tf.image.adjust_gamma(r4, gain=a, gamma=b)
+ if brightness:
+ r4 = tf.image.random_brightness(r4, max_delta=0.2)
+
+ if gamma:
+ r5 = tf.image.adjust_gamma(r5, gain=a, gamma=b)
+ if brightness:
+ r5 = tf.image.random_brightness(r5, max_delta=0.2)
+ if contrast:
+ r5 = tf.image.random_contrast(r5, lower=0.5, upper=1.5)
+
+ if gamma:
+ r6 = tf.image.adjust_gamma(r6, gain=a, gamma=b)
+ if contrast:
+ r6 = tf.image.random_contrast(r6, lower=0.5, upper=1.5)
+ if brightness:
+ r6 = tf.image.random_brightness(r6, max_delta=0.2)
+
+ r12 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r1, lambda: r2)
+ r34 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r3, lambda: r4)
+ r56 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r5, lambda: r6)
+ r1234 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r12, lambda: r34)
+ r5678 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r56, lambda: r)
+ r12345678 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r1234, lambda: r5678)
+ return r12345678
+
+
+def notransform(image):
+ r = image
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [validation_size, validation_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, 1, seed=seed)), dtype=tf.int32)
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], validation_size, validation_size)
+
+ return r
+
+
+# 创建生成器,这是一个编码解码器的变种,输入输出均为:256*256*3, 像素值为[-1,1]
+def create_generator(generator_inputs, generator_outputs_channels):
+ layers = []
+
+ # encoder_1: [batch, 256, 256, in_channels] => [batch, 128, 128, ngf]
+ with tf.variable_scope("encoder_1"):
+ output = gen_conv(generator_inputs, ngf) # ngf is the number of convolution kernels of
+ # the first convolutional layer. Default is 64.
+ layers.append(output)
+
+ layer_specs = [
+ ngf * 2, # encoder_2: [batch, 128, 128, ngf] => [batch, 64, 64, ngf * 2]
+ ngf * 4, # encoder_3: [batch, 64, 64, ngf * 2] => [batch, 32, 32, ngf * 4]
+ ngf * 8, # encoder_4: [batch, 32, 32, ngf * 4] => [batch, 16, 16, ngf * 8]
+ ngf * 8, # encoder_5: [batch, 16, 16, ngf * 8] => [batch, 8, 8, ngf * 8]
+ ngf * 8, # encoder_6: [batch, 8, 8, ngf * 8] => [batch, 4, 4, ngf * 8]
+ ngf * 8, # encoder_7: [batch, 4, 4, ngf * 8] => [batch, 2, 2, ngf * 8]
+ ngf * 8, # encoder_8: [batch, 2, 2, ngf * 8] => [batch, 1, 1, ngf * 8]
+ ]
+
+ # 卷积的编码器
+ for out_channels in layer_specs:
+ with tf.variable_scope("encoder_%d" % (len(layers) + 1)):
+ # 对最后一层使用激活函数
+ rectified = lrelu(layers[-1], 0.2)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height/2, in_width/2, out_channels]
+ convolved = gen_conv(rectified, out_channels)
+ output = batchnorm(convolved)
+ layers.append(output)
+
+ layer_specs = [
+ (ngf * 8, 0.5), # decoder_8: [batch, 1, 1, ngf * 8] => [batch, 2, 2, ngf * 8 * 2]
+ (ngf * 8, 0.5), # decoder_7: [batch, 2, 2, ngf * 8 * 2] => [batch, 4, 4, ngf * 8 * 2]
+ (ngf * 8, 0.5), # decoder_6: [batch, 4, 4, ngf * 8 * 2] => [batch, 8, 8, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_5: [batch, 8, 8, ngf * 8 * 2] => [batch, 16, 16, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_4: [batch, 16, 16, ngf * 8 * 2] => [batch, 32, 32, ngf * 4 * 2]
+ (ngf * 4, 0.0), # decoder_3: [batch, 32, 32, ngf * 4 * 2] => [batch, 64, 64, ngf * 2 * 2]
+ (ngf * 2, 0.0), # decoder_2: [batch, 64, 64, ngf * 2 * 2] => [batch, 128, 128, ngf * 2]
+ ]
+
+ # 卷积的解码器
+ num_encoder_layers = len(layers) # 8
+ for decoder_layer, (out_channels, dropout) in enumerate(layer_specs):
+ skip_layer = num_encoder_layers - decoder_layer - 1
+ with tf.variable_scope("decoder_%d" % (skip_layer + 1)):
+ if decoder_layer == 0:
+ # first decoder layer doesn't have skip connections
+ # since it is directly connected to the skip_layer
+ input = layers[-1]
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+ # elif decoder_layer == 6:
+ # input = layers[-1]
+ # elif decoder_layer == 5:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # elif decoder_layer == 3:
+ # input = layers[-1]
+ # elif decoder_layer == 2:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # # input = tf.concat([layers[-1], layers[3]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv2(rectified, out_channels)
+
+ else:
+ input = tf.concat([layers[-1], layers[skip_layer]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ output = gen_deconv(rectified, out_channels)
+ output = batchnorm(output)
+
+ if dropout > 0.0:
+ output = tf.nn.dropout(output, keep_prob=1 - dropout)
+
+ layers.append(output)
+
+ # decoder_1: [batch, 128, 128, ngf * 2] => [batch, 256, 256, generator_outputs_channels]
+ with tf.variable_scope("decoder_1"):
+ # input = tf.concat([layers[-1], layers[0]], axis=3)
+ # input = tf.concat([layers[-1], layers[1]], axis=3)
+ input = layers[-1]
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ output = gen_deconv(rectified, generator_outputs_channels)
+ output = tf.tanh(output)
+ layers.append(output)
+
+ return layers[-1]
+
+
+# 创建判别器,输入生成的图像和真实的图像:两个[batch,256,256,3],元素值值[-1,1],输出:[batch,30,30,1],元素值为概率
+# def create_discriminator(discrim_inputs, discrim_targets):
+# n_layers = 3
+# layers = []
+#
+# # 2x [batch, height, width, in_channels] => [batch, height, width, in_channels * 2]
+# input = tf.concat([discrim_inputs, discrim_targets], axis=3)
+#
+# # layer_1: [batch, 256, 256, in_channels * 2] => [batch, 128, 128, ndf]
+# with tf.variable_scope("layer_1"):
+# convolved = discrim_conv(input, ndf, stride=2)
+# rectified = lrelu(convolved, 0.2)
+# layers.append(rectified)
+#
+# # layer_2: [batch, 128, 128, ndf] => [batch, 64, 64, ndf * 2]
+# # layer_3: [batch, 64, 64, ndf * 2] => [batch, 32, 32, ndf * 4]
+# # layer_4: [batch, 32, 32, ndf * 4] => [batch, 31, 31, ndf * 8]
+# for i in range(n_layers):
+# with tf.variable_scope("layer_%d" % (len(layers) + 1)):
+# out_channels = ndf * min(2 ** (i + 1), 8)
+# stride = 1 if i == n_layers - 1 else 2 # last layer here has stride 1
+# convolved = discrim_conv(layers[-1], out_channels, stride=stride)
+# normalized = batchnorm(convolved)
+# rectified = lrelu(normalized, 0.2)
+# layers.append(rectified)
+#
+# # layer_5: [batch, 31, 31, ndf * 8] => [batch, 30, 30, 1]
+# with tf.variable_scope("layer_%d" % (len(layers) + 1)):
+# convolved = discrim_conv(rectified, out_channels=1, stride=1)
+# output = tf.sigmoid(convolved)
+# layers.append(output)
+#
+# return layers[-1]
+
+
+def create_generator2(generator_inputs, generator_outputs_channels):
+ layers = []
+
+ # encoder_1: [batch, 256, 256, in_channels] => [batch, 128, 128, ngf]
+ with tf.variable_scope("encoder_1"):
+ output = gen_conv2(generator_inputs, ngf) # ngf is the number of convolution kernels of
+ # the first convolutional layer. Default is 64.
+ layers.append(output)
+
+ layer_specs = [
+ ngf * 2, # encoder_2: [batch, 128, 128, ngf] => [batch, 64, 64, ngf * 2]
+ ngf * 4, # encoder_3: [batch, 64, 64, ngf * 2] => [batch, 32, 32, ngf * 4]
+ ngf * 8, # encoder_4: [batch, 32, 32, ngf * 4] => [batch, 16, 16, ngf * 8]
+ # ngf * 8, # encoder_5: [batch, 16, 16, ngf * 8] => [batch, 8, 8, ngf * 8]
+ # ngf * 8, # encoder_6: [batch, 8, 8, ngf * 8] => [batch, 4, 4, ngf * 8]
+ # ngf * 8, # encoder_7: [batch, 4, 4, ngf * 8] => [batch, 2, 2, ngf * 8]
+ # ngf * 8, # encoder_8: [batch, 2, 2, ngf * 8] => [batch, 1, 1, ngf * 8]
+ ]
+
+ # 卷积的编码器
+ for out_channels in layer_specs:
+ with tf.variable_scope("encoder_%d" % (len(layers) + 1)):
+ # 对最后一层使用激活函数
+ rectified = lrelu(layers[-1], 0.2)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height/2, in_width/2, out_channels]
+ convolved = gen_conv2(rectified, out_channels)
+ output = batchnorm(convolved)
+ layers.append(output)
+
+ layer_specs = [
+ # (ngf * 8, 0.5), # decoder_8: [batch, 1, 1, ngf * 8] => [batch, 2, 2, ngf * 8 * 2]
+ # (ngf * 8, 0.5), # decoder_7: [batch, 2, 2, ngf * 8 * 2] => [batch, 4, 4, ngf * 8 * 2]
+ # (ngf * 8, 0.5), # decoder_6: [batch, 4, 4, ngf * 8 * 2] => [batch, 8, 8, ngf * 8 * 2]
+ # (ngf * 8, 0.0), # decoder_5: [batch, 8, 8, ngf * 8 * 2] => [batch, 16, 16, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_4: [batch, 16, 16, ngf * 8 * 2] => [batch, 32, 32, ngf * 4 * 2]
+ (ngf * 4, 0.0), # decoder_3: [batch, 32, 32, ngf * 4 * 2] => [batch, 64, 64, ngf * 2 * 2]
+ (ngf * 2, 0.0), # decoder_2: [batch, 64, 64, ngf * 2 * 2] => [batch, 128, 128, ngf * 2]
+ ]
+
+ # 卷积的解码器
+ num_encoder_layers = len(layers) # 8
+ for decoder_layer, (out_channels, dropout) in enumerate(layer_specs):
+ skip_layer = num_encoder_layers - decoder_layer - 1
+ with tf.variable_scope("decoder_%d" % (skip_layer + 1)):
+ if decoder_layer == 0:
+ # first decoder layer doesn't have skip connections
+ # since it is directly connected to the skip_layer
+ input = layers[-1]
+ input = gen_conv3(input, 1024)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+ # elif decoder_layer == 6:
+ # input = layers[-1]
+ # elif decoder_layer == 5:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # elif decoder_layer == 3:
+ # input = layers[-1]
+ elif decoder_layer == 2:
+ input = layers[-1]
+ elif decoder_layer == 1:
+ input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # # input = tf.concat([layers[-1], layers[3]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv2(rectified, out_channels)
+
+ else:
+ input = tf.concat([layers[-1], layers[skip_layer]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ output = gen_deconv2(rectified, out_channels)
+ output = batchnorm(output)
+
+ if dropout > 0.0:
+ output = tf.nn.dropout(output, keep_prob=1 - dropout)
+
+ layers.append(output)
+
+ # decoder_1: [batch, 128, 128, ngf * 2] => [batch, 256, 256, generator_outputs_channels]
+ with tf.variable_scope("decoder_1"):
+ # input = tf.concat([layers[-1], layers[0]], axis=3)
+ # input = tf.concat([layers[-1], layers[1]], axis=3)
+ input = layers[-1]
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ output = gen_deconv2(rectified, 64)
+ output = gen_deconv3(output, generator_outputs_channels)
+ # output = tf.tanh(output)
+ # output = tf.nn.softmax(output)
+ # output = tf.nn.relu(output)
+ output = tf.sigmoid(output)
+ layers.append(output)
+
+ return layers[-1]
+
+
+# 创建Pix2Pix模型,inputs和targets形状为:[batch_size, height, width, channels]
+def create_model(inputs, targets):
+ with tf.variable_scope("generator1"):
+ out_channels = int(targets.get_shape()[-1])
+ outputs = create_generator2(inputs, out_channels)
+ # outputs = UNet(inputs)
+
+ # create two copies of discriminator, one for real pairs and one for fake pairs
+ # they share the same underlying variables
+ # with tf.name_scope("real_discriminator"):
+ # with tf.variable_scope("discriminator"):
+ # # 2x [batch, height, width, channels] => [batch, 30, 30, 1]
+ # predict_real = create_discriminator(inputs, targets) # 条件变量图像和真实图像
+
+ # with tf.name_scope("fake_discriminator"):
+ # with tf.variable_scope("discriminator", reuse=True):
+ # # 2x [batch, height, width, channels] => [batch, 30, 30, 1]
+ # predict_fake = create_discriminator(inputs, outputs) # 条件变量图像和生成的图像
+
+ # The discriminator loses, the discriminator wants V(G, D) as large as possible
+ # with tf.name_scope("discriminator_loss"):
+ # # minimizing -tf.log will try to get inputs to 1
+ # # predict_real => 1
+ # # predict_fake => 0
+ # discrim_loss = tf.reduce_mean(-(tf.log(predict_real + EPS) + tf.log(1 - predict_fake + EPS)))
+
+ # Generator loss, the generator wants V(G, D) to be as small as possible
+ with tf.name_scope("generator_loss"):
+ # predict_fake => 1
+ # abs(targets - outputs) => 0
+ # gen_loss_GAN = tf.reduce_mean(-tf.log(predict_fake + EPS))
+ # gen_loss_L1 = tf.reduce_mean(tf.abs(targets - outputs))
+ # gen_loss_L1 = tf.reduce_mean(tf.square(targets - outputs))
+ gen_loss_L1 = -tf.reduce_mean((targets + EPS) * tf.log(outputs + EPS) + (1 - targets + EPS) * tf.log(1 - outputs + EPS))
+ # gen_loss = gen_loss_GAN * gan_weight + gen_loss_L1 * l1_weight
+
+ # 判别器训练
+ # with tf.name_scope("discriminator_train"):
+ # # 判别器需要优化的参数
+ # discrim_tvars = [var for var in tf.trainable_variables() if var.name.startswith("discriminator")]
+ # # 优化器定义
+ # discrim_optim = tf.train.AdamOptimizer(lr, beta1)
+ # # 计算损失函数对优化参数的梯度
+ # discrim_grads_and_vars = discrim_optim.compute_gradients(discrim_loss, var_list=discrim_tvars)
+ # # 更新该梯度所对应的参数的状态,返回一个op
+ # discrim_train = discrim_optim.apply_gradients(discrim_grads_and_vars)
+
+ # 生成器训练
+ with tf.name_scope("generator_train"):
+ # with tf.control_dependencies([discrim_train]):
+ # 生成器需要优化的参数列表
+ gen_tvars = [var for var in tf.trainable_variables() if var.name.startswith("generator1")]
+ # 定义优化器
+ gen_optim = tf.train.AdamOptimizer(lr, beta1)
+ # 计算需要优化的参数的梯度
+ gen_grads_and_vars = gen_optim.compute_gradients(gen_loss_L1, var_list=gen_tvars)
+ # gen_grads_and_vars = gen_optim.compute_gradients(gen_loss, var_list=gen_tvars)
+ # 更新该梯度所对应的参数的状态,返回一个op
+ gen_train = gen_optim.apply_gradients(gen_grads_and_vars)
+
+ '''
+ 在采用随机梯度下降算法训练神经网络时,使用 tf.train.ExponentialMovingAverage 滑动平均操作的意义在于
+ 提高模型在测试数据上的健壮性(robustness)。tensorflow 下的 tf.train.ExponentialMovingAverage 需要
+ 提供一个衰减率(decay)。该衰减率用于控制模型更新的速度。该衰减率用于控制模型更新的速度,
+ ExponentialMovingAverage 对每一个(待更新训练学习的)变量(variable)都会维护一个影子变量
+ (shadow variable)。影子变量的初始值就是这个变量的初始值,
+ shadow_variable=decay×shadow_variable+(1−decay)×variable
+ '''
+ ema = tf.train.ExponentialMovingAverage(decay=0.99)
+ # update_losses = ema.apply([discrim_loss, gen_loss_GAN, gen_loss_L1])
+ update_losses = ema.apply([gen_loss_L1])
+
+ global_step = tf.train.get_or_create_global_step()
+ incr_global_step = tf.assign(global_step, global_step + 1)
+
+ return Model(
+ # predict_real=predict_real, # 条件变量(输入图像)和真实图像之间的概率值,形状为;[batch,30,30,1]
+ # predict_fake=predict_fake, # 条件变量(输入图像)和生成图像之间的概率值,形状为;[batch,30,30,1]
+ # discrim_loss=ema.average(discrim_loss), # 判别器损失
+ # discrim_grads_and_vars=discrim_grads_and_vars, # 判别器需要优化的参数和对应的梯度
+ # gen_loss_GAN=ema.average(gen_loss_GAN), # 生成器的损失
+ gen_loss_L1=ema.average(gen_loss_L1), # 生成器的 L1损失
+ gen_grads_and_vars=gen_grads_and_vars, # 生成器需要优化的参数和对应的梯度
+ outputs=outputs, # 生成器生成的图片
+ train=tf.group(update_losses, incr_global_step, gen_train), # 打包需要run的操作op
+ validate=gen_loss_L1
+ )
+
+
+# 保存图像
+def save_images(output_dir, fetches, step=None):
+ image_dir = os.path.join(output_dir)
+ if not os.path.exists(image_dir):
+ os.makedirs(image_dir)
+
+ filesets = []
+ for i, in_path in enumerate(fetches["paths"]):
+ # name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8")))
+ name, _ = os.path.splitext(os.path.basename(str(in_path)))
+ fileset = {"name": name, "step": step}
+ for kind in ["inputs", "outputs", "targets"]:
+ filename = name + "-" + kind + ".jpg"
+ if step is not None:
+ filename = "%08d-%s" % (step, filename)
+ fileset[kind] = filename
+ out_path = os.path.join(image_dir, filename)
+ contents = fetches[kind][i]
+ with open(out_path, "wb") as f:
+ f.write(contents)
+ filesets.append(fileset)
+ return filesets
+
+
+# 保存图像
+def save_images_test(output_dir, fetches, step=None):
+ image_dir = os.path.join(output_dir)
+ if not os.path.exists(image_dir):
+ os.makedirs(image_dir)
+
+ filesets = []
+ for i, in_path in enumerate(fetches["paths"]):
+ # name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8")))
+ name, _ = os.path.splitext(os.path.basename(str(in_path)))
+ fileset = {"name": name, "step": step}
+ for kind in ["outputs"]:
+ filename = name + "-" + kind + ".jpg"
+ if step is not None:
+ filename = "%08d-%s" % (step, filename)
+ fileset[kind] = filename
+ out_path = os.path.join(image_dir, filename)
+ contents = fetches[kind][i]
+ with open(out_path, "wb") as f:
+ f.write(contents)
+ filesets.append(fileset)
+ return filesets
+
+
+# 转变图像的尺寸、并且将[0,1]--->[0,255]
+def convert(image):
+ if aspect_ratio != 1.0:
+ # upscale to correct aspect ratio
+ size = [CROP_SIZE, int(round(CROP_SIZE * aspect_ratio))]
+ image = tf.image.resize_images(image, size=size, method=tf.image.ResizeMethod.BICUBIC)
+
+ # 将数据的类型转换为8位无符号整型
+ return tf.image.convert_image_dtype(image, dtype=tf.uint8, saturate=True)
+
+
+# 主函数
+def train():
+ csv_write = csv.writer(loss_csv, dialect='excel')
+
+ # 设置随机数种子的值
+ global seed
+ if seed is None:
+ seed = random.randint(0, 2 ** 31 - 1)
+
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+ random.seed(seed)
+
+ # # 创建目录
+ # if not os.path.exists(train_output):
+ # os.makedirs(train_output)
+ # if not os.path.exists(validation_output):
+ # os.makedirs(validation_output)
+ # if not os.path.exists(checkpoint):
+ # os.makedirs(checkpoint)
+
+ # 加载数据集,得到输入数据和目标数据并把范围变为 :[-1,1]
+ examples = load_examples(train_input_sample, train_input_label, 1, 0)
+ print("load successful ! examples count = %d" % examples.count)
+ print("load successful ! examples count = %d" % examples.count, file=log)
+ examples2 = load_examples(train_input_sample2, train_input_label2, 1, 0)
+ print("load successful ! examples2 count = %d" % examples2.count)
+ print("load successful ! examples2 count = %d" % examples2.count, file=log)
+
+ # 创建模型,inputs和targets是:[batch_size, height, width, channels]
+ # 返回值:
+
+ inputs_pd_8 = tf.placeholder(tf.uint8, [None, 256, 256, 3], name='input_image')
+
+ target_pd_8 = tf.placeholder(tf.uint8, [None, 256, 256, 3], name='target')
+
+ inputs_pd = tf.image.convert_image_dtype(inputs_pd_8, dtype=tf.float32)
+
+ target_pd = tf.image.convert_image_dtype(target_pd_8, dtype=tf.float32)
+
+ model = create_model(inputs_pd, target_pd)
+ print("create model successful!")
+ print("create model successful!", file=log)
+
+ # 图像处理[-1, 1] => [0, 1]
+ # inputs = deprocess(inputs_pd)
+ # targets = deprocess(target_pd)
+ # outputs = deprocess(model.outputs)
+ # inputs2 = deprocess(inputs_pd)
+ # targets2 = deprocess(target_pd)
+ # outputs2 = deprocess(model.outputs)
+ inputs = inputs_pd
+ targets = target_pd
+ outputs = model.outputs
+ inputs2 = inputs_pd
+ targets2 = target_pd
+ outputs2 = model.outputs
+
+ # 把[0,1]的像素点转为RGB值:[0,255]
+ with tf.name_scope("convert_inputs"):
+ converted_inputs = convert(inputs)
+ with tf.name_scope("convert_targets"):
+ converted_targets = convert(targets)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs = convert(outputs)
+ with tf.name_scope("convert_inputs"):
+ converted_inputs2 = convert(inputs2)
+ with tf.name_scope("convert_targets"):
+ converted_targets2 = convert(targets2)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs2 = convert(outputs2)
+
+ # 对图像进行编码以便于保存
+ with tf.name_scope("encode_images"):
+ display_fetches = {
+ "paths": examples.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ "inputs": tf.map_fn(tf.image.encode_jpeg, converted_inputs, dtype=tf.string, name="input_jpegs"),
+ "targets": tf.map_fn(tf.image.encode_jpeg, converted_targets, dtype=tf.string, name="target_jpegs"),
+ "outputs": tf.map_fn(tf.image.encode_jpeg, converted_outputs, dtype=tf.string, name="output_jpegs"),
+ }
+ with tf.name_scope("encode_images"):
+ display_fetches2 = {
+ "paths": examples2.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ "inputs": tf.map_fn(tf.image.encode_jpeg, converted_inputs2, dtype=tf.string,
+ name="input_jpegs"),
+ "targets": tf.map_fn(tf.image.encode_jpeg, converted_targets2, dtype=tf.string,
+ name="target_jpegs"),
+ "outputs": tf.map_fn(tf.image.encode_jpeg, converted_outputs2, dtype=tf.string,
+ name="output_jpegs"),
+ }
+
+ with tf.name_scope("parameter_count"):
+ parameter_count = tf.reduce_sum([tf.reduce_prod(tf.shape(v)) for v in tf.trainable_variables()])
+
+ # 只保存最新一个checkpoint
+ saver = tf.train.Saver(max_to_keep=10)
+
+ new_loss_v = 10
+ min_loss_v = 10
+ train_loss = 10
+
+ train_fetches = {}
+ val_fetches = {}
+ init = tf.global_variables_initializer()
+ result = {} # for train
+ result2 = {} # for validation
+
+ with tf.Session() as sess:
+ sess.run(init)
+ print("parameter_count =", sess.run(parameter_count))
+ print("parameter_count =", sess.run(parameter_count), file=log)
+ # if max_epochs is not None:
+ # max_steps = examples.steps_per_epoch * max_epochs # 400X200=8000
+
+ # 因为是从文件中读取数据,所以需要启动start_queue_runners()
+ # 这个函数将会启动输入管道的线程,填充样本到队列中,以便出队操作可以从队列中拿到样本。
+ coord = tf.train.Coordinator()
+ threads = tf.train.start_queue_runners(coord=coord)
+
+ # 运行训练集
+ print("begin trainning......")
+ print("begin trainning......", file=log)
+ print("max_steps:", max_steps)
+ print("max_steps:", max_steps, file=log)
+ start = time.time()
+ for step in range(max_steps):
+ def should(freq):
+ return freq > 0 and ((step + 1) % freq == 0 or step == max_steps - 1)
+
+ print("step:", step)
+
+ # 执行正常的train
+ # 定义一个需要run的所有操作的字典
+ train_fetches = {
+ "train": model.train # 只有添加这个才会进行参数更新
+ }
+
+ # progress_freq为 100,每100次计算一次三个损失,显示进度
+ if should(progress_freq):
+ # train_fetches["discrim_loss"] = model.discrim_loss
+ # train_fetches["gen_loss_GAN"] = model.gen_loss_GAN
+ train_fetches["gen_loss_L1"] = model.gen_loss_L1
+
+ # display_freq为 100,每100次保存一次输入、目标、输出的图像
+ if should(display_freq):
+ train_fetches["display"] = display_fetches
+
+ # 获取真实数据:
+ train_inputs_real, train_targets_real = sess.run([examples.inputs, examples.targets])
+ train_input_dict = {inputs_pd: train_inputs_real, target_pd: train_targets_real}
+ results = sess.run(train_fetches, feed_dict=train_input_dict)
+
+ if should(display_freq):
+ print("saving display training images")
+ save_images(train_output, results["display"], step=step)
+
+ # progress_freq为 100,每100次打印一次三种损失的大小,显示进度
+ if should(progress_freq):
+ # global_step will have the correct step count if we resume from a checkpoint
+ train_epoch = math.ceil(step / examples.steps_per_epoch)
+ train_step = (step - 1) % examples.steps_per_epoch + 1
+ rate = (step + 1) * batch_size / (time.time() - start)
+ remaining = (max_steps - step) * batch_size / rate
+ print("progress epoch %d step %d image/sec %0.1f remaining %dm" % (train_epoch, train_step, rate, remaining / 60))
+ print("progress epoch %d step %d image/sec %0.1f remaining %dm" % (train_epoch, train_step, rate, remaining / 60), file=log)
+ # print("discrim_loss", results["discrim_loss"])
+ # print("discrim_loss", results["discrim_loss"], file=log)
+ # print("gen_loss_GAN", results["gen_loss_GAN"])
+ # print("gen_loss_GAN", results["gen_loss_GAN"], file=log)
+ # print("gen_loss_L1", results["gen_loss_L1"])
+ # print("gen_loss_L1", results["gen_loss_L1"], file=log)
+ print("gen_loss_cross_entropy", results["gen_loss_L1"])
+ print("gen_loss_cross_entropy", results["gen_loss_L1"], file=log)
+ new_loss = results["gen_loss_L1"]
+ # dis_loss = results["discrim_loss"]
+ # GAN_loss = results["gen_loss_GAN"]
+
+ if should(validation_freq): # 每一百步计算一次 validation
+ val_fetches["gen_loss_L1"] = model.validate
+ val_fetches["display"] = display_fetches2
+
+ i = 0
+ sum_loss_v = 0
+ print("start display validation images and calculate validation loss")
+ while i <= examples2.count:
+ val_inputs_real, val_targets_real = sess.run([examples2.inputs, examples2.targets])
+ val_input_dict = {inputs_pd: val_inputs_real, target_pd: val_targets_real}
+ results2 = sess.run(val_fetches, feed_dict=val_input_dict)
+ sum_loss_v = sum_loss_v + results2["gen_loss_L1"]
+ i = i + 1
+
+ save_images(validation_output, results2["display"], step=step)
+ avg_loss_v = sum_loss_v / i
+ # print("gen_loss_L1_validation", avg_loss_v)
+ # print("gen_loss_L1_validation", avg_loss_v, file=log)
+ print("gen_loss_cross_entropy_validation", avg_loss_v)
+ print("gen_loss_cross_entropy_validation", avg_loss_v, file=log)
+ new_loss_v = avg_loss_v
+
+ # stu = [step + 1, new_loss_v, new_loss, dis_loss, GAN_loss]
+ stu = [step + 1, new_loss_v, new_loss]
+ csv_write.writerow(stu)
+
+ # 只保存lose最低的一次模型
+ # if should(save_freq):
+ if new_loss_v < min_loss_v and train_loss >= new_loss:
+ min_loss_v = new_loss_v
+ train_loss = new_loss
+ print("saving model")
+ print("saving model", file=log)
+ saver.save(sess, os.path.join(checkpoint, "model"), global_step=step)
+ tf.train.write_graph(sess.graph.as_graph_def(), checkpoint, 'graph_node.pbtxt', as_text=True)
+
+ log.close()
+ loss_csv.close()
+
+
+def test():
+ # 设置随机数种子的值
+ global seed
+ if seed is None:
+ seed = random.randint(0, 2 ** 31 - 1)
+
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+ random.seed(seed)
+
+ # 创建目录
+ if not os.path.exists(test_output_dir):
+ os.makedirs(test_output_dir)
+ if checkpoint is None:
+ raise Exception("checkpoint required for test mode")
+
+ # disable these features in test mode
+ scale_size = CROP_SIZE
+ flip = False
+
+ # 加载数据集,得到输入数据和目标数据
+ examples = load_examples(test_input_dir, test_input_label, 0, 0)
+ print("load successful ! examples count = %d" % examples.count)
+
+ # 创建模型,inputs和targets是:[batch_size, height, width, channels]
+ model = create_model(examples.inputs, examples.targets)
+ print("create model successful!")
+
+ # 图像处理[-1, 1] => [0, 1]
+ # inputs = deprocess(examples.inputs)
+ # targets = deprocess(examples.targets)
+ # outputs = deprocess(model.outputs)
+ inputs = examples.inputs
+ targets = examples.targets
+ outputs = model.outputs
+
+ # 把[0,1]的像素点转为RGB值:[0,255]
+ with tf.name_scope("convert_inputs"):
+ converted_inputs = convert(inputs)
+ with tf.name_scope("convert_targets"):
+ converted_targets = convert(targets)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs = convert(outputs)
+
+ # 对图像进行编码以便于保存
+ with tf.name_scope("encode_images"):
+ display_fetches = {
+ "paths": examples.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ # "inputs": tf.map_fn(tf.image.encode_png, converted_inputs, dtype=tf.string, name="input_pngs"),
+ # "targets": tf.map_fn(tf.image.encode_png, converted_targets, dtype=tf.string, name="target_pngs"),
+ "outputs": tf.map_fn(tf.image.encode_png, converted_outputs, dtype=tf.string, name="output_pngs"),
+ }
+
+ sess = tf.InteractiveSession()
+ saver = tf.train.Saver(max_to_keep=10)
+
+ start = time.time()
+
+ ckpt = tf.train.get_checkpoint_state(checkpoint)
+ saver.restore(sess, ckpt.model_checkpoint_path)
+
+ coord = tf.train.Coordinator()
+ threads = tf.train.start_queue_runners(coord=coord)
+
+ for step in range(examples.count):
+ results = sess.run(display_fetches)
+ save_images_test(test_output_dir, results, step=step)
+
+ print("Total Time:", (time.time() - start))
+ print("Per Image Time:", (time.time() - start) / examples.count)
+
+
+if __name__ == '__main__':
+ train()
+ # test()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py" "b/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py"
new file mode 100644
index 0000000..97a6e7a
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py"
@@ -0,0 +1,116 @@
+from sklearn.metrics import confusion_matrix
+import numpy as np
+from PIL import Image
+import os
+
+
+iou = 0
+iou2 = 0
+iouall = 0
+iouall2 = 0
+ioumean = 0
+ioumean2 = 0
+ioumax = 0
+ioumin = 1
+filesnum = 0
+sen = 0
+senall = 0
+senmean = 0
+acc = 0
+accall = 0
+accmean = 0
+spe = 0
+speall = 0
+spemean = 0
+
+# Result txt file saving PATH
+log = open(r'D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4/result.txt', 'w')
+
+
+def compute_iou(y_pred, y_true):
+ # ytrue, ypred is a flatten vector
+ y_pred = y_pred.flatten()
+ y_true = y_true.flatten()
+ tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
+ # compute mean iou
+ print("tn, fp, fn, tp:", (tn, fp, fn, tp), file=log)
+ # tp/(tp + fp + fn)
+ iou_tp = tp / (tp + fp + fn)
+ iou_tp2 = tn / (tn + fp + fn)
+ sen_tp = tp / (tp + fn)
+ acc_tp = (tp + tn) / (tn + fp + fn + tp)
+ spe_tp = tn / (tn + fp)
+ global iou
+ iou = iou_tp
+ global iou2
+ iou2 = iou_tp2
+ global sen
+ sen = sen_tp
+ global acc
+ acc = acc_tp
+ global spe
+ spe = spe_tp
+ print("IoU:", iou_tp, file=log)
+ print("IoU2:", iou_tp2, file=log)
+ print("SEN:", sen_tp, file=log)
+ print("ACC:", acc_tp, file=log)
+ print("SPE:", spe_tp, file=log)
+
+
+#"C:/Users/user/Desktop/test16/gt_img/"
+#"C:/Users/user/Desktop/test16/testdataset/Fair/gt/"
+
+for filenames in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4/"): # label gray scale images PATH
+ print(filenames, file=log)
+ filename = filenames.replace('', '')
+ print(filename)
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4/" + filenames) # label gray scale images PATH
+ img11 = img1.convert('L')
+ threshold = 128
+ table1 = []
+ for i in range(256):
+ if i < threshold:
+ table1.append(0)
+ else:
+ table1.append(1)
+ img11 = img11.point(table1, '1')
+ img_true = np.array(img11.convert("1").getdata())
+ img11.save('D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4_convert/' + filenames) # label binary images output PATH
+
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4/" + filename) # Deep Learning model generated images PATH
+ img22 = img2.convert('L')
+ table2 = []
+ for i in range(256):
+ if i < threshold:
+ table2.append(0)
+ else:
+ table2.append(1)
+ img22 = img22.point(table2, '1')
+ img_pred = np.array(img22.convert("1").getdata())
+ img22.save('D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4_convert/' + filename) # generated images binary converted output PATH
+
+ compute_iou(img_pred, img_true)
+ if iou >= ioumax:
+ ioumax = iou
+ if iou <= ioumin:
+ ioumin = iou
+ iouall = iouall + iou
+ iouall2 = iouall2 + iou2
+ senall = senall + sen
+ accall = accall + acc
+ speall = speall + spe
+ filesnum = filesnum + 1
+ ioumean = iouall / filesnum
+ ioumean2 = iouall2 / filesnum
+ senmean = senall / filesnum
+ accmean = accall / filesnum
+ spemean = speall / filesnum
+ print("IoUmean:", ioumean, file=log)
+ print("IoUmean2:", ioumean2, file=log)
+ print("IoUmax:", ioumax, file=log)
+ print("IoUmin:", ioumin, file=log)
+ print("SENmean:", senmean, file=log)
+ print("ACCmean:", accmean, file=log)
+ print("SPEmean:", spemean, file=log)
+
+log.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py" "b/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py"
new file mode 100644
index 0000000..fa2d449
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py"
@@ -0,0 +1,31 @@
+import re
+import csv
+
+num = 23
+
+cs1 = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/logDis.csv", 'w', newline='')
+cs2 = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/logGAN.csv", 'w', newline='')
+csvw1 = csv.writer(cs1)
+csvw2 = csv.writer(cs2)
+w1 = {}
+w2 = {}
+
+f = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/log.txt")
+for line in f:
+ if 'discrim_loss ' not in line:
+ continue
+ m1 = re.findall(r'\S+$', line)
+ csvw1.writerow(m1)
+
+f.close()
+cs1.close()
+
+f = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/log.txt")
+for line in f:
+ if 'gen_loss_GAN ' not in line:
+ continue
+ m2 = re.findall(r'\S+$', line)
+ csvw2.writerow(m2)
+
+f.close()
+cs2.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py" "b/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py"
new file mode 100644
index 0000000..fdab252
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py"
@@ -0,0 +1,29 @@
+import cv2
+import numpy as np
+
+
+def CannyThreshold(lowThreshold):
+ detected_edges = cv2.GaussianBlur(gray, (3, 3), 0)
+ detected_edges = cv2.Canny(detected_edges,
+ lowThreshold,
+ lowThreshold * ratio,
+ apertureSize=kernel_size)
+ dst = cv2.bitwise_and(img, img, mask=detected_edges) # just add some colours to edges from original image.
+ cv2.imshow('canny demo', dst)
+
+
+lowThreshold = 0
+max_lowThreshold = 100
+ratio = 3
+kernel_size = 3
+
+img = cv2.imread('D:/result_contract/Otherkind/Tongue/20191119/50/1 (1).jpg')
+gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+
+cv2.namedWindow('canny demo')
+
+cv2.createTrackbar('Min threshold', 'canny demo', lowThreshold, max_lowThreshold, CannyThreshold)
+
+CannyThreshold(0) # initialization
+if cv2.waitKey(0) == 27:
+ cv2.destroyAllWindows()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/demo.py" "b/Features/DeepLearning/Reference/Tang\047s/demo.py"
new file mode 100644
index 0000000..098ae8f
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/demo.py"
@@ -0,0 +1,152 @@
+#学習結果から画像を生成するプログラム
+#!/usr/bin/env python
+
+# python train_facade.py -g 0 -i ./facade/base --out result_facade --snapshot_interval 10000
+
+from __future__ import print_function
+import argparse
+import os
+import os.path
+import glob
+import math
+from PIL import Image
+import cv2
+
+import chainer
+from chainer.serializers.npz import NpzDeserializer
+import numpy as np
+
+from net import Encoder
+from net import Decoder
+import scipy as sp
+
+import chainer
+import chainer.cuda
+from chainer import Variable
+
+import time
+
+"""各種設定"""
+#入力画像サイズ
+w_in = 256
+h_in = 256
+#生成したい画像サイズ
+w_out = 256
+h_out = 256
+#保存したパラメータの読み込み
+iter_first = 5000
+iter_last = 380000
+iter_inter = 5000
+#パラメータの保存先
+param_dir = "./pix2pix_param2/"
+#入力画像の保存先
+path_man = "./dataset/"
+#GPU番号
+gpu_num = 0
+man_fld = [os.path.basename(x) for x in glob.glob(path_man+'*')]
+
+def out_image(img, man, files, enc, dec, rows, cols, seed, iter_num, gpu):
+ def save_image(x, name, mode=None):
+ _, C, H, W = x.shape
+ x = x.reshape((rows, cols, C, H, W))
+ x = x.transpose(0, 3, 1, 4, 2)
+ if C==1:
+ x = x.reshape((rows*H, cols*W))
+ else:
+ x = x.reshape((rows*H, cols*W, C))
+
+ img_fld, _ = os.path.splitext(files)
+ preview_dir = "./generate_image/"+man+"/"+img_fld+"/"
+ if not os.path.exists(preview_dir):
+ os.makedirs(preview_dir)
+ preview_path =preview_dir+"iter_"+iter_num+".jpg"
+ Image.fromarray(x, mode=mode).convert('RGB').save(preview_path)
+
+ np.random.seed(seed)
+ n_images = rows * cols
+ xp = enc.xp
+
+ in_ch = 3
+ out_ch = 3
+ batchsize = 1
+
+ in_all = np.zeros((n_images, in_ch, h_in, w_in)).astype("f")
+ gen_all = np.zeros((n_images, out_ch, h_out, w_out)).astype("f")
+
+ for it in range(n_images):
+
+ x_in = xp.zeros((batchsize, in_ch, h_in, w_in)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_out, w_out)).astype("f")
+
+ x_in[0,:] = xp.asarray(img)
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ if gpu >= 0:
+ in_all[it,:] = x_in.data.get()[0,:]
+ gen_all[it,:] = x_out.data.get()[0,:]
+ else:
+ in_all[it,:] = x_in.data[0,:]
+ gen_all[it,:] = x_out.data[0,:]
+
+ x = np.asarray(np.clip(gen_all * 128 + 128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ #x = np.asarray(np.clip(in_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ #save_image(x, "in")
+
+all_time = time.time()
+
+for man in man_fld:
+ path_img = path_man+man+"/"
+ img_file = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+
+ for files in img_file:
+
+ start = time.time()
+
+ for num in range(iter_first,iter_last+1,iter_inter):
+ iter_num = str(num)
+ # trained model
+ ENC_W = param_dir+"enc_iter_"+iter_num+".npz"
+ DEC_W = param_dir+"dec_iter_"+iter_num+".npz"
+
+ parser = argparse.ArgumentParser(description='chainer implementation of pix2pix')
+ parser.add_argument('--gpu', '-g', type=int, default=gpu_num,
+ help='GPU ID (negative value indicates CPU)')
+ parser.add_argument('--img', '-i', help='Input image')
+ parser.add_argument('--out', '-o', default='result_dehighlight',
+ help='Directory to output the result')
+ args = parser.parse_args()
+
+
+ # Set up a neural network to train
+ enc = Encoder(in_ch=3)
+ dec = Decoder(out_ch=3)
+
+ chainer.serializers.load_npz(ENC_W, enc)
+ chainer.serializers.load_npz(DEC_W, dec)
+
+ if args.gpu >= 0:
+ chainer.cuda.get_device(args.gpu).use() # Make a specified GPU current
+ enc.to_gpu() # Copy the model to the GPU
+ dec.to_gpu()
+
+ #入力画像の読み込み
+ img_src = Image.open(path_img+files)
+ w,h = img_src.size
+ #r = 286/min(w,h)
+ # resize images so that min(w, h) == 286
+ #img_src = img_src.resize((int(r*w), int(r*h)), Image.BILINEAR)
+ img_src = img_src.resize((w_in, h_in), Image.BILINEAR)
+
+ img_src = np.asarray(img_src).astype("f").transpose(2,0,1)/128.0-1.0
+
+ out_image(img_src,man,files, enc, dec, 1, 1, 0, iter_num, args.gpu)
+ elapsed_time = time.time() - start
+ print("complete: "+files)
+ print("time: " +str(elapsed_time))
+finish_time = time.time()-all_time
+print("total: "+str(finish_time))
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py"
new file mode 100644
index 0000000..6e3a9ad
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py"
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+import cv2
+import os
+
+
+def Edge_Extract(root, root2, root3):
+ img_root = os.path.join(root) # 修改为保存图像的文件名
+ edge_root = os.path.join(root2) #
+ binary_root = os.path.join(root3)
+
+ if not os.path.exists(edge_root):
+ os.mkdir(edge_root)
+
+ if not os.path.exists(binary_root):
+ os.mkdir(binary_root)
+
+ file_names = os.listdir(img_root)
+ img_name = []
+
+ for name in file_names:
+ if not name.endswith('.jpg'):
+ assert "This file %s is not JPG" % (name)
+ img_name.append(os.path.join(img_root, name[:-4]+'.jpg'))
+
+ index = 0
+ for image in img_name:
+ img = cv2.imread(image, 0)
+ ret, img = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(binary_root + '/' + file_names[index], img)
+ cv2.imwrite(edge_root + '/' + file_names[index], cv2.Canny(img, 100, 300))
+ index += 1
+ return 0
+
+
+if __name__ == '__main__':
+ root = 'D:/result_contract/Otherkind/Tongue/20191119/86_extraction_SRG/' # 修改为你对应的文件路径
+ root2 = 'D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge/'
+ root3 = 'D:/result_contract/Otherkind/Tongue/20191119/86_SRG_binary/'
+ Edge_Extract(root, root2, root3)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py"
new file mode 100644
index 0000000..f482dd5
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py"
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+import matplotlib.pyplot as plt
+from skimage import measure, data, color
+import cv2
+import os
+
+
+def Edge_Extract(root, root2, root3):
+ img_root = os.path.join(root)
+ edge_root = os.path.join(root2)
+ binary_root = os.path.join(root3)
+
+ if not os.path.exists(edge_root):
+ os.mkdir(edge_root)
+
+ if not os.path.exists(binary_root):
+ os.mkdir(binary_root)
+
+ file_names = os.listdir(img_root)
+ img_name = []
+
+ for name in file_names:
+ if not name.endswith('.jpg'):
+ assert "This file %s is not JPG" % (name)
+ img_name.append(os.path.join(img_root, name[:-4]+'.jpg'))
+
+ index = 0
+
+ for image in img_name:
+ img = cv2.imread(image, 0)
+ ret, img = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(binary_root + '/' + file_names[index], img)
+ print(index)
+ contours = measure.find_contours(img, 0.5)
+ plt.axis('off')
+ fig = plt.gcf()
+ fig.set_size_inches(2.56 / 3, 2.56 / 3)
+ plt.gca().xaxis.set_major_locator(plt.NullLocator())
+ plt.gca().yaxis.set_major_locator(plt.NullLocator())
+ plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
+ plt.margins(0, 0)
+ ax1 = fig.subplots(1, 1)
+ ax1.spines['top'].set_visible(False)
+ ax1.spines['right'].set_visible(False)
+ ax1.spines['bottom'].set_visible(False)
+ ax1.spines['left'].set_visible(False)
+ rows, cols = img.shape
+ ax1.axis([0, rows, cols, 0])
+ ax1.plot(contours[0][:, 1], contours[0][:, 0], linewidth=10, color='red')
+ ax1.plot(contours[1][:, 1], contours[1][:, 0], linewidth=10, color='red')
+ # for n, contour in enumerate(contours):
+ # ax1.plot(contour[:, 1], contour[:, 0], linewidth=5, color='blue')
+ fig.savefig(edge_root + '/' + file_names[index], dpi=300, pad_inches=0)
+ # plt.savefig(edge_root + '/' + file_names[index], dpi=300, pad_inches=0)
+ index += 1
+ return 0
+
+
+if __name__ == '__main__':
+ root = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_notall/'
+ root2 = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours_notall/'
+ root3 = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_binary_notall/'
+ Edge_Extract(root, root2, root3)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py"
new file mode 100644
index 0000000..7e1a48e
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py"
@@ -0,0 +1,56 @@
+from PIL import Image
+import os
+
+
+for filename in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel_edge/"):
+ print(filename)
+ img_mix = Image.new("RGB", (256, 256), (255, 255, 255))
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel_edge/" + filename)
+ img1_L = img1.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data1 = img1_L.getpixel((i, j))
+ if data1 < 128:
+ img1_L.putpixel((i, j), 0)
+ else:
+ img1_L.putpixel((i, j), 255)
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge/" + filename)
+ img2_L = img2.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data2 = img2_L.getpixel((i, j))
+ if data2 < 128:
+ img2_L.putpixel((i, j), 0)
+ else:
+ img2_L.putpixel((i, j), 255)
+ img3 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge/" + filename)
+ img3_L = img3.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data3 = img3_L.getpixel((i, j))
+ if data3 < 128:
+ img3_L.putpixel((i, j), 0)
+ else:
+ img3_L.putpixel((i, j), 255)
+
+ data = img_mix.load()
+ for i in range(0, 256):
+ for j in range(0, 256):
+ if img1_L.getpixel((i, j)) >= 128:
+ data[i, j] = (255, 0, 0)
+ if img2_L.getpixel((i, j)) >= 128:
+ data[i, j] = (0, 255, 0)
+ if img3_L.getpixel((i, j)) >= 128:
+ data[i, j] = (0, 0, 255)
+ img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
+
+ # img_mix = Image.merge(img_mix.mode, (img1_L, img2_L, img3_L))
+ # img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+
+ # img_mix2 = Image.open('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+ # data = img_mix2.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if (data[i, j][0] <= 100) and (data[i, j][1] <= 100) and (data[i, j][2] <= 100):
+ # data[i, j] = (250, 250, 250)
+ # img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py"
new file mode 100644
index 0000000..1ddba87
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py"
@@ -0,0 +1,57 @@
+from PIL import Image
+import os
+
+
+for filename in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours6/"):
+ print(filename)
+ # img_mix = Image.new("RGB", (256, 256), (255, 255, 255))
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours6/" + filename)
+ img1_L = img1.convert('RGB')
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data1 = img1_L.getpixel((i, j))
+ # if data1 < 128:
+ # img1_L.putpixel((i, j), 0)
+ # else:
+ # img1_L.putpixel((i, j), 255)
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours6/" + filename)
+ img2_L = img2.convert('RGB')
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data2 = img2_L.getpixel((i, j))
+ # if data2 < 128:
+ # img2_L.putpixel((i, j), 0)
+ # else:
+ # img2_L.putpixel((i, j), 255)
+ img_mix = Image.blend(img1_L, img2_L, 0.5)
+ img3 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge_find_contours6/" + filename)
+ img3_L = img3.convert('RGB')
+ img_mix2 = Image.blend(img_mix, img3_L, 0.4)
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data3 = img3_L.getpixel((i, j))
+ # if data3 < 128:
+ # img3_L.putpixel((i, j), 0)
+ # else:
+ # img3_L.putpixel((i, j), 255)
+
+ # data = img_mix.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if img1_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (255, 0, 0)
+ # if img2_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (0, 255, 0)
+ # if img3_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (0, 0, 255)
+ # img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed3/' + filename)
+
+ img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed3_86_6/' + filename)
+
+ # img_mix2 = Image.open('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+ # data = img_mix2.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if (data[i, j][0] <= 100) and (data[i, j][1] <= 100) and (data[i, j][2] <= 100):
+ # data[i, j] = (250, 250, 250)
+ # img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/generate.py" "b/Features/DeepLearning/Reference/Tang\047s/generate.py"
new file mode 100644
index 0000000..298e961
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/generate.py"
@@ -0,0 +1,59 @@
+#グレースケールを二値化→論理積画像を行なうプログラム
+import numpy as np
+from matplotlib.image import imread, imsave
+import glob, os
+from PIL import Image
+import cv2
+
+"""***各種設定***"""
+#イテレータの最初
+iter_first = 5000
+#イテレータの最後
+iter_last = 380000
+#イテレータの間隔
+iter_inter = 5000
+#生成した画像を保存したディレクトリ
+path_gen = "./generate_image/"
+#論理積画像を保存するディレクトリ
+result_dir = "./result/"
+
+path_gt = "./gt/"
+
+
+#二値化
+gen_fld = [os.path.basename(x) for x in glob.glob(path_gen+'*')]
+
+for man in gen_fld:
+ path_img = path_gen+man+"/"
+ img_fld = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+ for ifolder in img_fld:
+ path_img2 = path_img+ifolder+"/"
+ img_fld2 = [os.path.basename(x) for x in glob.glob(path_img2+'*')]
+ #二値化処理
+ for ifile in img_fld2:
+ img = cv2.imread(path_img2+ifile,0)
+ ret, th2 = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(path_img2+ifile,th2)
+
+#論理積をとって画像を生成
+for man in gen_fld:
+ path_img = path_gen+man+"/"
+ img_fld = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+ for ifolder in img_fld:
+ path_img2 = path_img+ifolder+"/"
+ for num in range(iter_first,iter_last+1,iter_inter):
+ img = imread(path_img2+"iter_"+str(num)+".jpg")
+ if(num == iter_first):
+ mask_img = img
+ else:
+ make = cv2.bitwise_and(img,img, mask = mask_img)
+ mask_img = make
+
+ #
+ res_dir = result_dir+man+"/"
+ if not os.path.exists(res_dir):
+ os.makedirs(res_dir)
+ cv2.imwrite(res_dir+str(ifolder)+".jpg",mask_img)
+
+print("complete")
+
diff --git "a/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py" "b/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py"
new file mode 100644
index 0000000..a468d7b
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py"
@@ -0,0 +1,105 @@
+import tensorflow as tf
+import os
+import random
+import numpy as np
+
+source_file = "D:/test13/SampleSkin1/" # 原始文件地址
+source_file2 = "D:/test13/LabelSkin1/" # 原始文件地址
+target_file = "D:/test13/SampleSkin2/" # 修改后的文件地址
+target_file2 = "D:/test13/LabelSkin2/" # 修改后的文件地址
+num = 2000 # 产生图片次数
+flip = False
+flip2 = False
+seed = None
+scale_size = 256 # help="scale images to this size before cropping to 256x256")
+brightness = False
+contrast = False
+hue = False
+saturation = False
+gamma = False
+aspect_ratio = 1.0 # aspect ratio of output images (width/height)
+CROP_SIZE = 256
+
+if not os.path.exists(target_file): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file)
+
+file_list = os.listdir(source_file) # 读取原始文件的路径
+
+if not os.path.exists(target_file2): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file2)
+
+file_list2 = os.listdir(source_file2) # 读取原始文件的路径
+
+
+# 图像预处理,翻转、改变形状
+def transform(image):
+ r = image
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [scale_size, scale_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, scale_size - CROP_SIZE + 1, seed=seed)), dtype=tf.int32)
+ if scale_size > CROP_SIZE:
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], CROP_SIZE, CROP_SIZE)
+ elif scale_size < CROP_SIZE:
+ raise Exception("scale size cannot be less than crop size")
+ return r
+
+
+def transform2(image):
+ r = image
+ if flip2:
+ r = tf.image.flip_left_right(r)
+ # r = tf.image.flip_up_down(r)
+ # r = tf.image.random_flip_left_right(r, seed=seed)
+ # r = tf.image.random_flip_up_down(r, seed=seed)
+ return r
+
+
+def transform3(image, color_ordering=0):
+ r = image
+ a = random.uniform(1, 2)
+ # b = random.uniform(1, 5)
+ b = 1
+ if color_ordering == 0:
+ if brightness:
+ r = tf.image.random_brightness(r, max_delta=0.2)
+ if contrast:
+ r = tf.image.random_contrast(r, lower=0.5, upper=1.5)
+ if saturation:
+ r = tf.image.random_saturation(r, lower=1, upper=2)
+ if gamma:
+ r = tf.image.adjust_gamma(r, gain=a, gamma=b)
+ # if hue:
+ # r = tf.image.random_hue(r, max_delta=0.5)
+
+ return r
+
+
+with tf.Session() as sess:
+ for i in range(num):
+ image_raw_data = tf.gfile.FastGFile(source_file + file_list[i], "rb").read() # 读取图片
+ image_raw_data2 = tf.gfile.FastGFile(source_file2 + file_list2[i], "rb").read() # 读取图片
+ print("Processing: ", str(i))
+ image_data = tf.image.decode_jpeg(image_raw_data)
+ # image_data = tf.image.convert_image_dtype(image_data, dtype=tf.float32)
+ image_data2 = tf.image.decode_jpeg(image_raw_data2)
+ # image_data2 = tf.image.convert_image_dtype(image_data2, dtype=tf.float32)
+
+ adjust = tf.image.flip_left_right(image_data)
+ adjust2 = tf.image.flip_left_right(image_data2)
+
+ image_data = tf.image.convert_image_dtype(adjust, dtype=tf.uint8)
+ image_data2 = tf.image.convert_image_dtype(adjust2, dtype=tf.uint8)
+
+ encode_data = tf.image.encode_jpeg(image_data)
+ encode_data2 = tf.image.encode_jpeg(image_data2)
+
+ with tf.gfile.GFile(target_file + file_list[i] + ".jpeg", "wb") as f1:
+ f1.write(encode_data.eval())
+
+ with tf.gfile.GFile(target_file2 + file_list2[i] + ".jpeg", "wb") as f2:
+ f2.write(encode_data2.eval())
+
+print("Finished!!!")
diff --git "a/Features/DeepLearning/Reference/Tang\047s/img_dataset.py" "b/Features/DeepLearning/Reference/Tang\047s/img_dataset.py"
new file mode 100644
index 0000000..046b583
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/img_dataset.py"
@@ -0,0 +1,79 @@
+#メモ(学習サイズが256×256の場合)
+#画像の縦横のサイズが286pixel以上になるように変換したあと、
+#256×256で切り抜きしている
+
+import os
+
+import numpy
+from PIL import Image
+import six
+
+import numpy as np
+
+from io import BytesIO
+import os
+import pickle
+import json
+import numpy as np
+import glob
+
+import skimage.io as io
+
+from chainer.dataset import dataset_mixin
+"""***各種設定***"""
+
+#画像サイズの変更(学習させたい画像サイズ以上に設定)
+min_size = 286
+
+#学習させる画像サイズに設定
+w_crop_width = 256
+h_crop_width = 256
+# download `BASE` dataset from http://cmp.felk.cvut.cz/~tylecr1/facade/
+class ImgDataset(dataset_mixin.DatasetMixin):
+ def __init__(self, dataSrcDir, dataDstDir, data_range=(0,0.9)):
+ print("load dataset start")
+ print(" from: %s, %s"%(dataSrcDir, dataDstDir))
+ print(" range: [{}, {})".format(data_range[0], data_range[1]))
+ self.dataSrcDir = dataSrcDir
+ self.dataDstDir = dataDstDir
+ self.dataset = []
+ self.picfiles = list(map(os.path.basename, glob.glob(os.path.join(dataDstDir, "*.jpg"))))
+ data_range_start = int(data_range[0] * len(self.picfiles))
+ data_range_end = int(data_range[1] * len(self.picfiles))
+ for fn in self.picfiles[data_range_start:data_range_end]:
+ img_src = Image.open(os.path.join(self.dataSrcDir, fn))
+ img_dst = Image.open(os.path.join(self.dataDstDir, fn))
+ w,h = img_src.size
+ #この値をcrop_width以上にする
+ r = min_size/min(w,h)
+ #r = 286/min(w,h)
+ # resize images so that min(w, h) == 286
+ img_src = img_src.resize((int(r*w), int(r*h)), Image.BILINEAR)
+ img_dst = img_dst.resize((int(r*w), int(r*h)), Image.BILINEAR)
+
+ #img_src = np.asarray(img_src).astype("f")
+ #img_src = img_src.transpose(2,0,1)/128.0-1.0
+
+ img_src = np.asarray(img_src).astype("f").transpose(2,0,1)/128.0-1.0
+ img_dst = np.asarray(img_dst).astype("f").transpose(2,0,1)/128.0-1.0
+
+ #img_dst = np.asarray(img_dst).astype("f")
+ #img_dst_shape = img_dst.shape
+ #img_dst = img_dst.reshape(img_dst_shape[0], img_dst_shape[1], 1)
+ #img_dst = img_dst.transpose(2,0,1)/128.0-1.0
+
+ self.dataset.append((img_src, img_dst))
+ print("load dataset done")
+
+ def __len__(self):
+ return len(self.dataset)
+
+ # return (label, img)
+ def get_example(self, i):
+ _,h,w = self.dataset[i][0].shape
+ x_l = np.random.randint(0,w-w_crop_width)
+ x_r = x_l+w_crop_width
+ y_l = np.random.randint(0,h-h_crop_width)
+ y_r = y_l+h_crop_width
+ #same image for input and output image pair
+ return self.dataset[i][0][:,y_l:y_r,x_l:x_r],self.dataset[i][1][:,y_l:y_r,x_l:x_r]
diff --git "a/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py" "b/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py"
new file mode 100644
index 0000000..6d4569a
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py"
@@ -0,0 +1,90 @@
+import tensorflow as tf
+from tensorflow.python.framework import dtypes
+from PIL import Image
+import numpy as np
+from tensorflow.python.tools import freeze_graph
+from tensorflow.python.tools import optimize_for_inference_lib
+
+# # load one image
+# test_image_dir = "D:/test13/pb_test/20180626030939.jpg" # test image path for testing whether the model optimization works
+# img = Image.open(test_image_dir)
+# img_ndarray = np.array(img, dtype='uint8')
+#
+# print(img_ndarray.shape)
+# img = img_ndarray.reshape((1, 256, 256, 3))
+# print(img)
+
+
+def freeze_from_checkpoint(): # freeze graph
+ path = tf.train.latest_checkpoint("D:/resultAREinProcess10_gpu_checkpoint/") # the path used for only use variable saved at the last time.
+ input_graph_path = "D:/resultAREinProcess10_gpu_checkpoint/graph_node.pbtxt" # the pbtxt path
+ output_nodes = "generator1/decoder_1/Tanh"
+ restore_op = "save/restore_all"
+ filename_tensor = "save/Const:0"
+ output_name = "D:/resultAREinProcess10_gpu_checkpoint/AREinProcess2_step8100.pb" # where you want to export your freezed model
+ freeze_graph.freeze_graph(input_graph_path, "", False, path, output_nodes, restore_op, filename_tensor, output_name, True, "")
+
+
+def optimize_frozen_file():
+ """
+ - Removing training-only operations like checkpoint saving.
+ - Stripping out parts of the graph that are never reached.
+ - Removing debug operations like CheckNumerics.
+ - Folding batch normalization ops into the pre-calculated weights.
+ - Fusing common operations into unified versions.
+
+ "Note: important: Don't use placeholder as training switch, otherwise the folding batch normalization will occur error"
+ :return: a optimized function
+ """
+ inputGraph = tf.GraphDef()
+ frozen_graph_filename = "D:/result201910072_gpu_checkpoint/frozen_model.pb" # the freezed model path
+ with tf.gfile.Open(frozen_graph_filename, "rb") as f:
+
+ data2read = f.read()
+ inputGraph.ParseFromString(data2read)
+
+ outputGraph = optimize_for_inference_lib.optimize_for_inference(
+ inputGraph,
+ ["input_image"], # an array of the input node(s)
+ ["generator1/decoder_1/Tanh"], # an array of output nodes
+ dtypes.float32.as_datatype_enum)
+
+ # Save the optimized graph'test.pb'
+
+ f = tf.gfile.FastGFile('D:/result201910111_gpu_checkpoint/OptimizedGraph.pb', "w")
+
+ f.write(outputGraph.SerializeToString())
+
+
+def load_graph():
+ frozen_filename = "D:/result201910111_gpu_checkpoint/OptimizedGraph.pb"
+ with tf.gfile.GFile(frozen_filename, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+
+ with tf.Graph().as_default() as graph:
+ # tf.import_graph_def(graph_def, name="prefix")
+ tf.import_graph_def(graph_def)
+ return graph
+
+
+def childs(t, d=0):
+ print('-' * d, t.name)
+ for child in t.op.inputs:
+ childs(child, d + 1)
+
+
+if __name__ == '__main__':
+
+ freeze_from_checkpoint()
+ # optimize_frozen_file()
+ #
+ # graph = load_graph()
+ # x = graph.get_tensor_by_name("import/input_image:0")
+ #
+ # pred = graph.get_tensor_by_name("import/generator1/decoder_1/Tanh:0")
+
+ # with tf.Session(graph=graph) as sess:
+ # input_data = img
+ # y = sess.run(pred, feed_dict={x: input_data})
+ # print(y)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/mIoU.py" "b/Features/DeepLearning/Reference/Tang\047s/mIoU.py"
new file mode 100644
index 0000000..341b6ba
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/mIoU.py"
@@ -0,0 +1,106 @@
+from sklearn.metrics import confusion_matrix
+import numpy as np
+from PIL import Image
+import os
+
+
+iou = 0
+iouall = 0
+ioumean = 0
+ioumax = 0
+ioumin = 1
+filesnum = 0
+sen = 0
+senall = 0
+senmean = 0
+acc = 0
+accall = 0
+accmean = 0
+spe = 0
+speall = 0
+spemean = 0
+
+# Result txt file saving PATH
+log = open(r'D:/result_contract/Otherkind/Tongue/20191119/109/result.txt', 'w')
+
+
+def compute_iou(y_pred, y_true):
+ # ytrue, ypred is a flatten vector
+ y_pred = y_pred.flatten()
+ y_true = y_true.flatten()
+ tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
+ # compute mean iou
+ print("tn, fp, fn, tp:", (tn, fp, fn, tp), file=log)
+ # tp/(tp + fp + fn)
+ iou_tp = tp / (tp + fp + fn)
+ sen_tp = tp / (tp + fn)
+ acc_tp = (tp + tn) / (tn + fp + fn + tp)
+ spe_tp = tn / (tn + fp)
+ global iou
+ iou = iou_tp
+ global sen
+ sen = sen_tp
+ global acc
+ acc = acc_tp
+ global spe
+ spe = spe_tp
+ print("IoU:", iou_tp, file=log)
+ print("SEN:", sen_tp, file=log)
+ print("ACC:", acc_tp, file=log)
+ print("SPE:", spe_tp, file=log)
+
+
+#"C:/Users/user/Desktop/test16/gt_img/"
+#"C:/Users/user/Desktop/test16/testdataset/Fair/gt/"
+
+for filenames in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel/"): # label gray scale images PATH
+ print(filenames, file=log)
+ filename = filenames.replace('', '')
+ print(filename)
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel/" + filenames) # label gray scale images PATH
+ img11 = img1.convert('L')
+ threshold = 128
+ table1 = []
+ for i in range(256):
+ if i < threshold:
+ table1.append(0)
+ else:
+ table1.append(1)
+ img11 = img11.point(table1, '1')
+ img_true = np.array(img11.convert("1").getdata())
+ img11.save('D:/result_contract/Otherkind/Tongue/20191119/ioulabel_convert/' + filenames) # label binary images output PATH
+
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/109/" + filename) # Deep Learning model generated images PATH
+ img22 = img2.convert('L')
+ table2 = []
+ for i in range(256):
+ if i < threshold:
+ table2.append(0)
+ else:
+ table2.append(1)
+ img22 = img22.point(table2, '1')
+ img_pred = np.array(img22.convert("1").getdata())
+ img22.save('D:/result_contract/Otherkind/Tongue/20191119/109_convert/' + filename) # generated images binary converted output PATH
+
+ compute_iou(img_pred, img_true)
+ if iou >= ioumax:
+ ioumax = iou
+ if iou <= ioumin:
+ ioumin = iou
+ iouall = iouall + iou
+ senall = senall + sen
+ accall = accall + acc
+ speall = speall + spe
+ filesnum = filesnum + 1
+ ioumean = iouall / filesnum
+ senmean = senall / filesnum
+ accmean = accall / filesnum
+ spemean = speall / filesnum
+ print("IoUmean:", ioumean, file=log)
+ print("IoUmax:", ioumax, file=log)
+ print("IoUmin:", ioumin, file=log)
+ print("SENmean:", senmean, file=log)
+ print("ACCmean:", accmean, file=log)
+ print("SPEmean:", spemean, file=log)
+
+log.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py" "b/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py"
new file mode 100644
index 0000000..02d093b
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py"
@@ -0,0 +1,71 @@
+import os
+import tensorflow as tf
+from PIL import Image
+import numpy as np
+import pandas as pd
+
+# 原始图片的存储位置
+orig_picture = os.getcwd() + '\\image\\test'
+# 生成图片的存储位置
+gen_picture = os.getcwd() + '\\image'
+# 需要的识别类型
+classes = {'0', '1'}
+# 样本总数
+num_samples = 40
+
+
+# 制作TFRecords数据
+def create_record():
+ writer = tf.python_io.TFRecordWriter("test.tfrecords")
+ for index, name in enumerate(classes):
+ class_path = orig_picture + "/" + name + "/"
+ for img_name in os.listdir(class_path):
+ img_path = class_path + img_name
+ img = Image.open(img_path)
+ img = img.resize((32, 32)) # 设置需要转换的图片大小
+ ###图片灰度化######################################################################
+ # img=img.convert("L")
+ ##############################################################################################
+ img_raw = img.tobytes() # 将图片转化为原生bytes
+ example = tf.train.Example(
+ features=tf.train.Features(feature={
+ "label": tf.train.Feature(int64_list=tf.train.Int64List(value=[index])),
+ 'img_raw': tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw]))
+ }))
+ writer.write(example.SerializeToString())
+ writer.close()
+
+
+# =======================================================================================
+def read_and_decode(filename, is_batch):
+ # 创建文件队列,不限读取的数量
+ filename_queue = tf.train.string_input_producer([filename])
+ # create a reader from file queue
+ reader = tf.TFRecordReader()
+ # reader从文件队列中读入一个序列化的样本
+ _, serialized_example = reader.read(filename_queue)
+ # get feature from serialized example
+ # 解析符号化的样本
+ features = tf.parse_single_example(
+ serialized_example,
+ features={
+ 'label': tf.FixedLenFeature([], tf.int64),
+ 'img_raw': tf.FixedLenFeature([], tf.string)
+ })
+ label = features['label']
+ img = features['img_raw']
+ img = tf.decode_raw(img, tf.uint8)
+ img = tf.reshape(img, [32, 32, 3])
+ # img = tf.cast(img, tf.float32) * (1. / 255) - 0.5
+ label = tf.cast(label, tf.int32)
+
+ if is_batch:
+ batch_size = 3
+ min_after_dequeue = 10
+ capacity = min_after_dequeue + 3 * batch_size
+ img, label = tf.train.shuffle_batch([img, label],
+ batch_size=batch_size,
+ num_threads=3,
+ capacity=capacity,
+ min_after_dequeue=min_after_dequeue)
+ return img, label
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/net.py" "b/Features/DeepLearning/Reference/Tang\047s/net.py"
new file mode 100644
index 0000000..81fb85c
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/net.py"
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import numpy
+
+import chainer
+from chainer import cuda
+import chainer.functions as F
+import chainer.links as L
+
+# U-net https://arxiv.org/pdf/1611.07004v1.pdf
+
+# convolution-batchnormalization-(dropout)-relu
+class CBR(chainer.Chain):
+ def __init__(self, ch0, ch1, bn=True, sample='down', activation=F.relu, dropout=False):
+ self.bn = bn
+ self.activation = activation
+ self.dropout = dropout
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ if sample=='down':
+ layers['c'] = L.Convolution2D(ch0, ch1, 4, 2, 1, initialW=w)
+ else:
+ layers['c'] = L.Deconvolution2D(ch0, ch1, 4, 2, 1, initialW=w)
+ if bn:
+ layers['batchnorm'] = L.BatchNormalization(ch1)
+ super(CBR, self).__init__(**layers)
+
+ def __call__(self, x):
+ h = self.c(x)
+ #print(h.shape)
+ if self.bn:
+ h = self.batchnorm(h)
+ if self.dropout:
+ h = F.dropout(h)
+ if not self.activation is None:
+ h = self.activation(h)
+ return h
+
+class Encoder(chainer.Chain):
+ def __init__(self, in_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0'] = L.Convolution2D(in_ch, 64, 3, 1, 1, initialW=w)
+ layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c4'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c5'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c6'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c7'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ super(Encoder, self).__init__(**layers)
+
+ def __call__(self, x):
+ #print("enc")
+ hs = [F.leaky_relu(self.c0(x))]
+ for i in range(1,8):
+ hs.append(self['c%d'%i](hs[i-1]))
+ return hs
+
+class Decoder(chainer.Chain):
+ def __init__(self, out_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0'] = CBR(512, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c1'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c2'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c3'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c4'] = CBR(1024, 256, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c5'] = CBR(512, 128, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c6'] = CBR(256, 64, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c7'] = L.Convolution2D(128, out_ch, 3, 1, 1, initialW=w)
+ super(Decoder, self).__init__(**layers)
+
+ def __call__(self, hs):
+ #print("dec")
+ h = self.c0(hs[-1])
+ for i in range(1,8):
+ #print(h.shape)
+ #print(hs[-i-1].shape)
+ h = F.concat([h, hs[-i-1]])
+ if i<7:
+ h = self['c%d'%i](h)
+ else:
+ h = self.c7(h)
+ return h
+
+
+class Discriminator(chainer.Chain):
+ def __init__(self, in_ch, out_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0_0'] = CBR(in_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c0_1'] = CBR(out_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c4'] = L.Convolution2D(512, 1, 3, 1, 1, initialW=w)
+ super(Discriminator, self).__init__(**layers)
+
+ def __call__(self, x_0, x_1):
+ h = F.concat([self.c0_0(x_0), self.c0_1(x_1)])
+ h = self.c1(h)
+ h = self.c2(h)
+ h = self.c3(h)
+ h = self.c4(h)
+ #h = F.average_pooling_2d(h, h.data.shape[2], 1, 0)
+ return h
diff --git "a/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py" "b/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py"
new file mode 100644
index 0000000..b2fe959
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py"
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+
+import os
+
+import numpy as np
+from PIL import Image
+
+import chainer
+import chainer.cuda
+from chainer import Variable
+
+def out_image(updater, enc, dec, rows, cols, seed, dst, gpu, w_img, h_img):
+ @chainer.training.make_extension()
+ def make_image(trainer):
+ np.random.seed(seed)
+ n_images = rows * cols
+ xp = enc.xp
+
+ in_ch = 3
+ out_ch = 3
+
+ in_all = np.zeros((n_images, in_ch, h_img, w_img)).astype("f")
+ gt_all = np.zeros((n_images, out_ch, h_img, w_img)).astype("f")
+ gen_all = np.zeros((n_images, out_ch, h_img, w_img)).astype("f")
+
+ for it in range(n_images):
+ batch = updater.get_iterator('test').next()
+ batchsize = len(batch)
+
+ x_in = xp.zeros((batchsize, in_ch, h_img, w_img)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_img, w_img)).astype("f")
+
+ for i in range(batchsize):
+ x_in[i,:] = xp.asarray(batch[i][0])
+ t_out[i,:] = xp.asarray(batch[i][1])
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ if gpu >= 0:
+ in_all[it,:] = x_in.data.get()[0,:]
+ gt_all[it,:] = t_out.get()[0,:]
+ gen_all[it,:] = x_out.data.get()[0,:]
+ else:
+ in_all[it,:] = x_in.data[0,:]
+ gt_all[it,:] = t_out[0,:]
+ gen_all[it,:] = x_out.data[0,:]
+
+
+ def save_image(x, name, mode=None):
+ _, C, H, W = x.shape
+ x = x.reshape((rows, cols, C, H, W))
+ x = x.transpose(0, 3, 1, 4, 2)
+ if C==1:
+ x = x.reshape((rows*H, cols*W))
+ else:
+ x = x.reshape((rows*H, cols*W, C))
+
+ preview_dir = '{}/preview'.format(dst)
+ preview_path = preview_dir +\
+ '/image_{:0>8}_{}.png'.format(trainer.updater.iteration, name)
+ if not os.path.exists(preview_dir):
+ os.makedirs(preview_dir)
+ Image.fromarray(x, mode=mode).convert('RGB').save(preview_path)
+ """
+ x = np.asarray(np.clip(gen_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ x = np.asarray(np.clip(in_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "in")
+
+ x = np.asarray(np.clip(gt_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gt")
+
+ """
+ x = np.asarray(np.clip(gen_all * 128 + 128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ x = np.asarray(np.clip(in_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "in")
+
+ x = np.asarray(np.clip(gt_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gt")
+
+ return make_image
diff --git "a/Features/DeepLearning/Reference/Tang\047s/png2jpg.py" "b/Features/DeepLearning/Reference/Tang\047s/png2jpg.py"
new file mode 100644
index 0000000..78ba50e
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/png2jpg.py"
@@ -0,0 +1,14 @@
+import os
+import cv2
+
+
+def png2jpg():
+ path = 'K:/label5/'
+ for file in os.listdir(path):
+ img = cv2.imread(path + file)
+ img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
+ cv2.imwrite(
+ 'K:/label5/' + file.split('.')[0]+'.jpg', img)
+
+
+png2jpg()
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/random_erasing.py" "b/Features/DeepLearning/Reference/Tang\047s/random_erasing.py"
new file mode 100644
index 0000000..6289ef1
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/random_erasing.py"
@@ -0,0 +1,56 @@
+import numpy as np
+import os
+import cv2
+
+source_file = "D:/random_erasing_testdata4/" # 原始文件地址
+target_file = "D:/random_erasing_testdata4_output18/" # 修改后的文件地址
+num = 424 # 产生图片次数
+
+
+def random_erasing(img, p=1, sl=0.01, sh=0.2, r1=0.3, r2=0.5):
+ target_img = img.copy()
+
+ if p < np.random.rand():
+ # RandomErasingを実行しない
+ return target_img
+
+ H, W, _ = target_img.shape
+ S = H * W
+
+ while True:
+ Se = np.random.uniform(sl, sh) * S # 画像に重畳する矩形の面積
+ re = np.random.uniform(r1, r2) # 画像に重畳する矩形のアスペクト比
+
+ He = int(np.sqrt(Se * re)) # 画像に重畳する矩形のHeight
+ We = int(np.sqrt(Se / re)) # 画像に重畳する矩形のWidth
+
+ # choose = np.random.randint(0, 2)
+ # print(choose)
+ # if choose == 0:
+ # xe = np.random.randint(0, W/3) # 画像に重畳する矩形のx座標
+ # if choose == 1:
+ # xe = np.random.randint(W*2/3, W) # 画像に重畳する矩形のx座標
+
+ xe = np.random.randint(0, W) # 画像に重畳する矩形のx座標
+ ye = np.random.randint(0, H/5) # 画像に重畳する矩形のy座標
+
+ if xe + We <= W and ye + He <= H:
+ # 画像に重畳する矩形が画像からはみ出していなければbreak
+ break
+
+ mask = np.random.randint(0, 255, (He, We, 3)) # 矩形がを生成 矩形内の値はランダム値
+ target_img[ye:ye + He, xe:xe + We, :] = mask # 画像に矩形を重畳
+
+ return target_img
+
+
+if not os.path.exists(target_file): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file)
+
+
+for i in range(num):
+ print("Processing: ", str(i))
+ file_list = os.listdir(source_file)
+ img = cv2.imread(source_file + file_list[i])
+ new_img = random_erasing(img)
+ cv2.imwrite(target_file + file_list[i], new_img)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/updater.py" "b/Features/DeepLearning/Reference/Tang\047s/updater.py"
new file mode 100644
index 0000000..4da3575
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/updater.py"
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import chainer
+import chainer.functions as F
+from chainer import Variable
+
+import numpy as np
+from PIL import Image
+
+from chainer import cuda
+from chainer import function
+from chainer.utils import type_check
+import numpy
+
+"""各種設定"""
+#入出力画像サイズ
+w_img = 256
+h_img = 256
+
+class PicUpdater(chainer.training.StandardUpdater):
+
+ def __init__(self, *args, **kwargs):
+ self.enc, self.dec, self.dis = kwargs.pop('models')
+ super(PicUpdater, self).__init__(*args, **kwargs)
+
+
+ def loss_enc(self, enc, x_out, t_out, y_out, lam1=100, lam2=1):
+ batchsize,_,w,h = y_out.data.shape
+ loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
+ loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
+ loss = loss_rec + loss_adv
+ chainer.report({'loss': loss}, enc)
+ return loss
+
+ def loss_dec(self, dec, x_out, t_out, y_out, lam1=100, lam2=1):
+ batchsize,_,w,h = y_out.data.shape
+ loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
+ loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
+ loss = loss_rec + loss_adv
+ chainer.report({'loss': loss}, dec)
+ return loss
+
+
+ def loss_dis(self, dis, y_in, y_out):
+ batchsize,_,w,h = y_in.data.shape
+
+ L1 = F.sum(F.softplus(-y_in)) / batchsize / w / h
+ L2 = F.sum(F.softplus(y_out)) / batchsize / w / h
+ loss = L1 + L2
+ chainer.report({'loss': loss}, dis)
+ return loss
+
+ def update_core(self):
+ enc_optimizer = self.get_optimizer('enc')
+ dec_optimizer = self.get_optimizer('dec')
+ dis_optimizer = self.get_optimizer('dis')
+
+ enc, dec, dis = self.enc, self.dec, self.dis
+ xp = enc.xp
+
+ batch = self.get_iterator('main').next()
+ batchsize = len(batch)
+ in_ch = batch[0][0].shape[0]
+ out_ch = batch[0][1].shape[0]
+ #w_in = 256
+ #w_out = 256
+
+ x_in = xp.zeros((batchsize, in_ch, h_img, w_img)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_img, w_img)).astype("f")
+
+ for i in range(batchsize):
+ x_in[i,:] = xp.asarray(batch[i][0])
+ t_out[i,:] = xp.asarray(batch[i][1])
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ y_fake = dis(x_in, x_out)
+ y_real = dis(x_in, t_out)
+
+
+ enc_optimizer.update(self.loss_enc, enc, x_out, t_out, y_fake)
+ for z_ in z:
+ z_.unchain_backward()
+ dec_optimizer.update(self.loss_dec, dec, x_out, t_out, y_fake)
+ x_in.unchain_backward()
+ x_out.unchain_backward()
+ dis_optimizer.update(self.loss_dis, dis, y_real, y_fake)
diff --git a/Features/DeepLearning/Tools/Converter_ckpt_To_PB b/Features/DeepLearning/Tools/Converter_ckpt_To_PB
new file mode 100644
index 0000000..2af50db
--- /dev/null
+++ b/Features/DeepLearning/Tools/Converter_ckpt_To_PB
@@ -0,0 +1,80 @@
+import tensorflow as tf
+from tensorflow.python.framework import dtypes
+from PIL import Image
+import numpy as np
+from tensorflow.python.tools import freeze_graph
+from tensorflow.python.tools import optimize_for_inference_lib
+
+def freeze_from_checkpoint(): # freeze graph
+ path = tf.train.latest_checkpoint(r"D:\kei2\Study\Tongue\TongueColorAnalysis\tang_model\\")
+ input_graph_path = "D:/resultAREinProcess10_gpu_checkpoint/graph_node.pbtxt" # the pbtxt path
+ output_nodes = "generator1/decoder_1/Tanh"
+ restore_op = "save/restore_all"
+ filename_tensor = "save/Const:0"
+ output_name = "D:/resultAREinProcess10_gpu_checkpoint/AREinProcess2_step8100.pb" # where you want to export your freezed model
+ freeze_graph.freeze_graph(input_graph_path, "", False, path, output_nodes, restore_op, filename_tensor, output_name, True, "")
+
+
+def optimize_frozen_file():
+ """
+ - Removing training-only operations like checkpoint saving.
+ - Stripping out parts of the graph that are never reached.
+ - Removing debug operations like CheckNumerics.
+ - Folding batch normalization ops into the pre-calculated weights.
+ - Fusing common operations into unified versions.
+
+ "Note: important: Don't use placeholder as training switch, otherwise the folding batch normalization will occur error"
+ :return: a optimized function
+ """
+ inputGraph = tf.GraphDef()
+ frozen_graph_filename = "D:/result201910072_gpu_checkpoint/frozen_model.pb" # the freezed model path
+ with tf.gfile.Open(frozen_graph_filename, "rb") as f:
+
+ data2read = f.read()
+ inputGraph.ParseFromString(data2read)
+
+ outputGraph = optimize_for_inference_lib.optimize_for_inference(
+ inputGraph,
+ ["input_image"], # an array of the input node(s)
+ ["generator1/decoder_1/Tanh"], # an array of output nodes
+ dtypes.float32.as_datatype_enum)
+
+ # Save the optimized graph'test.pb'
+
+ f = tf.gfile.FastGFile('D:/result201910111_gpu_checkpoint/OptimizedGraph.pb', "w")
+
+ f.write(outputGraph.SerializeToString())
+
+
+def load_graph():
+ frozen_filename = "D:/result201910111_gpu_checkpoint/OptimizedGraph.pb"
+ with tf.gfile.GFile(frozen_filename, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+
+ with tf.Graph().as_default() as graph:
+ # tf.import_graph_def(graph_def, name="prefix")
+ tf.import_graph_def(graph_def)
+ return graph
+
+
+def childs(t, d=0):
+ print('-' * d, t.name)
+ for child in t.op.inputs:
+ childs(child, d + 1)
+
+
+if __name__ == '__main__':
+
+ freeze_from_checkpoint()
+ # optimize_frozen_file()
+ #
+ # graph = load_graph()
+ # x = graph.get_tensor_by_name("import/input_image:0")
+ #
+ # pred = graph.get_tensor_by_name("import/generator1/decoder_1/Tanh:0")
+
+ # with tf.Session(graph=graph) as sess:
+ # input_data = img
+ # y = sess.run(pred, feed_dict={x: input_data})
+ # print(y)
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln b/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
new file mode 100644
index 0000000..ad5bbe3
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
@@ -0,0 +1,31 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.30011.22
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "deepTIAS_feature_labColor", "newcamera_deeplearning\deepTIAS_feature_labColor.csproj", "{9B9D9F04-F367-4B3A-A842-51D01B42539B}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.ActiveCfg = Debug|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.Build.0 = Debug|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.ActiveCfg = Release|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.Build.0 = Release|x64
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {0887EA03-2F07-4BB6-B5FD-1656497D53DB}
+ EndGlobalSection
+EndGlobal
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
new file mode 100644
index 0000000..ccc69e5
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
@@ -0,0 +1,553 @@
+namespace OperateCamera
+{
+ partial class Form_TIASAutomaticShootingSystem
+ {
+ ///
+ /// Required designer variable.
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// Clean up any resources being used.
+ ///
+ /// true if managed resources should be disposed; otherwise, false.
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ ///
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ ///
+ private void InitializeComponent()
+ {
+ this.components = new System.ComponentModel.Container();
+ this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
+ this.textBox5 = new System.Windows.Forms.TextBox();
+ this.textBox1 = new System.Windows.Forms.TextBox();
+ this.panel7 = new System.Windows.Forms.Panel();
+ this.pictureBox4 = new System.Windows.Forms.PictureBox();
+ this.panel6 = new System.Windows.Forms.Panel();
+ this.pictureBox3 = new System.Windows.Forms.PictureBox();
+ this.panel5 = new System.Windows.Forms.Panel();
+ this.pictureBox2 = new System.Windows.Forms.PictureBox();
+ this.label10 = new System.Windows.Forms.Label();
+ this.label9 = new System.Windows.Forms.Label();
+ this.panel3 = new System.Windows.Forms.Panel();
+ this.ShootingGuideBox = new System.Windows.Forms.TextBox();
+ this.label1 = new System.Windows.Forms.Label();
+ this.panel1 = new System.Windows.Forms.Panel();
+ this.button_ConnectTIAS = new System.Windows.Forms.Button();
+ this.label7 = new System.Windows.Forms.Label();
+ this.label8 = new System.Windows.Forms.Label();
+ this.textBox2 = new System.Windows.Forms.TextBox();
+ this.textBox3 = new System.Windows.Forms.TextBox();
+ this.textBox4 = new System.Windows.Forms.TextBox();
+ this.textBox6 = new System.Windows.Forms.TextBox();
+ this.textBox7 = new System.Windows.Forms.TextBox();
+ this.textBox8 = new System.Windows.Forms.TextBox();
+ this.panel2 = new System.Windows.Forms.Panel();
+ this.GuideBox = new System.Windows.Forms.TextBox();
+ this.label11 = new System.Windows.Forms.Label();
+ this.label12 = new System.Windows.Forms.Label();
+ this.panel4 = new System.Windows.Forms.Panel();
+ this.pictureBox1 = new System.Windows.Forms.PictureBox();
+ this.serialPort1 = new System.IO.Ports.SerialPort(this.components);
+ this.button1 = new System.Windows.Forms.Button();
+ this.tableLayoutPanel1.SuspendLayout();
+ this.panel7.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).BeginInit();
+ this.panel6.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).BeginInit();
+ this.panel5.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).BeginInit();
+ this.panel3.SuspendLayout();
+ this.panel1.SuspendLayout();
+ this.panel2.SuspendLayout();
+ this.panel4.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
+ this.SuspendLayout();
+ //
+ // tableLayoutPanel1
+ //
+ this.tableLayoutPanel1.ColumnCount = 4;
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.Controls.Add(this.textBox5, 1, 9);
+ this.tableLayoutPanel1.Controls.Add(this.textBox1, 0, 9);
+ this.tableLayoutPanel1.Controls.Add(this.panel7, 3, 8);
+ this.tableLayoutPanel1.Controls.Add(this.panel6, 2, 8);
+ this.tableLayoutPanel1.Controls.Add(this.panel5, 3, 2);
+ this.tableLayoutPanel1.Controls.Add(this.label10, 3, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label9, 2, 1);
+ this.tableLayoutPanel1.Controls.Add(this.panel3, 0, 14);
+ this.tableLayoutPanel1.Controls.Add(this.label1, 1, 0);
+ this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
+ this.tableLayoutPanel1.Controls.Add(this.label7, 0, 7);
+ this.tableLayoutPanel1.Controls.Add(this.label8, 1, 7);
+ this.tableLayoutPanel1.Controls.Add(this.textBox2, 0, 10);
+ this.tableLayoutPanel1.Controls.Add(this.textBox3, 0, 11);
+ this.tableLayoutPanel1.Controls.Add(this.textBox4, 0, 12);
+ this.tableLayoutPanel1.Controls.Add(this.textBox6, 1, 10);
+ this.tableLayoutPanel1.Controls.Add(this.textBox7, 1, 11);
+ this.tableLayoutPanel1.Controls.Add(this.textBox8, 1, 12);
+ this.tableLayoutPanel1.Controls.Add(this.panel2, 0, 13);
+ this.tableLayoutPanel1.Controls.Add(this.label11, 2, 7);
+ this.tableLayoutPanel1.Controls.Add(this.label12, 3, 7);
+ this.tableLayoutPanel1.Controls.Add(this.panel4, 2, 2);
+ this.tableLayoutPanel1.Controls.Add(this.button1, 1, 2);
+ this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
+ this.tableLayoutPanel1.Name = "tableLayoutPanel1";
+ this.tableLayoutPanel1.RowCount = 15;
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.Size = new System.Drawing.Size(834, 561);
+ this.tableLayoutPanel1.TabIndex = 0;
+ this.tableLayoutPanel1.Paint += new System.Windows.Forms.PaintEventHandler(this.tableLayoutPanel1_Paint);
+ //
+ // textBox5
+ //
+ this.textBox5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox5.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox5.Location = new System.Drawing.Point(211, 336);
+ this.textBox5.Multiline = true;
+ this.textBox5.Name = "textBox5";
+ this.textBox5.Size = new System.Drawing.Size(202, 31);
+ this.textBox5.TabIndex = 33;
+ this.textBox5.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox5.TextChanged += new System.EventHandler(this.textBox5_TextChanged);
+ //
+ // textBox1
+ //
+ this.textBox1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox1.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox1.Location = new System.Drawing.Point(3, 336);
+ this.textBox1.Multiline = true;
+ this.textBox1.Name = "textBox1";
+ this.textBox1.Size = new System.Drawing.Size(202, 31);
+ this.textBox1.TabIndex = 32;
+ this.textBox1.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox1.TextChanged += new System.EventHandler(this.textBox1_TextChanged);
+ //
+ // panel7
+ //
+ this.panel7.Controls.Add(this.pictureBox4);
+ this.panel7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel7.Location = new System.Drawing.Point(627, 299);
+ this.panel7.Name = "panel7";
+ this.tableLayoutPanel1.SetRowSpan(this.panel7, 5);
+ this.panel7.Size = new System.Drawing.Size(204, 179);
+ this.panel7.TabIndex = 31;
+ this.panel7.Paint += new System.Windows.Forms.PaintEventHandler(this.panel7_Paint);
+ //
+ // pictureBox4
+ //
+ this.pictureBox4.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox4.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox4.Name = "pictureBox4";
+ this.pictureBox4.Size = new System.Drawing.Size(204, 179);
+ this.pictureBox4.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox4.TabIndex = 2;
+ this.pictureBox4.TabStop = false;
+ this.pictureBox4.Click += new System.EventHandler(this.pictureBox4_Click);
+ //
+ // panel6
+ //
+ this.panel6.Controls.Add(this.pictureBox3);
+ this.panel6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel6.Location = new System.Drawing.Point(419, 299);
+ this.panel6.Name = "panel6";
+ this.tableLayoutPanel1.SetRowSpan(this.panel6, 5);
+ this.panel6.Size = new System.Drawing.Size(202, 179);
+ this.panel6.TabIndex = 30;
+ this.panel6.Paint += new System.Windows.Forms.PaintEventHandler(this.panel6_Paint);
+ //
+ // pictureBox3
+ //
+ this.pictureBox3.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox3.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox3.Name = "pictureBox3";
+ this.pictureBox3.Size = new System.Drawing.Size(202, 179);
+ this.pictureBox3.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox3.TabIndex = 1;
+ this.pictureBox3.TabStop = false;
+ this.pictureBox3.Click += new System.EventHandler(this.pictureBox3_Click);
+ //
+ // panel5
+ //
+ this.panel5.Controls.Add(this.pictureBox2);
+ this.panel5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel5.Location = new System.Drawing.Point(627, 77);
+ this.panel5.Name = "panel5";
+ this.tableLayoutPanel1.SetRowSpan(this.panel5, 5);
+ this.panel5.Size = new System.Drawing.Size(204, 179);
+ this.panel5.TabIndex = 29;
+ this.panel5.Paint += new System.Windows.Forms.PaintEventHandler(this.panel5_Paint);
+ //
+ // pictureBox2
+ //
+ this.pictureBox2.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox2.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox2.Name = "pictureBox2";
+ this.pictureBox2.Size = new System.Drawing.Size(204, 179);
+ this.pictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox2.TabIndex = 0;
+ this.pictureBox2.TabStop = false;
+ this.pictureBox2.Click += new System.EventHandler(this.pictureBox2_Click);
+ //
+ // label10
+ //
+ this.label10.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label10.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label10.Location = new System.Drawing.Point(627, 37);
+ this.label10.Name = "label10";
+ this.label10.Size = new System.Drawing.Size(204, 37);
+ this.label10.TabIndex = 25;
+ this.label10.Text = "Detection";
+ this.label10.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label10.Click += new System.EventHandler(this.label10_Click);
+ //
+ // label9
+ //
+ this.label9.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label9.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label9.Location = new System.Drawing.Point(419, 37);
+ this.label9.Name = "label9";
+ this.label9.Size = new System.Drawing.Size(202, 37);
+ this.label9.TabIndex = 24;
+ this.label9.Text = "Real Time";
+ this.label9.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label9.Click += new System.EventHandler(this.label9_Click);
+ //
+ // panel3
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel3, 4);
+ this.panel3.Controls.Add(this.ShootingGuideBox);
+ this.panel3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel3.Location = new System.Drawing.Point(3, 521);
+ this.panel3.Name = "panel3";
+ this.panel3.Size = new System.Drawing.Size(828, 37);
+ this.panel3.TabIndex = 23;
+ this.panel3.Paint += new System.Windows.Forms.PaintEventHandler(this.panel3_Paint);
+ //
+ // ShootingGuideBox
+ //
+ this.ShootingGuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.ShootingGuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.ShootingGuideBox.ForeColor = System.Drawing.Color.Red;
+ this.ShootingGuideBox.Location = new System.Drawing.Point(0, 0);
+ this.ShootingGuideBox.Multiline = true;
+ this.ShootingGuideBox.Name = "ShootingGuideBox";
+ this.ShootingGuideBox.Size = new System.Drawing.Size(828, 37);
+ this.ShootingGuideBox.TabIndex = 24;
+ this.ShootingGuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.ShootingGuideBox.TextChanged += new System.EventHandler(this.ShootingGuideBox_TextChanged);
+ //
+ // label1
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.label1, 2);
+ this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label1.Font = new System.Drawing.Font("Arial", 25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Pixel);
+ this.label1.Location = new System.Drawing.Point(211, 0);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(410, 37);
+ this.label1.TabIndex = 0;
+ this.label1.Text = "Tongue Image Analyzing System";
+ this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label1.Click += new System.EventHandler(this.label1_Click);
+ //
+ // panel1
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel1, 2);
+ this.panel1.Controls.Add(this.button_ConnectTIAS);
+ this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel1.Location = new System.Drawing.Point(3, 151);
+ this.panel1.Name = "panel1";
+ this.tableLayoutPanel1.SetRowSpan(this.panel1, 2);
+ this.panel1.Size = new System.Drawing.Size(410, 68);
+ this.panel1.TabIndex = 7;
+ this.panel1.Paint += new System.Windows.Forms.PaintEventHandler(this.panel1_Paint);
+ //
+ // button_ConnectTIAS
+ //
+ this.button_ConnectTIAS.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_ConnectTIAS.Font = new System.Drawing.Font("Arial", 20F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.button_ConnectTIAS.Location = new System.Drawing.Point(0, 0);
+ this.button_ConnectTIAS.Name = "button_ConnectTIAS";
+ this.button_ConnectTIAS.Size = new System.Drawing.Size(410, 68);
+ this.button_ConnectTIAS.TabIndex = 0;
+ this.button_ConnectTIAS.Text = "START";
+ this.button_ConnectTIAS.UseVisualStyleBackColor = true;
+ this.button_ConnectTIAS.Click += new System.EventHandler(this.button_ConnectTIAS_Click);
+ //
+ // label7
+ //
+ this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label7.Location = new System.Drawing.Point(3, 259);
+ this.label7.Name = "label7";
+ this.label7.Size = new System.Drawing.Size(202, 37);
+ this.label7.TabIndex = 14;
+ this.label7.Text = "RGB";
+ this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label7.Click += new System.EventHandler(this.label7_Click);
+ //
+ // label8
+ //
+ this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label8.Location = new System.Drawing.Point(211, 259);
+ this.label8.Name = "label8";
+ this.label8.Size = new System.Drawing.Size(202, 37);
+ this.label8.TabIndex = 15;
+ this.label8.Text = "L*a*b*";
+ this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label8.Click += new System.EventHandler(this.label8_Click);
+ //
+ // textBox2
+ //
+ this.textBox2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox2.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox2.Location = new System.Drawing.Point(3, 373);
+ this.textBox2.Multiline = true;
+ this.textBox2.Name = "textBox2";
+ this.textBox2.Size = new System.Drawing.Size(202, 31);
+ this.textBox2.TabIndex = 16;
+ this.textBox2.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox2.TextChanged += new System.EventHandler(this.textBox2_TextChanged);
+ //
+ // textBox3
+ //
+ this.textBox3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox3.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox3.Location = new System.Drawing.Point(3, 410);
+ this.textBox3.Multiline = true;
+ this.textBox3.Name = "textBox3";
+ this.textBox3.Size = new System.Drawing.Size(202, 31);
+ this.textBox3.TabIndex = 17;
+ this.textBox3.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox3.TextChanged += new System.EventHandler(this.textBox3_TextChanged);
+ //
+ // textBox4
+ //
+ this.textBox4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox4.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox4.Location = new System.Drawing.Point(3, 447);
+ this.textBox4.Multiline = true;
+ this.textBox4.Name = "textBox4";
+ this.textBox4.Size = new System.Drawing.Size(202, 31);
+ this.textBox4.TabIndex = 18;
+ this.textBox4.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox4.TextChanged += new System.EventHandler(this.textBox4_TextChanged);
+ //
+ // textBox6
+ //
+ this.textBox6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox6.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox6.Location = new System.Drawing.Point(211, 373);
+ this.textBox6.Multiline = true;
+ this.textBox6.Name = "textBox6";
+ this.textBox6.Size = new System.Drawing.Size(202, 31);
+ this.textBox6.TabIndex = 19;
+ this.textBox6.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox6.TextChanged += new System.EventHandler(this.textBox6_TextChanged);
+ //
+ // textBox7
+ //
+ this.textBox7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox7.Location = new System.Drawing.Point(211, 410);
+ this.textBox7.Multiline = true;
+ this.textBox7.Name = "textBox7";
+ this.textBox7.Size = new System.Drawing.Size(202, 31);
+ this.textBox7.TabIndex = 20;
+ this.textBox7.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox7.TextChanged += new System.EventHandler(this.textBox7_TextChanged);
+ //
+ // textBox8
+ //
+ this.textBox8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox8.Location = new System.Drawing.Point(211, 447);
+ this.textBox8.Multiline = true;
+ this.textBox8.Name = "textBox8";
+ this.textBox8.Size = new System.Drawing.Size(202, 31);
+ this.textBox8.TabIndex = 21;
+ this.textBox8.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox8.TextChanged += new System.EventHandler(this.textBox8_TextChanged);
+ //
+ // panel2
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel2, 4);
+ this.panel2.Controls.Add(this.GuideBox);
+ this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel2.Location = new System.Drawing.Point(3, 484);
+ this.panel2.Name = "panel2";
+ this.panel2.Size = new System.Drawing.Size(828, 31);
+ this.panel2.TabIndex = 22;
+ this.panel2.Paint += new System.Windows.Forms.PaintEventHandler(this.panel2_Paint);
+ //
+ // GuideBox
+ //
+ this.GuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.GuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.GuideBox.ForeColor = System.Drawing.Color.Red;
+ this.GuideBox.Location = new System.Drawing.Point(0, 0);
+ this.GuideBox.Multiline = true;
+ this.GuideBox.Name = "GuideBox";
+ this.GuideBox.Size = new System.Drawing.Size(828, 31);
+ this.GuideBox.TabIndex = 0;
+ this.GuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.GuideBox.TextChanged += new System.EventHandler(this.GuideBox_TextChanged);
+ //
+ // label11
+ //
+ this.label11.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label11.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label11.Location = new System.Drawing.Point(419, 259);
+ this.label11.Name = "label11";
+ this.label11.Size = new System.Drawing.Size(202, 37);
+ this.label11.TabIndex = 26;
+ this.label11.Text = "Segmentation";
+ this.label11.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label11.Click += new System.EventHandler(this.label11_Click);
+ //
+ // label12
+ //
+ this.label12.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label12.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label12.Location = new System.Drawing.Point(627, 259);
+ this.label12.Name = "label12";
+ this.label12.Size = new System.Drawing.Size(204, 37);
+ this.label12.TabIndex = 27;
+ this.label12.Text = "Selection Region";
+ this.label12.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label12.Click += new System.EventHandler(this.label12_Click);
+ //
+ // panel4
+ //
+ this.panel4.Controls.Add(this.pictureBox1);
+ this.panel4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel4.Location = new System.Drawing.Point(419, 77);
+ this.panel4.Name = "panel4";
+ this.tableLayoutPanel1.SetRowSpan(this.panel4, 5);
+ this.panel4.Size = new System.Drawing.Size(202, 179);
+ this.panel4.TabIndex = 28;
+ this.panel4.Paint += new System.Windows.Forms.PaintEventHandler(this.panel4_Paint);
+ //
+ // pictureBox1
+ //
+ this.pictureBox1.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox1.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox1.Name = "pictureBox1";
+ this.pictureBox1.Size = new System.Drawing.Size(202, 179);
+ this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox1.TabIndex = 0;
+ this.pictureBox1.TabStop = false;
+ this.pictureBox1.Click += new System.EventHandler(this.pictureBox1_Click);
+ //
+ // button1
+ //
+ this.button1.Location = new System.Drawing.Point(211, 77);
+ this.button1.Name = "button1";
+ this.button1.Size = new System.Drawing.Size(75, 23);
+ this.button1.TabIndex = 34;
+ this.button1.Text = "button1";
+ this.button1.UseVisualStyleBackColor = true;
+ this.button1.Click += new System.EventHandler(this.button1_Click);
+ //
+ // Form_TIASAutomaticShootingSystem
+ //
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.None;
+ this.ClientSize = new System.Drawing.Size(834, 561);
+ this.Controls.Add(this.tableLayoutPanel1);
+ this.Name = "Form_TIASAutomaticShootingSystem";
+ this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
+ this.Text = "TIAS BackGround";
+ this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form_TIASAutomaticShootingSystem_FormClosing);
+ this.Load += new System.EventHandler(this.Form_TIASAutomaticShootingSystem_Load);
+ this.tableLayoutPanel1.ResumeLayout(false);
+ this.tableLayoutPanel1.PerformLayout();
+ this.panel7.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).EndInit();
+ this.panel6.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).EndInit();
+ this.panel5.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).EndInit();
+ this.panel3.ResumeLayout(false);
+ this.panel3.PerformLayout();
+ this.panel1.ResumeLayout(false);
+ this.panel2.ResumeLayout(false);
+ this.panel2.PerformLayout();
+ this.panel4.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
+ private System.Windows.Forms.Panel panel3;
+ public System.Windows.Forms.TextBox ShootingGuideBox;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Panel panel1;
+ private System.Windows.Forms.Button button_ConnectTIAS;
+ private System.Windows.Forms.Label label7;
+ private System.Windows.Forms.Label label8;
+ private System.Windows.Forms.TextBox textBox2;
+ private System.Windows.Forms.TextBox textBox3;
+ private System.Windows.Forms.TextBox textBox4;
+ private System.Windows.Forms.TextBox textBox6;
+ private System.Windows.Forms.TextBox textBox7;
+ private System.Windows.Forms.TextBox textBox8;
+ private System.Windows.Forms.Panel panel2;
+ public System.Windows.Forms.TextBox GuideBox;
+ private System.Windows.Forms.Panel panel7;
+ public System.Windows.Forms.PictureBox pictureBox4;
+ private System.Windows.Forms.Panel panel6;
+ public System.Windows.Forms.PictureBox pictureBox3;
+ private System.Windows.Forms.Panel panel5;
+ public System.Windows.Forms.PictureBox pictureBox2;
+ private System.Windows.Forms.Label label10;
+ private System.Windows.Forms.Label label9;
+ private System.Windows.Forms.Label label11;
+ private System.Windows.Forms.Label label12;
+ private System.Windows.Forms.Panel panel4;
+ private System.IO.Ports.SerialPort serialPort1;
+ private System.Windows.Forms.PictureBox pictureBox1;
+ private System.Windows.Forms.TextBox textBox5;
+ private System.Windows.Forms.TextBox textBox1;
+ private System.Windows.Forms.Button button1;
+ }
+}
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
new file mode 100644
index 0000000..2ea2bac
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
@@ -0,0 +1,959 @@
+using OpenCvSharp;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Windows.Forms;
+
+namespace OperateCamera
+{
+ public partial class Form_TIASAutomaticShootingSystem : Form
+ {
+ // Config
+ const int RADIUS_COLORAREA = 10;
+
+ public static Bitmap bitmap;
+
+ float[] a = new float[17];
+ float[] b = new float[17];
+ float[] c = new float[17];
+ float d;
+ float e;
+ float f;
+ int k;
+ public static bool m_getColor = false;
+ public static Mat m_CalibFrame; //キャリブレーション用画像
+ public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
+ double[] m_BforLab = new double[24];
+ double[] m_GforLab = new double[24];
+ double[] m_RforLab = new double[24];
+ public static bool m_bCalib;
+ public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
+ public static Mat m_PointedFrame; //ポイントされたMat
+ public static int click = 0; //クリック回数
+
+ OpenCvSharp.Point P1 = new OpenCvSharp.Point();
+ OpenCvSharp.Point P2 = new OpenCvSharp.Point();
+
+ //Serial communication
+ public Form_TIASAutomaticShootingSystem()
+ {
+ InitializeComponent();
+ }
+
+ private void button_ConnectTIAS_Click(object sender, EventArgs e)
+ {
+ // GetImage()
+ var path_oriImg = @"data_lab\h\Shot0001.bmp";
+ var path_calibCsv = @"data_lab\h\Calib.csv";
+ var path_colorMatrixXYZ = "xyz.txt";
+ using (Mat mat_oriImg = Cv2.ImRead(path_oriImg, ImreadModes.Color))
+ {
+ // Process_DeepTIAS()
+ var path_mask = @"data_lab\h\Shot0001_mask.bmp";
+ Mat mat_finalMask = Cv2.ImRead(path_mask, ImreadModes.Grayscale);
+ Cv2.Threshold(mat_finalMask, mat_finalMask, 128, 255, ThresholdTypes.Binary);
+
+ // マスクされた舌領域画像の作成
+ Mat mat_maskedImg = new Mat();
+ mat_oriImg.CopyTo(mat_maskedImg, mat_finalMask);
+
+ // 5点クリック法(2010石川)
+ List list_5points = Get5points(mat_finalMask);
+ Show5point(mat_oriImg.Clone(), list_5points);
+
+ // 8領域の取得
+ List list_8area = Get8area(list_5points);
+ Show8area(mat_oriImg.Clone(), list_8area);
+
+ // DEBUG
+ //List list_5points_002 = new List() {
+ // new OpenCvSharp.Point(230, 628),
+ // new OpenCvSharp.Point(704, 572),
+ // new OpenCvSharp.Point(642, 782),
+ // new OpenCvSharp.Point(360, 808),
+ // new OpenCvSharp.Point(500, 920)
+ //};
+ //List list_8area_002 = Get8area(list_5points_002);
+ //Show8area(mat_oriImg.Clone(), list_8area_002);
+
+
+ /*
+ // 色抽出
+ List list_8Bgr = Get8colors(mat_maskedImg, list_8area);
+
+ // 色変換(RGB->XYZ->Lab)
+ List list_8Lab = Calc8Lab(list_8Bgr, path_calibCsv, path_colorMatrixXYZ);
+
+ // 色の表示
+ Show8colors(list_8Bgr, list_8Lab);
+
+ // 保存
+ // Write8colors(list_8Bgr, list_8Lab);
+ */
+
+
+ // 廃棄
+ mat_finalMask.Dispose();
+ mat_maskedImg.Dispose();
+ GC.Collect();
+ }
+ System.Threading.Thread.Sleep(100);
+ }
+
+ private List Get5points(Mat mat_finalMask)
+ {
+ List li_dst;
+
+ // 表示用
+ var mat_dst = mat_finalMask.Clone();
+ Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
+
+ // マスクの舌領域画素座標
+ var mat_nonZeroCoordinates = new Mat();
+ Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
+
+ // 舌領域上の点をすべてlistに詰める
+ var list_X = new List();
+ var list_Y = new List();
+ for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
+ {
+ var x = mat_nonZeroCoordinates.At(i).X;
+ var y = mat_nonZeroCoordinates.At(i).Y;
+ list_X.Add(x);
+ list_Y.Add(y);
+ }
+
+ /// method1
+ // 端っこを探索(ラスタ左上から)
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
+ var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
+ var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
+ var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
+
+ // 表示してみる
+ //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
+
+ /// method2
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var maxArea = contours.Select(n => Cv2.ContourArea(n)).Max();
+ var maxContour = contours.Where(n => Cv2.ContourArea(n) == maxArea).ToList()[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ double maxDistance_bottom = 0.0;
+ var p_left_2 = new OpenCvSharp.Point();
+ var p_right_2 = new OpenCvSharp.Point();
+ var p_bottom_2 = new OpenCvSharp.Point();
+ // 距離が最も遠いものを採択
+ for (int i = 0; i < maxContour.Count(); i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_2 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_2 = maxContour[i];
+ }
+ }
+ }
+ else
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_bottom)
+ {
+ maxDistance_bottom = distance;
+ p_bottom_2 = maxContour[i];
+ }
+ }
+ }
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
+ var y_apex_2 = (int)(y_top_avg_ + ((p_bottom_2.Y - y_top_avg_) * 0.53));
+ var p_apex_left_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Min());
+ var p_apex_right_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Max());
+
+ // 表示
+ Cv2.Circle(mat_dst, p_left_2, 20, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(mat_dst, p_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, new OpenCvSharp.Point(moment_x, moment_y), 10, new Scalar(200, 60, 200), -1); //重心
+ //Cv2.DrawContours(mat_dst, maxContours, 0, new Scalar(0, 255, 255), 4); // 輪郭
+
+ /// method3
+ // 重心とtopの中点を算出する
+ var y_topToCoG = (int)(moment_y + p_top.Y) / 2;
+ var p_topToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Min());
+ var p_topToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Max());
+ var p_topToCoG_center = new OpenCvSharp.Point((int)((p_topToCoG_left.X + p_topToCoG_right.X) / 2), y_topToCoG);
+ // 重心とbottomの中点を算出する
+ var y_bottomToCoG = (int)(moment_y + p_bottom.Y) / 2;
+ var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min());
+ var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max());
+ var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG);
+ // 直線フィッティング
+ var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.0);
+ var lefty = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1);
+ var righty = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1);
+ // 直線上の輪郭点
+ var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ Cv2.Line(mat_centerline, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), 3);
+ Cv2.DrawContours(mat_contour, contours, 0, 3);
+ Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and);
+ Cv2.FindNonZero(mat_and, mat_and);
+ var bottom_y_3 = 0;
+ var bottom_x_3 = 0;
+ for (int i = 0; i < mat_and.Total(); i++)
+ {
+ var x = mat_and.At(i).X;
+ var y = mat_and.At(i).Y;
+ if (bottom_y_3 < y)
+ {
+ bottom_y_3 = y;
+ bottom_x_3 = x;
+ }
+ }
+ var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3);
+
+ //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Line(mat_dst, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), new Scalar(0, 100, 255)); //近似直線
+ //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1);
+
+ // DEBUG
+ Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize);
+ Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5)));
+
+ // 出力
+ li_dst = new List { p_left_2, p_apex_left, p_bottom, p_apex_right, p_right_2 };
+ //li_dst = new List { p_left_2, p_apex_left_2, p_bottom_2, p_apex_right_2, p_right_2 };
+
+ // 破棄
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+ return li_dst;
+ }
+
+ private List Get8area(List list_5points)
+ {
+ var li_dst = new List();
+ // ROIマスク画像1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|● ●|3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points1 = new int[2, 3]{{0, 2, 1}, {4, 2, 3}};
+ var area1 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i ++)
+ {
+ var cx1 = (list_5points[points1[i, 0]].X + list_5points[points1[i, 1]].X) / 2;
+ var cy1 = (list_5points[points1[i, 0]].Y + list_5points[points1[i, 1]].Y) / 2;
+ area1.X = (list_5points[points1[i, 2]].X + cx1) / 2;
+ area1.Y = (list_5points[points1[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area1);
+ }
+
+ // ROIマスク画像2
+ // 0____________ 4
+ // | ● ● |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points2 = new int[2, 4]{{0, 3, 0, 4}, {4, 1, 4, 0}};
+ var area2 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i++)
+ {
+ float cx1 = list_5points[points2[i, 0]].X + (list_5points[points2[i, 1]].X - list_5points[points2[i, 0]].X) / 4;
+ float cy1 = list_5points[points2[i, 0]].Y + (list_5points[points2[i, 1]].Y - list_5points[points2[i, 0]].Y) / 4;
+ float cx2 = (list_5points[points2[i, 2]].X + list_5points[points2[i, 3]].X) / 2;
+ float cy2 = (list_5points[points2[i, 2]].Y + list_5points[points2[i, 3]].Y) / 2;
+ area2.X = (int)(cx1 + cx2) / 2;
+ area2.Y = (int)(cy1 + cy2) / 2;
+ li_dst.Add(area2);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| ● ● |3
+ // 舌 /
+ // 舌_______/
+ // 2
+ var points3 = new int[2, 3]{{0, 3, 2}, {4, 1, 2}};
+ var area3 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i ++)
+ {
+ float cx1 = list_5points[points3[i, 0]].X + (list_5points[points3[i, 1]].X - list_5points[points3[i, 0]].X) / 4;
+ float cy1 = list_5points[points3[i, 0]].Y + (list_5points[points3[i, 1]].Y - list_5points[points3[i, 0]].Y) / 4;
+ area3.X = (int)(list_5points[points3[i, 2]].X + cx1) / 2;
+ area3.Y = (int)(list_5points[points3[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area3);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_●_●_/
+ // 2
+ var points4 = new int[2, 2]{{0, 2}, {4, 2}};
+ var area4 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i ++)
+ {
+ area4.X = list_5points[points4[i, 0]].X + (list_5points[points4[i, 1]].X - list_5points[points4[i, 0]].X) * 7 / 8;
+ area4.Y = list_5points[points4[i, 0]].Y + (list_5points[points4[i, 1]].Y - list_5points[points4[i, 0]].Y) * 7 / 8;
+ li_dst.Add(area4);
+ }
+
+ // To do : もし8areaが舌領域に載っていなかったら修正
+ //bool isOnTongueArea = DiscriminateOnTongueArea(li_dst);
+ //if (isOnTongueArea)
+ //{
+
+ //}
+
+ return li_dst;
+ }
+
+ private List Get8colors(Mat mat_maskedImg, List list_8area)
+ {
+ List li_dst = new List();
+ for (int i = 0; i < list_8area.Count(); i++)
+ {
+ using (Mat mat_colorRoi = Mat.Zeros(mat_maskedImg.Size(), MatType.CV_8UC1))
+ {
+ // 色抽出領域を示すマスク画像を作成
+ Cv2.Circle(mat_colorRoi, list_8area[i], RADIUS_COLORAREA, 255, -1);
+
+ // 領域で色を抽出
+ var color = Cv2.Mean(mat_maskedImg, mat_colorRoi);
+ li_dst.Add(color);
+ }
+ }
+ return li_dst;
+ }
+
+ private void Show5point(Mat oriImg, List list_5point)
+ {
+ Cv2.Circle(oriImg, list_5point[0], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[1], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[2], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[3], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[4], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_point", WindowMode.AutoSize);
+ Cv2.ImShow("dst_point", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ private void Show8area(Mat oriImg, List list_8area)
+ {
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_", WindowMode.AutoSize);
+ Cv2.ImShow("dst_", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ private List Calc8Lab(List list_8colors, string path_calibCsv, string path_colorMatrixXYZ)
+ {
+ GetColorMatrixRGB(path_calibCsv);
+ CalcTransMat(path_colorMatrixXYZ);
+
+
+
+
+ Read_TranslationMatrix();
+ var li_dst = new List();
+ for (int i = 0; i < list_8colors.Count(); i++)
+ {
+ var LabValue = CalcLab(list_8colors[i]);
+ li_dst.Add(LabValue);
+ }
+ return li_dst;
+ }
+
+ private void GetColorMatrixRGB(string path_calibCsv)
+ {
+ System.Text.Encoding encoding = GetType(path_calibCsv);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_calibCsv, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding);
+ string strLine = "";
+ string[] aryLine;
+ sr3.ReadLine(); // headerをスルー
+ int i = 0;
+ while ((strLine = sr3.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ var no = Convert.ToSingle(aryLine[0]);
+ float r = Convert.ToSingle(aryLine[1]);
+ float g = Convert.ToSingle(aryLine[2]);
+ float b = Convert.ToSingle(aryLine[3]);
+ m_BforLab[i] = b;
+ m_GforLab[i] = g;
+ m_RforLab[i] = r;
+ i++;
+ }
+ sr3.Close();
+ fs3.Close();
+ }
+
+ private void CalcTransMat(string path_colorMatrixXYZ)
+ {
+ Mat RGBmat = new Mat(24, 17, MatType.CV_64F, new Scalar(1.0f));
+ Mat XYZmat = new Mat(24, 4, MatType.CV_64F, new Scalar(1.0f));
+
+ // ColorChartのXYZ読み込み
+ string line;
+ string[] split = new string[3];
+ double valueX = 0, valueY = 0, valueZ = 0;
+ System.Text.Encoding encoding3 = GetType(path_colorMatrixXYZ);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_colorMatrixXYZ, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding3);
+ for (int i = 0; i < 24; i++)
+ {
+ line = sr3.ReadLine();
+ split = line.Split(' ');
+ valueX = Convert.ToDouble(split[0]);
+ valueY = Convert.ToDouble(split[1]);
+ valueZ = Convert.ToDouble(split[2]);
+ XYZmat.Set(i, 0, valueX);
+ XYZmat.Set(i, 1, valueY);
+ XYZmat.Set(i, 2, valueZ);
+ }
+ sr3.Close();
+ fs3.Close();
+
+ // 変換行列の計算
+ for (int j = 0; j < 24; j++)
+ {
+ //順番
+ //R,G,B,RG,RB,GB,R^2,G^2,B^2
+ //R^2B,R^2G,G^2,R,G^2B,B^2R,B^2G,RGB
+ RGBmat.Set(j, 2, m_BforLab[j]);
+ RGBmat.Set(j, 1, m_GforLab[j]);
+ RGBmat.Set(j, 0, m_RforLab[j]);
+ //2V次の項
+ RGBmat.Set(j, 3, m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 4, m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 5, m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 6, m_RforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 7, m_GforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 8, m_BforLab[j] * m_BforLab[j]);
+ //3V次の項
+ RGBmat.Set(j, 9, m_RforLab[j] * m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 10, m_RforLab[j] * m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 11, m_GforLab[j] * m_GforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 12, m_GforLab[j] * m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 13, m_BforLab[j] * m_BforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 14, m_BforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 15, m_RforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ }
+ // 変換行列の生成
+ Mat translation = new Mat();
+ var canSolve = Cv2.Solve(RGBmat, XYZmat, translation, DecompTypes.SVD);
+
+ // 保存
+ string CSVfilename = "translateMatrix.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ string s2 = "";
+ for (int i = 0; i <= 16; i++)
+ {
+ for (int j = 0; j <= 2; j++)
+ {
+ double s1 = translation.At(i, j);
+ s2 += s1.ToString() + ",";
+ }
+ CSV_data.WriteLine(s2);
+ s2 = "";
+ }
+ CSV_data.Close();
+ }
+
+ private void Read_TranslationMatrix()
+ {
+ // 変換行列を読み込みなおす
+ System.Text.Encoding encoding = GetType("translateMatrix.csv");
+ System.IO.FileStream fs1 = new System.IO.FileStream("translateMatrix.csv", System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr1 = new System.IO.StreamReader(fs1, encoding);
+ for (int i = 0; i < 17; i++)
+ {
+ a[i] = 0;
+ b[i] = 0;
+ c[i] = 0;
+ }
+ k = 0;
+ string strLine = "";
+ string[] aryLine = null;
+ while ((strLine = sr1.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ a[k] = Convert.ToSingle(aryLine[0]);
+ b[k] = Convert.ToSingle(aryLine[1]);
+ c[k] = Convert.ToSingle(aryLine[2]);
+ k++;
+ }
+ sr1.Close();
+ fs1.Close();
+ }
+
+ private OpenCvSharp.Scalar CalcLab(OpenCvSharp.Scalar BgrValue)
+ {
+ // CIELabの計算
+ // XYZに変換
+ double X, Y, Z;
+ double m_B = BgrValue.Val0;
+ double m_G = BgrValue.Val1;
+ double m_R = BgrValue.Val2;
+ X = m_R * a[0] + m_G * a[1] + m_B * a[2]
+ + a[3] * m_R * m_G + a[4] * m_R * m_B + a[5] * m_G * m_B
+ + a[6] * m_R * m_R + a[7] * m_G * m_G + a[8] * m_B * m_B
+ + a[9] * m_R * m_R * m_B + a[10] * m_R * m_R * m_G
+ + a[11] * m_G * m_G * m_R + a[12] * m_G * m_G * m_B
+ + a[13] * m_B * m_B * m_R + a[14] * m_B * m_B * m_G
+ + a[15] * m_R * m_G * m_B
+ + a[16];
+ Y = m_R * b[0] + m_G * b[1] + m_B * b[2]
+ + b[3] * m_R * m_G + b[4] * m_R * m_B + b[5] * m_G * m_B
+ + b[6] * m_R * m_R + b[7] * m_G * m_G + b[8] * m_B * m_B
+ + b[9] * m_R * m_R * m_B + b[10] * m_R * m_R * m_G
+ + b[11] * m_G * m_G * m_R + b[12] * m_G * m_G * m_B
+ + b[13] * m_B * m_B * m_R + b[14] * m_B * m_B * m_G
+ + b[15] * m_R * m_G * m_B
+ + b[16];
+ Z = m_R * c[0] + m_G * c[1] + m_B * c[2]
+ + c[3] * m_R * m_G + c[4] * m_R * m_B + c[5] * m_G * m_B
+ + c[6] * m_R * m_R + c[7] * m_G * m_G + c[8] * m_B * m_B
+ + c[9] * m_R * m_R * m_B + c[10] * m_R * m_R * m_G
+ + c[11] * m_G * m_G * m_R + c[12] * m_G * m_G * m_B
+ + c[13] * m_B * m_B * m_R + c[14] * m_B * m_B * m_G
+ + c[15] * m_R * m_G * m_B
+ + c[16];
+ if (X < 0) X = 0;
+ if (Y < 0) Y = 0;
+ if (Z < 0) Z = 0;
+
+ // Labに変換(固定の計算式)
+ // TIAS 光源 (測定値20201023)
+ // double Xn = 99.5829;
+ // double Yn = 100.0;
+ // double Zn = 57.1402;
+
+ // Tangさん,竹田さんが使用してた値 (おそらく昔のTIAS光源の測定値)
+ //double Xn = 102.07;
+ //double Yn = 100.0;
+ //double Zn = 79.41;
+
+ // 石川さん,中口先生が使用している値 人口太陽照明?
+ double Xn = 92.219;
+ double Yn = 100.0;
+ double Zn = 95.965;
+ double cL = 116.0 * Math.Pow((Y / Yn), 1.0 / 3.0) - 16.0;
+ double ca = 500.0 * (Math.Pow((X / Xn), 1.0 / 3.0) - Math.Pow((Y / Yn), 1.0 / 3.0));
+ double cb = 200.0 * (Math.Pow((Y / Yn), 1.0 / 3.0) - Math.Pow((Z / Zn), 1.0 / 3.0));
+
+ return new OpenCvSharp.Scalar(cL, ca, cb);
+ }
+
+ private void Show8colors(List list_8Bgr, List list_8Lab)
+ {
+ Invoke((MethodInvoker)delegate
+ {
+ textBox1.Text = list_8Bgr[0].ToString() + " " + list_8Bgr[1].ToString();
+ textBox2.Text = list_8Bgr[2].ToString() + " " + list_8Bgr[3].ToString();
+ textBox3.Text = list_8Bgr[4].ToString() + " " + list_8Bgr[5].ToString();
+ textBox4.Text = list_8Bgr[6].ToString() + " " + list_8Bgr[7].ToString();
+ textBox5.Text = list_8Lab[0].ToString() + " " + list_8Lab[1].ToString();
+ textBox6.Text = list_8Lab[2].ToString() + " " + list_8Lab[3].ToString();
+ textBox7.Text = list_8Lab[4].ToString() + " " + list_8Lab[5].ToString();
+ textBox8.Text = list_8Lab[6].ToString() + " " + list_8Lab[7].ToString();
+ });
+ }
+
+ private void Write8colors(List list_8Bgr, List list_8Lab)
+ {
+ // 保存
+ string CSVfilename = "CalculatedLab.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+ }
+
+ private List IndexOfAll(List li, int target)
+ {
+ int num = li.IndexOf(target);
+ var li_num = new List();
+ if (num > 0)
+ {
+ li_num.Add(num);
+ // IndexOfメソッドで見つからなくなるまで繰り返す
+ while (num > 0)
+ {
+ //見つかった位置の次の位置から検索
+ num = li.IndexOf(target, num + 1);
+ if (num > 0)
+ {
+ li_num.Add(num);
+ }
+ }
+ }
+ else
+ {
+ Console.WriteLine("{0}は見つかりませんでした", target);
+ }
+ return li_num;
+ }
+
+
+ public static System.Text.Encoding GetType(string FILE_NAME)
+ {
+ System.IO.FileStream fs = new System.IO.FileStream(FILE_NAME, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.Text.Encoding r = GetType(fs);
+ fs.Close();
+ return r;
+ }
+
+ public static System.Text.Encoding GetType(System.IO.FileStream fs)
+ {
+ byte[] Unicode = new byte[] { 0xFF, 0xFE, 0x41 };
+ byte[] UnicodeBIG = new byte[] { 0xFE, 0xFF, 0x00 };
+ byte[] UTF8 = new byte[] { 0xEF, 0xBB, 0xBF };
+ System.Text.Encoding reVal = System.Text.Encoding.Default;
+
+ System.IO.BinaryReader r = new System.IO.BinaryReader(fs, System.Text.Encoding.Default);
+ int i;
+ int.TryParse(fs.Length.ToString(), out i);
+ byte[] ss = r.ReadBytes(i);
+ if (IsUTF8Bytes(ss) || (ss[0] == 0xEF && ss[1] == 0xBB && ss[2] == 0xBF))
+ {
+ reVal = System.Text.Encoding.UTF8;
+ }
+ else if (ss[0] == 0xFE && ss[1] == 0xFF && ss[2] == 0x00)
+ {
+ reVal = System.Text.Encoding.BigEndianUnicode;
+ }
+ else if (ss[0] == 0xFF && ss[1] == 0xFE && ss[2] == 0x41)
+ {
+ reVal = System.Text.Encoding.Unicode;
+ }
+ r.Close();
+ return reVal;
+ }
+
+ private static bool IsUTF8Bytes(byte[] data)
+ {
+ int charByteCounter = 1;
+ byte curByte;
+ for (int i = 0; i < data.Length; i++)
+ {
+ curByte = data[i];
+ if (charByteCounter == 1)
+ {
+ if (curByte >= 0x80)
+ {
+ while (((curByte <<= 1) & 0x80) != 0)
+ {
+ charByteCounter++;
+ }
+ if (charByteCounter == 1 || charByteCounter > 6)
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ if ((curByte & 0xC0) != 0x80)
+ {
+ return false;
+ }
+ charByteCounter--;
+ }
+ }
+ if (charByteCounter > 1)
+ {
+ throw new Exception("Error");
+ }
+ return true;
+ }
+
+ private void Form_TIASAutomaticShootingSystem_FormClosing(object sender, FormClosingEventArgs e)
+ {
+ System.Threading.Thread.Sleep(100);
+ }
+
+ private void Form_TIASAutomaticShootingSystem_Load(object sender, EventArgs e)
+ {
+ }
+
+ private void textBox5_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox1_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel7_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox4_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel6_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox3_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel5_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox2_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label10_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label9_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel3_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void ShootingGuideBox_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label1_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel1_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void tableLayoutPanel1_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void label7_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label8_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox2_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox3_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox4_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox6_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox7_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox8_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel2_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void GuideBox_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label11_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label12_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel4_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox1_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void button1_Click(object sender, EventArgs e)
+ {
+ var path_csv = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_clicked.csv";
+ var path_out = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_area.csv";
+ var sr = new StreamReader(path_csv);
+ var sw = new StreamWriter(path_out);
+
+ // skip header
+ var head = sr.ReadLine();
+
+ //
+ for (int i = 0; i < 101; i++)
+ {
+ var line = sr.ReadLine();
+ var words = line.Split(',');
+ sw.Write(words[0] + "," + words[1] + ",");
+ var list_point = new List();
+
+ for (int j = 2; j < words.Length; j = j + 2)
+ {
+ var p = new OpenCvSharp.Point(int.Parse(words[j]), int.Parse(words[j + 1]));
+ list_point.Add(p);
+ }
+ var list_Psorted = list_point.OrderBy(n => n.X).ToList();
+ var list_area = Get8area(list_Psorted);
+ foreach (var n in list_Psorted)
+ {
+ sw.Write(n.X + "," + n.Y + ",");
+ }
+ foreach (var n in list_area)
+ {
+ sw.Write(n.X + "," + n.Y + ",");
+ }
+ sw.WriteLine();
+ }
+
+ // dispose
+ sw.Close();
+ sr.Close();
+ }
+ }
+}
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
new file mode 100644
index 0000000..5f4899d
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
@@ -0,0 +1,126 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ 17, 17
+
+
+ 56
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
new file mode 100644
index 0000000..4df843f
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using System.IO.Ports;
+using System.Windows.Forms;
+using System.Linq;
+
+namespace OperateCamera
+{
+ static class Program
+ {
+ ///
+ /// Main System Running In Here!
+ ///
+ [STAThread]
+ static void Main()
+ {
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form_TIASAutomaticShootingSystem());
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..13ca395
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
+// アセンブリに関連付けられている情報を変更するには、
+// これらの属性値を変更してください。
+[assembly: AssemblyTitle("newcamera_deeplearning")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("newcamera_deeplearning")]
+[assembly: AssemblyCopyright("Copyright © 2018")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
+// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
+// その型の ComVisible 属性を true に設定してください。
+[assembly: ComVisible(false)]
+
+// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
+[assembly: Guid("9b9d9f04-f367-4b3a-a842-51d01b42539b")]
+
+// アセンブリのバージョン情報は次の 4 つの値で構成されています:
+//
+// メジャー バージョン
+// マイナー バージョン
+// ビルド番号
+// Revision
+//
+// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
+// 既定値にすることができます:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..22a72a7
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace deepTIAS_feature_labColor.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("deepTIAS_feature_labColor.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..11eed20
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace deepTIAS_feature_labColor.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
new file mode 100644
index 0000000..061c9ca
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
new file mode 100644
index 0000000..8b969e5
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
@@ -0,0 +1,185 @@
+
+
+
+
+
+ Debug
+ AnyCPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}
+ WinExe
+ Properties
+ deepTIAS_feature_labColor
+ deepTIAS_feature_labColor
+ v4.7.1
+ 512
+ true
+
+
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ AnyCPU
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+ true
+
+
+
+ Form
+
+
+ Form_TIASAutomaticShootingSystem.cs
+
+
+
+
+ Form_TIASAutomaticShootingSystem.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+
+ SettingsSingleFileGenerator
+ Settings.Designer.cs
+
+
+ True
+ Settings.settings
+ True
+
+
+
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 および x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
+ Always
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Blob.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Extensions.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.UserInterface.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
diff --git a/Main/ColorSubdivision/ColorSubdivision.csproj b/Main/ColorSubdivision/ColorSubdivision.csproj
new file mode 100644
index 0000000..9f1ebea
--- /dev/null
+++ b/Main/ColorSubdivision/ColorSubdivision.csproj
@@ -0,0 +1,151 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {AD42A573-7AC3-4714-9D53-DB9921815CBB}
+ WinExe
+ ColorSubdivision
+ ColorSubdivision
+ v4.7.1
+ 512
+ true
+ true
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ x64
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ x64
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ 7.3
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ 7.3
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Blob.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Extensions.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.UserInterface.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Form
+
+
+ Form1.cs
+
+
+
+
+ Form1.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+ SettingsSingleFileGenerator
+ Settings.Designer.cs
+
+
+ True
+ Settings.settings
+ True
+
+
+
+
+
+
+
+ Always
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 and x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Form1.Designer.cs b/Main/ColorSubdivision/Form1.Designer.cs
new file mode 100644
index 0000000..1b0e6fd
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.Designer.cs
@@ -0,0 +1,84 @@
+namespace ColorSubdivision
+{
+ partial class Form1
+ {
+ ///
+ /// Required designer variable.
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// Clean up any resources being used.
+ ///
+ /// true if managed resources should be disposed; otherwise, false.
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ ///
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ ///
+ private void InitializeComponent()
+ {
+ this.button1 = new System.Windows.Forms.Button();
+ this.button2 = new System.Windows.Forms.Button();
+ this.RichTextBox1 = new System.Windows.Forms.RichTextBox();
+ this.SuspendLayout();
+ //
+ // button1
+ //
+ this.button1.Location = new System.Drawing.Point(115, 47);
+ this.button1.Name = "button1";
+ this.button1.Size = new System.Drawing.Size(75, 23);
+ this.button1.TabIndex = 0;
+ this.button1.Text = "Button1";
+ this.button1.UseVisualStyleBackColor = true;
+ this.button1.Click += new System.EventHandler(this.Button1_Click);
+ //
+ // button2
+ //
+ this.button2.Location = new System.Drawing.Point(115, 105);
+ this.button2.Name = "button2";
+ this.button2.Size = new System.Drawing.Size(75, 23);
+ this.button2.TabIndex = 1;
+ this.button2.Text = "Button2";
+ this.button2.UseVisualStyleBackColor = true;
+ this.button2.Click += new System.EventHandler(this.Button2_Click);
+ //
+ // RichTextBox1
+ //
+ this.RichTextBox1.Location = new System.Drawing.Point(370, 138);
+ this.RichTextBox1.Name = "RichTextBox1";
+ this.RichTextBox1.Size = new System.Drawing.Size(100, 96);
+ this.RichTextBox1.TabIndex = 2;
+ this.RichTextBox1.Text = "";
+ //
+ // Form1
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(800, 450);
+ this.Controls.Add(this.RichTextBox1);
+ this.Controls.Add(this.button2);
+ this.Controls.Add(this.button1);
+ this.Name = "Form1";
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.Button button1;
+ private System.Windows.Forms.Button button2;
+ private System.Windows.Forms.RichTextBox RichTextBox1;
+ }
+}
+
diff --git a/Main/ColorSubdivision/Form1.cs b/Main/ColorSubdivision/Form1.cs
new file mode 100644
index 0000000..9d64c49
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.cs
@@ -0,0 +1,56 @@
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Data;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+using OpenCvSharp;
+
+namespace ColorSubdivision
+{
+ public partial class Form1 : Form
+ {
+ public Form1()
+ {
+ InitializeComponent();
+ }
+
+ private void Button1_Click(object sender, EventArgs e)
+ {
+ var path = @"D:\kei2\Solutions\DeepTongue\LocalRepository\Tongue extraction_cropresizemethod\Tongue extraction\bin\x64\Debug\mask_final\20180315093610.jpg";
+ using (Mat mat_input = Cv2.ImRead(path, ImreadModes.Grayscale))
+ {
+ var mat_dst = new Mat(mat_input.Size(), mat_input.Type());
+ bool isEdge = false;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ if(!isEdge)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ if (mat_input.At(i, j) > 200)
+ {
+ mat_dst.Set(i, j, 100);
+ isEdge = true;
+ }
+ }
+ }
+ }
+ Cv2.ImShow("input", mat_input);
+ Cv2.ImShow("dst", mat_dst);
+ mat_dst.Dispose();
+ }
+ GC.Collect();
+
+ }
+
+ private void Button2_Click(object sender, EventArgs e)
+ {
+
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Form1.resx b/Main/ColorSubdivision/Form1.resx
new file mode 100644
index 0000000..1af7de1
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.resx
@@ -0,0 +1,120 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Program.cs b/Main/ColorSubdivision/Program.cs
new file mode 100644
index 0000000..b7233a7
--- /dev/null
+++ b/Main/ColorSubdivision/Program.cs
@@ -0,0 +1,22 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+
+namespace ColorSubdivision
+{
+ static class Program
+ {
+ ///
+ /// The main entry point for the application.
+ ///
+ [STAThread]
+ static void Main()
+ {
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form1());
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/AssemblyInfo.cs b/Main/ColorSubdivision/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..87b468f
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("ColorSubdivision")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("ColorSubdivision")]
+[assembly: AssemblyCopyright("Copyright © 2020")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("ad42a573-7ac3-4714-9d53-db9921815cbb")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Main/ColorSubdivision/Properties/Resources.Designer.cs b/Main/ColorSubdivision/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..6ca7260
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace ColorSubdivision.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ColorSubdivision.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/Resources.resx b/Main/ColorSubdivision/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Properties/Settings.Designer.cs b/Main/ColorSubdivision/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..41f48bd
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace ColorSubdivision.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/Settings.settings b/Main/ColorSubdivision/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Main/Tongue extraction.sln b/Main/Tongue extraction.sln
new file mode 100644
index 0000000..d3135fb
--- /dev/null
+++ b/Main/Tongue extraction.sln
@@ -0,0 +1,37 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.30011.22
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DeepTIAS1.9", "Tongue extraction\DeepTIAS1.9.csproj", "{AFD610B1-8D23-423A-AA0F-B09BA769BDD7}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.ActiveCfg = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.Build.0 = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.ActiveCfg = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.Build.0 = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.ActiveCfg = Release|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.Build.0 = Release|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.ActiveCfg = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {33F6C697-859B-4D55-9D28-998267FD09AE}
+ EndGlobalSection
+EndGlobal
diff --git a/Main/Tongue extraction/ColorExtractor.cs b/Main/Tongue extraction/ColorExtractor.cs
new file mode 100644
index 0000000..589729c
--- /dev/null
+++ b/Main/Tongue extraction/ColorExtractor.cs
@@ -0,0 +1,754 @@
+using OpenCvSharp;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Windows.Forms;
+
+namespace Tongue_extraction
+{
+ public partial class ColorExtractor
+ {
+ // Config
+ const int RADIUS_COLORAREA = 10;
+ public static Bitmap bitmap;
+ float[] a = new float[17];
+ float[] b = new float[17];
+ float[] c = new float[17];
+ float d;
+ float e;
+ float f;
+ int k;
+ public static bool m_getColor = false;
+ public static Mat m_CalibFrame; //キャリブレーション用画像
+ public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
+ double[] m_BforLab = new double[24];
+ double[] m_GforLab = new double[24];
+ double[] m_RforLab = new double[24];
+ public static bool m_bCalib;
+ public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
+
+ public enum FivePointMethod {Method1, Method2, Method3};
+
+ public List Get5points(Mat mat_finalMask, FivePointMethod method)
+ {
+ // 表示用
+ var mat_dst = mat_finalMask.Clone();
+ Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
+
+ // マスクの舌領域画素座標
+ var mat_nonZeroCoordinates = new Mat();
+ Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
+
+ // 舌領域上の点をすべてlistに詰める
+ var list_X = new List();
+ var list_Y = new List();
+ for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
+ {
+ var x = mat_nonZeroCoordinates.At(i).X;
+ var y = mat_nonZeroCoordinates.At(i).Y;
+ list_X.Add(x);
+ list_Y.Add(y);
+ }
+
+ if(method == FivePointMethod.Method1)
+ {
+ /// method1
+ // 端っこを探索(ラスタ左上から)
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
+ var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
+ var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
+ var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
+
+ // 表示してみる
+ //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
+
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left, p_apex_left, p_bottom, p_apex_right, p_right };
+ return li_dst;
+ }
+ else if (method == FivePointMethod.Method2)
+ {
+ /// method2
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var sortedContour = contours.OrderByDescending(n => Cv2.ContourArea(n)).ToList();
+ var maxContour = sortedContour[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ var p_left_2 = new OpenCvSharp.Point();
+ var p_right_2 = new OpenCvSharp.Point();
+ for (int i = 0; i < maxContour.Length; i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_2 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_2 = maxContour[i];
+ }
+ }
+ }
+ }
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
+ var y_apex_2 = (int)(y_top_avg_ + ((p_bottom.Y - y_top_avg_) * 0.57));
+ var p_apex_left_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Min());
+ var p_apex_right_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Max());
+
+ // 表示
+ //Cv2.Circle(mat_dst, p_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, new OpenCvSharp.Point(moment_x, moment_y), 10, new Scalar(200, 60, 200), -1); //重心
+ //Cv2.DrawContours(mat_dst, contours, 0, new Scalar(0, 255, 255), 4); // 輪郭
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left_2, p_apex_left_2, p_bottom, p_apex_right_2, p_right_2 };
+ return li_dst;
+ }
+ else if (method == FivePointMethod.Method3)
+ {
+ /// method3
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var sortedContour = contours.OrderByDescending(n => Cv2.ContourArea(n)).ToList();
+ var maxContour = sortedContour[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ var p_left_3 = new OpenCvSharp.Point();
+ var p_right_3 = new OpenCvSharp.Point();
+ // 距離が最も遠いものを採択
+ for (int i = 0; i < maxContour.Length; i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_3 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_3 = maxContour[i];
+ }
+ }
+ }
+ }
+
+ // 重心とtopの中点を算出する
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var y_topToCoG = (int)(moment_y + p_top.Y) / 2;
+ var p_topToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Min());
+ var p_topToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Max());
+ var p_topToCoG_center = new OpenCvSharp.Point((int)((p_topToCoG_left.X + p_topToCoG_right.X) / 2), y_topToCoG);
+ // 重心とbottomの中点を算出する
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var y_bottomToCoG = (int)(moment_y + p_bottom.Y) / 2;
+ var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min());
+ var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max());
+ var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG);
+
+ // 直線フィッティング
+ var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.0);
+ var lefty = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1);
+ var righty = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1);
+ // 直線上の輪郭点
+ var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ Cv2.Line(mat_centerline, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), 3);
+ Cv2.DrawContours(mat_contour, sortedContour, 0, 3);
+ Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and);
+ Cv2.FindNonZero(mat_and, mat_and);
+ var bottom_y_3 = 0;
+ var bottom_x_3 = 0;
+ for (int i = 0; i < mat_and.Total(); i++)
+ {
+ var x = mat_and.At(i).X;
+ var y = mat_and.At(i).Y;
+ if (bottom_y_3 < y)
+ {
+ bottom_y_3 = y;
+ bottom_x_3 = x;
+ }
+ }
+ var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3);
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_top_avg_ = (p_left_3.Y + p_right_3.Y) / 2.0;
+ var y_apex_3 = (int)(y_top_avg_ + ((p_bottom_3.Y - y_top_avg_) * 0.57));
+ var p_apex_left_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Min());
+ var p_apex_right_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Max());
+
+ //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Line(mat_dst, new OpenCvSharp.Point(0, lefty), new OpenCvSharp.Point(mat_finalMask.Cols - 1, righty), new Scalar(0, 100, 255)); //近似直線
+ //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1);
+
+ // 破棄
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left_3, p_apex_left_3, p_bottom_3, p_apex_right_3, p_right_3 };
+ return li_dst;
+ }
+ else
+ {
+ return new List();
+ }
+ // DEBUG
+ //Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize);
+ //Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5)));
+
+ }
+
+ public List Get8area(List list_5points)
+ {
+ var li_dst = new List();
+ // ROIマスク画像1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|● ●|3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points1 = new int[2, 3] { { 0, 2, 1 }, { 4, 2, 3 } };
+ var area1 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ var cx1 = (list_5points[points1[i, 0]].X + list_5points[points1[i, 1]].X) / 2;
+ var cy1 = (list_5points[points1[i, 0]].Y + list_5points[points1[i, 1]].Y) / 2;
+ area1.X = (list_5points[points1[i, 2]].X + cx1) / 2;
+ area1.Y = (list_5points[points1[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area1);
+ }
+
+ // ROIマスク画像2
+ // 0____________ 4
+ // | ● ● |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points2 = new int[2, 4] { { 0, 3, 0, 4 }, { 4, 1, 4, 0 } };
+ var area2 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ float cx1 = list_5points[points2[i, 0]].X + (list_5points[points2[i, 1]].X - list_5points[points2[i, 0]].X) / 4;
+ float cy1 = list_5points[points2[i, 0]].Y + (list_5points[points2[i, 1]].Y - list_5points[points2[i, 0]].Y) / 4;
+ float cx2 = (list_5points[points2[i, 2]].X + list_5points[points2[i, 3]].X) / 2;
+ float cy2 = (list_5points[points2[i, 2]].Y + list_5points[points2[i, 3]].Y) / 2;
+ area2.X = (int)(cx1 + cx2) / 2;
+ area2.Y = (int)(cy1 + cy2) / 2;
+ li_dst.Add(area2);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| ● ● |3
+ // 舌 /
+ // 舌_______/
+ // 2
+ var points3 = new int[2, 3] { { 0, 3, 2 }, { 4, 1, 2 } };
+ var area3 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ float cx1 = list_5points[points3[i, 0]].X + (list_5points[points3[i, 1]].X - list_5points[points3[i, 0]].X) / 4;
+ float cy1 = list_5points[points3[i, 0]].Y + (list_5points[points3[i, 1]].Y - list_5points[points3[i, 0]].Y) / 4;
+ area3.X = (int)(list_5points[points3[i, 2]].X + cx1) / 2;
+ area3.Y = (int)(list_5points[points3[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area3);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_●_●_/
+ // 2
+ var points4 = new int[2, 2] { { 0, 2 }, { 4, 2 } };
+ var area4 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ area4.X = list_5points[points4[i, 0]].X + (list_5points[points4[i, 1]].X - list_5points[points4[i, 0]].X) * 7 / 8;
+ area4.Y = list_5points[points4[i, 0]].Y + (list_5points[points4[i, 1]].Y - list_5points[points4[i, 0]].Y) * 7 / 8;
+ li_dst.Add(area4);
+ }
+
+ // To do : もし8areaが舌領域に載っていなかったら修正
+ //bool isOnTongueArea = DiscriminateOnTongueArea(li_dst);
+ //if (isOnTongueArea)
+ //{
+
+ //}
+
+ return li_dst;
+ }
+
+ public List Get8colors(Mat mat_maskedImg, List list_8area)
+ {
+ List li_dst = new List();
+ for (int i = 0; i < list_8area.Count(); i++)
+ {
+ using (Mat mat_colorRoi = Mat.Zeros(mat_maskedImg.Size(), MatType.CV_8UC1))
+ {
+ // 色抽出領域を示すマスク画像を作成
+ Cv2.Circle(mat_colorRoi, list_8area[i], RADIUS_COLORAREA, 255, -1);
+
+ // 領域で色を抽出
+ var color = Cv2.Mean(mat_maskedImg, mat_colorRoi);
+ li_dst.Add(color);
+ }
+ }
+ return li_dst;
+ }
+
+ public void Show8area(Mat oriImg, List list_8area)
+ {
+
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_", WindowMode.KeepRatio ^ WindowMode.Normal);
+ Cv2.ImShow("dst_", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ public Mat ShowResult(Mat oriImg, List list_5point, List list_8area)
+ {
+ Cv2.Circle(oriImg, list_5point[0], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[1], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[2], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[3], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[4], 3, new Scalar(255, 0, 0), -1);
+
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(0, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(0, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 255), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 255), -1);
+
+ Cv2.Line(oriImg, list_5point[0], list_5point[1], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[1], list_5point[2], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[2], list_5point[3], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[3], list_5point[4], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[4], list_5point[0], new Scalar(255, 100, 100));
+
+ return oriImg;
+ }
+
+ public List Calc8Lab(List list_8colors, string path_calibCsv, string path_colorMatrixXYZ)
+ {
+ GetColorMatrixRGB(path_calibCsv);
+ CalcTransMat(path_colorMatrixXYZ);
+ Read_TranslationMatrix();
+ var li_dst = new List();
+ for (int i = 0; i < list_8colors.Count(); i++)
+ {
+ var LabValue = CalcLab(list_8colors[i]);
+ li_dst.Add(LabValue);
+ }
+ return li_dst;
+ }
+
+ public void GetColorMatrixRGB(string path_calibCsv)
+ {
+ System.Text.Encoding encoding = GetType(path_calibCsv);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_calibCsv, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding);
+ string strLine = "";
+ string[] aryLine;
+ sr3.ReadLine(); // headerをスルー
+ int i = 0;
+ while ((strLine = sr3.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ var no = Convert.ToSingle(aryLine[0]);
+ float r = Convert.ToSingle(aryLine[1]);
+ float g = Convert.ToSingle(aryLine[2]);
+ float b = Convert.ToSingle(aryLine[3]);
+ m_BforLab[i] = b;
+ m_GforLab[i] = g;
+ m_RforLab[i] = r;
+ i++;
+ }
+ sr3.Close();
+ fs3.Close();
+ }
+
+ public void CalcTransMat(string path_colorMatrixXYZ)
+ {
+ Mat RGBmat = new Mat(24, 17, MatType.CV_64F, new Scalar(1.0f));
+ Mat XYZmat = new Mat(24, 4, MatType.CV_64F, new Scalar(1.0f));
+
+ // ColorChartのXYZ読み込み
+ string line;
+ string[] split = new string[3];
+ double valueX = 0, valueY = 0, valueZ = 0;
+ System.Text.Encoding encoding3 = GetType(path_colorMatrixXYZ);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_colorMatrixXYZ, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding3);
+ for (int i = 0; i < 24; i++)
+ {
+ line = sr3.ReadLine();
+ split = line.Split(' ');
+ valueX = Convert.ToDouble(split[0]);
+ valueY = Convert.ToDouble(split[1]);
+ valueZ = Convert.ToDouble(split[2]);
+ XYZmat.Set(i, 0, valueX);
+ XYZmat.Set(i, 1, valueY);
+ XYZmat.Set(i, 2, valueZ);
+ }
+ sr3.Close();
+ fs3.Close();
+
+ // 変換行列の計算
+ for (int j = 0; j < 24; j++)
+ {
+ //順番
+ //R,G,B,RG,RB,GB,R^2,G^2,B^2
+ //R^2B,R^2G,G^2,R,G^2B,B^2R,B^2G,RGB
+ RGBmat.Set(j, 2, m_BforLab[j]);
+ RGBmat.Set(j, 1, m_GforLab[j]);
+ RGBmat.Set(j, 0, m_RforLab[j]);
+ //2V次の項
+ RGBmat.Set(j, 3, m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 4, m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 5, m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 6, m_RforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 7, m_GforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 8, m_BforLab[j] * m_BforLab[j]);
+ //3V次の項
+ RGBmat.Set(j, 9, m_RforLab[j] * m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 10, m_RforLab[j] * m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 11, m_GforLab[j] * m_GforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 12, m_GforLab[j] * m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 13, m_BforLab[j] * m_BforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 14, m_BforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 15, m_RforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ }
+ // 変換行列の生成
+ Mat translation = new Mat();
+ var canSolve = Cv2.Solve(RGBmat, XYZmat, translation, DecompTypes.SVD);
+
+ // 保存
+ string CSVfilename = "translateMatrix.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ string s2 = "";
+ for (int i = 0; i <= 16; i++)
+ {
+ for (int j = 0; j <= 2; j++)
+ {
+ double s1 = translation.At(i, j);
+ s2 += s1.ToString() + ",";
+ }
+ CSV_data.WriteLine(s2);
+ s2 = "";
+ }
+ CSV_data.Close();
+ }
+
+ public void Read_TranslationMatrix()
+ {
+ // 変換行列を読み込みなおす
+ System.Text.Encoding encoding = GetType("translateMatrix.csv");
+ System.IO.FileStream fs1 = new System.IO.FileStream("translateMatrix.csv", System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr1 = new System.IO.StreamReader(fs1, encoding);
+ for (int i = 0; i < 17; i++)
+ {
+ a[i] = 0;
+ b[i] = 0;
+ c[i] = 0;
+ }
+ k = 0;
+ string strLine = "";
+ string[] aryLine = null;
+ while ((strLine = sr1.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ a[k] = Convert.ToSingle(aryLine[0]);
+ b[k] = Convert.ToSingle(aryLine[1]);
+ c[k] = Convert.ToSingle(aryLine[2]);
+ k++;
+ }
+ sr1.Close();
+ fs1.Close();
+ }
+
+ private OpenCvSharp.Scalar CalcLab(OpenCvSharp.Scalar BgrValue)
+ {
+ // CIELabの計算
+ // XYZに変換
+ double X, Y, Z;
+ double m_B = BgrValue.Val0;
+ double m_G = BgrValue.Val1;
+ double m_R = BgrValue.Val2;
+ X = m_R * a[0] + m_G * a[1] + m_B * a[2]
+ + a[3] * m_R * m_G + a[4] * m_R * m_B + a[5] * m_G * m_B
+ + a[6] * m_R * m_R + a[7] * m_G * m_G + a[8] * m_B * m_B
+ + a[9] * m_R * m_R * m_B + a[10] * m_R * m_R * m_G
+ + a[11] * m_G * m_G * m_R + a[12] * m_G * m_G * m_B
+ + a[13] * m_B * m_B * m_R + a[14] * m_B * m_B * m_G
+ + a[15] * m_R * m_G * m_B
+ + a[16];
+ Y = m_R * b[0] + m_G * b[1] + m_B * b[2]
+ + b[3] * m_R * m_G + b[4] * m_R * m_B + b[5] * m_G * m_B
+ + b[6] * m_R * m_R + b[7] * m_G * m_G + b[8] * m_B * m_B
+ + b[9] * m_R * m_R * m_B + b[10] * m_R * m_R * m_G
+ + b[11] * m_G * m_G * m_R + b[12] * m_G * m_G * m_B
+ + b[13] * m_B * m_B * m_R + b[14] * m_B * m_B * m_G
+ + b[15] * m_R * m_G * m_B
+ + b[16];
+ Z = m_R * c[0] + m_G * c[1] + m_B * c[2]
+ + c[3] * m_R * m_G + c[4] * m_R * m_B + c[5] * m_G * m_B
+ + c[6] * m_R * m_R + c[7] * m_G * m_G + c[8] * m_B * m_B
+ + c[9] * m_R * m_R * m_B + c[10] * m_R * m_R * m_G
+ + c[11] * m_G * m_G * m_R + c[12] * m_G * m_G * m_B
+ + c[13] * m_B * m_B * m_R + c[14] * m_B * m_B * m_G
+ + c[15] * m_R * m_G * m_B
+ + c[16];
+ if (X < 0) X = 0;
+ if (Y < 0) Y = 0;
+ if (Z < 0) Z = 0;
+
+ // Labに変換(固定の計算式)
+ // TIAS 光源 (測定値20201023)
+ // double Xn = 99.5829;
+ // double Yn = 100.0;
+ // double Zn = 57.1402;
+
+ // Tangさん,竹田さんが使用してた値 (おそらく昔のTIAS光源の測定値)
+ //double Xn = 102.07;
+ //double Yn = 100.0;
+ //double Zn = 79.41;
+
+ // 石川さん,中口先生が使用している値 人口太陽照明?
+ double Xn = 92.219;
+ double Yn = 100.0;
+ double Zn = 95.965;
+ double cL = 116.0 * Math.Pow((Y / Yn), 1.0 / 3.0) - 16.0;
+ double ca = 500.0 * (Math.Pow((X / Xn), 1.0 / 3.0) - Math.Pow((Y / Yn), 1.0 / 3.0));
+ double cb = 200.0 * (Math.Pow((Y / Yn), 1.0 / 3.0) - Math.Pow((Z / Zn), 1.0 / 3.0));
+
+ return new OpenCvSharp.Scalar(cL, ca, cb);
+ }
+
+ public void Write8colors(List list_8Bgr, List list_8Lab)
+ {
+ // 保存
+ string CSVfilename = "CalculatedLab.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+ }
+
+ private List IndexOfAll(List li, int target)
+ {
+ int num = li.IndexOf(target);
+ var li_num = new List();
+ if (num > 0)
+ {
+ li_num.Add(num);
+ // IndexOfメソッドで見つからなくなるまで繰り返す
+ while (num > 0)
+ {
+ //見つかった位置の次の位置から検索
+ num = li.IndexOf(target, num + 1);
+ if (num > 0)
+ {
+ li_num.Add(num);
+ }
+ }
+ }
+ else
+ {
+ Console.WriteLine("{0}は見つかりませんでした", target);
+ }
+ return li_num;
+ }
+
+
+ public static System.Text.Encoding GetType(string FILE_NAME)
+ {
+ System.IO.FileStream fs = new System.IO.FileStream(FILE_NAME, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.Text.Encoding r = GetType(fs);
+ fs.Close();
+ return r;
+ }
+
+ public static System.Text.Encoding GetType(System.IO.FileStream fs)
+ {
+ byte[] Unicode = new byte[] { 0xFF, 0xFE, 0x41 };
+ byte[] UnicodeBIG = new byte[] { 0xFE, 0xFF, 0x00 };
+ byte[] UTF8 = new byte[] { 0xEF, 0xBB, 0xBF };
+ System.Text.Encoding reVal = System.Text.Encoding.Default;
+
+ System.IO.BinaryReader r = new System.IO.BinaryReader(fs, System.Text.Encoding.Default);
+ int i;
+ int.TryParse(fs.Length.ToString(), out i);
+ byte[] ss = r.ReadBytes(i);
+ if (IsUTF8Bytes(ss) || (ss[0] == 0xEF && ss[1] == 0xBB && ss[2] == 0xBF))
+ {
+ reVal = System.Text.Encoding.UTF8;
+ }
+ else if (ss[0] == 0xFE && ss[1] == 0xFF && ss[2] == 0x00)
+ {
+ reVal = System.Text.Encoding.BigEndianUnicode;
+ }
+ else if (ss[0] == 0xFF && ss[1] == 0xFE && ss[2] == 0x41)
+ {
+ reVal = System.Text.Encoding.Unicode;
+ }
+ r.Close();
+ return reVal;
+ }
+
+ private static bool IsUTF8Bytes(byte[] data)
+ {
+ int charByteCounter = 1;
+ byte curByte;
+ for (int i = 0; i < data.Length; i++)
+ {
+ curByte = data[i];
+ if (charByteCounter == 1)
+ {
+ if (curByte >= 0x80)
+ {
+ while (((curByte <<= 1) & 0x80) != 0)
+ {
+ charByteCounter++;
+ }
+ if (charByteCounter == 1 || charByteCounter > 6)
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ if ((curByte & 0xC0) != 0x80)
+ {
+ return false;
+ }
+ charByteCounter--;
+ }
+ }
+ if (charByteCounter > 1)
+ {
+ throw new Exception("Error");
+ }
+ return true;
+ }
+ }
+}
diff --git a/Main/Tongue extraction/DeepTIAS1.9.csproj b/Main/Tongue extraction/DeepTIAS1.9.csproj
new file mode 100644
index 0000000..882082c
--- /dev/null
+++ b/Main/Tongue extraction/DeepTIAS1.9.csproj
@@ -0,0 +1,191 @@
+
+
+
+
+
+ Debug
+ AnyCPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}
+ WinExe
+ Properties
+ Tongue_extraction
+ DeepTIAS1.9
+ v4.7.1
+ 512
+ true
+
+
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ AnyCPU
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ DeepTongue_Icon.ico
+
+
+
+
+ Form
+
+
+ Form1.cs
+
+
+
+
+
+ Form1.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+
+ SettingsSingleFileGenerator
+
+
+
+
+
+
+
+
+ Always
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 および x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.dll
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Blob.dll
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Extensions.dll
+
+
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.UserInterface.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.Registration.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.DataAnnotations.dll
+
+
+
+
+
+
+
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.Reflection.Context.dll
+
+
+ ..\..\..\packages\System.ValueTuple.4.4.0\lib\net461\System.ValueTuple.dll
+
+
+
+
+ False
+ ..\..\..\packages\TensorFlowSharp.1.11.0\lib\net471\TensorFlowSharp.dll
+
+
+
+
+
+ このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/DeepTongue_Icon.ico b/Main/Tongue extraction/DeepTongue_Icon.ico
new file mode 100644
index 0000000..85fef71
--- /dev/null
+++ b/Main/Tongue extraction/DeepTongue_Icon.ico
Binary files differ
diff --git a/Main/Tongue extraction/Form1.Designer.cs b/Main/Tongue extraction/Form1.Designer.cs
new file mode 100644
index 0000000..6a7aa09
--- /dev/null
+++ b/Main/Tongue extraction/Form1.Designer.cs
@@ -0,0 +1,485 @@
+namespace Tongue_extraction
+{
+ partial class Form1
+ {
+ ///
+ /// 必要なデザイナー変数です。
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// 使用中のリソースをすべてクリーンアップします。
+ ///
+ /// マネージ リソースを破棄する場合は true を指定し、その他の場合は false を指定します。
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows フォーム デザイナーで生成されたコード
+
+ ///
+ /// デザイナー サポートに必要なメソッドです。このメソッドの内容を
+ /// コード エディターで変更しないでください。
+ ///
+ private void InitializeComponent()
+ {
+ System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(Form1));
+ this.button_start = new System.Windows.Forms.Button();
+ this.pictureBox_extraction = new System.Windows.Forms.PictureBox();
+ this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
+ this.panel3 = new System.Windows.Forms.Panel();
+ this.label9 = new System.Windows.Forms.Label();
+ this.comboBox_mode = new System.Windows.Forms.ComboBox();
+ this.label_mode = new System.Windows.Forms.Label();
+ this.panel1 = new System.Windows.Forms.Panel();
+ this.pictureBox_detection = new System.Windows.Forms.PictureBox();
+ this.pictureBox_input = new System.Windows.Forms.PictureBox();
+ this.label1 = new System.Windows.Forms.Label();
+ this.label2 = new System.Windows.Forms.Label();
+ this.label3 = new System.Windows.Forms.Label();
+ this.label4 = new System.Windows.Forms.Label();
+ this.label5 = new System.Windows.Forms.Label();
+ this.label6 = new System.Windows.Forms.Label();
+ this.label7 = new System.Windows.Forms.Label();
+ this.label8 = new System.Windows.Forms.Label();
+ this.pictureBox_cropResized = new System.Windows.Forms.PictureBox();
+ this.pictureBox_output = new System.Windows.Forms.PictureBox();
+ this.pictureBox_outputSRG = new System.Windows.Forms.PictureBox();
+ this.pictureBox_maskSRG = new System.Windows.Forms.PictureBox();
+ this.pictureBox_last = new System.Windows.Forms.PictureBox();
+ this.label_processingFileName = new System.Windows.Forms.Label();
+ this.label_totalProgress = new System.Windows.Forms.Label();
+ this.panel2 = new System.Windows.Forms.Panel();
+ this.button_pause = new System.Windows.Forms.Button();
+ this.backgroundWorker1 = new System.ComponentModel.BackgroundWorker();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).BeginInit();
+ this.tableLayoutPanel1.SuspendLayout();
+ this.panel3.SuspendLayout();
+ this.panel1.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_last)).BeginInit();
+ this.panel2.SuspendLayout();
+ this.SuspendLayout();
+ //
+ // button_start
+ //
+ this.button_start.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_start.Location = new System.Drawing.Point(0, 0);
+ this.button_start.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.button_start.Name = "button_start";
+ this.button_start.Size = new System.Drawing.Size(246, 49);
+ this.button_start.TabIndex = 0;
+ this.button_start.Text = "Start";
+ this.button_start.UseVisualStyleBackColor = true;
+ this.button_start.Click += new System.EventHandler(this.Button_start_Click);
+ //
+ // pictureBox_extraction
+ //
+ this.pictureBox_extraction.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_extraction.Location = new System.Drawing.Point(511, 241);
+ this.pictureBox_extraction.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_extraction.Name = "pictureBox_extraction";
+ this.pictureBox_extraction.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_extraction.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_extraction.TabIndex = 4;
+ this.pictureBox_extraction.TabStop = false;
+ //
+ // tableLayoutPanel1
+ //
+ this.tableLayoutPanel1.CellBorderStyle = System.Windows.Forms.TableLayoutPanelCellBorderStyle.Outset;
+ this.tableLayoutPanel1.ColumnCount = 4;
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.Controls.Add(this.panel3, 2, 4);
+ this.tableLayoutPanel1.Controls.Add(this.label_mode, 1, 5);
+ this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_detection, 1, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_input, 0, 0);
+ this.tableLayoutPanel1.Controls.Add(this.label1, 0, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label2, 1, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label3, 2, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label4, 3, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label5, 0, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label6, 1, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label7, 2, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label8, 3, 3);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_cropResized, 2, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_extraction, 2, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_output, 3, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_outputSRG, 0, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_maskSRG, 1, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_last, 3, 2);
+ this.tableLayoutPanel1.Controls.Add(this.label_processingFileName, 0, 5);
+ this.tableLayoutPanel1.Controls.Add(this.label_totalProgress, 2, 5);
+ this.tableLayoutPanel1.Controls.Add(this.panel2, 1, 4);
+ this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
+ this.tableLayoutPanel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.tableLayoutPanel1.Name = "tableLayoutPanel1";
+ this.tableLayoutPanel1.RowCount = 6;
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44445F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44444F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 11.11111F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.Size = new System.Drawing.Size(1018, 554);
+ this.tableLayoutPanel1.TabIndex = 5;
+ //
+ // panel3
+ //
+ this.panel3.Controls.Add(this.label9);
+ this.panel3.Controls.Add(this.comboBox_mode);
+ this.panel3.Dock = System.Windows.Forms.DockStyle.Left;
+ this.panel3.Location = new System.Drawing.Point(511, 478);
+ this.panel3.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.panel3.Name = "panel3";
+ this.panel3.Size = new System.Drawing.Size(246, 49);
+ this.panel3.TabIndex = 23;
+ //
+ // label9
+ //
+ this.label9.AutoSize = true;
+ this.label9.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label9.Font = new System.Drawing.Font("Arial Narrow", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
+ this.label9.Location = new System.Drawing.Point(0, 0);
+ this.label9.Name = "label9";
+ this.label9.Size = new System.Drawing.Size(47, 24);
+ this.label9.TabIndex = 1;
+ this.label9.Text = "Input";
+ //
+ // comboBox_mode
+ //
+ this.comboBox_mode.Dock = System.Windows.Forms.DockStyle.Bottom;
+ this.comboBox_mode.FormattingEnabled = true;
+ this.comboBox_mode.Items.AddRange(new object[] {
+ "Image",
+ "Image&Calib.csv",
+ "Image&Calib.csv&Mask"});
+ this.comboBox_mode.Location = new System.Drawing.Point(0, 29);
+ this.comboBox_mode.Name = "comboBox_mode";
+ this.comboBox_mode.Size = new System.Drawing.Size(246, 20);
+ this.comboBox_mode.TabIndex = 0;
+ this.comboBox_mode.SelectedIndexChanged += new System.EventHandler(this.comboBox1_SelectedIndexChanged);
+ //
+ // label_mode
+ //
+ this.label_mode.AutoSize = true;
+ this.label_mode.BackColor = System.Drawing.Color.White;
+ this.label_mode.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_mode.Location = new System.Drawing.Point(513, 531);
+ this.label_mode.Name = "label_mode";
+ this.label_mode.Size = new System.Drawing.Size(246, 21);
+ this.label_mode.TabIndex = 22;
+ this.label_mode.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // panel1
+ //
+ this.panel1.Controls.Add(this.button_start);
+ this.panel1.Dock = System.Windows.Forms.DockStyle.Left;
+ this.panel1.Location = new System.Drawing.Point(3, 478);
+ this.panel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.panel1.Name = "panel1";
+ this.panel1.Size = new System.Drawing.Size(246, 49);
+ this.panel1.TabIndex = 6;
+ //
+ // pictureBox_detection
+ //
+ this.pictureBox_detection.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_detection.Location = new System.Drawing.Point(257, 4);
+ this.pictureBox_detection.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_detection.Name = "pictureBox_detection";
+ this.pictureBox_detection.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_detection.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_detection.TabIndex = 2;
+ this.pictureBox_detection.TabStop = false;
+ //
+ // pictureBox_input
+ //
+ this.pictureBox_input.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_input.Location = new System.Drawing.Point(3, 4);
+ this.pictureBox_input.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_input.Name = "pictureBox_input";
+ this.pictureBox_input.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_input.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_input.TabIndex = 1;
+ this.pictureBox_input.TabStop = false;
+ //
+ // label1
+ //
+ this.label1.AutoSize = true;
+ this.label1.BackColor = System.Drawing.Color.White;
+ this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label1.Location = new System.Drawing.Point(5, 217);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(246, 20);
+ this.label1.TabIndex = 7;
+ this.label1.Text = "Input";
+ this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label2
+ //
+ this.label2.AutoSize = true;
+ this.label2.BackColor = System.Drawing.Color.White;
+ this.label2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label2.Location = new System.Drawing.Point(259, 217);
+ this.label2.Name = "label2";
+ this.label2.Size = new System.Drawing.Size(246, 20);
+ this.label2.TabIndex = 8;
+ this.label2.Text = "Detection";
+ this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label3
+ //
+ this.label3.AutoSize = true;
+ this.label3.BackColor = System.Drawing.Color.White;
+ this.label3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label3.Location = new System.Drawing.Point(513, 217);
+ this.label3.Name = "label3";
+ this.label3.Size = new System.Drawing.Size(246, 20);
+ this.label3.TabIndex = 9;
+ this.label3.Text = "Crop+Resize";
+ this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label4
+ //
+ this.label4.AutoSize = true;
+ this.label4.BackColor = System.Drawing.Color.White;
+ this.label4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label4.Location = new System.Drawing.Point(767, 217);
+ this.label4.Name = "label4";
+ this.label4.Size = new System.Drawing.Size(246, 20);
+ this.label4.TabIndex = 10;
+ this.label4.Text = "Output";
+ this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label5
+ //
+ this.label5.AutoSize = true;
+ this.label5.BackColor = System.Drawing.Color.White;
+ this.label5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label5.Location = new System.Drawing.Point(5, 454);
+ this.label5.Name = "label5";
+ this.label5.Size = new System.Drawing.Size(246, 20);
+ this.label5.TabIndex = 11;
+ this.label5.Text = "Output+SRG";
+ this.label5.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label6
+ //
+ this.label6.AutoSize = true;
+ this.label6.BackColor = System.Drawing.Color.White;
+ this.label6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label6.Location = new System.Drawing.Point(259, 454);
+ this.label6.Name = "label6";
+ this.label6.Size = new System.Drawing.Size(246, 20);
+ this.label6.TabIndex = 12;
+ this.label6.Text = "Mask+SRG";
+ this.label6.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label7
+ //
+ this.label7.AutoSize = true;
+ this.label7.BackColor = System.Drawing.Color.White;
+ this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label7.Location = new System.Drawing.Point(513, 454);
+ this.label7.Name = "label7";
+ this.label7.Size = new System.Drawing.Size(246, 20);
+ this.label7.TabIndex = 13;
+ this.label7.Text = "Extraction";
+ this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label8
+ //
+ this.label8.AutoSize = true;
+ this.label8.BackColor = System.Drawing.Color.White;
+ this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label8.Location = new System.Drawing.Point(767, 454);
+ this.label8.Name = "label8";
+ this.label8.Size = new System.Drawing.Size(246, 20);
+ this.label8.TabIndex = 14;
+ this.label8.Text = "Color Extraction";
+ this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // pictureBox_cropResized
+ //
+ this.pictureBox_cropResized.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_cropResized.Location = new System.Drawing.Point(511, 4);
+ this.pictureBox_cropResized.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_cropResized.Name = "pictureBox_cropResized";
+ this.pictureBox_cropResized.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_cropResized.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_cropResized.TabIndex = 3;
+ this.pictureBox_cropResized.TabStop = false;
+ //
+ // pictureBox_output
+ //
+ this.pictureBox_output.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_output.Location = new System.Drawing.Point(767, 5);
+ this.pictureBox_output.Name = "pictureBox_output";
+ this.pictureBox_output.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_output.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_output.TabIndex = 15;
+ this.pictureBox_output.TabStop = false;
+ //
+ // pictureBox_outputSRG
+ //
+ this.pictureBox_outputSRG.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_outputSRG.Location = new System.Drawing.Point(5, 242);
+ this.pictureBox_outputSRG.Name = "pictureBox_outputSRG";
+ this.pictureBox_outputSRG.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_outputSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_outputSRG.TabIndex = 16;
+ this.pictureBox_outputSRG.TabStop = false;
+ //
+ // pictureBox_maskSRG
+ //
+ this.pictureBox_maskSRG.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_maskSRG.Location = new System.Drawing.Point(259, 242);
+ this.pictureBox_maskSRG.Name = "pictureBox_maskSRG";
+ this.pictureBox_maskSRG.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_maskSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_maskSRG.TabIndex = 17;
+ this.pictureBox_maskSRG.TabStop = false;
+ //
+ // pictureBox_last
+ //
+ this.pictureBox_last.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_last.Location = new System.Drawing.Point(767, 242);
+ this.pictureBox_last.Name = "pictureBox_last";
+ this.pictureBox_last.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_last.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_last.TabIndex = 18;
+ this.pictureBox_last.TabStop = false;
+ //
+ // label_processingFileName
+ //
+ this.label_processingFileName.AutoSize = true;
+ this.label_processingFileName.BackColor = System.Drawing.Color.White;
+ this.tableLayoutPanel1.SetColumnSpan(this.label_processingFileName, 2);
+ this.label_processingFileName.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_processingFileName.Location = new System.Drawing.Point(5, 531);
+ this.label_processingFileName.Name = "label_processingFileName";
+ this.label_processingFileName.Size = new System.Drawing.Size(500, 21);
+ this.label_processingFileName.TabIndex = 19;
+ this.label_processingFileName.Text = "Processing File: None";
+ this.label_processingFileName.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // label_totalProgress
+ //
+ this.label_totalProgress.AutoSize = true;
+ this.label_totalProgress.BackColor = System.Drawing.Color.White;
+ this.label_totalProgress.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_totalProgress.Location = new System.Drawing.Point(767, 531);
+ this.label_totalProgress.Name = "label_totalProgress";
+ this.label_totalProgress.Size = new System.Drawing.Size(246, 21);
+ this.label_totalProgress.TabIndex = 20;
+ this.label_totalProgress.Text = "Total Progress: 0/0";
+ this.label_totalProgress.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // panel2
+ //
+ this.panel2.Controls.Add(this.button_pause);
+ this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel2.Location = new System.Drawing.Point(259, 479);
+ this.panel2.Name = "panel2";
+ this.panel2.Size = new System.Drawing.Size(246, 47);
+ this.panel2.TabIndex = 21;
+ //
+ // button_pause
+ //
+ this.button_pause.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_pause.Enabled = false;
+ this.button_pause.Location = new System.Drawing.Point(0, 0);
+ this.button_pause.Name = "button_pause";
+ this.button_pause.Size = new System.Drawing.Size(246, 47);
+ this.button_pause.TabIndex = 0;
+ this.button_pause.Text = "Pause";
+ this.button_pause.UseVisualStyleBackColor = true;
+ this.button_pause.Click += new System.EventHandler(this.Button_pause_Click);
+ //
+ // backgroundWorker1
+ //
+ this.backgroundWorker1.WorkerSupportsCancellation = true;
+ //
+ // Form1
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(1018, 554);
+ this.Controls.Add(this.tableLayoutPanel1);
+ this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
+ this.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.Name = "Form1";
+ this.Text = "DeepTIAS1.9";
+ this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form1_FormClosing);
+ this.Load += new System.EventHandler(this.Form1_Load);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).EndInit();
+ this.tableLayoutPanel1.ResumeLayout(false);
+ this.tableLayoutPanel1.PerformLayout();
+ this.panel3.ResumeLayout(false);
+ this.panel3.PerformLayout();
+ this.panel1.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_last)).EndInit();
+ this.panel2.ResumeLayout(false);
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.Button button_start;
+ private System.Windows.Forms.PictureBox pictureBox_extraction;
+ private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
+ private System.Windows.Forms.Panel panel1;
+ private System.ComponentModel.BackgroundWorker backgroundWorker1;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Label label2;
+ private System.Windows.Forms.Label label3;
+ private System.Windows.Forms.Label label4;
+ private System.Windows.Forms.Label label5;
+ private System.Windows.Forms.Label label6;
+ private System.Windows.Forms.Label label7;
+ private System.Windows.Forms.Label label8;
+ private System.Windows.Forms.PictureBox pictureBox_output;
+ private System.Windows.Forms.PictureBox pictureBox_outputSRG;
+ private System.Windows.Forms.PictureBox pictureBox_maskSRG;
+ private System.Windows.Forms.PictureBox pictureBox_last;
+ private System.Windows.Forms.Label label_processingFileName;
+ private System.Windows.Forms.Panel panel2;
+ private System.Windows.Forms.Button button_pause;
+ private System.Windows.Forms.Label label_totalProgress;
+ private System.Windows.Forms.Label label_mode;
+ private System.Windows.Forms.PictureBox pictureBox_detection;
+ private System.Windows.Forms.PictureBox pictureBox_input;
+ private System.Windows.Forms.PictureBox pictureBox_cropResized;
+ private System.Windows.Forms.Panel panel3;
+ private System.Windows.Forms.ComboBox comboBox_mode;
+ private System.Windows.Forms.Label label9;
+ }
+}
+
diff --git a/Main/Tongue extraction/Form1.cs b/Main/Tongue extraction/Form1.cs
new file mode 100644
index 0000000..c4455cc
--- /dev/null
+++ b/Main/Tongue extraction/Form1.cs
@@ -0,0 +1,1284 @@
+using System;
+using System.Windows.Forms;
+using System.IO;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.ComponentModel;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Diagnostics;
+using System.Linq;
+using System.Collections.Generic;
+using TensorFlow;
+using OpenCvSharp;
+
+namespace Tongue_extraction
+{
+ public partial class Form1 : Form
+ {
+ //Use the RemoveSmallRegionDLL
+ [DllImport(@"RemoveSmallRegionDLL.dll", EntryPoint = "RemoveSmallRegion", SetLastError = true, CharSet = CharSet.Ansi, ExactSpelling = false, CallingConvention = CallingConvention.StdCall)]
+ extern static void RemoveSmallRegion(string name, string name2, int AreaLimit, int CheckMode, int NeihborMode);
+
+ Mat mat_drawBox = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_roi = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_input = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_roi256 = new Mat(256, 256, MatType.CV_8UC3, 1);
+ Mat mat_roisize = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_output = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputChanged = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_mask = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_extraction = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_cropped;
+ Mat mat_outputSRGNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_maskSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+
+ public static Bitmap bitmap_bitch;
+ string[] path;
+
+ OpenCvSharp.Rect rectangle;
+
+ byte[] byte_inputDetection;
+ byte[] byte_inputSegmentation;
+ OpenCvSharp.Point P1 = new OpenCvSharp.Point();
+ OpenCvSharp.Point P2 = new OpenCvSharp.Point();
+
+ byte[] mask = new byte[200000];
+
+ string basepath;
+ string imageFile;
+ string time;
+ string modelFile;
+ int ii = 0;
+ int count;
+ int areaCount;
+
+ Rect roi = new Rect();
+ int mmp = 0;
+ int pmm = 0;
+
+ int check_detection = 0;
+ float max_score = 0;
+
+ private ManualResetEvent manualReset = new ManualResetEvent(true);
+
+ string fileName_info = Directory.GetCurrentDirectory() + "//info//" + DateTime.Now.ToLocalTime().ToString("yyyyMMddhhmmss") + ".csv";
+ StreamWriter sw;
+
+ public Form1()
+ {
+ InitializeComponent();
+
+ }
+
+ private void Form1_Load(object sender, EventArgs e)
+ {
+ comboBox_mode.SelectedIndex = 1;
+ }
+
+ private void Button_start_Click(object sender, EventArgs e)
+ {
+ path = Directory.GetFiles("data");
+ button_start.Enabled = false;
+ button_pause.Enabled = true;
+ pictureBox_input.Image = null;
+ pictureBox_input.Refresh();
+ pictureBox_detection.Image = null;
+ pictureBox_detection.Refresh();
+ pictureBox_cropResized.Image = null;
+ pictureBox_cropResized.Refresh();
+ pictureBox_output.Image = null;
+ pictureBox_output.Refresh();
+ pictureBox_outputSRG.Image = null;
+ pictureBox_outputSRG.Refresh();
+ pictureBox_maskSRG.Image = null;
+ pictureBox_maskSRG.Refresh();
+ pictureBox_extraction.Image = null;
+ pictureBox_extraction.Refresh();
+ pictureBox_last.Image = null;
+ pictureBox_last.Refresh();
+ comboBox_mode.Enabled = false;
+
+ if (comboBox_mode.SelectedIndex == 0)
+ {
+ this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_Image);
+ backgroundWorker1.RunWorkerAsync();
+ }
+ else if (comboBox_mode.SelectedIndex == 1)
+ {
+ this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_ImageandCalib);
+ backgroundWorker1.RunWorkerAsync();
+ }
+ else if (comboBox_mode.SelectedIndex == 2)
+ {
+ //this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_MaskImage);
+ //backgroundWorker1.RunWorkerAsync();
+ MessageBox.Show("This mode is not implemented.");
+ }
+ else
+ {
+ MessageBox.Show("Please select the mode on the combobox");
+ }
+ }
+
+ public static class ImageUtil
+ {
+ // Convert the image in filename to a Tensor suitable as input to the Inception model.
+ public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.UInt8)
+ {
+ // DecodeJpeg uses a scalar String-valued tensor as input.
+ var tensor = TFTensor.CreateString(contents);
+
+ TFGraph graph;
+ TFOutput input, output;
+
+ // Construct a graph to normalize the image
+ ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
+
+ // Execute that graph to normalize this one image
+ using (var session = new TFSession(graph))
+ {
+ var normalized = session.Run(
+ inputs: new[] { input },
+ inputValues: new[] { tensor },
+ outputs: new[] { output });
+
+ if (session != null)
+ {
+ session.Dispose();
+ }
+ if (tensor != null)
+ {
+ tensor.Dispose();
+ }
+ if (graph != null)
+ {
+ graph.Dispose();
+ }
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+
+ return normalized[0];
+ }
+ }
+
+ // The inception model takes as input the image described by a Tensor in a very
+ // specific normalized format (a particular image size, shape of the input tensor,
+ // normalized pixel values etc.).
+ //
+ // This function constructs a graph of TensorFlow operations which takes as
+ // input a JPEG-encoded string and returns a tensor suitable as input to the
+ // inception model.
+
+ public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.UInt8)
+ {
+ const int W = 256;
+ const int H = 256;
+ const float Mean = 0;
+ const float Scale = 1;
+ graph = new TFGraph();
+ input = graph.Placeholder(TFDataType.String);
+ output = graph.Cast(graph.Div(
+ x: graph.Sub(
+ x: graph.ResizeBilinear(
+ images: graph.ExpandDims(
+ input: graph.Cast(
+ graph.DecodeJpeg(contents: input, channels: 3), DstT: destinationDataType),
+ dim: graph.Const(0, "make_batch")),
+ size: graph.Const(new int[] { W, H }, "size")),
+ y: graph.Const(Mean, "mean")),
+ y: graph.Const(Scale, "scale")), destinationDataType);
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+ }
+ }
+
+ public static class ImageUtil2
+ {
+ // Convert the image in filename to a Tensor suitable as input to the Inception model.
+ public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.Float)
+ {
+ // DecodeJpeg uses a scalar String-valued tensor as input.
+ var tensor = TFTensor.CreateString(contents);
+
+ TFGraph graph;
+ TFOutput input, output;
+
+ // Construct a graph to normalize the image
+ ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
+
+ // Execute that graph to normalize this one image
+ using (var session = new TFSession(graph))
+ {
+ var normalized = session.Run(
+ inputs: new[] { input },
+ inputValues: new[] { tensor },
+ outputs: new[] { output });
+
+ if (session != null)
+ {
+ session.Dispose();
+ }
+ if (tensor != null)
+ {
+ tensor.Dispose();
+ }
+ if (graph != null)
+ {
+ graph.Dispose();
+ }
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+
+ return normalized[0];
+ }
+ }
+
+ // The inception model takes as input the image described by a Tensor in a very
+ // specific normalized format (a particular image size, shape of the input tensor,
+ // normalized pixel values etc.).
+ //
+ // This function constructs a graph of TensorFlow operations which takes as
+ // input a JPEG-encoded string and returns a tensor suitable as input to the
+ // inception model.
+
+ public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.Float)
+ {
+ const int W = 256;
+ const int H = 256;
+ //const int W = 512;
+ //const int H = 512;
+
+ const float Mean = 128;
+ const float Scale = 128;
+ graph = new TFGraph();
+ input = graph.Placeholder(TFDataType.String);
+ output = graph.Cast(graph.Div(
+ x: graph.Sub(
+ x: graph.ResizeBilinear(
+ images: graph.ExpandDims(
+ input: graph.Cast(
+ graph.DecodeJpeg(contents: input, channels: 3), DstT: TFDataType.Float),
+ dim: graph.Const(0, "make_batch")),
+ size: graph.Const(new int[] { W, H }, "size")),
+ y: graph.Const(Mean, "mean")),
+ y: graph.Const(Scale, "scale")), destinationDataType);
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+ }
+ }
+
+ public static byte[] Bitmap2Byte(Bitmap bitmap)
+ {
+ using (MemoryStream stream = new MemoryStream())
+ {
+ bitmap.Save(stream, ImageFormat.Jpeg);
+ byte[] data = new byte[stream.Length];
+ stream.Seek(0, SeekOrigin.Begin);
+ stream.Read(data, 0, Convert.ToInt32(stream.Length));
+ return data;
+ }
+ }
+
+ private static string DownloadDefaultModel(string dir)
+ {
+ var modelFile = Path.Combine(dir, "AREinProcess2_step8100.pb");
+ return modelFile;
+ }
+
+ private static string DownloadDefaultModel_noBoxPix2Pix(string dir)
+ {
+ var modelFile = Path.Combine(dir, "424_256_64_5999_scale300_enhancment_L1loss0.02001.pb");
+ return modelFile;
+ }
+
+ public static Bitmap ToGrayBitmap(byte[] rawValues, int width, int height)
+ {
+ //// Apply for a target bitmap variable and lock its memory area
+ Bitmap bmp = new Bitmap(width, height, PixelFormat.Format8bppIndexed);
+ BitmapData bmpData = bmp.LockBits(new Rectangle(0, 0, width, height),
+ ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed);
+
+ //// Get image parameters
+ int stride = bmpData.Stride; // Width of the scan line
+ int offset = stride - width; // Show gap between width and scan line width
+ IntPtr iptr = bmpData.Scan0; // Get the memory start position of bmpData
+ int scanBytes = stride * height;// Use stride width to indicate that this is the size of the memory area
+
+ //// The following is to convert the original display size byte array to the byte array actually stored in memory.
+ int posScan = 0, posReal = 0;// Set two position pointers respectively, pointing to the source array and the target array
+ byte[] pixelValues = new byte[scanBytes]; //Allocate memory for the target array
+
+ for (int x = 0; x < height; x++)
+ {
+ //// The following loop section is a simulated line scan
+ for (int y = 0; y < width; y++)
+ {
+ pixelValues[posScan++] = rawValues[posReal++];
+ }
+ posScan += offset; //At the end of the line scan, move the target position pointer over that "gap"
+ }
+
+ //// Use Marshal's Copy method to copy the just obtained memory byte array into BitmapData.
+ System.Runtime.InteropServices.Marshal.Copy(pixelValues, 0, iptr, scanBytes);
+ bmp.UnlockBits(bmpData); // Unlock the memory area
+
+ //// The following code is to modify the index table of the generated bitmap, from pseudo color to grayscale
+ ColorPalette tempPalette;
+ using (Bitmap tempBmp = new Bitmap(1, 1, PixelFormat.Format8bppIndexed))
+ {
+ tempPalette = tempBmp.Palette;
+ }
+ for (int i = 0; i < 256; i++)
+ {
+ tempPalette.Entries[i] = Color.FromArgb(i, i, i);
+ }
+
+ bmp.Palette = tempPalette;
+
+ //// The algorithm ends here and returns the result.
+ return bmp;
+ }
+
+ private void BackgroundWorker1_DoWork_Image(object sender, DoWorkEventArgs e)
+ {
+
+ // boundingboxなどのinfo出力用
+ sw = new StreamWriter(fileName_info, false, System.Text.Encoding.GetEncoding("shift_jis"));
+ sw.WriteLine(
+ "image" + "," +
+ "top left X" + "," + "top left Y" + "," + "bottom right X" + "," + "bottom right Y" + "," +
+ "Width" + "," + "Height" + "," + "Area" + "," + "Gloss Count");
+
+ using (MemoryStream ms = new MemoryStream())
+ {
+ for (int a = 0; a < path.Length; a++)
+ {
+ manualReset.WaitOne();
+ ii = 0;
+ basepath = Directory.GetCurrentDirectory();
+ imageFile = System.Text.RegularExpressions.Regex.Replace(path[a], "data", "");
+ Invoke((MethodInvoker)delegate
+ {
+ label_processingFileName.Text = "Processing File: " + imageFile;
+ count = a + 1;
+ label_totalProgress.Text = "Total Progress: " + count + "/" + path.Length;
+ });
+
+ mat_input = Cv2.ImRead(basepath + "\\data" + imageFile, ImreadModes.Color);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_input.Image = bitmap_bitch;
+ pictureBox_input.Refresh();
+ });
+
+ label8.BackColor = Color.White;
+ label1.BackColor = Color.Red;
+ mat_drawBox = mat_input.Clone();
+ mat_cropped = new Mat(mat_input.Size(), MatType.CV_8UC3, 1);
+ byte_inputDetection = Bitmap2Byte(bitmap_bitch);
+
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(Directory.GetCurrentDirectory() + "/Detection_Normal.pb");
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil.CreateTensorFromImageFile(byte_inputDetection, TFDataType.UInt8);
+
+ var runner = session.GetRunner();
+ runner
+
+ .AddInput(graph["image_tensor"][0], tensor)
+ .Fetch("detection_boxes", "detection_scores", "detection_classes", "num_detections");
+
+ var output = runner.Run();
+ var boxes = (float[,,])output[0].GetValue();
+ var scores = (float[,])output[1].GetValue();
+ var classes = (float[,])output[2].GetValue();
+ var detections = (float[])output[3].GetValue();
+
+ check_detection = 0;
+ max_score = 0;
+
+ for (int i = 0; i < scores.Length; i++)
+ {
+ if ((scores[0, i] > 0.5) && (scores[0, i] > max_score))
+ {
+ max_score = scores[0, i];
+ float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
+ float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
+ float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
+ float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
+ P1.X = (int)x_min;
+ P1.Y = (int)y_min;
+ P2.X = (int)x_max;
+ P2.Y = (int)y_max;
+ Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
+ rectangle.X = (int)x_min;
+ rectangle.Y = (int)y_min;
+ rectangle.Width = (int)(x_max - x_min);
+ rectangle.Height = (int)(y_max - y_min);
+
+ check_detection = 1;
+ }
+ }
+ }
+ }
+
+ // 舌が検出されなかった場合,Detectionされた画像で学習したモデル(CropResize)を使用するのはまずいので
+ // 以前のモデル(Detectionせずに学習)を使用する
+ if (check_detection == 0)
+ {
+ MessageBox.Show("Error: Sorry can not detect any tongue in this image.\nPress [OK] to skip preprocessing.",
+ "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("DetectionFailedLog.txt ", time + " " + imageFile + "\n");
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = null;
+ pictureBox_detection.Refresh();
+ pictureBox_cropResized.Image = null;
+ pictureBox_cropResized.Refresh();
+ pictureBox_output.Image = null;
+ pictureBox_output.Refresh();
+ pictureBox_outputSRG.Image = null;
+ pictureBox_outputSRG.Refresh();
+ pictureBox_maskSRG.Image = null;
+ pictureBox_maskSRG.Refresh();
+ pictureBox_extraction.Image = null;
+ pictureBox_extraction.Refresh();
+ pictureBox_last.Image = null;
+ pictureBox_last.Refresh();
+ });
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel_noBoxPix2Pix(basepath);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["generator/input_image"][0], tensor)
+ .Fetch(graph["generator/prediction"][0]);
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ Thread.Sleep(1000);
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_outputNoBox = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputNoBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ label1.BackColor = Color.White;
+ label4.BackColor = Color.Red;
+ });
+
+ mat_outputNoBox.SaveImage(basepath + "\\output256" + imageFile);
+ try
+ {
+ RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRGNoBox = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRGNoBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+ });
+ Cv2.Resize(mat_outputSRGNoBox, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ try
+ {
+ RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ label5.BackColor = Color.White;
+ label6.BackColor = Color.Red;
+ });
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+ label6.BackColor = Color.White;
+
+
+
+ label7.BackColor = Color.Red;
+ label7.BackColor = Color.White;
+
+
+ label8.BackColor = Color.Red;
+ //csvにboundingbox情報を保存
+ sw.WriteLine(
+ imageFile.Substring(1) + ","
+ + "None" + "," + "None" + ","
+ + "None" + "," + "None" + ","
+ + "None" + "," + "None" + "," + areaCount.ToString()
+ );
+
+ continue;
+ }
+
+ // 舌が正常にDetectionされた場合の処理
+ else
+ {
+ // 検出されたバウンディングボックス画像を保存
+ mat_drawBox.SaveImage(basepath + "\\detection" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = bitmap_bitch;
+ pictureBox_detection.Refresh();
+ });
+ label1.BackColor = Color.White;
+
+ // 検出領域でcropし,256*256にリサイズして表示
+ label2.BackColor = Color.Red;
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ // 舌領域以外を黒へ
+ Vec3b pix = mat_input.At(i, j);
+ mat_cropped.Set(i, j, pix);
+ }
+ }
+ mat_cropped.SaveImage(basepath + "\\cropped" + imageFile);
+ // 検出領域の範囲を切り出す
+ OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
+ size_roi.Height = rectangle.Height;
+ size_roi.Width = rectangle.Width;
+ roi = new Rect(P1, size_roi);
+ mat_roisize = mat_input.Clone(roi);
+ // セグメンテーションのため,256*256にリサイズ
+ Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
+ mat_roi.SaveImage(basepath + "\\cropresized" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_cropResized.Image = bitmap_bitch;
+ pictureBox_cropResized.Refresh();
+ });
+ label2.BackColor = Color.White;
+
+ // セグメンテーションを行う
+ label3.BackColor = Color.Red;
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel(basepath);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["generator/input_image"][0], tensor)
+ .Fetch(graph["generator/prediction"][0]);
+
+ //.AddInput(graph["input_image"][0], tensor)
+ //.Fetch(graph["generator1/decoder_1/Tanh"][0]);
+
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ GC.Collect();
+ Thread.Sleep(1000);
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ });
+ label3.BackColor = Color.White;
+
+ // 舌分割結果の後処理
+ label4.BackColor = Color.Red;
+ // 舌分割結果を保存
+ mat_output.SaveImage(basepath + "\\output256" + imageFile);
+ // 後処理(領域拡張法)でノイズ除去
+ try
+ {
+ RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRG = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ });
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+
+ // バウンディングボックスのサイズに舌分割結果をリサイズ
+ // この時に2値じゃなくなってるみたい
+ Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
+ mat_outputChanged.SaveImage(basepath + "\\output_resized" + imageFile);
+ // 舌検出された領域において舌領域を切り出す
+ mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 1);
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ int pix = mat_outputChanged.At(mmp, pmm);
+ mat_mask.Set(i, j, pix);
+ pmm++;
+ }
+ mmp++;
+ pmm = 0;
+ }
+ mmp = 0;
+ Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ // ノイズ処理
+ try
+ {
+ RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+
+ mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ // 2値マスクの最終結果
+ mat_maskSRG.SaveImage(basepath + "\\mask_final" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ });
+ label5.BackColor = Color.White;
+
+ // 元画像とマスクを合わせて,舌領域を抜き出す
+ label6.BackColor = Color.Red;
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+
+ label6.BackColor = Color.White;
+
+ // Gloss抽出処理
+ // extraction結果は使用しない(255,255,255の扱いが煩雑なため)
+ label7.BackColor = Color.Red;
+ label7.BackColor = Color.White;
+
+ // 処理が終わった画像を記録する
+ label8.BackColor = Color.Red;
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("Log.txt ", time + " " + imageFile + " Done!\n");
+
+
+ //csvにboundingbox情報を保存
+ sw.WriteLine(
+ imageFile.Substring(1) + ","
+ + P1.X.ToString() + "," + P1.Y.ToString() + ","
+ + P2.X.ToString() + "," + P2.Y.ToString() + ","
+ + Math.Abs(P1.X - P2.X).ToString() + "," + Math.Abs(P1.Y - P2.Y).ToString() + ","
+ + areaCount.ToString() + ","
+ );
+ }
+ GC.Collect();
+ }
+ MessageBox.Show("Finished!");
+
+ Invoke((MethodInvoker)delegate
+ {
+ button_start.Enabled = true;
+ button_pause.Enabled = false;
+ label_processingFileName.Text = "Processing File: None";
+ });
+ }
+ }
+
+ private void BackgroundWorker1_DoWork_ImageandCalib(object sender, DoWorkEventArgs e)
+ {
+
+ // boundingboxなどのinfo出力用
+ sw = new StreamWriter(fileName_info, false, System.Text.Encoding.GetEncoding("shift_jis"));
+ // 出力用csvの準備
+ sw.Write(
+ "image" + "," +
+ "top left X" + "," + "top left Y" + "," + "bottom right X" + "," + "bottom right Y" + ","
+ );
+ for (int i = 0; i < 1; i++)
+ for (int j = 0; j < 5; j++)
+ sw.Write("x" + j.ToString() + "," + "y" + j.ToString() + ",");
+ for (int i = 0; i < 1; i++)
+ for (int j = 0; j < 8; j++)
+ sw.Write("x" + j.ToString() + "," + "y" + j.ToString() + ",");
+ sw.WriteLine();
+ sw.Close();
+
+ using (MemoryStream ms = new MemoryStream())
+ {
+ // ディレクトリglobの取得
+ var glob_dir = Directory.GetDirectories(@"D:\kei2\Study\Tongue\DentalDiagnosisDataAnalysis\Data\NewDataset\Images\Proc");
+
+ for (int a = 0; a < glob_dir.Length; a++)
+ {
+ manualReset.WaitOne();
+
+ // 画像と校正ファイルのパス
+ var glob_file = Directory.GetFiles(glob_dir[a]);
+ var path_calib = glob_file.Where(n => n.Contains("csv")).ToList()[0];
+ var path_image = glob_file.Where(n => n.Contains("bmp") || n.Contains("png")).ToList()[0];
+ var path_base = Directory.GetCurrentDirectory();
+ var name_image = Path.GetFileName(path_image);
+ var name_dir = Path.GetFileName(glob_dir[a]);
+
+ // ステータスの表示
+ Invoke((MethodInvoker)delegate
+ {
+ label_processingFileName.Text = "Processing File: " + "\\" + name_dir;
+ count = a + 1;
+ label_totalProgress.Text = "Total Progress: " + count + "/" + glob_dir.Length;
+ });
+
+ // 入力画像読み込み・表示
+ mat_input = Cv2.ImRead(path_image, ImreadModes.Color);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_input.Image = bitmap_bitch;
+ pictureBox_input.Refresh();
+ });
+
+ // Detection
+ label8.BackColor = Color.White;
+ label1.BackColor = Color.Red;
+ mat_drawBox = mat_input.Clone();
+ mat_cropped = new Mat(mat_input.Size(), MatType.CV_8UC3, 1);
+ byte_inputDetection = Bitmap2Byte(bitmap_bitch);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(path_base + "/Detection_Normal.pb");
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil.CreateTensorFromImageFile(byte_inputDetection, TFDataType.UInt8);
+
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["image_tensor"][0], tensor)
+ .Fetch("detection_boxes", "detection_scores", "detection_classes", "num_detections");
+
+ var output = runner.Run();
+ var boxes = (float[,,])output[0].GetValue();
+ var scores = (float[,])output[1].GetValue();
+ var classes = (float[,])output[2].GetValue();
+ var detections = (float[])output[3].GetValue();
+ check_detection = 0;
+ max_score = 0;
+ for (int i = 0; i < scores.Length; i++)
+ {
+ if ((scores[0, i] > 0.5) && (scores[0, i] > max_score))
+ {
+ max_score = scores[0, i];
+ float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
+ float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
+ float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
+ float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
+ P1.X = (int)x_min;
+ P1.Y = (int)y_min;
+ P2.X = (int)x_max;
+ P2.Y = (int)y_max;
+ Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
+ rectangle.X = (int)x_min;
+ rectangle.Y = (int)y_min;
+ rectangle.Width = (int)(x_max - x_min);
+ rectangle.Height = (int)(y_max - y_min);
+
+ check_detection = 1;
+ }
+ }
+ }
+ }
+ // Detection結果表示・保存
+ mat_drawBox.SaveImage(path_base + "\\detection" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = bitmap_bitch;
+ pictureBox_detection.Refresh();
+ });
+ label1.BackColor = Color.White;
+
+ // 舌検出領域でcrop
+ label2.BackColor = Color.Red;
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ Vec3b pix = mat_input.At(i, j);
+ mat_cropped.Set(i, j, pix);
+ }
+ }
+ mat_cropped.SaveImage(path_base + "\\cropped" + "\\" + name_dir + ".bmp");
+
+ // 舌検出領域でresize
+ OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
+ size_roi.Height = rectangle.Height;
+ size_roi.Width = rectangle.Width;
+ roi = new Rect(P1, size_roi);
+ mat_roisize = mat_input.Clone(roi);
+ Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
+ mat_roi.SaveImage(path_base + "\\cropresized" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_cropResized.Image = bitmap_bitch;
+ pictureBox_cropResized.Refresh();
+ });
+ label2.BackColor = Color.White;
+
+ // Segmenation
+ label3.BackColor = Color.Red;
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel(path_base);
+ ii = 0;
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+ var runner = session.GetRunner();
+ runner
+ //.AddInput(graph["generator/input_image"][0], tensor)
+ //.Fetch(graph["generator/prediction"][0]);
+
+ .AddInput(graph["input_image"][0], tensor)
+ .Fetch(graph["generator1/decoder_1/Tanh"][0]);
+
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ GC.Collect();
+ Thread.Sleep(1000);
+
+ // segmentation結果表示・保存
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ });
+ label3.BackColor = Color.White;
+ label4.BackColor = Color.Red;
+ mat_output.SaveImage(path_base + "\\output256" + "\\" + name_dir + ".bmp");
+
+ // 後処理(領域拡張法)でノイズ除去 iteration 2
+ try
+ {
+ RemoveSmallRegion(path_base + "\\output256" + "\\" + name_dir + ".bmp", path_base + "\\output_changed1" + "\\" + name_dir + ".bmp", 500, 1, 1);
+ RemoveSmallRegion(path_base + "\\output_changed1" + "\\" + name_dir + ".bmp", path_base + "\\output_changed2" + "\\" + name_dir + ".bmp", 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRG = new Mat(path_base + "\\output_changed2" + "\\" + name_dir + ".bmp", ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ });
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+
+ // バウンディングボックスのサイズに戻す
+ Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
+ mat_outputChanged.SaveImage(path_base + "\\output_resized" + "\\" + name_dir + ".bmp");
+
+ // 入力と同じサイズでマスクを作成(なんかたまに不安定.resizeで微妙にサイズ変わってrange overしれるかも)
+ // 気休め
+ Thread.Sleep(100);
+ GC.Collect();
+
+ mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 0);
+ var y_mask = 0;
+ for (int y = P1.Y; y < P2.Y; y++)
+ {
+ if (y_mask >= mat_outputChanged.Height)
+ break;
+
+ var x_mask = 0;
+ for (int x = P1.X; x < P2.X; x++)
+ {
+ if (x_mask >= mat_outputChanged.Width)
+ break;
+
+ int pix = mat_outputChanged.At(y_mask, x_mask);
+ mat_mask.Set(y, x, pix);
+ x_mask++;
+ }
+ y_mask++;
+ }
+ mmp = 0;
+ Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(path_base + "\\mask" + "\\" + name_dir + ".bmp");
+
+ // ノイズ処理
+ try
+ {
+ RemoveSmallRegion(path_base + "\\mask" + "\\" + name_dir + ".bmp", path_base + "\\mask_changed1" + "\\" + name_dir + ".bmp", 500, 1, 1);
+ RemoveSmallRegion(path_base + "\\mask_changed1" + "\\" + name_dir + ".bmp", path_base + "\\mask_changed2" + "\\" + name_dir + ".bmp", 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_maskSRG = new Mat(path_base + "\\mask_changed2" + "\\" + name_dir + ".bmp", ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+
+ // 2値マスクの最終結果
+ mat_maskSRG.SaveImage(path_base + "\\mask_final" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ });
+ label5.BackColor = Color.White;
+
+ // 元画像とマスクを合わせて,舌領域を抜き出す
+ // todo: opencv等によるマスク処理と領域計算へ
+ label6.BackColor = Color.Red;
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(path_base + "\\extraction" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+ label6.BackColor = Color.White;
+
+ /// 5点クリックによる色抽出処理
+ var path_colorMatrixXYZ = "xyz.txt";
+
+ // 色抽出処理のクラス
+ ColorExtractor ce = new ColorExtractor();
+
+ // マスクされた舌領域画像の作成
+ Mat mat_finalMask = mat_maskSRG.Clone();
+ Mat mat_maskedImg = new Mat();
+ mat_input.CopyTo(mat_maskedImg, mat_finalMask);
+
+ // 5点クリック法(2010石川)
+ List list_5points_3 = ce.Get5points(mat_finalMask, ColorExtractor.FivePointMethod.Method3);
+
+ // 8領域の取得
+ List list_8area_3 = ce.Get8area(list_5points_3);
+
+ // 領域の表示
+ var mat_areaDicision = ce.ShowResult(mat_input.Clone(), list_5points_3, list_8area_3);
+
+ // 色抽出
+ List list_8Bgr = ce.Get8colors(mat_maskedImg, list_8area_3);
+
+ // 色変換(RGB->XYZ->Lab)
+ List list_8Lab = ce.Calc8Lab(list_8Bgr, path_calib, path_colorMatrixXYZ);
+
+ // 保存
+ string CSVfilename = path_base + "\\color" + "\\" + name_dir + ".csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+
+ // 廃棄
+ mat_finalMask.Dispose();
+ mat_maskedImg.Dispose();
+ GC.Collect();
+ System.Threading.Thread.Sleep(100);
+
+ mat_areaDicision.SaveImage(path_base + "\\autoAreaDecision" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_areaDicision);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_last.Image = bitmap_bitch;
+ pictureBox_last.Refresh();
+ });
+ label7.BackColor = Color.White;
+
+ // 処理log
+ label8.BackColor = Color.Red;
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("Log.txt ", time + " " + "\\" + name_dir + ".bmp" + " Done!\n");
+
+ // info出力
+ sw = new StreamWriter(fileName_info, true, System.Text.Encoding.GetEncoding("shift_jis"));
+ sw.Write(
+ name_dir.ToString() + ","
+ // bounding box
+ + P1.X.ToString() + "," + P1.Y.ToString() + ","
+ + P2.X.ToString() + "," + P2.Y.ToString() + ","
+ // area
+ //+ areaCount.ToString() + ","
+ );
+ foreach (var n in list_5points_3)
+ sw.Write(n.X + "," + n.Y + ",");
+ foreach (var n in list_8area_3)
+ sw.Write(n.X + "," + n.Y + ",");
+ sw.Write("\n");
+
+ sw.Close();
+ // 破棄
+
+ }
+ GC.Collect();
+ MessageBox.Show("Finished!");
+
+ Invoke((MethodInvoker)delegate
+ {
+ button_start.Enabled = true;
+ button_pause.Enabled = false;
+ label_processingFileName.Text = "Processing File: None";
+ });
+ }
+ }
+
+ private void Button_pause_Click(object sender, EventArgs e)
+ {
+ if (button_pause.Text == "Pause")
+ {
+ manualReset.Reset();
+ button_pause.Text = "Continue";
+ }
+ else
+ {
+ manualReset.Set();
+ button_pause.Text = "Pause";
+ }
+ }
+
+ private void Form1_FormClosing(object sender, FormClosingEventArgs e)
+ {
+ Console.WriteLine("file closing");
+ if (sw != null)
+ {
+ sw.Close();
+ }
+ Console.WriteLine("file closed");
+ }
+
+ private void comboBox1_SelectedIndexChanged(object sender, EventArgs e)
+ {
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/Main/Tongue extraction/Form1.resx b/Main/Tongue extraction/Form1.resx
new file mode 100644
index 0000000..e1424ee
--- /dev/null
+++ b/Main/Tongue extraction/Form1.resx
@@ -0,0 +1,1080 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ 17, 17
+
+
+ 58
+
+
+
+
+ AAABAAEAa4AAAAEAIAAo3gAAFgAAACgAAABrAAAAAAEAAAEAIAAAAAAAANYAAGM4AABjOAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmA+Xk5gLl5OYB5ePlAOjs6gDl5OUA4dznAOfn
+ 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYA5eTmAOXj5gHl5OYA5enrAObe4QDl5OYC5eTmBOXk
+ 5gTl5OYD5eTmAebk5wDk5OcA5+XkAOXj6ADl5ecA5+HnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5QDl4+UA5eTlCeXk5kPl5OYl5uXmBujl
+ 5gDn5eYA3OTmAOXl5QDl5eUA5ebjAOXk5gPl5OYE5eTmA+Xk5gLl5OYB5uPmAO3o7gDj5OQA6+LsAObl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZN5eTm/+Xk
+ 5vjl5Obe5eTmuuXk5pDl5OZj5eTmOeXk5hbx5ewB/+T/AOfk5wDl6OoA5OPlAOTk5QDl5OYB5eTmBOXk
+ 5gTl5OYD5eTmAuXk5gHk4eYA5uHkAObk5QDk4ugA5eXlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOTj5QDl5OYD5eTmAOXk
+ 5qfl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm8eXk5s/l5Oap5eTmfeXk5lDl5OYq5eTmCubn
+ 5gDm5uYA5d7mAOXm5QDk6OQA5OjkAOXl5gPl5OYE5eTmBOXk5gLl5OYB5eTmAObo5gDt8+IA5ubmAOXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAeXk
+ 5gDl5OYW5eTm7OXk5v/l5Ob95eTm/eXk5vzl5Ob75eTm++Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Obl5eTmwOXk5pbl5OZp5eTmP+Xk5hvm5+YC5+jmAObl5gDr5OYA5OTnAOTk5wDm5OcB5eTmBOXk
+ 5gTl5OYD5eTmAuXk5gHl5OcA4uXkAOTl5QDq5eoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYD5eTmAOXk5lfl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm++Xk
+ 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm9eXk5tXl5Oau5eTmg+Xk5lbl5OYu5eTnDuPk
+ 5QDj5OUA5+XoAOXj6ADg89UA4e7XAOXk5gLl5OYE5eTmBOXk5gLl5OYB5eTmAObl5gDl5OUA5uTmAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADj4uMA5uTmAOXk5gPl5OYA5eTmpOXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/eXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Obp5eTmx+Xk5p7l5OZw5eTmROXk5h/m5eYE5eXmAObk5gDl5OYA5eTmAOXk5gDl5OYB5eTmA+Xk
+ 5gTl5OYD5eTmAuXk5gHl5OUA6tneAOTm5wDk4uQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTk5QDl5OYB5eTmAOXk5hDl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
+ 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+OXk5tvl5Oa25eTmiuXk5l3l5OYz5uPmEsvl
+ zQDg5eIA5+PnAObk5gDm5OYA5uTmAOXk5wHp4+UA6uPlALb09QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eXnAOXk5gPl5OYA5eTmSeXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Obt5eTmzOXk5qPl5OZ25eTmS+Xk5iXl5ecH5eXnAOPj5QDf398A39/fAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOji6ADk4+UA5eTmBOXk5gDl5OaT5eTm/+Xk5vvl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
+ 5vvl5Ob85eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+eXk5ufl5OaY5eXnAOXl5wHl4+UAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5OTlAOXk5wHl5ecA5eXoB+Xk5tjl5Ob/5eTm/eXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v3l5Ob75eTm++Xk5v3l5Ob75eTm/+Xk5n3l5OYA5eTmBOXm
+ 6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmA+Xk5gDl5OY95eTm/+Xk
+ 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obr5eTmGOXk
+ 5gDl5OYB4ODgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA7entAOXk5gDl5OYE5eTmAOXk
+ 5onl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
+ 5pbl5OYA5eTmA+Xk5QDm5ugAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmAePk
+ 5gDj5OYD5eTm0+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTmNeXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYD5eTmAOXk5j3l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5r3l5OYA5eTlAeXk5wDl5OUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5OcA6OTlAOXk5gTl5OYA5eTmkeXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYD5eTmAObm6gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eXmAOXj5gDl5OYB5uXmAObk5hLl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5uPl5OYP5eTmAOXk5gHk5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAObk5gDk5OYA5OPmAOXk5gbl5OYA5eTmcuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmleXk5gDl5OYD5eXmAOfm5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXl5QDl5OYA5uTmAObk5QDl5OYF5eTmAOXk5jfl5Oby5eTm/+Xk5v7l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZD5eTmAOXk5gPl5OcAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTj5wDl5OUA5OPmAOTj5gDl5OYC5eTmBeXk5gDl5OY35eTm6+Xk5v/l5Ob+5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Tj5grk4+YA5eTmAeXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA6ungAOfm4wDi4ekA5OTnAOTk5wDl5OYA5uTmAOXj
+ 5QDl4+UA5eXnAOXk5gHl5OYB5eTmAeXk5gLl5OYE5eTmA+Xk5gHl5OYA5eTmVuXk5vTl5Ob/5eTm/eXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTmAOXk
+ 5gPl5OYA5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5ecA3eHjAN3g4wDl5OYC5eTmBOXk5gPm5OUC5+TlAeTk
+ 5QHi5OYB5unrAOPs7gDT2dsA0NvdANzl5wAAAAAA5eTmAOXk5gHk5OUA5eTmHeXk5qHl5Ob/5eTm/eXk
+ 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
+ 5l7l5OYA5eTmBOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAObl5gDm5eYA5eTmAuPh5QDj4uUA5OPmAAAA
+ AAAAAAAAAAAAAAAAAACl5OYA/+DmAP/R2wDg4OUB5uTmBeXk5hHl5OYp5eTmWOXk5qbl5Ob05eTm/+Xk
+ 5vzl5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5Ob+5eTmK+Xk5gDl5OYC5OTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA4eLlAOTk5gLj5OUA5eTmLOXk
+ 5oHl5Oat5eTmvuXk5sTl5ObG5eTmyOXk5svl5ObM5eTmz+Xk5tTl5Obd5eTm7OXk5vzl5Ob/5eTm/+Xk
+ 5v7l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob95eTm/+Xk5uTl5OYM5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAObl5gDl5OYC5eTmAOXk
+ 5nXl5Ob/5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmyuTk5QDk5OUB5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OUA5eTmA+Xk
+ 5gDl5OZM5eTm/+Xk5v3l5Ob75eTm++Xk5vzl5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v3l5Ob95eTm/eXk
+ 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa15eTmAOXk5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYC5eTmAOXk5rjl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+bk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAebk5gDm5OYX5eTm7uXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmsOXk5gDl5OYD5eTmAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXl5gDl5OYD5eTmAOXk5kXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa/5eTmAOXk
+ 5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gTl5OYA5eTmcuXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5tbm5eUA5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5uUA5eTmBOXk5gDl5Oac5eTm/+Xk5vvl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Ob/5eTm7+Xk5hfl5OYA5eTmAebm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDl5OYC5eTmAOXk5rvl5Ob/5eTm/OXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTmPOXk5gDl5OYD5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOjm7ADj4uIA5eTm1OXk
+ 5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ15eTmAOXk5gTk4+QAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYB5uPmAObj
+ 5gvl5Obj5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rXl5OYA5eTmAuXl5gDo6OgAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gHl5OYA5eTmFuXk5u/l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7uXk5hbl5OYA5eTmAeXk
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAuXk5gDl5OYi5eTm++Xk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmXOXk
+ 5gDl5OYD5eTmAObm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYC5eTmAOXk5i/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v/l5Oaz5eTmAOXk5gLl5OYA5+PnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmPuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vfl5OYn5eTmAOXk5gLl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmA+Xk5gDl5OZM5eTm/+Xk5v3l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5oPl5OYA5eTmBOjj5gDl5OUAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOvr6wDl5OYE5eTmAOXk5lzl5Ob/5eTm++Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Xk5g3l5OYA5eTmAeXk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePlAOXk5gTl5OYA5eTmcuXk
+ 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmYOXk5gDl5OYD5eTlAOXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADf3+wA5eTmBOXk
+ 5gDl5OaK5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObJ5NG+AOTm
+ 7ADl5OUA5OPnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObk
+ 5gDl5OYE5eTmAOXk5qPl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5v/l5OZI5eTmAOXk5gPl5eYA5ubmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAOXk5gHl5OYA5eTmw+Xk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5rLl5OYA5eTmAubk5gDl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYB5eTnAOTk5wrl5Obi5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm/eXk5jPl5OYA5eTmAuXk5wDm4uYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gLl5OYA5eTmK+Xk5v7l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmmuXk5gDl5OYD5OPmAOXl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmBOXk5gDl5OZe5eTm/+Xk5vzl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obx5ePmHuXk5gDl5OYC5eTlAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6OjoAOXl5QDl5OYD5eTmAOXk5qLl5Ob/5eTm++Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+b/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi
+ 5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ65eTmAOXk
+ 5gTn5OUA4+LjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANvb2wDl5OYA5eTmAeXk5gDl5OYQ5eTm5eXk
+ 5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPm/+nn5v/w7OX/7erm/+7q5v/u6ub/7urm/+7q5v/u6ub/7uvm/+bl
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+bk5v/u6+b/7urm/+7q5v/u6ub/7urm/+7q
+ 5v/t6ub/8Ozl/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5trl5OUK5eTlAOXk5QHl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+gA5eTmAOXk5gDl5OYE5eTmAOXk
+ 5ljl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5v/p5+b/0NTn/2WF7f9df+3/X4Ht/1+A7f9fgO3/X4Ht/1t+
+ 7f9ujOz/3t/m/+bl5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/m5eb/4ODm/3CN7P9bfu3/X4Ht/1+A
+ 7f9fgO3/X4Ht/15/7f9khO3/ztPn/+ro5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5lrl5OYA5eTmA+Xk5gDk4eQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj4wDl5eYA5uXoAN7e1wDl5OcB5eTmBOTk
+ 5QLj5OUA5eTmwuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ht5f+ot+n/Ekfx/x5R8P8cT/H/HE/x/xxP
+ 8f8dT/H/HE/x/xxP8f/Gzej/7Onm/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm/+vp5v/Jz+f/HlDw/xtO
+ 8f8dT/H/HE/x/xxP8f8cT/H/H1Hw/xBG8f+ktOn/8u3l/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmvubk5gDm5OYB5eTmAObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAOXh5gDl5uUA5ePlAOXk5gLl5OYE5eTmA+Xk
+ 5gHn5eIA5ubiAuXk5pXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6u56f8gUvD/K1rw/ypZ
+ 8P8pWfD/KVnw/ylZ8P8sW/D/HlHw/2yL7P/07+X/5+Xm/+Hh5v/i4ub/4uLm/+Hh5v/m5eb/9fDl/3GO
+ 7P8eUfD/LFvw/ylZ8P8pWfD/KVnw/ylZ8P8sW/D/HlDw/6i26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmOuXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADq6esA5eTmAOXk5gDm5OcA5eTmA+Xk5gTm5ugB5eXnAOXl
+ 5wDn5+gA5uXnB+Xk5k/l5ObN5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q
+ 8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HlHw/3eT7P/h4eb/8u7l//Ht5f/x7eX/8u7l/+Lh
+ 5v97lez/H1Hw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ypZ8P8bTvH/p7bp//Ht5f/i4ub/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oac5eTmAOXk5gPm4+YA5OLlAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAuXk5QLl6OcA5d3kAOXg
+ 5QPl5OYl5eTmXeXk5pzl5Obf5eTm/+Xk5v7l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds
+ 5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HE/x/zxn7/95k+z/mKvq/5mr
+ 6v96lOz/PWjv/xxP8f8oWPD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+mten/7uvm/+Dg
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u/m5OYb5eTmAOXk
+ 5gHm5OcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5gDl5OYA5eTmAOXk5gLl5eYB7OHuAOXk
+ 5jrl5OaT5eTm0+Xk5vrl5Ob/5eTm/+Xk5v/l5Ob85eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/IlPw/xpN
+ 8f8aTvH/Gk7x/xpN8f8iU/D/Klnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/626
+ 6f/+9+X/7erm/+Tk5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
+ 5nTl5OYA5eTmBOXk5wDm5eYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTnAOXk5wDl5OYC5eTmAOXk
+ 5ifl5Oa95eTm/+Xk5v7l5Ob/5eTm/+Xk5vzl5Ob75eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8oWPD/Klnw/ypZ8P8qWfD/Klnw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
+ 8P8kVPD/T3Tu/3iT7P+ruen/6efm/+nn5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v3l5Ob/5eTm0eff6APo3+gA5uPmAeXj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAuXk
+ 5gDl5OY05eTm7eXk5v/l5Ob85eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/yhY8P8gUvD/Gk3x/xxO8f9Ye+3/2dvn/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTmR+Xk5gDl5OYD5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5wHl5ecA5eXoBuXk5tHl5Ob/5eTm+uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ylY8P8qWfD/K1rw/xZK8f9ZfO3/6efm/+Tj5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oaj5eTmAOXk5gPl4+YA5eTlAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmA+Xk5gDl5OZC5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/yBR8P+5w+j/7uvm/+Pj
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u7l5OYa5eTmAOXk5gHm5OYAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYE5eTmAOXk5lzl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/HU/x/6Oz
+ 6f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5mfl5OYA5eTmBOXk
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmR+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q5
+ 6f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KFjw/yhY
+ 8P8kVfD/xczo/+zp5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmveXk
+ 5wDl5OcB5eTlAOXj5gAAAAAAAAAAAAAAAAAAAAAA5eTmAeXk5gDl5OYa5eTm8OXk5v/l5Ob+5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5OPl/+Pi4//i4eP/4+Lk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li
+ 5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KVjw/ypZ
+ 8P8mVvD/FEnx/3iT7P/v6+b/4+Pm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5Ob55eTmJ+Xk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmAuXk5gDl5Oat5eTm/+Xk
+ 5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/m5ef/7+/y//Hx9P/s7O//5OLk/+Tj5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
+ 8P8eUfD/G07x/yxb8P+Cm+v/6Obm/+bl5v/k5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm++Xk5v/l5OZw5eTmAOXk5gTm5OcAAAAAAAAAAAAAAAAAAAAAAOXl5wDl5OYD5eTmAOXk
+ 5kfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/j4uT/6+rt/+Hg4v+tqKb/nJWS/8C9vP/s7O//5uXn/+Tj5f/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8oWPD/IVLw/2yK7P+ntun/z9Tn//Ht5f/l5Ob/5OPm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5r7l5eUA5eXmAeXj5gDj4egAAAAAAAAAAAAAAAAA5eTmAOXk
+ 5gDk5OYB5OTnAOXk5rrl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+vq7f/Oy8v/VkpC/zEiGP8yIxj/NSYc/4F4c//o5+n/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ypZ8P8bTvH/sL3p//735f/n5eb/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm9eXk5h/l5OYA5eTmAubl5QAAAAAAAAAAAAAA
+ AADk5OcA5ePmAOXk5gLl5OYA5eTmN+Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/4uHj/1dLQ/8wIRb/QDIo/z8xJ/8+MCb/KxwQ/5KK
+ h//w8PL/4+Hj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+ltOn/7uvm/+Hh5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmWuXk5gDl5OYE5OXnAAAA
+ AAAAAAAAAAAAAAAAAADl4+UA5+nnAOXk5gPl5OYA5eTmkOXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+X/4uHj/+Lh4//i4eP/4N/g/+3t8P+knZv/MCEW/0AyKf88LiT/PC4k/z0v
+ Jv86LCL/Rjkw/9fV1v/l5Ob/4uDi/+Lh4//i4eP/4uHj/+Xk5v/j4ub/8Ozl/6u56f8eUPD/KVjw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/6e26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oae5eTmAOXk
+ 5gPk5OYAAAAAAAAAAAAAAAAAAAAAAOno6QDm5OYA5eTlAeTk5ADl5OUN5eTm1uXk5v/l5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+rq7P/x8fT/8PDz//Hw8//u7vH/+/z//3lvaf8xIhj/PzEn/zwu
+ JP88LiT/PC4k/z4wJv8zJRr/u7e2//v8///u7vH/8fDz//Hx8//w8PP/5+bo/+Li5v/w7Ob/qrjp/yBS
+ 8P8sW/D/Klnw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/Klnw/y1b8P8eUPD/p7bp//Ht5f/i4ub/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5tXk4+YD5eTmAOXk5gHm5uYAAAAAAAAAAAAAAAAAAAAAAOXk5QDl5OYA5eTmA+Xk5gDl5OZA5eTm/eXk
+ 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/p6Ov/ycXF/6CZlv+knpv/o52a/6Kcmf+noZ//VkpC/zco
+ Hv89LyX/PC4k/zwuJP88LiT/PTAm/zUnHP98c27/qqSi/6KbmP+jnZv/opyZ/6ein//d3N3/5OTo//Ds
+ 5f+suun/Ekjx/xxP8f8aTvH/Gk3x/xpN8f8aTfH/Gk3x/xtO8f8mV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/G07x/xpN8f8aTfH/Gk3x/xpN8f8aTvH/HE/x/xBG8f+pt+n/8e3l/+Li
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Ob/5eTm++Xk5ifl5OYA5eTmAuXk5gAAAAAAAAAAAAAAAAAAAAAA5+fnAOXk5gDl5OYA5eTmBOXk
+ 5gDl5OZ85eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+7t8P9rYFn/JxcM/zMkGv8xIhf/MSIX/zAh
+ F/85KyH/PS8l/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zUnHP8wIRb/MSIX/zEiF/8zJBn/KBkN/6ym
+ pP/v7/L/5ePj/9zd5v+Tp+n/iqDp/4yh6f+Loen/i6Hp/4uh6f+Noun/hJvq/y5c8P8mVvD/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/Jlbw/yxb8P+Dm+r/jaPp/4uh6f+Loen/i6Hp/4yh6f+KoOn/kqbp/9rc
+ 5v/n5ub/5OTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmV+Xk5gDl5OYD5eTnAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk
+ 5gDl5OYA5ePmAuHi5ADl5Oay5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7Ozu/2pgWf81Jx3/QDMp/z4x
+ J/8+MSf/PzEn/z0vJf88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjAm/z8xJ/8+MSf/PjEn/0E0
+ Kv8zJBr/qKKg//Dw8//i4eP/5+bm//Pv6//z7+3/8+/s//Pv7P/z7+z/8+/s//Pv7f/08Ov/SXHu/x9R
+ 8P8rWvD/J1fw/ydX8P8nV/D/J1fw/ypZ8P8gUvD/RW3u//Lu6//08O3/8+/s//Pv7P/z7+z/8+/s//Pv
+ 7f/z7+v/5+bm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OaG5eTmAOXk5gTl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAOTj5QDl5OYC5OPlAOTk5hbl5Obc5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMk
+ Gf8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PjEn/zEiF/+po6H/8PDz/+Ph4//l5Of/3tzM/93bxf/d28f/3dvH/93bx//d28f/2tnG/+nk
+ zP+ntun/Gk7x/yNU8P8qWvD/Klnw/ypZ8P8qWvD/JFTw/xlN8f+js+n/6eXM/9rZxv/d28f/3dvH/93b
+ x//d28f/3dvF/97czP/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rTl5OYA5eTmA+bl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYD5eTmAOXk5jnl5Ob25eTm/+Xk5v7l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs
+ 7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PH/5OPs/97buv/GvSz/x74v/8e+L//Hvi7/x74u/8e+
+ L//GvSv/x788/+zo2P+gsez/Ml/v/xtO8f8aTvH/Gk7x/xtO8f8xXu//nK7s/+zo2v/Hvzz/xr0r/8e+
+ L//Hvi7/x74u/8e+L//Hvi//xr0r/97buf/m5u//5eTk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm1OPg5QHk4uUA5eTmAQAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5eTmAOTk5gDl5OYE5eTmAOXk5mHl5Ob/5eTm/OXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw8P/k5O7/29ir/8W8Jf/HvzL/x74w/8e+
+ MP/HvjD/x74w/8e+Mv/GvCf/2teu//Lv9P/X2eH/n7Dp/3+Z6/9/mOv/nq/p/9XY4f/y7/T/2tev/8a8
+ J//HvjL/x74w/8e+MP/HvjD/x74w/8e/M//FvCT/29ep/+fm8v/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obt5eTmEuXk
+ 5gDl5OYBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5eUA5eTmAP///wDl5OYE5eTmAOXk
+ 5ovl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Y/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwn/8e+
+ Mv/HvjD/x74w/8e+MP/HvjD/x74y/8a9K//LwkX/397U/+vq9f/y7uj/8u7l//Lu5f/y7uj/6+r1/+Df
+ 1f/Lw0b/xr0r/8e+Mv/HvjD/x74w/8e+MP/HvjD/x78z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk
+ 5vzl5OYp5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5uPlAOXk
+ 5gHl4+YC3NfmAOXk5rTl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+X/4eDi/+/v8f9sYVr/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z0vJf9AMyn/Lh8U/6agnv/w8PH/5OPt/9vY
+ rP/FvCf/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9Kf/KwUD/2dWh/+Df2P/i4uj/4uLo/+Df
+ 2P/Z1aL/ysJA/8a9Kf/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob95eTm/+Xk5jrl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5OYA5eTmAOXk5gLj4+YA5OPmE+Xk5tfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+bl5//u7fD/4+Lk/2RaUv80JRv/PjAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/OSsh/zUmHP8/MSj/tK+s/+7t
+ 7P/l5fP/3Nmy/8W8Jv/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9LP/FvCX/ycA8/8zF
+ UP/MxVD/ycA8/8W8Jf/GvSz/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm
+ 8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmSuXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAPT09QDl5OYA5eTmAOXk5gPm5OYA5eTmMuXk5vPl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5uj/5eTm/4qCff9KPTT/PTAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zgqIP9ENi3/pqCe/9rY
+ 2P/m5e3/5uXs/+Ti3v/Y1Jj/xrwo/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74x/8e/
+ Mv/HvS3/xrwp/8a8Kf/HvS3/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8
+ Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA5eTmAOXk5gTl5OYA5eTmXuXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+zr7v95cGr/KxsQ/zkrIf88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJf87LSP/PC4k/725
+ uf/19ff/6Ojx/97cvv/PyGH/ycA5/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+Mf/HvjL/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5lvl5OYA5eTmBAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYA4+flAOXk5gTl5OYA5eTmluXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/u7vH/trGw/zIjGP8/MSj/PS8l/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zEi
+ F/+DenX/8/P0/+Lh6v/b16v/xr0q/8W8Jf/Hvi7/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYEAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5wD/+eIA5eTmAebk
+ 5QDm5OYN5eTm1uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Hj//Dv8v97cm3/MSIY/z8x
+ J/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP89LyX/Nige/725t//t7fL/4d/Y/8nAPP/GvSv/yL80/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk
+ 5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYA5eTmA+Xk5gDl5OZL5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4+X/6urs/2JX
+ UP80Jhv/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PS8l/zssIv9BMyr/0c/O/+vr9v/b167/xrwo/8e/M//HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5knl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5+bnAOXk5gDl5OYA5uTkAeTj7QHl5ObQ5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
+ 5f/q6uz/Y1hR/zQlG/8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LyX/Oywi/0EzKv/Rzs3/6+v1/9vYsf/GvCj/x78z/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v3l5Ob/5eTmOeXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYC5eTmAOXk5rTl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/4+Hj//Dw8v99dG//MSIX/z8xJ/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/Nicd/7u3tf/t7fH/4uDd/8rCRP/GvCf/yL81/8e+
+ Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj
+ 4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vzl5OYn5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4+TjAOXk5gTl5OYA5eTmmuXk5v/l5Ob75eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/j4uT/7u3w/7m1tP8zJBn/PzEn/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/f3Zx//Pz9P/i4en/3dq6/8i/
+ Nf/FuyP/xr0r/8e+Lv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vY
+ rP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7eXk5hHl5OYA5eTmAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmBOXk5gDl5OZ05eTm/+Xk
+ 5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7ezv/392cf8rGxD/OCof/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP86LCL/uLOy//X2
+ 9//p6fL/4d/P/9PNdv/Lw0j/yMA3/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObRk73YAOHi5QDl5OYBAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYD5eTmAOXk
+ 5k7l5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/5+bp/5GKhv9QQzv/PzEn/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zkr
+ If9AMyn/npiV/9XS0f/k4+n/5+bw/+bl7P/a1qP/xrwn/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+Tj5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gLl5OYA5eTmJeXk5vvl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/l5Ob/7u7w/+jn
+ 6f9mW1T/NCUa/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PS8l/zosIv80JRv/Oiwi/7Ktqv/u7ez/5eTw/9zZsP/FvCb/x74y/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmf+Xk5gDl5OYE5uXnAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5uTmAefj5wDo4+gF5eTm2uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xj
+ 5f/h4OL/7u7w/2thWv8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PS8l/0EzKf8vIBX/p6Ge//Dw8f/k4+3/29is/8W8J//HvjL/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZK5eTmAOXk
+ 5gPj4ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OcA5eTmA+Xk5gDl5Oan5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Z/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwo/8e/
+ M//HvjH/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjH/yL8z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm8eXk
+ 5hnl5OYA5eTmAeXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYE5eTmAOXk5mbl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PD/5OTv/9vX
+ qv/FuyL/x74v/8e9Lf/HvS3/x70t/8e9Lf/HvS3/x70u/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvS7/x70t/8e9Lf/HvS3/x70t/8e9Lf/HvjD/xLsh/9vXqf/n5vL/5ePj/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v/l5ObD5eTmAOXk5wHl5OYA////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gLl5OYA5eTmJuXk5vjl5Ob/5eTm/uXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw
+ 8f/k4+v/39zA/8nAO//JwDz/ycE8/8nAPP/JwDz/ycA7/8nBPP/JwDr/x74x/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74x/8nAOv/JwTz/ycA7/8nAPP/JwDz/ycE8/8nAPP/JwDr/39y//+bm
+ 7v/l5OT/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob75eTm/+Xk5nvl5OYA5eTmBObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzP8A5uTmAOTj5gHk4+YA5eTmwuXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMkGv8+MCb/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zEi
+ GP+po6H/8PDz/+Lh4//l5ej/4+Lb/+Lh1v/i4df/4uHW/+Lh1v/i4db/4+HZ/+Hfzv/JwT7/x70u/8e+
+ Mf/HvjD/x74w/8e+MP/HvjD/x74x/8e+Lv/JwTz/4d/N/+Ph2f/i4db/4uHW/+Lh1v/i4df/4uHW/+Pi
+ 2//l5Oj/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmM+Xk5gDl5OYC5uXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmBOXk
+ 5gDl5OZy5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OLk/+zs7v9qX1j/NScc/0Ay
+ Kf8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4w
+ Jv9BMyn/MiQZ/6ehn//w8PP/4uHj/+Xk5f/l5en/5uXq/+bl6v/m5er/5uXq/+bl6v/l5Oj/5+fz/9HL
+ bv/EuyD/yL82/8e+Mf/HvjD/x74w/8e+Mf/Ivzb/xLsg/9HLbP/n5/P/5eTo/+bl6v/m5er/5uXq/+bl
+ 6v/m5er/5eXp/+Xk5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk5s3m3t4A4P7/AOXk5gDk4+UAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObl
+ 5gDl5OYC5eTnAOXk5yHl5Obz5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7u7w/21j
+ XP8oGQ3/NSYb/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIk
+ Gf8yJBn/MiQZ/zQlG/8qGxD/raim//Dv8v/j4eP/5eTm/+Xk5f/l5OX/5eTl/+Xk5f/l5OX/5eTl/+Xk
+ 5P/m5en/4uHW/8vESv/EuyD/xr0s/8e+L//Hvi//xr0s/8S7IP/Lw0j/4uDV/+bl6v/l5OT/5eTl/+Xk
+ 5f/l5OX/5eTl/+Xk5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmeOXk5gDl5OYE5eTmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5OTlAObk5wDl5OYD5eTmAOXk5qjl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
+ 5f/p6Or/zcrL/6iioP+rpqT/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ul
+ o/+rpaP/q6Wj/6ulo/+rpqP/qqSi/6+qqP/f3t//5uXo/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5P/m5ez/4+Lc/9TPgP/KwkX/yL80/8i/NP/KwkX/1M9//+Pi2//m5ez/5eTk/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5vXl5OYj5eTmAOXk5gLl5OcAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gPl5OYA5eTmQuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Tj5f/q6ev/8PDz//Dv8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw
+ 8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/7+/y/+bl6P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5P/l5en/5+fy/+Tj4f/h387/4d/N/+Tj4f/n5/L/5eXp/+Xk
+ 5P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmpeXk5gDl5OYD5eTmAObl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAObk5gHm5OYA5eTmwOXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/i4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph
+ 4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4uP/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5f/l4+P/5eTn/+bl7P/m5ez/5eTn/+Xj
+ 4//l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5OY+5eTmAOXk
+ 5gPl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZM5eTm/+Xk
+ 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5OX/5eTl/+Xk
+ 5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmtOXk
+ 5gDl5OYC5eTmAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDn5eYA4+PlAeLj
+ 5QDl5Oa55eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5OY/5eTmAOXk5gPl5OcA4uLiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ubmAOXk
+ 5gDl5OYC5eTmAOXk5jnl5Ob95eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Ob/5eTmquXk5gDl5OYC5eTnAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmANzi8QDl5OYD5eTmAOXk5pLl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vXl5OYq5eTmAOXk5gLl5OYA5uTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADr6usA5eTmAOXk5gHl5eYA5eXmEeXk5tvl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmfuXk5gDl5OYD5+TlAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTnAOXk5gPl5OYA5eTmSOXk5v/l5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5svl4+YG6ODkAOXk5gHl5OYA5ubmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTlAOXk5gPl5OYA5eTmh+Xk
+ 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Ob35eTmNOXk5gDl5OYC5eXmAOXk5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj5QDm5OYA5eTmAebl
+ 5gHi4OcC5eTmueXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5m7l5OYA5eTmBObj5gDl5OYA6OjoAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYA5eTmAuXk5gDl5OYW5eTm2eXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTlAOXk5gPl5ecA5eTmAObm
+ 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXk5gDl5OYA5eTmA+Xk5gDl5OYp5eTm6uXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmxOTl5giU7fsA5eTmAePk
+ 5wDl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gDl5OYA5eTmA+Xk5gDl5OY45eTm8OXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5tbl5eYW5eXmAOXk
+ 5gLl5OYA5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5uXmAOXk5gDl5OYA5eTmBOXk5gDl5OY75eTm8OXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Obf5eTmI+bk
+ 5gDl5OYD5eTmAOXk5gDl5ecAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmBOXk
+ 5gDl5OY05eTm5+Xk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTm3uXk
+ 5iTl5OYA5eTmA+Xk5gDl5OYA4+PoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXl
+ 5wDl5ecA5eTmBOXk5gDl5OYi5eTm0+Xk5v/l5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
+ 5tLl5OYf5eTmAOXk5gPl5OYA5eTmAOfl5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5OPlAOXl5gDl5eYA5eTmA+fk5gDm5OYO5eTmr+Xk5v/l5Ob85eTm/eXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm++Xk
+ 5v/l5Oa65uTmEObj5QDl5OYD5eTmAOXk5gDm5ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5uPmAOXk5gDl5OYA5eTmAuXk5wLi4+cA5eTmeOXk5vzl5Ob/5eTm++Xk
+ 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v3l5Ob/5eTmjePm6QLm5OUB5eTmAuXk5gDl5OYA4eHjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAeXk5gTl5OYA5eTmN+Xk
+ 5tTl5Ob/5eTm/OXk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vvl5Ob/5eTm6+Xk5lTl5OYA5eTmA+Xk5gLl5OYA5eTmAObj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYA5eTmAeXk
+ 5gTm4+cA5ePmCOXk5oTl5Ob45eTm/+Xk5vzl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vzl5Ob+5eTm/+Xk5rHm5OYb5eTmAOXk5gTl5OYB5eTmAOXk5gDf398AAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5wDn5OQA5uTlAOXk5gPl5OYD5eTmAOXk5inl5Oa05eTm/+Xk5v/l5Ob95eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vzl5Ob+5eTm/+Xk5uDl5OZU4+LlAOTj5gHl5OYD5eTmAOXk5gDl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOLi6QDl5OYA5eTmAOXk5gHl5OYD5OLmAeTi5wDl5OZC5eTmw+Xk5v/l5Ob/5eTm/uXk
+ 5vvl5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/OXk
+ 5vzl5Ob+5eTm/+Xk5uzl5OZ55uTmC+bk5gDl5OYD5eTmAuXk5gDl4+YA5uXmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5OPlAOTj5QDl5OYC5eTmA/Hs4gD//9kA5eTmQuXk
+ 5rfl5Ob75eTm/+Xk5v/l5Ob95eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob85eTm++Xk
+ 5v/l5Ob+5eTm/+Xk5uPl5OZ65ePlE+Xk5gDl5OcC5eTmA+bk5gDm5OYA5eTmAP///wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+MA5eTmAOXl5gDl5eUA5eTmA+Xk
+ 5gLk4+EA5OPjAOXk5ivl5OaL5eTm3+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm++Xk5vzl5Ob95eTm/uXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v7l5Ob95eTm/OXk5vvl5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm+eXk5r7l5OZa5eTmC+Xj5gDl5OYB5eTmA+Xk5gHm5OYA5uTmAOTk5AAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6ujqAOXk
+ 5gDm5eYA5uXmAOXk5gPl5OYC5ebnAOXl5gDl5eYI5eTmReXk5pLl5ObW5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v3l5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Obw5eTmu+Xk5nLl4+Yj5ersAObn6gDk5OYB5eTmBOXk5gHl5OYA5eTmAObk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXk5wDl5OYA5eTnAOXk5wDl5OYC5eTmBOXl5wDl4eYA5eDnAOTh6QTl5OYq5eTmXuXk
+ 5pDl5Oa+5eTm3uXk5vPl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Obu5eTm0uXk
+ 5q7l5OZ+5eTmRubk5hXk5OYA5OTmAOTk5gDl5eYC5eTmBOXk5gHj4+UA5OPmAOfl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAOXl5wDk5ugA5eTmAeXk5gPl5OYE5uPmAObj
+ 5gDk5OYA5+TmAOnm5gDl5eYF5eTmGuXk5i/l5OY+5eTmT+Xk5lXl5OZY5eTmVOXk5kjl5OY65eTmJ+Xk
+ 5hLo4eYC6ODnAObj5gDl5OcA5eTnAObk5gLl5OYE5eTmAuXi5QDk4+MA5OTlAOfj5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDk5OUA5uHoAODy
+ 3ADl4+cB5eTmAuXk5gTl5OYE5OTmAubi5QDm4uYA4ejmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5eQA5eXjAOXl4wDl5OYD5eTmBOXk5gPl5OYB5OTmAOfl6wDm5ecA5ePlAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTk5ADo5usA5OLkAP///wDm4+YA5eTmAeXk5gHl5OYC5eTmA+Xk5gPl5OYD5eTmA+Xk
+ 5gPl5OYD5eTmA+Xk5gLl5OYB5ePmAObk5wDi598A6OPoAOTk5gDm5uYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA/////////////////+AAAP///////oD////////gAAD///////6YB///////4AAA///////8
+ hEAf/////+AAAP///////IAVAX/////gAAD///////yAAIgH////4AAA///////5AAACoC///+AAAP//
+ ////+QAAABCAv//gAAD///////kAAAAAVAX/4AAA///////yAAAAAAIl/+AAAP//////8gAAAAAACv/g
+ AAD///////IAAAAAAAT/4AAA///////0AAAAAAAE/+AAAP//////5AAAAAAABf/gAAD//////+QAAAAA
+ AAn/4AAA///////IAAAAAAAJ/+AAAP//////yAAAAAAAEf/gAAD//////4gAAAAAABP/4AAA//////+Q
+ AAAAAAAT/+AAAP/////+UAAAAAAAJ//gAAD//////SAAAAAAACf/4AAA//////JAAAAAAAAn/+AAAP//
+ /wAAgAAAAAAAT//gAAD///IAcgAAAAAAAE//4AAA///ovIAAAAAAAABP/+AAAP//6gAAAAAAAAAAX//g
+ AAD//8QAAAAAAAAAAJ//4AAA///IAAAAAAAAAACf/+AAAP//yAAAAAAAAAAAn//gAAD//9AAAAAAAAAA
+ AJ//4AAA///QAAAAAAAAAACf/+AAAP//0AAAAAAAAAAAX//gAAD//5AAAAAAAAAAAF//4AAA//+QAAAA
+ AAAAAABP/+AAAP//sAAAAAAAAAAAT//gAAD//6AAAAAAAAAAAE//4AAA//+gAAAAAAAAAAAn/+AAAP//
+ oAAAAAAAAAAAJ//gAAD//6AAAAAAAAAAACP/4AAA//+gAAAAAAAAAAAT/+AAAP//oAAAAAAAAAAAE//g
+ AAD//6AAAAAAAAAAAAn/4AAA//8gAAAAAAAAAAAJ/+AAAP//oAAAAAAAAAAAAP/gAAD//yAAAAAAAAAA
+ AAT/4AAA//8gAAAAAAAAAAAEf+AAAP//QAAAAAAAAAAAAn/gAAD//kAAAAAAAAAAAAJ/4AAA//5AAAAA
+ AAAAAAABP+AAAP/+QAAAAAAAAAAAAT/gAAD//IAAAAAAAAAAAACf4AAA//qAAAAAAAAAAAAAn+AAAP/A
+ gAAAAAAAAAAAAI/gAAD9AgAAAAAAAAAAAABP4AAA9CgAAAAAAAAAAAAAT+AAAOmAAAAAAAAAAAAAACfg
+ AADQAAAAAAAAAAAAAAAn4AAAqAAAAAAAAAAAAAAAE+AAAJAAAAAAAAAAAAAAABPgAACgAAAAAAAAAAAA
+ AAAT4AAAoAAAAAAAAAAAAAAACeAAAKAAAAAAAAAAAAAAAAngAACgAAAAAAAAAAAAAAAJ4AAAoAAAAAAA
+ AAAAAAAABOAAAJAAAAAAAAAAAAAAAATgAACQAAAAAAAAAAAAAAAE4AAAiAAAAAAAAAAAAAAAAuAAAMgA
+ AAAAAAAAAAAAAAJgAADEAAAAAAAAAAAAAAACYAAA5AAAAAAAAAAAAAAAAWAAAOoAAAAAAAAAAAAAAAFg
+ AAD1AAAAAAAAAAAAAAABIAAA+IAAAAAAAAAAAAAAASAAAPqAAAAAAAAAAAAAAAEgAAD9QAAAAAAAAAAA
+ AAAAoAAA/qAAAAAAAAAAAAAAAKAAAP9QAAAAAAAAAAAAAACgAAD/SAAAAAAAAAAAAAAAoAAA/6gAAAAA
+ AAAAAAAAAKAAAP/UAAAAAAAAAAAAAACgAAD/6gAAAAAAAAAAAAAAoAAA//EAAAAAAAAAAAAAAKAAAP/1
+ AAAAAAAAAAAAAACgAAD/+oAAAAAAAAAAAAAAoAAA//wAAAAAAAAAAAAAAKAAAP/+QAAAAAAAAAAAAACg
+ AAD//kAAAAAAAAAAAAAAoAAA//9AAAAAAAAAAAAAAKAAAP//QAAAAAAAAAAAAAEgAAD//0AAAAAAAAAA
+ AAABIAAA//9AAAAAAAAAAAAAAWAAAP//IAAAAAAAAAAAAAFgAAD//yAAAAAAAAAAAAACYAAA//8gAAAA
+ AAAAAAAAAmAAAP//kAAAAAAAAAAAAAJgAAD//5AAAAAAAAAAAAAG4AAA//+QAAAAAAAAAAAABOAAAP//
+ yAAAAAAAAAAAAATgAAD//8gAAAAAAAAAAAAJ4AAA///EAAAAAAAAAAAACeAAAP//5AAAAAAAAAAAABHg
+ AAD//+IAAAAAAAAAAAAT4AAA///yAAAAAAAAAAAAI+AAAP//8QAAAAAAAAAAACfgAAD///kAAAAAAAAA
+ AABX4AAA///6gAAAAAAAAAAAT+AAAP///UAAAAAAAAAAAK/gAAD///4AAAAAAAAAAAFf4AAA///+oAAA
+ AAAAAAACP+AAAP///1AAAAAAAAAAAr/gAAD///+oAAAAAAAAAAV/4AAA////1AAAAAAAAAAK/+AAAP//
+ /+oAAAAAAAAAFf/gAAD////lAAAAAAAAACv/4AAA////+oAAAAAAAABX/+AAAP////0gAAAAAAAAL//g
+ AAD////+kAAAAAAAAl//4AAA/////0gAAAAAAAS//+AAAP////+SAAAAAAARf//gAAD/////6IAAAAAA
+ Jf//4AAA//////JAAAAAAIv//+AAAP/////9CAAAAAIv///gAAD//////0IAAAAQn///4AAA///////Q
+ QAAAov///+AAAP//////+hIACQv////gAAD///////8Bf6Bf////4AAA////////9AAD/////+AAAP//
+ ///////////////gAAA=
+
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/Program.cs b/Main/Tongue extraction/Program.cs
new file mode 100644
index 0000000..c2f1ac0
--- /dev/null
+++ b/Main/Tongue extraction/Program.cs
@@ -0,0 +1,66 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+using System.Text;
+
+namespace Tongue_extraction
+{
+ static class Program
+ {
+ [STAThread]
+ static void Main()
+ {
+ try
+ {
+ Application.SetUnhandledExceptionMode(UnhandledExceptionMode.CatchException);
+ Application.ThreadException += new System.Threading.ThreadExceptionEventHandler(Application_ThreadException);
+ AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException);
+
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form1());
+ }
+ catch (Exception ex)
+ {
+ string str = GetExceptionMsg(ex, string.Empty);
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ }
+ }
+
+
+ static void Application_ThreadException(object sender, System.Threading.ThreadExceptionEventArgs e)
+ {
+ string str = GetExceptionMsg(e.Exception, e.ToString());
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ //LogManager.WriteLog(str);
+ }
+
+ static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e)
+ {
+ string str = GetExceptionMsg(e.ExceptionObject as Exception, e.ToString());
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ //LogManager.WriteLog(str);
+ }
+
+ static string GetExceptionMsg(Exception ex, string backStr)
+ {
+ StringBuilder sb = new StringBuilder();
+ sb.AppendLine("****************************Exception Text****************************");
+ sb.AppendLine("【Time】:" + DateTime.Now.ToString());
+ if (ex != null)
+ {
+ sb.AppendLine("【Exception Type】:" + ex.GetType().Name);
+ sb.AppendLine("【Exception Information】:" + ex.Message);
+ sb.AppendLine("【Stack Call】:" + ex.StackTrace);
+ }
+ else
+ {
+ sb.AppendLine("【Unhandled Exception】:" + backStr);
+ }
+ sb.AppendLine("***************************************************************");
+ return sb.ToString();
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/AssemblyInfo.cs b/Main/Tongue extraction/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..604a626
--- /dev/null
+++ b/Main/Tongue extraction/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
+// アセンブリに関連付けられている情報を変更するには、
+// これらの属性値を変更してください。
+[assembly: AssemblyTitle("Tongue extraction")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Tongue extraction")]
+[assembly: AssemblyCopyright("Copyright © 2018")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
+// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
+// その型の ComVisible 属性を true に設定してください。
+[assembly: ComVisible(false)]
+
+// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
+[assembly: Guid("d382f9e7-a41d-4d82-a59b-cf4095134d6b")]
+
+// アセンブリのバージョン情報は次の 4 つの値で構成されています:
+//
+// メジャー バージョン
+// マイナー バージョン
+// ビルド番号
+// Revision
+//
+// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
+// 既定値にすることができます:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Main/Tongue extraction/Properties/Resources.Designer.cs b/Main/Tongue extraction/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..84d1885
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace Tongue_extraction.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Tongue_extraction.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/Resources.resx b/Main/Tongue extraction/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/Properties/Settings.Designer.cs b/Main/Tongue extraction/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..15b8fb1
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace Tongue_extraction.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/Settings.settings b/Main/Tongue extraction/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj b/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj
deleted file mode 100644
index 9f1ebea..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj
+++ /dev/null
@@ -1,151 +0,0 @@
-
-
-
-
- Debug
- AnyCPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}
- WinExe
- ColorSubdivision
- ColorSubdivision
- v4.7.1
- 512
- true
- true
-
- publish\
- true
- Disk
- false
- Foreground
- 7
- Days
- false
- false
- true
- 0
- 1.0.0.%2a
- false
- false
- true
-
-
- x64
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE
- prompt
- 4
-
-
- x64
- pdbonly
- true
- bin\Release\
- TRACE
- prompt
- 4
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE
- full
- x64
- 7.3
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
- bin\x64\Release\
- TRACE
- true
- pdbonly
- x64
- 7.3
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Blob.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Extensions.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.UserInterface.dll
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Form
-
-
- Form1.cs
-
-
-
-
- Form1.cs
-
-
- ResXFileCodeGenerator
- Resources.Designer.cs
- Designer
-
-
- True
- Resources.resx
- True
-
-
- SettingsSingleFileGenerator
- Settings.Designer.cs
-
-
- True
- Settings.settings
- True
-
-
-
-
-
-
-
- Always
-
-
-
-
- False
- Microsoft .NET Framework 4.7.1 %28x86 and x64%29
- true
-
-
- False
- .NET Framework 3.5 SP1
- false
-
-
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs
deleted file mode 100644
index 1b0e6fd..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs
+++ /dev/null
@@ -1,84 +0,0 @@
-namespace ColorSubdivision
-{
- partial class Form1
- {
- ///
- /// Required designer variable.
- ///
- private System.ComponentModel.IContainer components = null;
-
- ///
- /// Clean up any resources being used.
- ///
- /// true if managed resources should be disposed; otherwise, false.
- protected override void Dispose(bool disposing)
- {
- if (disposing && (components != null))
- {
- components.Dispose();
- }
- base.Dispose(disposing);
- }
-
- #region Windows Form Designer generated code
-
- ///
- /// Required method for Designer support - do not modify
- /// the contents of this method with the code editor.
- ///
- private void InitializeComponent()
- {
- this.button1 = new System.Windows.Forms.Button();
- this.button2 = new System.Windows.Forms.Button();
- this.RichTextBox1 = new System.Windows.Forms.RichTextBox();
- this.SuspendLayout();
- //
- // button1
- //
- this.button1.Location = new System.Drawing.Point(115, 47);
- this.button1.Name = "button1";
- this.button1.Size = new System.Drawing.Size(75, 23);
- this.button1.TabIndex = 0;
- this.button1.Text = "Button1";
- this.button1.UseVisualStyleBackColor = true;
- this.button1.Click += new System.EventHandler(this.Button1_Click);
- //
- // button2
- //
- this.button2.Location = new System.Drawing.Point(115, 105);
- this.button2.Name = "button2";
- this.button2.Size = new System.Drawing.Size(75, 23);
- this.button2.TabIndex = 1;
- this.button2.Text = "Button2";
- this.button2.UseVisualStyleBackColor = true;
- this.button2.Click += new System.EventHandler(this.Button2_Click);
- //
- // RichTextBox1
- //
- this.RichTextBox1.Location = new System.Drawing.Point(370, 138);
- this.RichTextBox1.Name = "RichTextBox1";
- this.RichTextBox1.Size = new System.Drawing.Size(100, 96);
- this.RichTextBox1.TabIndex = 2;
- this.RichTextBox1.Text = "";
- //
- // Form1
- //
- this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
- this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
- this.ClientSize = new System.Drawing.Size(800, 450);
- this.Controls.Add(this.RichTextBox1);
- this.Controls.Add(this.button2);
- this.Controls.Add(this.button1);
- this.Name = "Form1";
- this.ResumeLayout(false);
-
- }
-
- #endregion
-
- private System.Windows.Forms.Button button1;
- private System.Windows.Forms.Button button2;
- private System.Windows.Forms.RichTextBox RichTextBox1;
- }
-}
-
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs
deleted file mode 100644
index 9d64c49..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs
+++ /dev/null
@@ -1,56 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Data;
-using System.Drawing;
-using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using System.Windows.Forms;
-using OpenCvSharp;
-
-namespace ColorSubdivision
-{
- public partial class Form1 : Form
- {
- public Form1()
- {
- InitializeComponent();
- }
-
- private void Button1_Click(object sender, EventArgs e)
- {
- var path = @"D:\kei2\Solutions\DeepTongue\LocalRepository\Tongue extraction_cropresizemethod\Tongue extraction\bin\x64\Debug\mask_final\20180315093610.jpg";
- using (Mat mat_input = Cv2.ImRead(path, ImreadModes.Grayscale))
- {
- var mat_dst = new Mat(mat_input.Size(), mat_input.Type());
- bool isEdge = false;
- for (int i = 0; i < mat_input.Height; i++)
- {
- if(!isEdge)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- if (mat_input.At(i, j) > 200)
- {
- mat_dst.Set(i, j, 100);
- isEdge = true;
- }
- }
- }
- }
- Cv2.ImShow("input", mat_input);
- Cv2.ImShow("dst", mat_dst);
- mat_dst.Dispose();
- }
- GC.Collect();
-
- }
-
- private void Button2_Click(object sender, EventArgs e)
- {
-
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx
deleted file mode 100644
index 1af7de1..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs
deleted file mode 100644
index b7233a7..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs
+++ /dev/null
@@ -1,22 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading.Tasks;
-using System.Windows.Forms;
-
-namespace ColorSubdivision
-{
- static class Program
- {
- ///
- /// The main entry point for the application.
- ///
- [STAThread]
- static void Main()
- {
- Application.EnableVisualStyles();
- Application.SetCompatibleTextRenderingDefault(false);
- Application.Run(new Form1());
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs
deleted file mode 100644
index 87b468f..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("ColorSubdivision")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("ColorSubdivision")]
-[assembly: AssemblyCopyright("Copyright © 2020")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible
-// to COM components. If you need to access a type in this assembly from
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("ad42a573-7ac3-4714-9d53-db9921815cbb")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs
deleted file mode 100644
index 6ca7260..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace ColorSubdivision.Properties {
- using System;
-
-
- ///
- /// A strongly-typed resource class, for looking up localized strings, etc.
- ///
- // This class was auto-generated by the StronglyTypedResourceBuilder
- // class via a tool like ResGen or Visual Studio.
- // To add or remove a member, edit your .ResX file then rerun ResGen
- // with the /str option, or rebuild your VS project.
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
- [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- internal class Resources {
-
- private static global::System.Resources.ResourceManager resourceMan;
-
- private static global::System.Globalization.CultureInfo resourceCulture;
-
- [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
- internal Resources() {
- }
-
- ///
- /// Returns the cached ResourceManager instance used by this class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Resources.ResourceManager ResourceManager {
- get {
- if (object.ReferenceEquals(resourceMan, null)) {
- global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ColorSubdivision.Properties.Resources", typeof(Resources).Assembly);
- resourceMan = temp;
- }
- return resourceMan;
- }
- }
-
- ///
- /// Overrides the current thread's CurrentUICulture property for all
- /// resource lookups using this strongly typed resource class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Globalization.CultureInfo Culture {
- get {
- return resourceCulture;
- }
- set {
- resourceCulture = value;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx
deleted file mode 100644
index af7dbeb..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs
deleted file mode 100644
index 41f48bd..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace ColorSubdivision.Properties {
-
-
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
- internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
-
- private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
-
- public static Settings Default {
- get {
- return defaultInstance;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings
deleted file mode 100644
index 3964565..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction.sln b/Tongue extraction_cropresizemethod/Tongue extraction.sln
deleted file mode 100644
index d3135fb..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction.sln
+++ /dev/null
@@ -1,37 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio Version 16
-VisualStudioVersion = 16.0.30011.22
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DeepTIAS1.9", "Tongue extraction\DeepTIAS1.9.csproj", "{AFD610B1-8D23-423A-AA0F-B09BA769BDD7}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Any CPU = Debug|Any CPU
- Debug|x64 = Debug|x64
- Debug|x86 = Debug|x86
- Release|Any CPU = Release|Any CPU
- Release|x64 = Release|x64
- Release|x86 = Release|x86
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.ActiveCfg = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.Build.0 = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.ActiveCfg = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.Build.0 = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.Build.0 = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.ActiveCfg = Release|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.Build.0 = Release|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.ActiveCfg = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.Build.0 = Release|Any CPU
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
- GlobalSection(ExtensibilityGlobals) = postSolution
- SolutionGuid = {33F6C697-859B-4D55-9D28-998267FD09AE}
- EndGlobalSection
-EndGlobal
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/App.config b/Tongue extraction_cropresizemethod/Tongue extraction/App.config
deleted file mode 100644
index 8fc0551..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/App.config
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/ColorExtractor.cs b/Tongue extraction_cropresizemethod/Tongue extraction/ColorExtractor.cs
deleted file mode 100644
index 9fe8d4d..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/ColorExtractor.cs
+++ /dev/null
@@ -1,758 +0,0 @@
-using OpenCvSharp;
-using System;
-using System.Collections.Generic;
-using System.Drawing;
-using System.IO;
-using System.Linq;
-using System.Windows.Forms;
-
-namespace Tongue_extraction
-{
- public partial class ColorExtractor
- {
- // Config
- const int RADIUS_COLORAREA = 10;
- public static Bitmap bitmap;
- float[] a = new float[17];
- float[] b = new float[17];
- float[] c = new float[17];
- float d;
- float e;
- float f;
- int k;
- public static bool m_getColor = false;
- public static Mat m_CalibFrame; //キャリブレーション用画像
- public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
- double[] m_BforLab = new double[24];
- double[] m_GforLab = new double[24];
- double[] m_RforLab = new double[24];
- public static bool m_bCalib;
- public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
-
- public enum FivePointMethod {Method1, Method2, Method3};
-
- public List Get5points(Mat mat_finalMask, FivePointMethod method)
- {
- // 表示用
- var mat_dst = mat_finalMask.Clone();
- Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
-
- // マスクの舌領域画素座標
- var mat_nonZeroCoordinates = new Mat();
- Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
-
- // 舌領域上の点をすべてlistに詰める
- var list_X = new List();
- var list_Y = new List();
- for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
- {
- var x = mat_nonZeroCoordinates.At(i).X;
- var y = mat_nonZeroCoordinates.At(i).Y;
- list_X.Add(x);
- list_Y.Add(y);
- }
-
- if(method == FivePointMethod.Method1)
- {
- /// method1
- // 端っこを探索(ラスタ左上から)
- var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
- var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
- var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
- var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
-
- // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
- var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
- var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
- var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
-
- // 表示してみる
- //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
- //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
-
- mat_dst.Dispose();
- mat_nonZeroCoordinates.Dispose();
- GC.Collect();
-
- var li_dst = new List { p_left, p_apex_left, p_bottom, p_apex_right, p_right };
- return li_dst;
- }
- else if (method == FivePointMethod.Method2)
- {
- /// method2
- // 重心(CoG)計算
- var moments = Cv2.Moments(mat_finalMask, true);
- var moment_x = moments.M10 / moments.M00;
- var moment_y = moments.M01 / moments.M00;
-
- // 輪郭座標
- OpenCvSharp.Point[][] contours;
- HierarchyIndex[] hierarchy;
- Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
- var sortedContour = contours.OrderByDescending(n => Cv2.ContourArea(n)).ToList();
- var maxContour = sortedContour[0];
-
- // 重心-輪郭の距離
- double maxDistance_lefttop = 0.0;
- double maxDistance_righttop = 0.0;
- var p_left_2 = new OpenCvSharp.Point();
- var p_right_2 = new OpenCvSharp.Point();
- for (int i = 0; i < maxContour.Length; i++)
- {
- // 重心より上側
- if (maxContour[i].Y < moment_y)
- {
- // 重心より上側左側
- if (maxContour[i].X < moment_x)
- {
- var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
- if (distance > maxDistance_lefttop)
- {
- maxDistance_lefttop = distance;
- p_left_2 = maxContour[i];
- }
-
- }
- // 重心より上側右側
- if (maxContour[i].X >= moment_x)
- {
- var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
- if (distance > maxDistance_righttop)
- {
- maxDistance_righttop = distance;
- p_right_2 = maxContour[i];
- }
- }
- }
- }
- // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
- var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
- var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
- var y_apex_2 = (int)(y_top_avg_ + ((p_bottom.Y - y_top_avg_) * 0.57));
- var p_apex_left_2 = mat_nonZeroCoordinates.At