diff --git a/.gitignore b/.gitignore
index 7057677..7373aa2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -112,3 +112,6 @@
Tongue extraction_cropresizemethod/Tongue extraction/obj/x64/Release/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
Tongue extraction_cropresizemethod/Tongue extraction/obj/x64/Release/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
Tongue extraction_cropresizemethod/Tongue extraction/obj/x64/Release/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
+DeepTongue_feature_LabColor/feature_labColor/packages/
+Features/DeepTongue_feature_LabColor/feature_labColor/packages/
+Main/packages/
diff --git a/Features/.idea/Features.iml b/Features/.idea/Features.iml
new file mode 100644
index 0000000..5f9eaa6
--- /dev/null
+++ b/Features/.idea/Features.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Features/DeepLearning/.idea/DeepLearning.iml b/Features/DeepLearning/.idea/DeepLearning.iml
new file mode 100644
index 0000000..5bf3430
--- /dev/null
+++ b/Features/DeepLearning/.idea/DeepLearning.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/1.py" "b/Features/DeepLearning/Reference/Tang\047s/1.py"
new file mode 100644
index 0000000..9381c02
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/1.py"
@@ -0,0 +1,163 @@
+#pix2pixの学習を行なうプログラム
+#!/usr/bin/env python
+
+# python train_facade.py -g 0 -i ./facade/base --out result_facade --snapshot_interval 10000
+
+from __future__ import print_function
+import argparse
+import os
+
+import chainer
+from chainer import training
+from chainer.training import extensions
+from chainer import serializers
+
+from net import Discriminator
+from net import Encoder
+from net import Decoder
+from updater import PicUpdater
+
+from img_dataset import ImgDataset
+from pic_visualizer import out_image
+
+# dataset paths
+#学習に使う画像のある場所
+#実際の舌画像がある場所
+DATASET_SRC = "D:/test13/Sample16"
+
+#二値化した舌画像がある場所
+DATASET_DST = "D:/test13/Label16"
+
+#パラメータの保存場所
+SAVE_DIR = './pix2pix_param2'
+def main():
+ """#各種設定"""
+ #入出力画像サイズ
+ w_img = 256
+ h_img = 256
+ #コマンドライン上で--batchsize ○○のように打つか
+ #defaltの値を直接書き換える
+ #適宜書き換える場所
+ #1 バッチサイズ
+ #2 エポック数
+ #3 GPUの番号(負の番号でUPUを利用※時間がかかりすぎるため非推奨)
+ #4 実際の舌画像がある場所(上記のDATASET_SRCを書き換え)
+ #5 二値化した舌画像がある場所(上記のDATASET_DSTを書き換え)
+ #6 学習したパラメータの保存場所
+ #7 パラメータの保存間隔
+
+ parser = argparse.ArgumentParser(description='chainer implementation of pix2pix')
+ #1
+ parser.add_argument('--batchsize', '-b', type=int, default=4,
+ help='Number of images in each mini-batch')
+ #2
+ parser.add_argument('--epoch', '-e', type=int, default=4000,
+ help='Number of sweeps over the dataset to train')
+ #3
+ parser.add_argument('--gpu', '-g', type=int, default=0,
+ help='GPU ID (negative value indicates CPU)')
+ #4
+ parser.add_argument('--data_src', '-s', default=DATASET_SRC,
+ help='Directory of image files.')
+ #5
+ parser.add_argument('--data_dst', '-d', default=DATASET_DST,
+ help='Directory of ground truth image files.')
+ #6
+ parser.add_argument('--out', '-o', default=SAVE_DIR,
+ help='Directory to output the result')
+ parser.add_argument('--resume', '-r', default='',
+ help='Resume the training from snapshot')
+ parser.add_argument('--seed', type=int, default=0,
+ help='Random seed')
+ #7
+ parser.add_argument('--snapshot_interval', type=int, default=5000,
+ help='Interval of snapshot')
+ parser.add_argument('--display_interval', type=int, default=5000,
+ help='Interval of displaying log to console')
+ args = parser.parse_args()
+
+ print('GPU: {}'.format(args.gpu))
+ print('# Minibatch-size: {}'.format(args.batchsize))
+ print('# epoch: {}'.format(args.epoch))
+ print('')
+
+ # Set up a neural network to train
+ enc = Encoder(in_ch=3)
+ dec = Decoder(out_ch=3)
+ dis = Discriminator(in_ch=3, out_ch=3)
+
+ if args.gpu >= 0:
+ chainer.cuda.get_device(args.gpu).use() # Make a specified GPU current
+ enc.to_gpu() # Copy the model to the GPU
+ dec.to_gpu()
+ dis.to_gpu()
+
+ # Setup an optimizer
+ def make_optimizer(model, alpha=0.0002, beta1=0.5):
+ optimizer = chainer.optimizers.Adam(alpha=alpha, beta1=beta1)
+ optimizer.setup(model)
+ optimizer.add_hook(chainer.optimizer.WeightDecay(0.00001), 'hook_dec')
+ return optimizer
+ opt_enc = make_optimizer(enc)
+ opt_dec = make_optimizer(dec)
+ opt_dis = make_optimizer(dis)
+
+ train_d = ImgDataset(args.data_src, args.data_dst, data_range=(0,0.9))
+ test_d = ImgDataset(args.data_src, args.data_dst, data_range=(0.9,1))
+ #train_iter = chainer.iterators.MultiprocessIterator(train_d, args.batchsize, n_processes=4)
+ #test_iter = chainer.iterators.MultiprocessIterator(test_d, args.batchsize, n_processes=4)
+ train_iter = chainer.iterators.SerialIterator(train_d, args.batchsize)
+ test_iter = chainer.iterators.SerialIterator(test_d, args.batchsize)
+
+ # Set up a trainer
+ updater = PicUpdater(
+ models=(enc, dec, dis),
+ iterator={
+ 'main': train_iter,
+ 'test': test_iter},
+ optimizer={
+ 'enc': opt_enc, 'dec': opt_dec,
+ 'dis': opt_dis},
+ device=args.gpu)
+ trainer = training.Trainer(updater, (args.epoch, 'epoch'), out=args.out)
+
+ snapshot_interval = (args.snapshot_interval, 'iteration')
+ display_interval = (args.display_interval, 'iteration')
+ trainer.extend(extensions.snapshot(
+ filename='snapshot_iter_{.updater.iteration}.npz'),
+ trigger=snapshot_interval)
+ #イテレータごとにパラメータの保存
+ trainer.extend(extensions.snapshot_object(
+ enc, 'enc_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.snapshot_object(
+ dec, 'dec_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.snapshot_object(
+ dis, 'dis_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
+ trainer.extend(extensions.LogReport(trigger=display_interval))
+ trainer.extend(extensions.PrintReport([
+ 'epoch', 'iteration', 'enc/loss', 'dec/loss', 'dis/loss',
+ ]), trigger=display_interval)
+ trainer.extend(extensions.ProgressBar(update_interval=10))
+ trainer.extend(
+ out_image(
+ updater, enc, dec,
+ 1, 1, args.seed, args.out, args.gpu,
+ w_img, h_img),
+ trigger=snapshot_interval)
+ #最終的な学習結果を保存
+ trainer.extend(extensions.snapshot_object(
+ enc, 'enc_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+ trainer.extend(extensions.snapshot_object(
+ dec, 'dec_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+ trainer.extend(extensions.snapshot_object(
+ dis, 'dis_epoch_{.updater.epoch}.npz'), trigger=(args.epoch,'epoch'))
+
+ if args.resume:
+ # Resume from a snapshot
+ chainer.serializers.load_npz(args.resume, trainer)
+
+ # Run the training
+ trainer.run()
+
+if __name__ == '__main__':
+ main()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/20181017.py" "b/Features/DeepLearning/Reference/Tang\047s/20181017.py"
new file mode 100644
index 0000000..bcc5ce9
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/20181017.py"
@@ -0,0 +1,1195 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow as tf
+import tensorflow.contrib.slim as slim
+import numpy as np
+import csv
+import os
+import glob
+import random
+import collections
+import math
+import time
+
+train_input_sample = "D:/test13/Sample16_new_revenge1/" # training data set dir
+train_input_label = "D:/test13/Label16_new_revenge1/"
+
+train_input_sample2 = "D:/test13/VS16_new_revenge1/" # validation data set dir
+train_input_label2 = "D:/test13/VL16_new_revenge1/"
+
+test_input_dir = "D:/result_contract/Otherkind/Tongue/20191119/testinput/"
+test_input_label = "D:/result_contract/Otherkind/Tongue/20191119/testlabel"
+test_output_dir = "D:/result_contract/Otherkind/Tongue/20191119/testoutput/"
+
+train_output = "D:/Result_RE_Revenge109/training" # training images output dir
+validation_output = "D:/Result_RE_Revenge109/validation/" # validation images output dir
+
+checkpoint = "D:/Result_RE_Revenge109_checkpoint/" # model saving dir
+
+# 创建目录
+if not os.path.exists(train_output):
+ os.makedirs(train_output)
+if not os.path.exists(validation_output):
+ os.makedirs(validation_output)
+if not os.path.exists(checkpoint):
+ os.makedirs(checkpoint)
+if not os.path.exists(test_output_dir):
+ os.makedirs(test_output_dir)
+
+log = open(r'D:/Result_RE_Revenge109_checkpoint/log.txt', 'a') # log saving dir
+loss_csv = open(r'D:/Result_RE_Revenge109_checkpoint/loss_csv.csv', 'a', newline='') # loss value saving dir
+
+seed = None
+max_steps = 20000 # number of training steps (0 to disable)
+max_epochs = None # number of training epochs
+
+progress_freq = 50 # display progress every progress_freq steps
+trace_freq = 0 # trace execution every trace_freq steps
+display_freq = 50 # write current training images every display_freq steps
+validation_freq = 50
+
+save_freq = None # save model every save_freq steps, 0 to disable
+
+separable_conv = False # use separable convolutions in the generator
+aspect_ratio = 1.0 # aspect ratio of output images (width/height)
+batch_size = 2 # help="number of images in batch")
+which_direction = "AtoB" # choices=["AtoB", "BtoA"])
+ngf = 64 # help="number of generator filters in first conv layer")
+ndf = 64 # help="number of discriminator filters in first conv layer")
+scale_size = 256 # help="scale images to this size before cropping to 256x256")
+validation_size = 256
+
+flip = False # flip images horizontally
+
+brightness = False
+contrast = False
+# hue = False
+# saturation = False
+gamma = False
+
+lr = 0.0001 # initial learning rate for adam
+beta1 = 0.5 # momentum term of adam
+l1_weight = 100.0 # weight on L1 term for generator gradient
+gan_weight = 1.0 # weight on GAN term for generator gradient
+
+EPS = 1e-12 # Very small number, preventing gradient loss to 0
+CROP_SIZE = 256 # Crop size of the image
+
+# Named tuples for storing loaded data collections to create good models
+Examples = collections.namedtuple("Examples", "paths, inputs, targets, count, steps_per_epoch")
+# Model = collections.namedtuple("Model", "outputs, predict_real, predict_fake, discrim_loss,"
+# "discrim_grads_and_vars, gen_loss_GAN, gen_loss_L1,"
+# "gen_grads_and_vars, train, validate")
+Model = collections.namedtuple("Model", "outputs, gen_loss_L1,"
+ "gen_grads_and_vars, train, validate")
+
+
+# Image preprocessing [0, 1] => [-1, 1]
+# def preprocess(image):
+# with tf.name_scope("preprocess"):
+# return image * 2 - 1
+
+
+# Image post processing [-1, 1] => [0, 1]
+# def deprocess(image):
+# with tf.name_scope("deprocess"):
+# return (image + 1) / 2
+
+
+# 判别器的卷积定义,batch_input为 [ batch , 256 , 256 , 6 ]
+# def discrim_conv(batch_input, out_channels, stride):
+# # [ batch , 256 , 256 , 6 ] ===>[ batch , 258 , 258 , 6 ]
+# padded_input = tf.pad(batch_input, [[0, 0], [1, 1], [1, 1], [0, 0]], mode="CONSTANT")
+# '''
+# [0,0]: 第一维batch大小不扩充
+# [1,1]:第二维图像宽度左右各扩充一列,用0填充
+# [1,1]:第三维图像高度上下各扩充一列,用0填充
+# [0,0]:第四维图像通道不做扩充
+# '''
+# return tf.layers.conv2d(padded_input, out_channels, kernel_size=4, strides=(stride, stride), padding="valid",
+# kernel_initializer=tf.random_normal_initializer(0, 0.02))
+
+
+# 生成器的卷积定义,卷积核为4*4,步长为2,输出图像为输入的一半
+def gen_conv(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ return tf.layers.conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+
+
+def gen_conv2(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d(batch_input, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put3 = tf.layers.max_pooling2d(inputs=out_put2, pool_size=[2, 2], strides=2, padding="same")
+ return out_put3
+
+
+def gen_conv3(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ return tf.layers.separable_conv2d(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d(batch_input, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same")
+ return out_put2
+
+
+# 生成器的反卷积定义
+def gen_deconv(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ return tf.layers.conv2d_transpose(batch_input, out_channels, kernel_size=4, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+
+
+def gen_deconv2(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put = tf.layers.conv2d_transpose(batch_input, out_channels, kernel_size=3, strides=(2, 2), padding="same",
+ kernel_initializer=initializer)
+ out_put2 = tf.layers.conv2d(out_put, out_channels, kernel_size=3, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ out_put3 = tf.layers.conv2d(out_put2, out_channels, kernel_size=3, strides=(1, 1), padding="same")
+ return out_put3
+
+
+def gen_deconv3(batch_input, out_channels):
+ # [batch, in_height, in_width, in_channels] => [batch, out_height, out_width, out_channels]
+ initializer = tf.random_normal_initializer(0, 0.02)
+ if separable_conv:
+ _b, h, w, _c = batch_input.shape
+ resized_input = tf.image.resize_images(batch_input, [h * 2, w * 2],
+ method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
+ return tf.layers.separable_conv2d(resized_input, out_channels, kernel_size=4, strides=(1, 1), padding="same",
+ depthwise_initializer=initializer, pointwise_initializer=initializer)
+ else:
+ out_put4 = tf.layers.conv2d(batch_input, out_channels, kernel_size=1, strides=(1, 1), padding="same",
+ kernel_initializer=initializer)
+ return out_put4
+
+
+# 定义LReLu激活函数
+def lrelu(x, a):
+ with tf.name_scope("lrelu"):
+ # adding these together creates the leak part and linear part
+ # then cancels them out by subtracting/adding an absolute value term
+ # leak: a*x/2 - a*abs(x)/2
+ # linear: x/2 + abs(x)/2
+
+ # this block looks like it has 2 inputs on the graph unless we do this
+ x = tf.identity(x)
+ return (0.5 * (1 + a)) * x + (0.5 * (1 - a)) * tf.abs(x)
+
+
+# 批量归一化图像
+def batchnorm(inputs):
+ return tf.layers.batch_normalization(inputs, axis=3, epsilon=1e-5, momentum=0.1, training=True,
+ gamma_initializer=tf.random_normal_initializer(1.0, 0.02))
+
+
+# 检查图像的维度
+def check_image(image):
+ assertion = tf.assert_equal(tf.shape(image)[-1], 3, message="image must have 3 color channels")
+ with tf.control_dependencies([assertion]):
+ image = tf.identity(image)
+
+ if image.get_shape().ndims not in (3, 4):
+ raise ValueError("image must be either 3 or 4 dimensions")
+
+ # make the last dimension 3 so that you can unstack the colors
+ shape = list(image.get_shape())
+ shape[-1] = 3
+ image.set_shape(shape)
+ return image
+
+
+# 去除文件的后缀,获取文件名
+def get_name(path):
+ # os.path.basename(),返回path最后的文件名。若path以/或\结尾,那么就会返回空值。
+ # os.path.splitext(),分离文件名与扩展名;默认返回(fname,fextension)元组
+ name, _ = os.path.splitext(os.path.basename(path))
+ return name
+
+
+# 加载数据集,从文件读取-->解码-->归一化--->拆分为输入和目标-->像素转为[-1,1]-->转变形状
+def load_examples(input_sample, input_label, shuffle, trans):
+ if input_sample is None or not os.path.exists(input_sample):
+ raise Exception("input_dir does not exist")
+
+ if input_label is None or not os.path.exists(input_label):
+ raise Exception("input_dir2 does not exist")
+
+ # 匹配第一个参数的路径中所有的符合条件的文件,并将其以list的形式返回。
+ input_path_sample = glob.glob(os.path.join(input_sample, "*.jpg"))
+ input_path_label = glob.glob(os.path.join(input_label, "*.jpg"))
+
+ # 图像解码器
+ decode = tf.image.decode_jpeg
+
+ if len(input_path_sample) == 0:
+ raise Exception("input_sample contains no image files")
+
+ if len(input_path_label) == 0:
+ raise Exception("input_label contains no image files")
+
+ # 如果文件名是数字,则用数字进行排序,否则用字母排序
+ if all(get_name(path).isdigit() for path in input_path_sample):
+ input_path_sample = sorted(input_path_sample, key=lambda path: int(get_name(path)))
+ else:
+ input_path_sample = sorted(input_path_sample)
+
+ if all(get_name(path).isdigit() for path in input_path_label):
+ input_path_label = sorted(input_path_label, key=lambda path: int(get_name(path)))
+ else:
+ input_path_label = sorted(input_path_label)
+
+ # sess = tf.Session()
+
+ with tf.name_scope("load_images"):
+ # 把我们需要的全部文件打包为一个tf内部的queue类型,之后tf开文件就从这个queue中取目录了,
+ # 如果是训练模式时,shuffle为True
+ if shuffle == 1:
+ path_queue = tf.train.slice_input_producer([input_path_sample, input_path_label], shuffle=True)
+ if shuffle == 0:
+ path_queue = tf.train.slice_input_producer([input_path_sample, input_path_label], shuffle=False)
+
+ # Read的输出将是一个文件名(key)和该文件的内容(value,每次读取一个文件,分多次读取)。
+ # reader = tf.WholeFileReader()
+
+ paths = input_path_sample
+
+ samples = tf.read_file(path_queue[0])
+ labels = tf.read_file(path_queue[1])
+
+ # 对文件进行解码并且对图片作归一化处理
+ raw_input_sample = decode(samples)
+ raw_input_label = decode(labels)
+ raw_input_sample = tf.image.convert_image_dtype(raw_input_sample, dtype=tf.float32) # 归一化处理
+ raw_input_label = tf.image.convert_image_dtype(raw_input_label, dtype=tf.float32)
+
+ # 判断两个值知否相等,如果不等抛出异常
+ assertion_sample = tf.assert_equal(tf.shape(raw_input_sample)[2], 3, message="image does not have 3 channels")
+ assertion_label = tf.assert_equal(tf.shape(raw_input_label)[2], 3, message="image2 does not have 3 channels")
+ '''
+ 对于control_dependencies这个管理器,只有当里面的操作是一个op时,才会生效,也就是先执行传入的
+ 参数op,再执行里面的op。如果里面的操作不是定义的op,图中就不会形成一个节点,这样该管理器就失效了。
+ tf.identity是返回一个一模一样新的tensor的op,这会增加一个新节点到gragh中,这时control_dependencies就会生效.
+ '''
+ with tf.control_dependencies([assertion_sample]):
+ raw_input_sample = tf.identity(raw_input_sample)
+
+ raw_input_sample.set_shape([None, None, 3])
+
+ with tf.control_dependencies([assertion_label]):
+ raw_input_label = tf.identity(raw_input_label)
+
+ raw_input_label.set_shape([None, None, 3])
+
+ # 图像值由[0,1]--->[-1, 1]
+ # width = tf.shape(raw_input)[1] # [height, width, channels]
+ # a_images = preprocess(raw_input_sample)
+ # b_images = preprocess(raw_input_label)
+ a_images = raw_input_sample
+ b_images = raw_input_label
+
+ # 这里的which_direction为:BtoA
+ if which_direction == "AtoB":
+ inputs, targets = [a_images, b_images]
+ elif which_direction == "BtoA":
+ inputs, targets = [b_images, a_images]
+ else:
+ raise Exception("invalid direction")
+
+ # synchronize seed for image operations so that we do the same operations to both
+ # input and output images
+ # seed = random.randint(0, 2 ** 31 - 1)
+
+ # 图像预处理,翻转、改变形状
+ with tf.name_scope("input_images"):
+ if trans == 0:
+ input_images = notransform(inputs)
+ if trans == 1:
+ inputs = transform(inputs)
+ input_images = transform2(inputs)
+ # input_images = random_erasing(inputs)
+ # input_images.set_shape([256, 256, 3])
+
+ with tf.name_scope("target_images"):
+ if trans == 0:
+ target_images = notransform(targets)
+ if trans == 1:
+ target_images = transform(targets)
+
+ # 获得输入图像、目标图像的batch块
+ paths_batch, inputs_batch, targets_batch = tf.train.batch([paths, input_images, target_images],
+ batch_size=batch_size)
+ steps_per_epoch = int(math.ceil(len(input_path_sample) / batch_size))
+
+ return Examples(
+ paths=paths_batch, # 输入的文件名块
+ inputs=inputs_batch, # 输入的图像块
+ targets=targets_batch, # 目标图像块
+ count=len(input_path_sample), # 数据集的大小
+ steps_per_epoch=steps_per_epoch, # batch的个数
+ )
+
+
+def random_erasing(img, probability=0.5, sl=0.005, sh=0.05, r1=0.5):
+ height = tf.shape(img)[0]
+ width = tf.shape(img)[1]
+ channel = tf.shape(img)[2]
+ area = tf.cast(width*height, tf.float32)
+
+ erase_area_low_bound = tf.cast(tf.round(tf.sqrt(sl * area * r1)), tf.int32)
+ erase_area_up_bound = tf.cast(tf.round(tf.sqrt((sh * area) / r1)), tf.int32)
+ h_upper_bound = tf.minimum(erase_area_up_bound, height)
+ w_upper_bound = tf.minimum(erase_area_up_bound, width)
+
+ h = tf.random_uniform([], erase_area_low_bound, h_upper_bound, tf.int32)
+ w = tf.random_uniform([], erase_area_low_bound, w_upper_bound, tf.int32)
+
+ erase_area = tf.cast(tf.random_uniform([h, w, channel], -255, 255, tf.int32), tf.float32)
+ erase_area_img = tf.image.resize_image_with_crop_or_pad(erase_area, 256, 256)
+ erase_area_img = tf.image.resize_images(erase_area_img, [512, 512], method=tf.image.ResizeMethod.BILINEAR)
+ # offset1 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 384 + 1, seed=seed)), dtype=tf.int32)
+ # offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ # offset1 = tf.cast(tf.floor(tf.random_uniform([2], 512 - 320, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ # offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ offset1 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ offset2 = tf.cast(tf.floor(tf.random_uniform([2], 0, 512 - 256 + 1, seed=seed)), dtype=tf.int32)
+ erase_area_img = tf.image.crop_to_bounding_box(erase_area_img, offset1[0], offset2[1], 256, 256)
+ img_v = img + erase_area_img
+ return tf.cond(tf.random_uniform([], 0, 1) > probability, lambda: img, lambda: img_v)
+
+ # x1 = tf.random_uniform([], 0, height+1 - h, tf.int32)
+ # y1 = tf.random_uniform([], 0, width+1 - w, tf.int32)
+ #
+ # erase_area = tf.cast(tf.random.uniform([h, w, channel], 0, 255, tf.int32), tf.uint8)
+ #
+ # erasing_img = img[x1:x1 + h, y1:y1 + w, :].assign(erase_area)
+ #
+ # return tf.cond(tf.random.uniform([], 0, 1) > probability, lambda: img, lambda: erasing_img)
+
+
+# 图像预处理,翻转、改变形状
+def transform(image):
+ r = image
+ if flip:
+ r = tf.image.random_flip_left_right(r, seed=seed)
+ # r = tf.image.random_flip_up_down(r, seed=seed)
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [scale_size, scale_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, scale_size - CROP_SIZE + 1, seed=seed)), dtype=tf.int32)
+ if scale_size > CROP_SIZE:
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], CROP_SIZE, CROP_SIZE)
+ elif scale_size < CROP_SIZE:
+ raise Exception("scale size cannot be less than crop size")
+ return r
+
+
+# 图像预处理
+def transform2(image):
+ r = image
+ r1 = image
+ r2 = image
+ r3 = image
+ r4 = image
+ r5 = image
+ r6 = image
+ a = random.uniform(1, 2)
+ # b = random.uniform(0.5, 1.5)
+ b = 1
+
+ if brightness:
+ r1 = tf.image.random_brightness(r1, max_delta=0.2)
+ if contrast:
+ r1 = tf.image.random_contrast(r1, lower=0.5, upper=1.5)
+ if gamma:
+ r1 = tf.image.adjust_gamma(r1, gain=a, gamma=b)
+ # if saturation:
+ # r1 = tf.image.random_saturation(r1, lower=1, upper=2)
+ # if hue:
+ # r1 = tf.image.random_hue(r1, max_delta=0.5)
+
+ if brightness:
+ r2 = tf.image.random_brightness(r2, max_delta=0.2)
+ if gamma:
+ r2 = tf.image.adjust_gamma(r2, gain=a, gamma=b)
+ if contrast:
+ r2 = tf.image.random_contrast(r2, lower=0.5, upper=1.5)
+
+ if contrast:
+ r3 = tf.image.random_contrast(r3, lower=0.5, upper=1.5)
+ if brightness:
+ r3 = tf.image.random_brightness(r3, max_delta=0.2)
+ if gamma:
+ r3 = tf.image.adjust_gamma(r3, gain=a, gamma=b)
+
+ if contrast:
+ r4 = tf.image.random_contrast(r4, lower=0.5, upper=1.5)
+ if gamma:
+ r4 = tf.image.adjust_gamma(r4, gain=a, gamma=b)
+ if brightness:
+ r4 = tf.image.random_brightness(r4, max_delta=0.2)
+
+ if gamma:
+ r5 = tf.image.adjust_gamma(r5, gain=a, gamma=b)
+ if brightness:
+ r5 = tf.image.random_brightness(r5, max_delta=0.2)
+ if contrast:
+ r5 = tf.image.random_contrast(r5, lower=0.5, upper=1.5)
+
+ if gamma:
+ r6 = tf.image.adjust_gamma(r6, gain=a, gamma=b)
+ if contrast:
+ r6 = tf.image.random_contrast(r6, lower=0.5, upper=1.5)
+ if brightness:
+ r6 = tf.image.random_brightness(r6, max_delta=0.2)
+
+ r12 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r1, lambda: r2)
+ r34 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r3, lambda: r4)
+ r56 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r5, lambda: r6)
+ r1234 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r12, lambda: r34)
+ r5678 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r56, lambda: r)
+ r12345678 = tf.cond(tf.random_uniform([], 0, 1) > 0.5, lambda: r1234, lambda: r5678)
+ return r12345678
+
+
+def notransform(image):
+ r = image
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [validation_size, validation_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, 1, seed=seed)), dtype=tf.int32)
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], validation_size, validation_size)
+
+ return r
+
+
+# 创建生成器,这是一个编码解码器的变种,输入输出均为:256*256*3, 像素值为[-1,1]
+def create_generator(generator_inputs, generator_outputs_channels):
+ layers = []
+
+ # encoder_1: [batch, 256, 256, in_channels] => [batch, 128, 128, ngf]
+ with tf.variable_scope("encoder_1"):
+ output = gen_conv(generator_inputs, ngf) # ngf is the number of convolution kernels of
+ # the first convolutional layer. Default is 64.
+ layers.append(output)
+
+ layer_specs = [
+ ngf * 2, # encoder_2: [batch, 128, 128, ngf] => [batch, 64, 64, ngf * 2]
+ ngf * 4, # encoder_3: [batch, 64, 64, ngf * 2] => [batch, 32, 32, ngf * 4]
+ ngf * 8, # encoder_4: [batch, 32, 32, ngf * 4] => [batch, 16, 16, ngf * 8]
+ ngf * 8, # encoder_5: [batch, 16, 16, ngf * 8] => [batch, 8, 8, ngf * 8]
+ ngf * 8, # encoder_6: [batch, 8, 8, ngf * 8] => [batch, 4, 4, ngf * 8]
+ ngf * 8, # encoder_7: [batch, 4, 4, ngf * 8] => [batch, 2, 2, ngf * 8]
+ ngf * 8, # encoder_8: [batch, 2, 2, ngf * 8] => [batch, 1, 1, ngf * 8]
+ ]
+
+ # 卷积的编码器
+ for out_channels in layer_specs:
+ with tf.variable_scope("encoder_%d" % (len(layers) + 1)):
+ # 对最后一层使用激活函数
+ rectified = lrelu(layers[-1], 0.2)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height/2, in_width/2, out_channels]
+ convolved = gen_conv(rectified, out_channels)
+ output = batchnorm(convolved)
+ layers.append(output)
+
+ layer_specs = [
+ (ngf * 8, 0.5), # decoder_8: [batch, 1, 1, ngf * 8] => [batch, 2, 2, ngf * 8 * 2]
+ (ngf * 8, 0.5), # decoder_7: [batch, 2, 2, ngf * 8 * 2] => [batch, 4, 4, ngf * 8 * 2]
+ (ngf * 8, 0.5), # decoder_6: [batch, 4, 4, ngf * 8 * 2] => [batch, 8, 8, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_5: [batch, 8, 8, ngf * 8 * 2] => [batch, 16, 16, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_4: [batch, 16, 16, ngf * 8 * 2] => [batch, 32, 32, ngf * 4 * 2]
+ (ngf * 4, 0.0), # decoder_3: [batch, 32, 32, ngf * 4 * 2] => [batch, 64, 64, ngf * 2 * 2]
+ (ngf * 2, 0.0), # decoder_2: [batch, 64, 64, ngf * 2 * 2] => [batch, 128, 128, ngf * 2]
+ ]
+
+ # 卷积的解码器
+ num_encoder_layers = len(layers) # 8
+ for decoder_layer, (out_channels, dropout) in enumerate(layer_specs):
+ skip_layer = num_encoder_layers - decoder_layer - 1
+ with tf.variable_scope("decoder_%d" % (skip_layer + 1)):
+ if decoder_layer == 0:
+ # first decoder layer doesn't have skip connections
+ # since it is directly connected to the skip_layer
+ input = layers[-1]
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+ # elif decoder_layer == 6:
+ # input = layers[-1]
+ # elif decoder_layer == 5:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # elif decoder_layer == 3:
+ # input = layers[-1]
+ # elif decoder_layer == 2:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # # input = tf.concat([layers[-1], layers[3]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv2(rectified, out_channels)
+
+ else:
+ input = tf.concat([layers[-1], layers[skip_layer]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ output = gen_deconv(rectified, out_channels)
+ output = batchnorm(output)
+
+ if dropout > 0.0:
+ output = tf.nn.dropout(output, keep_prob=1 - dropout)
+
+ layers.append(output)
+
+ # decoder_1: [batch, 128, 128, ngf * 2] => [batch, 256, 256, generator_outputs_channels]
+ with tf.variable_scope("decoder_1"):
+ # input = tf.concat([layers[-1], layers[0]], axis=3)
+ # input = tf.concat([layers[-1], layers[1]], axis=3)
+ input = layers[-1]
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ output = gen_deconv(rectified, generator_outputs_channels)
+ output = tf.tanh(output)
+ layers.append(output)
+
+ return layers[-1]
+
+
+# 创建判别器,输入生成的图像和真实的图像:两个[batch,256,256,3],元素值值[-1,1],输出:[batch,30,30,1],元素值为概率
+# def create_discriminator(discrim_inputs, discrim_targets):
+# n_layers = 3
+# layers = []
+#
+# # 2x [batch, height, width, in_channels] => [batch, height, width, in_channels * 2]
+# input = tf.concat([discrim_inputs, discrim_targets], axis=3)
+#
+# # layer_1: [batch, 256, 256, in_channels * 2] => [batch, 128, 128, ndf]
+# with tf.variable_scope("layer_1"):
+# convolved = discrim_conv(input, ndf, stride=2)
+# rectified = lrelu(convolved, 0.2)
+# layers.append(rectified)
+#
+# # layer_2: [batch, 128, 128, ndf] => [batch, 64, 64, ndf * 2]
+# # layer_3: [batch, 64, 64, ndf * 2] => [batch, 32, 32, ndf * 4]
+# # layer_4: [batch, 32, 32, ndf * 4] => [batch, 31, 31, ndf * 8]
+# for i in range(n_layers):
+# with tf.variable_scope("layer_%d" % (len(layers) + 1)):
+# out_channels = ndf * min(2 ** (i + 1), 8)
+# stride = 1 if i == n_layers - 1 else 2 # last layer here has stride 1
+# convolved = discrim_conv(layers[-1], out_channels, stride=stride)
+# normalized = batchnorm(convolved)
+# rectified = lrelu(normalized, 0.2)
+# layers.append(rectified)
+#
+# # layer_5: [batch, 31, 31, ndf * 8] => [batch, 30, 30, 1]
+# with tf.variable_scope("layer_%d" % (len(layers) + 1)):
+# convolved = discrim_conv(rectified, out_channels=1, stride=1)
+# output = tf.sigmoid(convolved)
+# layers.append(output)
+#
+# return layers[-1]
+
+
+def create_generator2(generator_inputs, generator_outputs_channels):
+ layers = []
+
+ # encoder_1: [batch, 256, 256, in_channels] => [batch, 128, 128, ngf]
+ with tf.variable_scope("encoder_1"):
+ output = gen_conv2(generator_inputs, ngf) # ngf is the number of convolution kernels of
+ # the first convolutional layer. Default is 64.
+ layers.append(output)
+
+ layer_specs = [
+ ngf * 2, # encoder_2: [batch, 128, 128, ngf] => [batch, 64, 64, ngf * 2]
+ ngf * 4, # encoder_3: [batch, 64, 64, ngf * 2] => [batch, 32, 32, ngf * 4]
+ ngf * 8, # encoder_4: [batch, 32, 32, ngf * 4] => [batch, 16, 16, ngf * 8]
+ # ngf * 8, # encoder_5: [batch, 16, 16, ngf * 8] => [batch, 8, 8, ngf * 8]
+ # ngf * 8, # encoder_6: [batch, 8, 8, ngf * 8] => [batch, 4, 4, ngf * 8]
+ # ngf * 8, # encoder_7: [batch, 4, 4, ngf * 8] => [batch, 2, 2, ngf * 8]
+ # ngf * 8, # encoder_8: [batch, 2, 2, ngf * 8] => [batch, 1, 1, ngf * 8]
+ ]
+
+ # 卷积的编码器
+ for out_channels in layer_specs:
+ with tf.variable_scope("encoder_%d" % (len(layers) + 1)):
+ # 对最后一层使用激活函数
+ rectified = lrelu(layers[-1], 0.2)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height/2, in_width/2, out_channels]
+ convolved = gen_conv2(rectified, out_channels)
+ output = batchnorm(convolved)
+ layers.append(output)
+
+ layer_specs = [
+ # (ngf * 8, 0.5), # decoder_8: [batch, 1, 1, ngf * 8] => [batch, 2, 2, ngf * 8 * 2]
+ # (ngf * 8, 0.5), # decoder_7: [batch, 2, 2, ngf * 8 * 2] => [batch, 4, 4, ngf * 8 * 2]
+ # (ngf * 8, 0.5), # decoder_6: [batch, 4, 4, ngf * 8 * 2] => [batch, 8, 8, ngf * 8 * 2]
+ # (ngf * 8, 0.0), # decoder_5: [batch, 8, 8, ngf * 8 * 2] => [batch, 16, 16, ngf * 8 * 2]
+ (ngf * 8, 0.0), # decoder_4: [batch, 16, 16, ngf * 8 * 2] => [batch, 32, 32, ngf * 4 * 2]
+ (ngf * 4, 0.0), # decoder_3: [batch, 32, 32, ngf * 4 * 2] => [batch, 64, 64, ngf * 2 * 2]
+ (ngf * 2, 0.0), # decoder_2: [batch, 64, 64, ngf * 2 * 2] => [batch, 128, 128, ngf * 2]
+ ]
+
+ # 卷积的解码器
+ num_encoder_layers = len(layers) # 8
+ for decoder_layer, (out_channels, dropout) in enumerate(layer_specs):
+ skip_layer = num_encoder_layers - decoder_layer - 1
+ with tf.variable_scope("decoder_%d" % (skip_layer + 1)):
+ if decoder_layer == 0:
+ # first decoder layer doesn't have skip connections
+ # since it is directly connected to the skip_layer
+ input = layers[-1]
+ input = gen_conv3(input, 1024)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+ # elif decoder_layer == 6:
+ # input = layers[-1]
+ # elif decoder_layer == 5:
+ # input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # elif decoder_layer == 3:
+ # input = layers[-1]
+ elif decoder_layer == 2:
+ input = layers[-1]
+ elif decoder_layer == 1:
+ input = layers[-1]
+ # elif decoder_layer == 4:
+ # input = layers[-1]
+ # # input = tf.concat([layers[-1], layers[3]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv2(rectified, out_channels)
+
+ else:
+ input = tf.concat([layers[-1], layers[skip_layer]], axis=3)
+ # rectified = tf.nn.relu(input)
+ # # rectified = tf.nn.leaky_relu(input)
+ # # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ # output = gen_deconv(rectified, out_channels)
+
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ # [batch, in_height, in_width, in_channels] => [batch, in_height*2, in_width*2, out_channels]
+ output = gen_deconv2(rectified, out_channels)
+ output = batchnorm(output)
+
+ if dropout > 0.0:
+ output = tf.nn.dropout(output, keep_prob=1 - dropout)
+
+ layers.append(output)
+
+ # decoder_1: [batch, 128, 128, ngf * 2] => [batch, 256, 256, generator_outputs_channels]
+ with tf.variable_scope("decoder_1"):
+ # input = tf.concat([layers[-1], layers[0]], axis=3)
+ # input = tf.concat([layers[-1], layers[1]], axis=3)
+ input = layers[-1]
+ rectified = tf.nn.relu(input)
+ # rectified = tf.nn.leaky_relu(input)
+ output = gen_deconv2(rectified, 64)
+ output = gen_deconv3(output, generator_outputs_channels)
+ # output = tf.tanh(output)
+ # output = tf.nn.softmax(output)
+ # output = tf.nn.relu(output)
+ output = tf.sigmoid(output)
+ layers.append(output)
+
+ return layers[-1]
+
+
+# 创建Pix2Pix模型,inputs和targets形状为:[batch_size, height, width, channels]
+def create_model(inputs, targets):
+ with tf.variable_scope("generator1"):
+ out_channels = int(targets.get_shape()[-1])
+ outputs = create_generator2(inputs, out_channels)
+ # outputs = UNet(inputs)
+
+ # create two copies of discriminator, one for real pairs and one for fake pairs
+ # they share the same underlying variables
+ # with tf.name_scope("real_discriminator"):
+ # with tf.variable_scope("discriminator"):
+ # # 2x [batch, height, width, channels] => [batch, 30, 30, 1]
+ # predict_real = create_discriminator(inputs, targets) # 条件变量图像和真实图像
+
+ # with tf.name_scope("fake_discriminator"):
+ # with tf.variable_scope("discriminator", reuse=True):
+ # # 2x [batch, height, width, channels] => [batch, 30, 30, 1]
+ # predict_fake = create_discriminator(inputs, outputs) # 条件变量图像和生成的图像
+
+ # The discriminator loses, the discriminator wants V(G, D) as large as possible
+ # with tf.name_scope("discriminator_loss"):
+ # # minimizing -tf.log will try to get inputs to 1
+ # # predict_real => 1
+ # # predict_fake => 0
+ # discrim_loss = tf.reduce_mean(-(tf.log(predict_real + EPS) + tf.log(1 - predict_fake + EPS)))
+
+ # Generator loss, the generator wants V(G, D) to be as small as possible
+ with tf.name_scope("generator_loss"):
+ # predict_fake => 1
+ # abs(targets - outputs) => 0
+ # gen_loss_GAN = tf.reduce_mean(-tf.log(predict_fake + EPS))
+ # gen_loss_L1 = tf.reduce_mean(tf.abs(targets - outputs))
+ # gen_loss_L1 = tf.reduce_mean(tf.square(targets - outputs))
+ gen_loss_L1 = -tf.reduce_mean((targets + EPS) * tf.log(outputs + EPS) + (1 - targets + EPS) * tf.log(1 - outputs + EPS))
+ # gen_loss = gen_loss_GAN * gan_weight + gen_loss_L1 * l1_weight
+
+ # 判别器训练
+ # with tf.name_scope("discriminator_train"):
+ # # 判别器需要优化的参数
+ # discrim_tvars = [var for var in tf.trainable_variables() if var.name.startswith("discriminator")]
+ # # 优化器定义
+ # discrim_optim = tf.train.AdamOptimizer(lr, beta1)
+ # # 计算损失函数对优化参数的梯度
+ # discrim_grads_and_vars = discrim_optim.compute_gradients(discrim_loss, var_list=discrim_tvars)
+ # # 更新该梯度所对应的参数的状态,返回一个op
+ # discrim_train = discrim_optim.apply_gradients(discrim_grads_and_vars)
+
+ # 生成器训练
+ with tf.name_scope("generator_train"):
+ # with tf.control_dependencies([discrim_train]):
+ # 生成器需要优化的参数列表
+ gen_tvars = [var for var in tf.trainable_variables() if var.name.startswith("generator1")]
+ # 定义优化器
+ gen_optim = tf.train.AdamOptimizer(lr, beta1)
+ # 计算需要优化的参数的梯度
+ gen_grads_and_vars = gen_optim.compute_gradients(gen_loss_L1, var_list=gen_tvars)
+ # gen_grads_and_vars = gen_optim.compute_gradients(gen_loss, var_list=gen_tvars)
+ # 更新该梯度所对应的参数的状态,返回一个op
+ gen_train = gen_optim.apply_gradients(gen_grads_and_vars)
+
+ '''
+ 在采用随机梯度下降算法训练神经网络时,使用 tf.train.ExponentialMovingAverage 滑动平均操作的意义在于
+ 提高模型在测试数据上的健壮性(robustness)。tensorflow 下的 tf.train.ExponentialMovingAverage 需要
+ 提供一个衰减率(decay)。该衰减率用于控制模型更新的速度。该衰减率用于控制模型更新的速度,
+ ExponentialMovingAverage 对每一个(待更新训练学习的)变量(variable)都会维护一个影子变量
+ (shadow variable)。影子变量的初始值就是这个变量的初始值,
+ shadow_variable=decay×shadow_variable+(1−decay)×variable
+ '''
+ ema = tf.train.ExponentialMovingAverage(decay=0.99)
+ # update_losses = ema.apply([discrim_loss, gen_loss_GAN, gen_loss_L1])
+ update_losses = ema.apply([gen_loss_L1])
+
+ global_step = tf.train.get_or_create_global_step()
+ incr_global_step = tf.assign(global_step, global_step + 1)
+
+ return Model(
+ # predict_real=predict_real, # 条件变量(输入图像)和真实图像之间的概率值,形状为;[batch,30,30,1]
+ # predict_fake=predict_fake, # 条件变量(输入图像)和生成图像之间的概率值,形状为;[batch,30,30,1]
+ # discrim_loss=ema.average(discrim_loss), # 判别器损失
+ # discrim_grads_and_vars=discrim_grads_and_vars, # 判别器需要优化的参数和对应的梯度
+ # gen_loss_GAN=ema.average(gen_loss_GAN), # 生成器的损失
+ gen_loss_L1=ema.average(gen_loss_L1), # 生成器的 L1损失
+ gen_grads_and_vars=gen_grads_and_vars, # 生成器需要优化的参数和对应的梯度
+ outputs=outputs, # 生成器生成的图片
+ train=tf.group(update_losses, incr_global_step, gen_train), # 打包需要run的操作op
+ validate=gen_loss_L1
+ )
+
+
+# 保存图像
+def save_images(output_dir, fetches, step=None):
+ image_dir = os.path.join(output_dir)
+ if not os.path.exists(image_dir):
+ os.makedirs(image_dir)
+
+ filesets = []
+ for i, in_path in enumerate(fetches["paths"]):
+ # name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8")))
+ name, _ = os.path.splitext(os.path.basename(str(in_path)))
+ fileset = {"name": name, "step": step}
+ for kind in ["inputs", "outputs", "targets"]:
+ filename = name + "-" + kind + ".jpg"
+ if step is not None:
+ filename = "%08d-%s" % (step, filename)
+ fileset[kind] = filename
+ out_path = os.path.join(image_dir, filename)
+ contents = fetches[kind][i]
+ with open(out_path, "wb") as f:
+ f.write(contents)
+ filesets.append(fileset)
+ return filesets
+
+
+# 保存图像
+def save_images_test(output_dir, fetches, step=None):
+ image_dir = os.path.join(output_dir)
+ if not os.path.exists(image_dir):
+ os.makedirs(image_dir)
+
+ filesets = []
+ for i, in_path in enumerate(fetches["paths"]):
+ # name, _ = os.path.splitext(os.path.basename(in_path.decode("utf8")))
+ name, _ = os.path.splitext(os.path.basename(str(in_path)))
+ fileset = {"name": name, "step": step}
+ for kind in ["outputs"]:
+ filename = name + "-" + kind + ".jpg"
+ if step is not None:
+ filename = "%08d-%s" % (step, filename)
+ fileset[kind] = filename
+ out_path = os.path.join(image_dir, filename)
+ contents = fetches[kind][i]
+ with open(out_path, "wb") as f:
+ f.write(contents)
+ filesets.append(fileset)
+ return filesets
+
+
+# 转变图像的尺寸、并且将[0,1]--->[0,255]
+def convert(image):
+ if aspect_ratio != 1.0:
+ # upscale to correct aspect ratio
+ size = [CROP_SIZE, int(round(CROP_SIZE * aspect_ratio))]
+ image = tf.image.resize_images(image, size=size, method=tf.image.ResizeMethod.BICUBIC)
+
+ # 将数据的类型转换为8位无符号整型
+ return tf.image.convert_image_dtype(image, dtype=tf.uint8, saturate=True)
+
+
+# 主函数
+def train():
+ csv_write = csv.writer(loss_csv, dialect='excel')
+
+ # 设置随机数种子的值
+ global seed
+ if seed is None:
+ seed = random.randint(0, 2 ** 31 - 1)
+
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+ random.seed(seed)
+
+ # # 创建目录
+ # if not os.path.exists(train_output):
+ # os.makedirs(train_output)
+ # if not os.path.exists(validation_output):
+ # os.makedirs(validation_output)
+ # if not os.path.exists(checkpoint):
+ # os.makedirs(checkpoint)
+
+ # 加载数据集,得到输入数据和目标数据并把范围变为 :[-1,1]
+ examples = load_examples(train_input_sample, train_input_label, 1, 0)
+ print("load successful ! examples count = %d" % examples.count)
+ print("load successful ! examples count = %d" % examples.count, file=log)
+ examples2 = load_examples(train_input_sample2, train_input_label2, 1, 0)
+ print("load successful ! examples2 count = %d" % examples2.count)
+ print("load successful ! examples2 count = %d" % examples2.count, file=log)
+
+ # 创建模型,inputs和targets是:[batch_size, height, width, channels]
+ # 返回值:
+
+ inputs_pd_8 = tf.placeholder(tf.uint8, [None, 256, 256, 3], name='input_image')
+
+ target_pd_8 = tf.placeholder(tf.uint8, [None, 256, 256, 3], name='target')
+
+ inputs_pd = tf.image.convert_image_dtype(inputs_pd_8, dtype=tf.float32)
+
+ target_pd = tf.image.convert_image_dtype(target_pd_8, dtype=tf.float32)
+
+ model = create_model(inputs_pd, target_pd)
+ print("create model successful!")
+ print("create model successful!", file=log)
+
+ # 图像处理[-1, 1] => [0, 1]
+ # inputs = deprocess(inputs_pd)
+ # targets = deprocess(target_pd)
+ # outputs = deprocess(model.outputs)
+ # inputs2 = deprocess(inputs_pd)
+ # targets2 = deprocess(target_pd)
+ # outputs2 = deprocess(model.outputs)
+ inputs = inputs_pd
+ targets = target_pd
+ outputs = model.outputs
+ inputs2 = inputs_pd
+ targets2 = target_pd
+ outputs2 = model.outputs
+
+ # 把[0,1]的像素点转为RGB值:[0,255]
+ with tf.name_scope("convert_inputs"):
+ converted_inputs = convert(inputs)
+ with tf.name_scope("convert_targets"):
+ converted_targets = convert(targets)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs = convert(outputs)
+ with tf.name_scope("convert_inputs"):
+ converted_inputs2 = convert(inputs2)
+ with tf.name_scope("convert_targets"):
+ converted_targets2 = convert(targets2)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs2 = convert(outputs2)
+
+ # 对图像进行编码以便于保存
+ with tf.name_scope("encode_images"):
+ display_fetches = {
+ "paths": examples.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ "inputs": tf.map_fn(tf.image.encode_jpeg, converted_inputs, dtype=tf.string, name="input_jpegs"),
+ "targets": tf.map_fn(tf.image.encode_jpeg, converted_targets, dtype=tf.string, name="target_jpegs"),
+ "outputs": tf.map_fn(tf.image.encode_jpeg, converted_outputs, dtype=tf.string, name="output_jpegs"),
+ }
+ with tf.name_scope("encode_images"):
+ display_fetches2 = {
+ "paths": examples2.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ "inputs": tf.map_fn(tf.image.encode_jpeg, converted_inputs2, dtype=tf.string,
+ name="input_jpegs"),
+ "targets": tf.map_fn(tf.image.encode_jpeg, converted_targets2, dtype=tf.string,
+ name="target_jpegs"),
+ "outputs": tf.map_fn(tf.image.encode_jpeg, converted_outputs2, dtype=tf.string,
+ name="output_jpegs"),
+ }
+
+ with tf.name_scope("parameter_count"):
+ parameter_count = tf.reduce_sum([tf.reduce_prod(tf.shape(v)) for v in tf.trainable_variables()])
+
+ # 只保存最新一个checkpoint
+ saver = tf.train.Saver(max_to_keep=10)
+
+ new_loss_v = 10
+ min_loss_v = 10
+ train_loss = 10
+
+ train_fetches = {}
+ val_fetches = {}
+ init = tf.global_variables_initializer()
+ result = {} # for train
+ result2 = {} # for validation
+
+ with tf.Session() as sess:
+ sess.run(init)
+ print("parameter_count =", sess.run(parameter_count))
+ print("parameter_count =", sess.run(parameter_count), file=log)
+ # if max_epochs is not None:
+ # max_steps = examples.steps_per_epoch * max_epochs # 400X200=8000
+
+ # 因为是从文件中读取数据,所以需要启动start_queue_runners()
+ # 这个函数将会启动输入管道的线程,填充样本到队列中,以便出队操作可以从队列中拿到样本。
+ coord = tf.train.Coordinator()
+ threads = tf.train.start_queue_runners(coord=coord)
+
+ # 运行训练集
+ print("begin trainning......")
+ print("begin trainning......", file=log)
+ print("max_steps:", max_steps)
+ print("max_steps:", max_steps, file=log)
+ start = time.time()
+ for step in range(max_steps):
+ def should(freq):
+ return freq > 0 and ((step + 1) % freq == 0 or step == max_steps - 1)
+
+ print("step:", step)
+
+ # 执行正常的train
+ # 定义一个需要run的所有操作的字典
+ train_fetches = {
+ "train": model.train # 只有添加这个才会进行参数更新
+ }
+
+ # progress_freq为 100,每100次计算一次三个损失,显示进度
+ if should(progress_freq):
+ # train_fetches["discrim_loss"] = model.discrim_loss
+ # train_fetches["gen_loss_GAN"] = model.gen_loss_GAN
+ train_fetches["gen_loss_L1"] = model.gen_loss_L1
+
+ # display_freq为 100,每100次保存一次输入、目标、输出的图像
+ if should(display_freq):
+ train_fetches["display"] = display_fetches
+
+ # 获取真实数据:
+ train_inputs_real, train_targets_real = sess.run([examples.inputs, examples.targets])
+ train_input_dict = {inputs_pd: train_inputs_real, target_pd: train_targets_real}
+ results = sess.run(train_fetches, feed_dict=train_input_dict)
+
+ if should(display_freq):
+ print("saving display training images")
+ save_images(train_output, results["display"], step=step)
+
+ # progress_freq为 100,每100次打印一次三种损失的大小,显示进度
+ if should(progress_freq):
+ # global_step will have the correct step count if we resume from a checkpoint
+ train_epoch = math.ceil(step / examples.steps_per_epoch)
+ train_step = (step - 1) % examples.steps_per_epoch + 1
+ rate = (step + 1) * batch_size / (time.time() - start)
+ remaining = (max_steps - step) * batch_size / rate
+ print("progress epoch %d step %d image/sec %0.1f remaining %dm" % (train_epoch, train_step, rate, remaining / 60))
+ print("progress epoch %d step %d image/sec %0.1f remaining %dm" % (train_epoch, train_step, rate, remaining / 60), file=log)
+ # print("discrim_loss", results["discrim_loss"])
+ # print("discrim_loss", results["discrim_loss"], file=log)
+ # print("gen_loss_GAN", results["gen_loss_GAN"])
+ # print("gen_loss_GAN", results["gen_loss_GAN"], file=log)
+ # print("gen_loss_L1", results["gen_loss_L1"])
+ # print("gen_loss_L1", results["gen_loss_L1"], file=log)
+ print("gen_loss_cross_entropy", results["gen_loss_L1"])
+ print("gen_loss_cross_entropy", results["gen_loss_L1"], file=log)
+ new_loss = results["gen_loss_L1"]
+ # dis_loss = results["discrim_loss"]
+ # GAN_loss = results["gen_loss_GAN"]
+
+ if should(validation_freq): # 每一百步计算一次 validation
+ val_fetches["gen_loss_L1"] = model.validate
+ val_fetches["display"] = display_fetches2
+
+ i = 0
+ sum_loss_v = 0
+ print("start display validation images and calculate validation loss")
+ while i <= examples2.count:
+ val_inputs_real, val_targets_real = sess.run([examples2.inputs, examples2.targets])
+ val_input_dict = {inputs_pd: val_inputs_real, target_pd: val_targets_real}
+ results2 = sess.run(val_fetches, feed_dict=val_input_dict)
+ sum_loss_v = sum_loss_v + results2["gen_loss_L1"]
+ i = i + 1
+
+ save_images(validation_output, results2["display"], step=step)
+ avg_loss_v = sum_loss_v / i
+ # print("gen_loss_L1_validation", avg_loss_v)
+ # print("gen_loss_L1_validation", avg_loss_v, file=log)
+ print("gen_loss_cross_entropy_validation", avg_loss_v)
+ print("gen_loss_cross_entropy_validation", avg_loss_v, file=log)
+ new_loss_v = avg_loss_v
+
+ # stu = [step + 1, new_loss_v, new_loss, dis_loss, GAN_loss]
+ stu = [step + 1, new_loss_v, new_loss]
+ csv_write.writerow(stu)
+
+ # 只保存lose最低的一次模型
+ # if should(save_freq):
+ if new_loss_v < min_loss_v and train_loss >= new_loss:
+ min_loss_v = new_loss_v
+ train_loss = new_loss
+ print("saving model")
+ print("saving model", file=log)
+ saver.save(sess, os.path.join(checkpoint, "model"), global_step=step)
+ tf.train.write_graph(sess.graph.as_graph_def(), checkpoint, 'graph_node.pbtxt', as_text=True)
+
+ log.close()
+ loss_csv.close()
+
+
+def test():
+ # 设置随机数种子的值
+ global seed
+ if seed is None:
+ seed = random.randint(0, 2 ** 31 - 1)
+
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+ random.seed(seed)
+
+ # 创建目录
+ if not os.path.exists(test_output_dir):
+ os.makedirs(test_output_dir)
+ if checkpoint is None:
+ raise Exception("checkpoint required for test mode")
+
+ # disable these features in test mode
+ scale_size = CROP_SIZE
+ flip = False
+
+ # 加载数据集,得到输入数据和目标数据
+ examples = load_examples(test_input_dir, test_input_label, 0, 0)
+ print("load successful ! examples count = %d" % examples.count)
+
+ # 创建模型,inputs和targets是:[batch_size, height, width, channels]
+ model = create_model(examples.inputs, examples.targets)
+ print("create model successful!")
+
+ # 图像处理[-1, 1] => [0, 1]
+ # inputs = deprocess(examples.inputs)
+ # targets = deprocess(examples.targets)
+ # outputs = deprocess(model.outputs)
+ inputs = examples.inputs
+ targets = examples.targets
+ outputs = model.outputs
+
+ # 把[0,1]的像素点转为RGB值:[0,255]
+ with tf.name_scope("convert_inputs"):
+ converted_inputs = convert(inputs)
+ with tf.name_scope("convert_targets"):
+ converted_targets = convert(targets)
+ with tf.name_scope("convert_outputs"):
+ converted_outputs = convert(outputs)
+
+ # 对图像进行编码以便于保存
+ with tf.name_scope("encode_images"):
+ display_fetches = {
+ "paths": examples.paths,
+ # tf.map_fn接受一个函数对象和集合,用函数对集合中每个元素分别处理
+ # "inputs": tf.map_fn(tf.image.encode_png, converted_inputs, dtype=tf.string, name="input_pngs"),
+ # "targets": tf.map_fn(tf.image.encode_png, converted_targets, dtype=tf.string, name="target_pngs"),
+ "outputs": tf.map_fn(tf.image.encode_png, converted_outputs, dtype=tf.string, name="output_pngs"),
+ }
+
+ sess = tf.InteractiveSession()
+ saver = tf.train.Saver(max_to_keep=10)
+
+ start = time.time()
+
+ ckpt = tf.train.get_checkpoint_state(checkpoint)
+ saver.restore(sess, ckpt.model_checkpoint_path)
+
+ coord = tf.train.Coordinator()
+ threads = tf.train.start_queue_runners(coord=coord)
+
+ for step in range(examples.count):
+ results = sess.run(display_fetches)
+ save_images_test(test_output_dir, results, step=step)
+
+ print("Total Time:", (time.time() - start))
+ print("Per Image Time:", (time.time() - start) / examples.count)
+
+
+if __name__ == '__main__':
+ train()
+ # test()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py" "b/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py"
new file mode 100644
index 0000000..97a6e7a
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/Iou_edge.py"
@@ -0,0 +1,116 @@
+from sklearn.metrics import confusion_matrix
+import numpy as np
+from PIL import Image
+import os
+
+
+iou = 0
+iou2 = 0
+iouall = 0
+iouall2 = 0
+ioumean = 0
+ioumean2 = 0
+ioumax = 0
+ioumin = 1
+filesnum = 0
+sen = 0
+senall = 0
+senmean = 0
+acc = 0
+accall = 0
+accmean = 0
+spe = 0
+speall = 0
+spemean = 0
+
+# Result txt file saving PATH
+log = open(r'D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4/result.txt', 'w')
+
+
+def compute_iou(y_pred, y_true):
+ # ytrue, ypred is a flatten vector
+ y_pred = y_pred.flatten()
+ y_true = y_true.flatten()
+ tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
+ # compute mean iou
+ print("tn, fp, fn, tp:", (tn, fp, fn, tp), file=log)
+ # tp/(tp + fp + fn)
+ iou_tp = tp / (tp + fp + fn)
+ iou_tp2 = tn / (tn + fp + fn)
+ sen_tp = tp / (tp + fn)
+ acc_tp = (tp + tn) / (tn + fp + fn + tp)
+ spe_tp = tn / (tn + fp)
+ global iou
+ iou = iou_tp
+ global iou2
+ iou2 = iou_tp2
+ global sen
+ sen = sen_tp
+ global acc
+ acc = acc_tp
+ global spe
+ spe = spe_tp
+ print("IoU:", iou_tp, file=log)
+ print("IoU2:", iou_tp2, file=log)
+ print("SEN:", sen_tp, file=log)
+ print("ACC:", acc_tp, file=log)
+ print("SPE:", spe_tp, file=log)
+
+
+#"C:/Users/user/Desktop/test16/gt_img/"
+#"C:/Users/user/Desktop/test16/testdataset/Fair/gt/"
+
+for filenames in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4/"): # label gray scale images PATH
+ print(filenames, file=log)
+ filename = filenames.replace('', '')
+ print(filename)
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4/" + filenames) # label gray scale images PATH
+ img11 = img1.convert('L')
+ threshold = 128
+ table1 = []
+ for i in range(256):
+ if i < threshold:
+ table1.append(0)
+ else:
+ table1.append(1)
+ img11 = img11.point(table1, '1')
+ img_true = np.array(img11.convert("1").getdata())
+ img11.save('D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours4_convert/' + filenames) # label binary images output PATH
+
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4/" + filename) # Deep Learning model generated images PATH
+ img22 = img2.convert('L')
+ table2 = []
+ for i in range(256):
+ if i < threshold:
+ table2.append(0)
+ else:
+ table2.append(1)
+ img22 = img22.point(table2, '1')
+ img_pred = np.array(img22.convert("1").getdata())
+ img22.save('D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours4_convert/' + filename) # generated images binary converted output PATH
+
+ compute_iou(img_pred, img_true)
+ if iou >= ioumax:
+ ioumax = iou
+ if iou <= ioumin:
+ ioumin = iou
+ iouall = iouall + iou
+ iouall2 = iouall2 + iou2
+ senall = senall + sen
+ accall = accall + acc
+ speall = speall + spe
+ filesnum = filesnum + 1
+ ioumean = iouall / filesnum
+ ioumean2 = iouall2 / filesnum
+ senmean = senall / filesnum
+ accmean = accall / filesnum
+ spemean = speall / filesnum
+ print("IoUmean:", ioumean, file=log)
+ print("IoUmean2:", ioumean2, file=log)
+ print("IoUmax:", ioumax, file=log)
+ print("IoUmin:", ioumin, file=log)
+ print("SENmean:", senmean, file=log)
+ print("ACCmean:", accmean, file=log)
+ print("SPEmean:", spemean, file=log)
+
+log.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py" "b/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py"
new file mode 100644
index 0000000..fa2d449
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/TXTtoCSV.py"
@@ -0,0 +1,31 @@
+import re
+import csv
+
+num = 23
+
+cs1 = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/logDis.csv", 'w', newline='')
+cs2 = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/logGAN.csv", 'w', newline='')
+csvw1 = csv.writer(cs1)
+csvw2 = csv.writer(cs2)
+w1 = {}
+w2 = {}
+
+f = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/log.txt")
+for line in f:
+ if 'discrim_loss ' not in line:
+ continue
+ m1 = re.findall(r'\S+$', line)
+ csvw1.writerow(m1)
+
+f.close()
+cs1.close()
+
+f = open("D:/Result_RE_Revenge" + str(num) + "_checkpoint/log.txt")
+for line in f:
+ if 'gen_loss_GAN ' not in line:
+ continue
+ m2 = re.findall(r'\S+$', line)
+ csvw2.writerow(m2)
+
+f.close()
+cs2.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py" "b/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py"
new file mode 100644
index 0000000..fdab252
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/change_CannyThreshold.py"
@@ -0,0 +1,29 @@
+import cv2
+import numpy as np
+
+
+def CannyThreshold(lowThreshold):
+ detected_edges = cv2.GaussianBlur(gray, (3, 3), 0)
+ detected_edges = cv2.Canny(detected_edges,
+ lowThreshold,
+ lowThreshold * ratio,
+ apertureSize=kernel_size)
+ dst = cv2.bitwise_and(img, img, mask=detected_edges) # just add some colours to edges from original image.
+ cv2.imshow('canny demo', dst)
+
+
+lowThreshold = 0
+max_lowThreshold = 100
+ratio = 3
+kernel_size = 3
+
+img = cv2.imread('D:/result_contract/Otherkind/Tongue/20191119/50/1 (1).jpg')
+gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+
+cv2.namedWindow('canny demo')
+
+cv2.createTrackbar('Min threshold', 'canny demo', lowThreshold, max_lowThreshold, CannyThreshold)
+
+CannyThreshold(0) # initialization
+if cv2.waitKey(0) == 27:
+ cv2.destroyAllWindows()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/demo.py" "b/Features/DeepLearning/Reference/Tang\047s/demo.py"
new file mode 100644
index 0000000..098ae8f
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/demo.py"
@@ -0,0 +1,152 @@
+#学習結果から画像を生成するプログラム
+#!/usr/bin/env python
+
+# python train_facade.py -g 0 -i ./facade/base --out result_facade --snapshot_interval 10000
+
+from __future__ import print_function
+import argparse
+import os
+import os.path
+import glob
+import math
+from PIL import Image
+import cv2
+
+import chainer
+from chainer.serializers.npz import NpzDeserializer
+import numpy as np
+
+from net import Encoder
+from net import Decoder
+import scipy as sp
+
+import chainer
+import chainer.cuda
+from chainer import Variable
+
+import time
+
+"""各種設定"""
+#入力画像サイズ
+w_in = 256
+h_in = 256
+#生成したい画像サイズ
+w_out = 256
+h_out = 256
+#保存したパラメータの読み込み
+iter_first = 5000
+iter_last = 380000
+iter_inter = 5000
+#パラメータの保存先
+param_dir = "./pix2pix_param2/"
+#入力画像の保存先
+path_man = "./dataset/"
+#GPU番号
+gpu_num = 0
+man_fld = [os.path.basename(x) for x in glob.glob(path_man+'*')]
+
+def out_image(img, man, files, enc, dec, rows, cols, seed, iter_num, gpu):
+ def save_image(x, name, mode=None):
+ _, C, H, W = x.shape
+ x = x.reshape((rows, cols, C, H, W))
+ x = x.transpose(0, 3, 1, 4, 2)
+ if C==1:
+ x = x.reshape((rows*H, cols*W))
+ else:
+ x = x.reshape((rows*H, cols*W, C))
+
+ img_fld, _ = os.path.splitext(files)
+ preview_dir = "./generate_image/"+man+"/"+img_fld+"/"
+ if not os.path.exists(preview_dir):
+ os.makedirs(preview_dir)
+ preview_path =preview_dir+"iter_"+iter_num+".jpg"
+ Image.fromarray(x, mode=mode).convert('RGB').save(preview_path)
+
+ np.random.seed(seed)
+ n_images = rows * cols
+ xp = enc.xp
+
+ in_ch = 3
+ out_ch = 3
+ batchsize = 1
+
+ in_all = np.zeros((n_images, in_ch, h_in, w_in)).astype("f")
+ gen_all = np.zeros((n_images, out_ch, h_out, w_out)).astype("f")
+
+ for it in range(n_images):
+
+ x_in = xp.zeros((batchsize, in_ch, h_in, w_in)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_out, w_out)).astype("f")
+
+ x_in[0,:] = xp.asarray(img)
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ if gpu >= 0:
+ in_all[it,:] = x_in.data.get()[0,:]
+ gen_all[it,:] = x_out.data.get()[0,:]
+ else:
+ in_all[it,:] = x_in.data[0,:]
+ gen_all[it,:] = x_out.data[0,:]
+
+ x = np.asarray(np.clip(gen_all * 128 + 128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ #x = np.asarray(np.clip(in_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ #save_image(x, "in")
+
+all_time = time.time()
+
+for man in man_fld:
+ path_img = path_man+man+"/"
+ img_file = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+
+ for files in img_file:
+
+ start = time.time()
+
+ for num in range(iter_first,iter_last+1,iter_inter):
+ iter_num = str(num)
+ # trained model
+ ENC_W = param_dir+"enc_iter_"+iter_num+".npz"
+ DEC_W = param_dir+"dec_iter_"+iter_num+".npz"
+
+ parser = argparse.ArgumentParser(description='chainer implementation of pix2pix')
+ parser.add_argument('--gpu', '-g', type=int, default=gpu_num,
+ help='GPU ID (negative value indicates CPU)')
+ parser.add_argument('--img', '-i', help='Input image')
+ parser.add_argument('--out', '-o', default='result_dehighlight',
+ help='Directory to output the result')
+ args = parser.parse_args()
+
+
+ # Set up a neural network to train
+ enc = Encoder(in_ch=3)
+ dec = Decoder(out_ch=3)
+
+ chainer.serializers.load_npz(ENC_W, enc)
+ chainer.serializers.load_npz(DEC_W, dec)
+
+ if args.gpu >= 0:
+ chainer.cuda.get_device(args.gpu).use() # Make a specified GPU current
+ enc.to_gpu() # Copy the model to the GPU
+ dec.to_gpu()
+
+ #入力画像の読み込み
+ img_src = Image.open(path_img+files)
+ w,h = img_src.size
+ #r = 286/min(w,h)
+ # resize images so that min(w, h) == 286
+ #img_src = img_src.resize((int(r*w), int(r*h)), Image.BILINEAR)
+ img_src = img_src.resize((w_in, h_in), Image.BILINEAR)
+
+ img_src = np.asarray(img_src).astype("f").transpose(2,0,1)/128.0-1.0
+
+ out_image(img_src,man,files, enc, dec, 1, 1, 0, iter_num, args.gpu)
+ elapsed_time = time.time() - start
+ print("complete: "+files)
+ print("time: " +str(elapsed_time))
+finish_time = time.time()-all_time
+print("total: "+str(finish_time))
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py"
new file mode 100644
index 0000000..6e3a9ad
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_Canny.py"
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+import cv2
+import os
+
+
+def Edge_Extract(root, root2, root3):
+ img_root = os.path.join(root) # 修改为保存图像的文件名
+ edge_root = os.path.join(root2) #
+ binary_root = os.path.join(root3)
+
+ if not os.path.exists(edge_root):
+ os.mkdir(edge_root)
+
+ if not os.path.exists(binary_root):
+ os.mkdir(binary_root)
+
+ file_names = os.listdir(img_root)
+ img_name = []
+
+ for name in file_names:
+ if not name.endswith('.jpg'):
+ assert "This file %s is not JPG" % (name)
+ img_name.append(os.path.join(img_root, name[:-4]+'.jpg'))
+
+ index = 0
+ for image in img_name:
+ img = cv2.imread(image, 0)
+ ret, img = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(binary_root + '/' + file_names[index], img)
+ cv2.imwrite(edge_root + '/' + file_names[index], cv2.Canny(img, 100, 300))
+ index += 1
+ return 0
+
+
+if __name__ == '__main__':
+ root = 'D:/result_contract/Otherkind/Tongue/20191119/86_extraction_SRG/' # 修改为你对应的文件路径
+ root2 = 'D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge/'
+ root3 = 'D:/result_contract/Otherkind/Tongue/20191119/86_SRG_binary/'
+ Edge_Extract(root, root2, root3)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py"
new file mode 100644
index 0000000..f482dd5
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_find_contours.py"
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+import matplotlib.pyplot as plt
+from skimage import measure, data, color
+import cv2
+import os
+
+
+def Edge_Extract(root, root2, root3):
+ img_root = os.path.join(root)
+ edge_root = os.path.join(root2)
+ binary_root = os.path.join(root3)
+
+ if not os.path.exists(edge_root):
+ os.mkdir(edge_root)
+
+ if not os.path.exists(binary_root):
+ os.mkdir(binary_root)
+
+ file_names = os.listdir(img_root)
+ img_name = []
+
+ for name in file_names:
+ if not name.endswith('.jpg'):
+ assert "This file %s is not JPG" % (name)
+ img_name.append(os.path.join(img_root, name[:-4]+'.jpg'))
+
+ index = 0
+
+ for image in img_name:
+ img = cv2.imread(image, 0)
+ ret, img = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(binary_root + '/' + file_names[index], img)
+ print(index)
+ contours = measure.find_contours(img, 0.5)
+ plt.axis('off')
+ fig = plt.gcf()
+ fig.set_size_inches(2.56 / 3, 2.56 / 3)
+ plt.gca().xaxis.set_major_locator(plt.NullLocator())
+ plt.gca().yaxis.set_major_locator(plt.NullLocator())
+ plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
+ plt.margins(0, 0)
+ ax1 = fig.subplots(1, 1)
+ ax1.spines['top'].set_visible(False)
+ ax1.spines['right'].set_visible(False)
+ ax1.spines['bottom'].set_visible(False)
+ ax1.spines['left'].set_visible(False)
+ rows, cols = img.shape
+ ax1.axis([0, rows, cols, 0])
+ ax1.plot(contours[0][:, 1], contours[0][:, 0], linewidth=10, color='red')
+ ax1.plot(contours[1][:, 1], contours[1][:, 0], linewidth=10, color='red')
+ # for n, contour in enumerate(contours):
+ # ax1.plot(contour[:, 1], contour[:, 0], linewidth=5, color='blue')
+ fig.savefig(edge_root + '/' + file_names[index], dpi=300, pad_inches=0)
+ # plt.savefig(edge_root + '/' + file_names[index], dpi=300, pad_inches=0)
+ index += 1
+ return 0
+
+
+if __name__ == '__main__':
+ root = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_notall/'
+ root2 = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours_notall/'
+ root3 = 'D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_binary_notall/'
+ Edge_Extract(root, root2, root3)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py"
new file mode 100644
index 0000000..7e1a48e
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforCanny.py"
@@ -0,0 +1,56 @@
+from PIL import Image
+import os
+
+
+for filename in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel_edge/"):
+ print(filename)
+ img_mix = Image.new("RGB", (256, 256), (255, 255, 255))
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel_edge/" + filename)
+ img1_L = img1.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data1 = img1_L.getpixel((i, j))
+ if data1 < 128:
+ img1_L.putpixel((i, j), 0)
+ else:
+ img1_L.putpixel((i, j), 255)
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge/" + filename)
+ img2_L = img2.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data2 = img2_L.getpixel((i, j))
+ if data2 < 128:
+ img2_L.putpixel((i, j), 0)
+ else:
+ img2_L.putpixel((i, j), 255)
+ img3 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge/" + filename)
+ img3_L = img3.convert('L')
+ for i in range(0, 256):
+ for j in range(0, 256):
+ data3 = img3_L.getpixel((i, j))
+ if data3 < 128:
+ img3_L.putpixel((i, j), 0)
+ else:
+ img3_L.putpixel((i, j), 255)
+
+ data = img_mix.load()
+ for i in range(0, 256):
+ for j in range(0, 256):
+ if img1_L.getpixel((i, j)) >= 128:
+ data[i, j] = (255, 0, 0)
+ if img2_L.getpixel((i, j)) >= 128:
+ data[i, j] = (0, 255, 0)
+ if img3_L.getpixel((i, j)) >= 128:
+ data[i, j] = (0, 0, 255)
+ img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
+
+ # img_mix = Image.merge(img_mix.mode, (img1_L, img2_L, img3_L))
+ # img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+
+ # img_mix2 = Image.open('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+ # data = img_mix2.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if (data[i, j][0] <= 100) and (data[i, j][1] <= 100) and (data[i, j][2] <= 100):
+ # data[i, j] = (250, 250, 250)
+ # img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py" "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py"
new file mode 100644
index 0000000..1ddba87
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/edge_overlappingforFindContours.py"
@@ -0,0 +1,57 @@
+from PIL import Image
+import os
+
+
+for filename in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours6/"):
+ print(filename)
+ # img_mix = Image.new("RGB", (256, 256), (255, 255, 255))
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel2_edge_find_contours6/" + filename)
+ img1_L = img1.convert('RGB')
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data1 = img1_L.getpixel((i, j))
+ # if data1 < 128:
+ # img1_L.putpixel((i, j), 0)
+ # else:
+ # img1_L.putpixel((i, j), 255)
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/50_SRG_edge_find_contours6/" + filename)
+ img2_L = img2.convert('RGB')
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data2 = img2_L.getpixel((i, j))
+ # if data2 < 128:
+ # img2_L.putpixel((i, j), 0)
+ # else:
+ # img2_L.putpixel((i, j), 255)
+ img_mix = Image.blend(img1_L, img2_L, 0.5)
+ img3 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/86_SRG_edge_find_contours6/" + filename)
+ img3_L = img3.convert('RGB')
+ img_mix2 = Image.blend(img_mix, img3_L, 0.4)
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # data3 = img3_L.getpixel((i, j))
+ # if data3 < 128:
+ # img3_L.putpixel((i, j), 0)
+ # else:
+ # img3_L.putpixel((i, j), 255)
+
+ # data = img_mix.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if img1_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (255, 0, 0)
+ # if img2_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (0, 255, 0)
+ # if img3_L.getpixel((i, j)) >= 128:
+ # data[i, j] = (0, 0, 255)
+ # img_mix.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed3/' + filename)
+
+ img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed3_86_6/' + filename)
+
+ # img_mix2 = Image.open('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed/' + filename)
+ # data = img_mix2.load()
+ # for i in range(0, 256):
+ # for j in range(0, 256):
+ # if (data[i, j][0] <= 100) and (data[i, j][1] <= 100) and (data[i, j][2] <= 100):
+ # data[i, j] = (250, 250, 250)
+ # img_mix2.save('D:/result_contract/Otherkind/Tongue/20191119/edge_mixed2/' + filename)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/generate.py" "b/Features/DeepLearning/Reference/Tang\047s/generate.py"
new file mode 100644
index 0000000..298e961
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/generate.py"
@@ -0,0 +1,59 @@
+#グレースケールを二値化→論理積画像を行なうプログラム
+import numpy as np
+from matplotlib.image import imread, imsave
+import glob, os
+from PIL import Image
+import cv2
+
+"""***各種設定***"""
+#イテレータの最初
+iter_first = 5000
+#イテレータの最後
+iter_last = 380000
+#イテレータの間隔
+iter_inter = 5000
+#生成した画像を保存したディレクトリ
+path_gen = "./generate_image/"
+#論理積画像を保存するディレクトリ
+result_dir = "./result/"
+
+path_gt = "./gt/"
+
+
+#二値化
+gen_fld = [os.path.basename(x) for x in glob.glob(path_gen+'*')]
+
+for man in gen_fld:
+ path_img = path_gen+man+"/"
+ img_fld = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+ for ifolder in img_fld:
+ path_img2 = path_img+ifolder+"/"
+ img_fld2 = [os.path.basename(x) for x in glob.glob(path_img2+'*')]
+ #二値化処理
+ for ifile in img_fld2:
+ img = cv2.imread(path_img2+ifile,0)
+ ret, th2 = cv2.threshold(img, 200, 255, cv2.THRESH_BINARY)
+ cv2.imwrite(path_img2+ifile,th2)
+
+#論理積をとって画像を生成
+for man in gen_fld:
+ path_img = path_gen+man+"/"
+ img_fld = [os.path.basename(x) for x in glob.glob(path_img+'*')]
+ for ifolder in img_fld:
+ path_img2 = path_img+ifolder+"/"
+ for num in range(iter_first,iter_last+1,iter_inter):
+ img = imread(path_img2+"iter_"+str(num)+".jpg")
+ if(num == iter_first):
+ mask_img = img
+ else:
+ make = cv2.bitwise_and(img,img, mask = mask_img)
+ mask_img = make
+
+ #
+ res_dir = result_dir+man+"/"
+ if not os.path.exists(res_dir):
+ os.makedirs(res_dir)
+ cv2.imwrite(res_dir+str(ifolder)+".jpg",mask_img)
+
+print("complete")
+
diff --git "a/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py" "b/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py"
new file mode 100644
index 0000000..a468d7b
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/image_enhancement.py"
@@ -0,0 +1,105 @@
+import tensorflow as tf
+import os
+import random
+import numpy as np
+
+source_file = "D:/test13/SampleSkin1/" # 原始文件地址
+source_file2 = "D:/test13/LabelSkin1/" # 原始文件地址
+target_file = "D:/test13/SampleSkin2/" # 修改后的文件地址
+target_file2 = "D:/test13/LabelSkin2/" # 修改后的文件地址
+num = 2000 # 产生图片次数
+flip = False
+flip2 = False
+seed = None
+scale_size = 256 # help="scale images to this size before cropping to 256x256")
+brightness = False
+contrast = False
+hue = False
+saturation = False
+gamma = False
+aspect_ratio = 1.0 # aspect ratio of output images (width/height)
+CROP_SIZE = 256
+
+if not os.path.exists(target_file): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file)
+
+file_list = os.listdir(source_file) # 读取原始文件的路径
+
+if not os.path.exists(target_file2): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file2)
+
+file_list2 = os.listdir(source_file2) # 读取原始文件的路径
+
+
+# 图像预处理,翻转、改变形状
+def transform(image):
+ r = image
+
+ # area produces a nice downscaling, but does nearest neighbor for upscaling
+ # assume we're going to be doing downscaling here
+ r = tf.image.resize_images(r, [scale_size, scale_size], method=tf.image.ResizeMethod.AREA)
+
+ offset = tf.cast(tf.floor(tf.random_uniform([2], 0, scale_size - CROP_SIZE + 1, seed=seed)), dtype=tf.int32)
+ if scale_size > CROP_SIZE:
+ r = tf.image.crop_to_bounding_box(r, offset[0], offset[1], CROP_SIZE, CROP_SIZE)
+ elif scale_size < CROP_SIZE:
+ raise Exception("scale size cannot be less than crop size")
+ return r
+
+
+def transform2(image):
+ r = image
+ if flip2:
+ r = tf.image.flip_left_right(r)
+ # r = tf.image.flip_up_down(r)
+ # r = tf.image.random_flip_left_right(r, seed=seed)
+ # r = tf.image.random_flip_up_down(r, seed=seed)
+ return r
+
+
+def transform3(image, color_ordering=0):
+ r = image
+ a = random.uniform(1, 2)
+ # b = random.uniform(1, 5)
+ b = 1
+ if color_ordering == 0:
+ if brightness:
+ r = tf.image.random_brightness(r, max_delta=0.2)
+ if contrast:
+ r = tf.image.random_contrast(r, lower=0.5, upper=1.5)
+ if saturation:
+ r = tf.image.random_saturation(r, lower=1, upper=2)
+ if gamma:
+ r = tf.image.adjust_gamma(r, gain=a, gamma=b)
+ # if hue:
+ # r = tf.image.random_hue(r, max_delta=0.5)
+
+ return r
+
+
+with tf.Session() as sess:
+ for i in range(num):
+ image_raw_data = tf.gfile.FastGFile(source_file + file_list[i], "rb").read() # 读取图片
+ image_raw_data2 = tf.gfile.FastGFile(source_file2 + file_list2[i], "rb").read() # 读取图片
+ print("Processing: ", str(i))
+ image_data = tf.image.decode_jpeg(image_raw_data)
+ # image_data = tf.image.convert_image_dtype(image_data, dtype=tf.float32)
+ image_data2 = tf.image.decode_jpeg(image_raw_data2)
+ # image_data2 = tf.image.convert_image_dtype(image_data2, dtype=tf.float32)
+
+ adjust = tf.image.flip_left_right(image_data)
+ adjust2 = tf.image.flip_left_right(image_data2)
+
+ image_data = tf.image.convert_image_dtype(adjust, dtype=tf.uint8)
+ image_data2 = tf.image.convert_image_dtype(adjust2, dtype=tf.uint8)
+
+ encode_data = tf.image.encode_jpeg(image_data)
+ encode_data2 = tf.image.encode_jpeg(image_data2)
+
+ with tf.gfile.GFile(target_file + file_list[i] + ".jpeg", "wb") as f1:
+ f1.write(encode_data.eval())
+
+ with tf.gfile.GFile(target_file2 + file_list2[i] + ".jpeg", "wb") as f2:
+ f2.write(encode_data2.eval())
+
+print("Finished!!!")
diff --git "a/Features/DeepLearning/Reference/Tang\047s/img_dataset.py" "b/Features/DeepLearning/Reference/Tang\047s/img_dataset.py"
new file mode 100644
index 0000000..046b583
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/img_dataset.py"
@@ -0,0 +1,79 @@
+#メモ(学習サイズが256×256の場合)
+#画像の縦横のサイズが286pixel以上になるように変換したあと、
+#256×256で切り抜きしている
+
+import os
+
+import numpy
+from PIL import Image
+import six
+
+import numpy as np
+
+from io import BytesIO
+import os
+import pickle
+import json
+import numpy as np
+import glob
+
+import skimage.io as io
+
+from chainer.dataset import dataset_mixin
+"""***各種設定***"""
+
+#画像サイズの変更(学習させたい画像サイズ以上に設定)
+min_size = 286
+
+#学習させる画像サイズに設定
+w_crop_width = 256
+h_crop_width = 256
+# download `BASE` dataset from http://cmp.felk.cvut.cz/~tylecr1/facade/
+class ImgDataset(dataset_mixin.DatasetMixin):
+ def __init__(self, dataSrcDir, dataDstDir, data_range=(0,0.9)):
+ print("load dataset start")
+ print(" from: %s, %s"%(dataSrcDir, dataDstDir))
+ print(" range: [{}, {})".format(data_range[0], data_range[1]))
+ self.dataSrcDir = dataSrcDir
+ self.dataDstDir = dataDstDir
+ self.dataset = []
+ self.picfiles = list(map(os.path.basename, glob.glob(os.path.join(dataDstDir, "*.jpg"))))
+ data_range_start = int(data_range[0] * len(self.picfiles))
+ data_range_end = int(data_range[1] * len(self.picfiles))
+ for fn in self.picfiles[data_range_start:data_range_end]:
+ img_src = Image.open(os.path.join(self.dataSrcDir, fn))
+ img_dst = Image.open(os.path.join(self.dataDstDir, fn))
+ w,h = img_src.size
+ #この値をcrop_width以上にする
+ r = min_size/min(w,h)
+ #r = 286/min(w,h)
+ # resize images so that min(w, h) == 286
+ img_src = img_src.resize((int(r*w), int(r*h)), Image.BILINEAR)
+ img_dst = img_dst.resize((int(r*w), int(r*h)), Image.BILINEAR)
+
+ #img_src = np.asarray(img_src).astype("f")
+ #img_src = img_src.transpose(2,0,1)/128.0-1.0
+
+ img_src = np.asarray(img_src).astype("f").transpose(2,0,1)/128.0-1.0
+ img_dst = np.asarray(img_dst).astype("f").transpose(2,0,1)/128.0-1.0
+
+ #img_dst = np.asarray(img_dst).astype("f")
+ #img_dst_shape = img_dst.shape
+ #img_dst = img_dst.reshape(img_dst_shape[0], img_dst_shape[1], 1)
+ #img_dst = img_dst.transpose(2,0,1)/128.0-1.0
+
+ self.dataset.append((img_src, img_dst))
+ print("load dataset done")
+
+ def __len__(self):
+ return len(self.dataset)
+
+ # return (label, img)
+ def get_example(self, i):
+ _,h,w = self.dataset[i][0].shape
+ x_l = np.random.randint(0,w-w_crop_width)
+ x_r = x_l+w_crop_width
+ y_l = np.random.randint(0,h-h_crop_width)
+ y_r = y_l+h_crop_width
+ #same image for input and output image pair
+ return self.dataset[i][0][:,y_l:y_r,x_l:x_r],self.dataset[i][1][:,y_l:y_r,x_l:x_r]
diff --git "a/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py" "b/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py"
new file mode 100644
index 0000000..6d4569a
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/inference_pb_generate.py"
@@ -0,0 +1,90 @@
+import tensorflow as tf
+from tensorflow.python.framework import dtypes
+from PIL import Image
+import numpy as np
+from tensorflow.python.tools import freeze_graph
+from tensorflow.python.tools import optimize_for_inference_lib
+
+# # load one image
+# test_image_dir = "D:/test13/pb_test/20180626030939.jpg" # test image path for testing whether the model optimization works
+# img = Image.open(test_image_dir)
+# img_ndarray = np.array(img, dtype='uint8')
+#
+# print(img_ndarray.shape)
+# img = img_ndarray.reshape((1, 256, 256, 3))
+# print(img)
+
+
+def freeze_from_checkpoint(): # freeze graph
+ path = tf.train.latest_checkpoint("D:/resultAREinProcess10_gpu_checkpoint/") # the path used for only use variable saved at the last time.
+ input_graph_path = "D:/resultAREinProcess10_gpu_checkpoint/graph_node.pbtxt" # the pbtxt path
+ output_nodes = "generator1/decoder_1/Tanh"
+ restore_op = "save/restore_all"
+ filename_tensor = "save/Const:0"
+ output_name = "D:/resultAREinProcess10_gpu_checkpoint/AREinProcess2_step8100.pb" # where you want to export your freezed model
+ freeze_graph.freeze_graph(input_graph_path, "", False, path, output_nodes, restore_op, filename_tensor, output_name, True, "")
+
+
+def optimize_frozen_file():
+ """
+ - Removing training-only operations like checkpoint saving.
+ - Stripping out parts of the graph that are never reached.
+ - Removing debug operations like CheckNumerics.
+ - Folding batch normalization ops into the pre-calculated weights.
+ - Fusing common operations into unified versions.
+
+ "Note: important: Don't use placeholder as training switch, otherwise the folding batch normalization will occur error"
+ :return: a optimized function
+ """
+ inputGraph = tf.GraphDef()
+ frozen_graph_filename = "D:/result201910072_gpu_checkpoint/frozen_model.pb" # the freezed model path
+ with tf.gfile.Open(frozen_graph_filename, "rb") as f:
+
+ data2read = f.read()
+ inputGraph.ParseFromString(data2read)
+
+ outputGraph = optimize_for_inference_lib.optimize_for_inference(
+ inputGraph,
+ ["input_image"], # an array of the input node(s)
+ ["generator1/decoder_1/Tanh"], # an array of output nodes
+ dtypes.float32.as_datatype_enum)
+
+ # Save the optimized graph'test.pb'
+
+ f = tf.gfile.FastGFile('D:/result201910111_gpu_checkpoint/OptimizedGraph.pb', "w")
+
+ f.write(outputGraph.SerializeToString())
+
+
+def load_graph():
+ frozen_filename = "D:/result201910111_gpu_checkpoint/OptimizedGraph.pb"
+ with tf.gfile.GFile(frozen_filename, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+
+ with tf.Graph().as_default() as graph:
+ # tf.import_graph_def(graph_def, name="prefix")
+ tf.import_graph_def(graph_def)
+ return graph
+
+
+def childs(t, d=0):
+ print('-' * d, t.name)
+ for child in t.op.inputs:
+ childs(child, d + 1)
+
+
+if __name__ == '__main__':
+
+ freeze_from_checkpoint()
+ # optimize_frozen_file()
+ #
+ # graph = load_graph()
+ # x = graph.get_tensor_by_name("import/input_image:0")
+ #
+ # pred = graph.get_tensor_by_name("import/generator1/decoder_1/Tanh:0")
+
+ # with tf.Session(graph=graph) as sess:
+ # input_data = img
+ # y = sess.run(pred, feed_dict={x: input_data})
+ # print(y)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/mIoU.py" "b/Features/DeepLearning/Reference/Tang\047s/mIoU.py"
new file mode 100644
index 0000000..341b6ba
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/mIoU.py"
@@ -0,0 +1,106 @@
+from sklearn.metrics import confusion_matrix
+import numpy as np
+from PIL import Image
+import os
+
+
+iou = 0
+iouall = 0
+ioumean = 0
+ioumax = 0
+ioumin = 1
+filesnum = 0
+sen = 0
+senall = 0
+senmean = 0
+acc = 0
+accall = 0
+accmean = 0
+spe = 0
+speall = 0
+spemean = 0
+
+# Result txt file saving PATH
+log = open(r'D:/result_contract/Otherkind/Tongue/20191119/109/result.txt', 'w')
+
+
+def compute_iou(y_pred, y_true):
+ # ytrue, ypred is a flatten vector
+ y_pred = y_pred.flatten()
+ y_true = y_true.flatten()
+ tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
+ # compute mean iou
+ print("tn, fp, fn, tp:", (tn, fp, fn, tp), file=log)
+ # tp/(tp + fp + fn)
+ iou_tp = tp / (tp + fp + fn)
+ sen_tp = tp / (tp + fn)
+ acc_tp = (tp + tn) / (tn + fp + fn + tp)
+ spe_tp = tn / (tn + fp)
+ global iou
+ iou = iou_tp
+ global sen
+ sen = sen_tp
+ global acc
+ acc = acc_tp
+ global spe
+ spe = spe_tp
+ print("IoU:", iou_tp, file=log)
+ print("SEN:", sen_tp, file=log)
+ print("ACC:", acc_tp, file=log)
+ print("SPE:", spe_tp, file=log)
+
+
+#"C:/Users/user/Desktop/test16/gt_img/"
+#"C:/Users/user/Desktop/test16/testdataset/Fair/gt/"
+
+for filenames in os.listdir(r"D:/result_contract/Otherkind/Tongue/20191119/ioulabel/"): # label gray scale images PATH
+ print(filenames, file=log)
+ filename = filenames.replace('', '')
+ print(filename)
+ img1 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/ioulabel/" + filenames) # label gray scale images PATH
+ img11 = img1.convert('L')
+ threshold = 128
+ table1 = []
+ for i in range(256):
+ if i < threshold:
+ table1.append(0)
+ else:
+ table1.append(1)
+ img11 = img11.point(table1, '1')
+ img_true = np.array(img11.convert("1").getdata())
+ img11.save('D:/result_contract/Otherkind/Tongue/20191119/ioulabel_convert/' + filenames) # label binary images output PATH
+
+ img2 = Image.open("D:/result_contract/Otherkind/Tongue/20191119/109/" + filename) # Deep Learning model generated images PATH
+ img22 = img2.convert('L')
+ table2 = []
+ for i in range(256):
+ if i < threshold:
+ table2.append(0)
+ else:
+ table2.append(1)
+ img22 = img22.point(table2, '1')
+ img_pred = np.array(img22.convert("1").getdata())
+ img22.save('D:/result_contract/Otherkind/Tongue/20191119/109_convert/' + filename) # generated images binary converted output PATH
+
+ compute_iou(img_pred, img_true)
+ if iou >= ioumax:
+ ioumax = iou
+ if iou <= ioumin:
+ ioumin = iou
+ iouall = iouall + iou
+ senall = senall + sen
+ accall = accall + acc
+ speall = speall + spe
+ filesnum = filesnum + 1
+ ioumean = iouall / filesnum
+ senmean = senall / filesnum
+ accmean = accall / filesnum
+ spemean = speall / filesnum
+ print("IoUmean:", ioumean, file=log)
+ print("IoUmax:", ioumax, file=log)
+ print("IoUmin:", ioumin, file=log)
+ print("SENmean:", senmean, file=log)
+ print("ACCmean:", accmean, file=log)
+ print("SPEmean:", spemean, file=log)
+
+log.close()
diff --git "a/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py" "b/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py"
new file mode 100644
index 0000000..02d093b
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/make_image_TFRecord.py"
@@ -0,0 +1,71 @@
+import os
+import tensorflow as tf
+from PIL import Image
+import numpy as np
+import pandas as pd
+
+# 原始图片的存储位置
+orig_picture = os.getcwd() + '\\image\\test'
+# 生成图片的存储位置
+gen_picture = os.getcwd() + '\\image'
+# 需要的识别类型
+classes = {'0', '1'}
+# 样本总数
+num_samples = 40
+
+
+# 制作TFRecords数据
+def create_record():
+ writer = tf.python_io.TFRecordWriter("test.tfrecords")
+ for index, name in enumerate(classes):
+ class_path = orig_picture + "/" + name + "/"
+ for img_name in os.listdir(class_path):
+ img_path = class_path + img_name
+ img = Image.open(img_path)
+ img = img.resize((32, 32)) # 设置需要转换的图片大小
+ ###图片灰度化######################################################################
+ # img=img.convert("L")
+ ##############################################################################################
+ img_raw = img.tobytes() # 将图片转化为原生bytes
+ example = tf.train.Example(
+ features=tf.train.Features(feature={
+ "label": tf.train.Feature(int64_list=tf.train.Int64List(value=[index])),
+ 'img_raw': tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw]))
+ }))
+ writer.write(example.SerializeToString())
+ writer.close()
+
+
+# =======================================================================================
+def read_and_decode(filename, is_batch):
+ # 创建文件队列,不限读取的数量
+ filename_queue = tf.train.string_input_producer([filename])
+ # create a reader from file queue
+ reader = tf.TFRecordReader()
+ # reader从文件队列中读入一个序列化的样本
+ _, serialized_example = reader.read(filename_queue)
+ # get feature from serialized example
+ # 解析符号化的样本
+ features = tf.parse_single_example(
+ serialized_example,
+ features={
+ 'label': tf.FixedLenFeature([], tf.int64),
+ 'img_raw': tf.FixedLenFeature([], tf.string)
+ })
+ label = features['label']
+ img = features['img_raw']
+ img = tf.decode_raw(img, tf.uint8)
+ img = tf.reshape(img, [32, 32, 3])
+ # img = tf.cast(img, tf.float32) * (1. / 255) - 0.5
+ label = tf.cast(label, tf.int32)
+
+ if is_batch:
+ batch_size = 3
+ min_after_dequeue = 10
+ capacity = min_after_dequeue + 3 * batch_size
+ img, label = tf.train.shuffle_batch([img, label],
+ batch_size=batch_size,
+ num_threads=3,
+ capacity=capacity,
+ min_after_dequeue=min_after_dequeue)
+ return img, label
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/net.py" "b/Features/DeepLearning/Reference/Tang\047s/net.py"
new file mode 100644
index 0000000..81fb85c
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/net.py"
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import numpy
+
+import chainer
+from chainer import cuda
+import chainer.functions as F
+import chainer.links as L
+
+# U-net https://arxiv.org/pdf/1611.07004v1.pdf
+
+# convolution-batchnormalization-(dropout)-relu
+class CBR(chainer.Chain):
+ def __init__(self, ch0, ch1, bn=True, sample='down', activation=F.relu, dropout=False):
+ self.bn = bn
+ self.activation = activation
+ self.dropout = dropout
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ if sample=='down':
+ layers['c'] = L.Convolution2D(ch0, ch1, 4, 2, 1, initialW=w)
+ else:
+ layers['c'] = L.Deconvolution2D(ch0, ch1, 4, 2, 1, initialW=w)
+ if bn:
+ layers['batchnorm'] = L.BatchNormalization(ch1)
+ super(CBR, self).__init__(**layers)
+
+ def __call__(self, x):
+ h = self.c(x)
+ #print(h.shape)
+ if self.bn:
+ h = self.batchnorm(h)
+ if self.dropout:
+ h = F.dropout(h)
+ if not self.activation is None:
+ h = self.activation(h)
+ return h
+
+class Encoder(chainer.Chain):
+ def __init__(self, in_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0'] = L.Convolution2D(in_ch, 64, 3, 1, 1, initialW=w)
+ layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c4'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c5'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c6'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c7'] = CBR(512, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ super(Encoder, self).__init__(**layers)
+
+ def __call__(self, x):
+ #print("enc")
+ hs = [F.leaky_relu(self.c0(x))]
+ for i in range(1,8):
+ hs.append(self['c%d'%i](hs[i-1]))
+ return hs
+
+class Decoder(chainer.Chain):
+ def __init__(self, out_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0'] = CBR(512, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c1'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c2'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=True)
+ layers['c3'] = CBR(1024, 512, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c4'] = CBR(1024, 256, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c5'] = CBR(512, 128, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c6'] = CBR(256, 64, bn=True, sample='up', activation=F.relu, dropout=False)
+ layers['c7'] = L.Convolution2D(128, out_ch, 3, 1, 1, initialW=w)
+ super(Decoder, self).__init__(**layers)
+
+ def __call__(self, hs):
+ #print("dec")
+ h = self.c0(hs[-1])
+ for i in range(1,8):
+ #print(h.shape)
+ #print(hs[-i-1].shape)
+ h = F.concat([h, hs[-i-1]])
+ if i<7:
+ h = self['c%d'%i](h)
+ else:
+ h = self.c7(h)
+ return h
+
+
+class Discriminator(chainer.Chain):
+ def __init__(self, in_ch, out_ch):
+ layers = {}
+ w = chainer.initializers.Normal(0.02)
+ layers['c0_0'] = CBR(in_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c0_1'] = CBR(out_ch, 32, bn=False, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c1'] = CBR(64, 128, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c2'] = CBR(128, 256, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c3'] = CBR(256, 512, bn=True, sample='down', activation=F.leaky_relu, dropout=False)
+ layers['c4'] = L.Convolution2D(512, 1, 3, 1, 1, initialW=w)
+ super(Discriminator, self).__init__(**layers)
+
+ def __call__(self, x_0, x_1):
+ h = F.concat([self.c0_0(x_0), self.c0_1(x_1)])
+ h = self.c1(h)
+ h = self.c2(h)
+ h = self.c3(h)
+ h = self.c4(h)
+ #h = F.average_pooling_2d(h, h.data.shape[2], 1, 0)
+ return h
diff --git "a/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py" "b/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py"
new file mode 100644
index 0000000..b2fe959
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/pic_visualizer.py"
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+
+import os
+
+import numpy as np
+from PIL import Image
+
+import chainer
+import chainer.cuda
+from chainer import Variable
+
+def out_image(updater, enc, dec, rows, cols, seed, dst, gpu, w_img, h_img):
+ @chainer.training.make_extension()
+ def make_image(trainer):
+ np.random.seed(seed)
+ n_images = rows * cols
+ xp = enc.xp
+
+ in_ch = 3
+ out_ch = 3
+
+ in_all = np.zeros((n_images, in_ch, h_img, w_img)).astype("f")
+ gt_all = np.zeros((n_images, out_ch, h_img, w_img)).astype("f")
+ gen_all = np.zeros((n_images, out_ch, h_img, w_img)).astype("f")
+
+ for it in range(n_images):
+ batch = updater.get_iterator('test').next()
+ batchsize = len(batch)
+
+ x_in = xp.zeros((batchsize, in_ch, h_img, w_img)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_img, w_img)).astype("f")
+
+ for i in range(batchsize):
+ x_in[i,:] = xp.asarray(batch[i][0])
+ t_out[i,:] = xp.asarray(batch[i][1])
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ if gpu >= 0:
+ in_all[it,:] = x_in.data.get()[0,:]
+ gt_all[it,:] = t_out.get()[0,:]
+ gen_all[it,:] = x_out.data.get()[0,:]
+ else:
+ in_all[it,:] = x_in.data[0,:]
+ gt_all[it,:] = t_out[0,:]
+ gen_all[it,:] = x_out.data[0,:]
+
+
+ def save_image(x, name, mode=None):
+ _, C, H, W = x.shape
+ x = x.reshape((rows, cols, C, H, W))
+ x = x.transpose(0, 3, 1, 4, 2)
+ if C==1:
+ x = x.reshape((rows*H, cols*W))
+ else:
+ x = x.reshape((rows*H, cols*W, C))
+
+ preview_dir = '{}/preview'.format(dst)
+ preview_path = preview_dir +\
+ '/image_{:0>8}_{}.png'.format(trainer.updater.iteration, name)
+ if not os.path.exists(preview_dir):
+ os.makedirs(preview_dir)
+ Image.fromarray(x, mode=mode).convert('RGB').save(preview_path)
+ """
+ x = np.asarray(np.clip(gen_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ x = np.asarray(np.clip(in_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "in")
+
+ x = np.asarray(np.clip(gt_all, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gt")
+
+ """
+ x = np.asarray(np.clip(gen_all * 128 + 128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gen")
+
+ x = np.asarray(np.clip(in_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "in")
+
+ x = np.asarray(np.clip(gt_all * 128+128, 0.0, 255.0), dtype=np.uint8)
+ save_image(x, "gt")
+
+ return make_image
diff --git "a/Features/DeepLearning/Reference/Tang\047s/png2jpg.py" "b/Features/DeepLearning/Reference/Tang\047s/png2jpg.py"
new file mode 100644
index 0000000..78ba50e
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/png2jpg.py"
@@ -0,0 +1,14 @@
+import os
+import cv2
+
+
+def png2jpg():
+ path = 'K:/label5/'
+ for file in os.listdir(path):
+ img = cv2.imread(path + file)
+ img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
+ cv2.imwrite(
+ 'K:/label5/' + file.split('.')[0]+'.jpg', img)
+
+
+png2jpg()
\ No newline at end of file
diff --git "a/Features/DeepLearning/Reference/Tang\047s/random_erasing.py" "b/Features/DeepLearning/Reference/Tang\047s/random_erasing.py"
new file mode 100644
index 0000000..6289ef1
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/random_erasing.py"
@@ -0,0 +1,56 @@
+import numpy as np
+import os
+import cv2
+
+source_file = "D:/random_erasing_testdata4/" # 原始文件地址
+target_file = "D:/random_erasing_testdata4_output18/" # 修改后的文件地址
+num = 424 # 产生图片次数
+
+
+def random_erasing(img, p=1, sl=0.01, sh=0.2, r1=0.3, r2=0.5):
+ target_img = img.copy()
+
+ if p < np.random.rand():
+ # RandomErasingを実行しない
+ return target_img
+
+ H, W, _ = target_img.shape
+ S = H * W
+
+ while True:
+ Se = np.random.uniform(sl, sh) * S # 画像に重畳する矩形の面積
+ re = np.random.uniform(r1, r2) # 画像に重畳する矩形のアスペクト比
+
+ He = int(np.sqrt(Se * re)) # 画像に重畳する矩形のHeight
+ We = int(np.sqrt(Se / re)) # 画像に重畳する矩形のWidth
+
+ # choose = np.random.randint(0, 2)
+ # print(choose)
+ # if choose == 0:
+ # xe = np.random.randint(0, W/3) # 画像に重畳する矩形のx座標
+ # if choose == 1:
+ # xe = np.random.randint(W*2/3, W) # 画像に重畳する矩形のx座標
+
+ xe = np.random.randint(0, W) # 画像に重畳する矩形のx座標
+ ye = np.random.randint(0, H/5) # 画像に重畳する矩形のy座標
+
+ if xe + We <= W and ye + He <= H:
+ # 画像に重畳する矩形が画像からはみ出していなければbreak
+ break
+
+ mask = np.random.randint(0, 255, (He, We, 3)) # 矩形がを生成 矩形内の値はランダム値
+ target_img[ye:ye + He, xe:xe + We, :] = mask # 画像に矩形を重畳
+
+ return target_img
+
+
+if not os.path.exists(target_file): # 如果不存在target_file,则创造一个
+ os.makedirs(target_file)
+
+
+for i in range(num):
+ print("Processing: ", str(i))
+ file_list = os.listdir(source_file)
+ img = cv2.imread(source_file + file_list[i])
+ new_img = random_erasing(img)
+ cv2.imwrite(target_file + file_list[i], new_img)
diff --git "a/Features/DeepLearning/Reference/Tang\047s/updater.py" "b/Features/DeepLearning/Reference/Tang\047s/updater.py"
new file mode 100644
index 0000000..4da3575
--- /dev/null
+++ "b/Features/DeepLearning/Reference/Tang\047s/updater.py"
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import chainer
+import chainer.functions as F
+from chainer import Variable
+
+import numpy as np
+from PIL import Image
+
+from chainer import cuda
+from chainer import function
+from chainer.utils import type_check
+import numpy
+
+"""各種設定"""
+#入出力画像サイズ
+w_img = 256
+h_img = 256
+
+class PicUpdater(chainer.training.StandardUpdater):
+
+ def __init__(self, *args, **kwargs):
+ self.enc, self.dec, self.dis = kwargs.pop('models')
+ super(PicUpdater, self).__init__(*args, **kwargs)
+
+
+ def loss_enc(self, enc, x_out, t_out, y_out, lam1=100, lam2=1):
+ batchsize,_,w,h = y_out.data.shape
+ loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
+ loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
+ loss = loss_rec + loss_adv
+ chainer.report({'loss': loss}, enc)
+ return loss
+
+ def loss_dec(self, dec, x_out, t_out, y_out, lam1=100, lam2=1):
+ batchsize,_,w,h = y_out.data.shape
+ loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
+ loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
+ loss = loss_rec + loss_adv
+ chainer.report({'loss': loss}, dec)
+ return loss
+
+
+ def loss_dis(self, dis, y_in, y_out):
+ batchsize,_,w,h = y_in.data.shape
+
+ L1 = F.sum(F.softplus(-y_in)) / batchsize / w / h
+ L2 = F.sum(F.softplus(y_out)) / batchsize / w / h
+ loss = L1 + L2
+ chainer.report({'loss': loss}, dis)
+ return loss
+
+ def update_core(self):
+ enc_optimizer = self.get_optimizer('enc')
+ dec_optimizer = self.get_optimizer('dec')
+ dis_optimizer = self.get_optimizer('dis')
+
+ enc, dec, dis = self.enc, self.dec, self.dis
+ xp = enc.xp
+
+ batch = self.get_iterator('main').next()
+ batchsize = len(batch)
+ in_ch = batch[0][0].shape[0]
+ out_ch = batch[0][1].shape[0]
+ #w_in = 256
+ #w_out = 256
+
+ x_in = xp.zeros((batchsize, in_ch, h_img, w_img)).astype("f")
+ t_out = xp.zeros((batchsize, out_ch, h_img, w_img)).astype("f")
+
+ for i in range(batchsize):
+ x_in[i,:] = xp.asarray(batch[i][0])
+ t_out[i,:] = xp.asarray(batch[i][1])
+ x_in = Variable(x_in)
+
+ z = enc(x_in)
+ x_out = dec(z)
+
+ y_fake = dis(x_in, x_out)
+ y_real = dis(x_in, t_out)
+
+
+ enc_optimizer.update(self.loss_enc, enc, x_out, t_out, y_fake)
+ for z_ in z:
+ z_.unchain_backward()
+ dec_optimizer.update(self.loss_dec, dec, x_out, t_out, y_fake)
+ x_in.unchain_backward()
+ x_out.unchain_backward()
+ dis_optimizer.update(self.loss_dis, dis, y_real, y_fake)
diff --git a/Features/DeepLearning/Tools/Converter_ckpt_To_PB b/Features/DeepLearning/Tools/Converter_ckpt_To_PB
new file mode 100644
index 0000000..f53926e
--- /dev/null
+++ b/Features/DeepLearning/Tools/Converter_ckpt_To_PB
@@ -0,0 +1,79 @@
+import tensorflow as tf
+from tensorflow.python.framework import dtypes
+import numpy as np
+from tensorflow.python.tools import freeze_graph
+from tensorflow.python.tools import optimize_for_inference_lib
+
+def freeze_from_checkpoint(): # freeze graph
+ path = tf.train.latest_checkpoint(r"D:\Result_RE_Revenge101_checkpoint\\")
+ input_graph_path = r"D:\Result_RE_Revenge101_checkpoint\graph_node.pbtxt" # the pbtxt path
+ output_nodes = "generator1/decoder_1/Tanh"
+ restore_op = r"save\restore_all"
+ filename_tensor = r"save\Const:0"
+ output_name = r"D:\Result_RE_Revenge101_checkpoint\pruning101_step11999.pb" # where you want to export your freezed model
+ freeze_graph.freeze_graph(input_graph_path, "", False, path, output_nodes, restore_op, filename_tensor, output_name, True, "")
+
+
+def optimize_frozen_file():
+ """
+ - Removing training-only operations like checkpoint saving.
+ - Stripping out parts of the graph that are never reached.
+ - Removing debug operations like CheckNumerics.
+ - Folding batch normalization ops into the pre-calculated weights.
+ - Fusing common operations into unified versions.
+
+ "Note: important: Don't use placeholder as training switch, otherwise the folding batch normalization will occur error"
+ :return: a optimized function
+ """
+ inputGraph = tf.GraphDef()
+ frozen_graph_filename = "D:/result201910072_gpu_checkpoint/frozen_model.pb" # the freezed model path
+ with tf.gfile.Open(frozen_graph_filename, "rb") as f:
+
+ data2read = f.read()
+ inputGraph.ParseFromString(data2read)
+
+ outputGraph = optimize_for_inference_lib.optimize_for_inference(
+ inputGraph,
+ ["input_image"], # an array of the input node(s)
+ ["generator1/decoder_1/Tanh"], # an array of output nodes
+ dtypes.float32.as_datatype_enum)
+
+ # Save the optimized graph'test.pb'
+
+ f = tf.gfile.FastGFile('D:/result201910111_gpu_checkpoint/OptimizedGraph.pb', "w")
+
+ f.write(outputGraph.SerializeToString())
+
+
+def load_graph():
+ frozen_filename = "D:/result201910111_gpu_checkpoint/OptimizedGraph.pb"
+ with tf.gfile.GFile(frozen_filename, "rb") as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+
+ with tf.Graph().as_default() as graph:
+ # tf.import_graph_def(graph_def, name="prefix")
+ tf.import_graph_def(graph_def)
+ return graph
+
+
+def childs(t, d=0):
+ print('-' * d, t.name)
+ for child in t.op.inputs:
+ childs(child, d + 1)
+
+
+if __name__ == '__main__':
+
+ freeze_from_checkpoint()
+ optimize_frozen_file()
+
+ # graph = load_graph()
+ # x = graph.get_tensor_by_name("import/input_image:0")
+ #
+ # pred = graph.get_tensor_by_name("import/generator1/decoder_1/Tanh:0")
+
+ # with tf.Session(graph=graph) as sess:
+ # input_data = img
+ # y = sess.run(pred, feed_dict={x: input_data})
+ # print(y)
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln b/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
new file mode 100644
index 0000000..ad5bbe3
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/deepTIAS_feature_labColor.sln
@@ -0,0 +1,31 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.30011.22
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "deepTIAS_feature_labColor", "newcamera_deeplearning\deepTIAS_feature_labColor.csproj", "{9B9D9F04-F367-4B3A-A842-51D01B42539B}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.ActiveCfg = Debug|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Debug|x64.Build.0 = Debug|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.ActiveCfg = Release|x64
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}.Release|x64.Build.0 = Release|x64
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {0887EA03-2F07-4BB6-B5FD-1656497D53DB}
+ EndGlobalSection
+EndGlobal
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
new file mode 100644
index 0000000..ccc69e5
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.Designer.cs
@@ -0,0 +1,553 @@
+namespace OperateCamera
+{
+ partial class Form_TIASAutomaticShootingSystem
+ {
+ ///
+ /// Required designer variable.
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// Clean up any resources being used.
+ ///
+ /// true if managed resources should be disposed; otherwise, false.
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ ///
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ ///
+ private void InitializeComponent()
+ {
+ this.components = new System.ComponentModel.Container();
+ this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
+ this.textBox5 = new System.Windows.Forms.TextBox();
+ this.textBox1 = new System.Windows.Forms.TextBox();
+ this.panel7 = new System.Windows.Forms.Panel();
+ this.pictureBox4 = new System.Windows.Forms.PictureBox();
+ this.panel6 = new System.Windows.Forms.Panel();
+ this.pictureBox3 = new System.Windows.Forms.PictureBox();
+ this.panel5 = new System.Windows.Forms.Panel();
+ this.pictureBox2 = new System.Windows.Forms.PictureBox();
+ this.label10 = new System.Windows.Forms.Label();
+ this.label9 = new System.Windows.Forms.Label();
+ this.panel3 = new System.Windows.Forms.Panel();
+ this.ShootingGuideBox = new System.Windows.Forms.TextBox();
+ this.label1 = new System.Windows.Forms.Label();
+ this.panel1 = new System.Windows.Forms.Panel();
+ this.button_ConnectTIAS = new System.Windows.Forms.Button();
+ this.label7 = new System.Windows.Forms.Label();
+ this.label8 = new System.Windows.Forms.Label();
+ this.textBox2 = new System.Windows.Forms.TextBox();
+ this.textBox3 = new System.Windows.Forms.TextBox();
+ this.textBox4 = new System.Windows.Forms.TextBox();
+ this.textBox6 = new System.Windows.Forms.TextBox();
+ this.textBox7 = new System.Windows.Forms.TextBox();
+ this.textBox8 = new System.Windows.Forms.TextBox();
+ this.panel2 = new System.Windows.Forms.Panel();
+ this.GuideBox = new System.Windows.Forms.TextBox();
+ this.label11 = new System.Windows.Forms.Label();
+ this.label12 = new System.Windows.Forms.Label();
+ this.panel4 = new System.Windows.Forms.Panel();
+ this.pictureBox1 = new System.Windows.Forms.PictureBox();
+ this.serialPort1 = new System.IO.Ports.SerialPort(this.components);
+ this.button1 = new System.Windows.Forms.Button();
+ this.tableLayoutPanel1.SuspendLayout();
+ this.panel7.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).BeginInit();
+ this.panel6.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).BeginInit();
+ this.panel5.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).BeginInit();
+ this.panel3.SuspendLayout();
+ this.panel1.SuspendLayout();
+ this.panel2.SuspendLayout();
+ this.panel4.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
+ this.SuspendLayout();
+ //
+ // tableLayoutPanel1
+ //
+ this.tableLayoutPanel1.ColumnCount = 4;
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.Controls.Add(this.textBox5, 1, 9);
+ this.tableLayoutPanel1.Controls.Add(this.textBox1, 0, 9);
+ this.tableLayoutPanel1.Controls.Add(this.panel7, 3, 8);
+ this.tableLayoutPanel1.Controls.Add(this.panel6, 2, 8);
+ this.tableLayoutPanel1.Controls.Add(this.panel5, 3, 2);
+ this.tableLayoutPanel1.Controls.Add(this.label10, 3, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label9, 2, 1);
+ this.tableLayoutPanel1.Controls.Add(this.panel3, 0, 14);
+ this.tableLayoutPanel1.Controls.Add(this.label1, 1, 0);
+ this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
+ this.tableLayoutPanel1.Controls.Add(this.label7, 0, 7);
+ this.tableLayoutPanel1.Controls.Add(this.label8, 1, 7);
+ this.tableLayoutPanel1.Controls.Add(this.textBox2, 0, 10);
+ this.tableLayoutPanel1.Controls.Add(this.textBox3, 0, 11);
+ this.tableLayoutPanel1.Controls.Add(this.textBox4, 0, 12);
+ this.tableLayoutPanel1.Controls.Add(this.textBox6, 1, 10);
+ this.tableLayoutPanel1.Controls.Add(this.textBox7, 1, 11);
+ this.tableLayoutPanel1.Controls.Add(this.textBox8, 1, 12);
+ this.tableLayoutPanel1.Controls.Add(this.panel2, 0, 13);
+ this.tableLayoutPanel1.Controls.Add(this.label11, 2, 7);
+ this.tableLayoutPanel1.Controls.Add(this.label12, 3, 7);
+ this.tableLayoutPanel1.Controls.Add(this.panel4, 2, 2);
+ this.tableLayoutPanel1.Controls.Add(this.button1, 1, 2);
+ this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
+ this.tableLayoutPanel1.Name = "tableLayoutPanel1";
+ this.tableLayoutPanel1.RowCount = 15;
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.666667F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.Size = new System.Drawing.Size(834, 561);
+ this.tableLayoutPanel1.TabIndex = 0;
+ this.tableLayoutPanel1.Paint += new System.Windows.Forms.PaintEventHandler(this.tableLayoutPanel1_Paint);
+ //
+ // textBox5
+ //
+ this.textBox5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox5.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox5.Location = new System.Drawing.Point(211, 336);
+ this.textBox5.Multiline = true;
+ this.textBox5.Name = "textBox5";
+ this.textBox5.Size = new System.Drawing.Size(202, 31);
+ this.textBox5.TabIndex = 33;
+ this.textBox5.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox5.TextChanged += new System.EventHandler(this.textBox5_TextChanged);
+ //
+ // textBox1
+ //
+ this.textBox1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox1.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox1.Location = new System.Drawing.Point(3, 336);
+ this.textBox1.Multiline = true;
+ this.textBox1.Name = "textBox1";
+ this.textBox1.Size = new System.Drawing.Size(202, 31);
+ this.textBox1.TabIndex = 32;
+ this.textBox1.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox1.TextChanged += new System.EventHandler(this.textBox1_TextChanged);
+ //
+ // panel7
+ //
+ this.panel7.Controls.Add(this.pictureBox4);
+ this.panel7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel7.Location = new System.Drawing.Point(627, 299);
+ this.panel7.Name = "panel7";
+ this.tableLayoutPanel1.SetRowSpan(this.panel7, 5);
+ this.panel7.Size = new System.Drawing.Size(204, 179);
+ this.panel7.TabIndex = 31;
+ this.panel7.Paint += new System.Windows.Forms.PaintEventHandler(this.panel7_Paint);
+ //
+ // pictureBox4
+ //
+ this.pictureBox4.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox4.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox4.Name = "pictureBox4";
+ this.pictureBox4.Size = new System.Drawing.Size(204, 179);
+ this.pictureBox4.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox4.TabIndex = 2;
+ this.pictureBox4.TabStop = false;
+ this.pictureBox4.Click += new System.EventHandler(this.pictureBox4_Click);
+ //
+ // panel6
+ //
+ this.panel6.Controls.Add(this.pictureBox3);
+ this.panel6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel6.Location = new System.Drawing.Point(419, 299);
+ this.panel6.Name = "panel6";
+ this.tableLayoutPanel1.SetRowSpan(this.panel6, 5);
+ this.panel6.Size = new System.Drawing.Size(202, 179);
+ this.panel6.TabIndex = 30;
+ this.panel6.Paint += new System.Windows.Forms.PaintEventHandler(this.panel6_Paint);
+ //
+ // pictureBox3
+ //
+ this.pictureBox3.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox3.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox3.Name = "pictureBox3";
+ this.pictureBox3.Size = new System.Drawing.Size(202, 179);
+ this.pictureBox3.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox3.TabIndex = 1;
+ this.pictureBox3.TabStop = false;
+ this.pictureBox3.Click += new System.EventHandler(this.pictureBox3_Click);
+ //
+ // panel5
+ //
+ this.panel5.Controls.Add(this.pictureBox2);
+ this.panel5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel5.Location = new System.Drawing.Point(627, 77);
+ this.panel5.Name = "panel5";
+ this.tableLayoutPanel1.SetRowSpan(this.panel5, 5);
+ this.panel5.Size = new System.Drawing.Size(204, 179);
+ this.panel5.TabIndex = 29;
+ this.panel5.Paint += new System.Windows.Forms.PaintEventHandler(this.panel5_Paint);
+ //
+ // pictureBox2
+ //
+ this.pictureBox2.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox2.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox2.Name = "pictureBox2";
+ this.pictureBox2.Size = new System.Drawing.Size(204, 179);
+ this.pictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox2.TabIndex = 0;
+ this.pictureBox2.TabStop = false;
+ this.pictureBox2.Click += new System.EventHandler(this.pictureBox2_Click);
+ //
+ // label10
+ //
+ this.label10.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label10.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label10.Location = new System.Drawing.Point(627, 37);
+ this.label10.Name = "label10";
+ this.label10.Size = new System.Drawing.Size(204, 37);
+ this.label10.TabIndex = 25;
+ this.label10.Text = "Detection";
+ this.label10.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label10.Click += new System.EventHandler(this.label10_Click);
+ //
+ // label9
+ //
+ this.label9.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label9.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label9.Location = new System.Drawing.Point(419, 37);
+ this.label9.Name = "label9";
+ this.label9.Size = new System.Drawing.Size(202, 37);
+ this.label9.TabIndex = 24;
+ this.label9.Text = "Real Time";
+ this.label9.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label9.Click += new System.EventHandler(this.label9_Click);
+ //
+ // panel3
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel3, 4);
+ this.panel3.Controls.Add(this.ShootingGuideBox);
+ this.panel3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel3.Location = new System.Drawing.Point(3, 521);
+ this.panel3.Name = "panel3";
+ this.panel3.Size = new System.Drawing.Size(828, 37);
+ this.panel3.TabIndex = 23;
+ this.panel3.Paint += new System.Windows.Forms.PaintEventHandler(this.panel3_Paint);
+ //
+ // ShootingGuideBox
+ //
+ this.ShootingGuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.ShootingGuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.ShootingGuideBox.ForeColor = System.Drawing.Color.Red;
+ this.ShootingGuideBox.Location = new System.Drawing.Point(0, 0);
+ this.ShootingGuideBox.Multiline = true;
+ this.ShootingGuideBox.Name = "ShootingGuideBox";
+ this.ShootingGuideBox.Size = new System.Drawing.Size(828, 37);
+ this.ShootingGuideBox.TabIndex = 24;
+ this.ShootingGuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.ShootingGuideBox.TextChanged += new System.EventHandler(this.ShootingGuideBox_TextChanged);
+ //
+ // label1
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.label1, 2);
+ this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label1.Font = new System.Drawing.Font("Arial", 25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Pixel);
+ this.label1.Location = new System.Drawing.Point(211, 0);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(410, 37);
+ this.label1.TabIndex = 0;
+ this.label1.Text = "Tongue Image Analyzing System";
+ this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label1.Click += new System.EventHandler(this.label1_Click);
+ //
+ // panel1
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel1, 2);
+ this.panel1.Controls.Add(this.button_ConnectTIAS);
+ this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel1.Location = new System.Drawing.Point(3, 151);
+ this.panel1.Name = "panel1";
+ this.tableLayoutPanel1.SetRowSpan(this.panel1, 2);
+ this.panel1.Size = new System.Drawing.Size(410, 68);
+ this.panel1.TabIndex = 7;
+ this.panel1.Paint += new System.Windows.Forms.PaintEventHandler(this.panel1_Paint);
+ //
+ // button_ConnectTIAS
+ //
+ this.button_ConnectTIAS.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_ConnectTIAS.Font = new System.Drawing.Font("Arial", 20F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.button_ConnectTIAS.Location = new System.Drawing.Point(0, 0);
+ this.button_ConnectTIAS.Name = "button_ConnectTIAS";
+ this.button_ConnectTIAS.Size = new System.Drawing.Size(410, 68);
+ this.button_ConnectTIAS.TabIndex = 0;
+ this.button_ConnectTIAS.Text = "START";
+ this.button_ConnectTIAS.UseVisualStyleBackColor = true;
+ this.button_ConnectTIAS.Click += new System.EventHandler(this.button_ConnectTIAS_Click);
+ //
+ // label7
+ //
+ this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label7.Location = new System.Drawing.Point(3, 259);
+ this.label7.Name = "label7";
+ this.label7.Size = new System.Drawing.Size(202, 37);
+ this.label7.TabIndex = 14;
+ this.label7.Text = "RGB";
+ this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label7.Click += new System.EventHandler(this.label7_Click);
+ //
+ // label8
+ //
+ this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label8.Location = new System.Drawing.Point(211, 259);
+ this.label8.Name = "label8";
+ this.label8.Size = new System.Drawing.Size(202, 37);
+ this.label8.TabIndex = 15;
+ this.label8.Text = "L*a*b*";
+ this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label8.Click += new System.EventHandler(this.label8_Click);
+ //
+ // textBox2
+ //
+ this.textBox2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox2.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox2.Location = new System.Drawing.Point(3, 373);
+ this.textBox2.Multiline = true;
+ this.textBox2.Name = "textBox2";
+ this.textBox2.Size = new System.Drawing.Size(202, 31);
+ this.textBox2.TabIndex = 16;
+ this.textBox2.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox2.TextChanged += new System.EventHandler(this.textBox2_TextChanged);
+ //
+ // textBox3
+ //
+ this.textBox3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox3.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox3.Location = new System.Drawing.Point(3, 410);
+ this.textBox3.Multiline = true;
+ this.textBox3.Name = "textBox3";
+ this.textBox3.Size = new System.Drawing.Size(202, 31);
+ this.textBox3.TabIndex = 17;
+ this.textBox3.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox3.TextChanged += new System.EventHandler(this.textBox3_TextChanged);
+ //
+ // textBox4
+ //
+ this.textBox4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox4.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox4.Location = new System.Drawing.Point(3, 447);
+ this.textBox4.Multiline = true;
+ this.textBox4.Name = "textBox4";
+ this.textBox4.Size = new System.Drawing.Size(202, 31);
+ this.textBox4.TabIndex = 18;
+ this.textBox4.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox4.TextChanged += new System.EventHandler(this.textBox4_TextChanged);
+ //
+ // textBox6
+ //
+ this.textBox6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox6.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox6.Location = new System.Drawing.Point(211, 373);
+ this.textBox6.Multiline = true;
+ this.textBox6.Name = "textBox6";
+ this.textBox6.Size = new System.Drawing.Size(202, 31);
+ this.textBox6.TabIndex = 19;
+ this.textBox6.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox6.TextChanged += new System.EventHandler(this.textBox6_TextChanged);
+ //
+ // textBox7
+ //
+ this.textBox7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox7.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox7.Location = new System.Drawing.Point(211, 410);
+ this.textBox7.Multiline = true;
+ this.textBox7.Name = "textBox7";
+ this.textBox7.Size = new System.Drawing.Size(202, 31);
+ this.textBox7.TabIndex = 20;
+ this.textBox7.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox7.TextChanged += new System.EventHandler(this.textBox7_TextChanged);
+ //
+ // textBox8
+ //
+ this.textBox8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.textBox8.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.textBox8.Location = new System.Drawing.Point(211, 447);
+ this.textBox8.Multiline = true;
+ this.textBox8.Name = "textBox8";
+ this.textBox8.Size = new System.Drawing.Size(202, 31);
+ this.textBox8.TabIndex = 21;
+ this.textBox8.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.textBox8.TextChanged += new System.EventHandler(this.textBox8_TextChanged);
+ //
+ // panel2
+ //
+ this.tableLayoutPanel1.SetColumnSpan(this.panel2, 4);
+ this.panel2.Controls.Add(this.GuideBox);
+ this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel2.Location = new System.Drawing.Point(3, 484);
+ this.panel2.Name = "panel2";
+ this.panel2.Size = new System.Drawing.Size(828, 31);
+ this.panel2.TabIndex = 22;
+ this.panel2.Paint += new System.Windows.Forms.PaintEventHandler(this.panel2_Paint);
+ //
+ // GuideBox
+ //
+ this.GuideBox.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.GuideBox.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.GuideBox.ForeColor = System.Drawing.Color.Red;
+ this.GuideBox.Location = new System.Drawing.Point(0, 0);
+ this.GuideBox.Multiline = true;
+ this.GuideBox.Name = "GuideBox";
+ this.GuideBox.Size = new System.Drawing.Size(828, 31);
+ this.GuideBox.TabIndex = 0;
+ this.GuideBox.TextAlign = System.Windows.Forms.HorizontalAlignment.Center;
+ this.GuideBox.TextChanged += new System.EventHandler(this.GuideBox_TextChanged);
+ //
+ // label11
+ //
+ this.label11.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label11.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label11.Location = new System.Drawing.Point(419, 259);
+ this.label11.Name = "label11";
+ this.label11.Size = new System.Drawing.Size(202, 37);
+ this.label11.TabIndex = 26;
+ this.label11.Text = "Segmentation";
+ this.label11.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label11.Click += new System.EventHandler(this.label11_Click);
+ //
+ // label12
+ //
+ this.label12.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label12.Font = new System.Drawing.Font("Arial", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel);
+ this.label12.Location = new System.Drawing.Point(627, 259);
+ this.label12.Name = "label12";
+ this.label12.Size = new System.Drawing.Size(204, 37);
+ this.label12.TabIndex = 27;
+ this.label12.Text = "Selection Region";
+ this.label12.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ this.label12.Click += new System.EventHandler(this.label12_Click);
+ //
+ // panel4
+ //
+ this.panel4.Controls.Add(this.pictureBox1);
+ this.panel4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel4.Location = new System.Drawing.Point(419, 77);
+ this.panel4.Name = "panel4";
+ this.tableLayoutPanel1.SetRowSpan(this.panel4, 5);
+ this.panel4.Size = new System.Drawing.Size(202, 179);
+ this.panel4.TabIndex = 28;
+ this.panel4.Paint += new System.Windows.Forms.PaintEventHandler(this.panel4_Paint);
+ //
+ // pictureBox1
+ //
+ this.pictureBox1.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
+ this.pictureBox1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox1.Location = new System.Drawing.Point(0, 0);
+ this.pictureBox1.Name = "pictureBox1";
+ this.pictureBox1.Size = new System.Drawing.Size(202, 179);
+ this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox1.TabIndex = 0;
+ this.pictureBox1.TabStop = false;
+ this.pictureBox1.Click += new System.EventHandler(this.pictureBox1_Click);
+ //
+ // button1
+ //
+ this.button1.Location = new System.Drawing.Point(211, 77);
+ this.button1.Name = "button1";
+ this.button1.Size = new System.Drawing.Size(75, 23);
+ this.button1.TabIndex = 34;
+ this.button1.Text = "button1";
+ this.button1.UseVisualStyleBackColor = true;
+ this.button1.Click += new System.EventHandler(this.button1_Click);
+ //
+ // Form_TIASAutomaticShootingSystem
+ //
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.None;
+ this.ClientSize = new System.Drawing.Size(834, 561);
+ this.Controls.Add(this.tableLayoutPanel1);
+ this.Name = "Form_TIASAutomaticShootingSystem";
+ this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
+ this.Text = "TIAS BackGround";
+ this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form_TIASAutomaticShootingSystem_FormClosing);
+ this.Load += new System.EventHandler(this.Form_TIASAutomaticShootingSystem_Load);
+ this.tableLayoutPanel1.ResumeLayout(false);
+ this.tableLayoutPanel1.PerformLayout();
+ this.panel7.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox4)).EndInit();
+ this.panel6.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox3)).EndInit();
+ this.panel5.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).EndInit();
+ this.panel3.ResumeLayout(false);
+ this.panel3.PerformLayout();
+ this.panel1.ResumeLayout(false);
+ this.panel2.ResumeLayout(false);
+ this.panel2.PerformLayout();
+ this.panel4.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
+ private System.Windows.Forms.Panel panel3;
+ public System.Windows.Forms.TextBox ShootingGuideBox;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Panel panel1;
+ private System.Windows.Forms.Button button_ConnectTIAS;
+ private System.Windows.Forms.Label label7;
+ private System.Windows.Forms.Label label8;
+ private System.Windows.Forms.TextBox textBox2;
+ private System.Windows.Forms.TextBox textBox3;
+ private System.Windows.Forms.TextBox textBox4;
+ private System.Windows.Forms.TextBox textBox6;
+ private System.Windows.Forms.TextBox textBox7;
+ private System.Windows.Forms.TextBox textBox8;
+ private System.Windows.Forms.Panel panel2;
+ public System.Windows.Forms.TextBox GuideBox;
+ private System.Windows.Forms.Panel panel7;
+ public System.Windows.Forms.PictureBox pictureBox4;
+ private System.Windows.Forms.Panel panel6;
+ public System.Windows.Forms.PictureBox pictureBox3;
+ private System.Windows.Forms.Panel panel5;
+ public System.Windows.Forms.PictureBox pictureBox2;
+ private System.Windows.Forms.Label label10;
+ private System.Windows.Forms.Label label9;
+ private System.Windows.Forms.Label label11;
+ private System.Windows.Forms.Label label12;
+ private System.Windows.Forms.Panel panel4;
+ private System.IO.Ports.SerialPort serialPort1;
+ private System.Windows.Forms.PictureBox pictureBox1;
+ private System.Windows.Forms.TextBox textBox5;
+ private System.Windows.Forms.TextBox textBox1;
+ private System.Windows.Forms.Button button1;
+ }
+}
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
new file mode 100644
index 0000000..17cf46d
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.cs
@@ -0,0 +1,983 @@
+using OpenCvSharp;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Windows.Forms;
+
+namespace OperateCamera
+{
+ public partial class Form_TIASAutomaticShootingSystem : Form
+ {
+ // Config
+ const int RADIUS_COLORAREA = 10;
+
+ public static Bitmap bitmap;
+
+ float[] a = new float[17];
+ float[] b = new float[17];
+ float[] c = new float[17];
+ float d;
+ float e;
+ float f;
+ int k;
+ public static bool m_getColor = false;
+ public static Mat m_CalibFrame; //キャリブレーション用画像
+ public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
+ double[] m_BforLab = new double[24];
+ double[] m_GforLab = new double[24];
+ double[] m_RforLab = new double[24];
+ public static bool m_bCalib;
+ public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
+ public static Mat m_PointedFrame; //ポイントされたMat
+ public static int click = 0; //クリック回数
+
+ OpenCvSharp.Point P1 = new OpenCvSharp.Point();
+ OpenCvSharp.Point P2 = new OpenCvSharp.Point();
+
+ //Serial communication
+ public Form_TIASAutomaticShootingSystem()
+ {
+ InitializeComponent();
+ }
+
+ private void button_ConnectTIAS_Click(object sender, EventArgs e)
+ {
+ // GetImage()
+ var path_oriImg = @"data_lab\1\Shot0001.bmp";
+ var path_calibCsv = @"data_lab\1\Calib.csv";
+ var path_colorMatrixXYZ = "xyz.txt";
+ using (Mat mat_oriImg = Cv2.ImRead(path_oriImg, ImreadModes.Color))
+ {
+ // Process_DeepTIAS()
+ var path_mask = @"data_lab\1\Shot0001_mask.bmp";
+ Mat mat_finalMask = Cv2.ImRead(path_mask, ImreadModes.Grayscale);
+ Cv2.Threshold(mat_finalMask, mat_finalMask, 128, 255, ThresholdTypes.Binary);
+
+ // マスクされた舌領域画像の作成
+ Mat mat_maskedImg = new Mat();
+ mat_oriImg.CopyTo(mat_maskedImg, mat_finalMask);
+
+ // 5点クリック法(2010石川)
+ List list_5points = Get5points(mat_finalMask);
+ Show5point(mat_oriImg.Clone(), list_5points);
+
+ // 8領域の取得
+ List list_8area = Get8area(list_5points);
+ Show8area(mat_oriImg.Clone(), list_8area);
+
+ // DEBUG
+ //List list_5points_002 = new List() {
+ // new OpenCvSharp.Point(230, 628),
+ // new OpenCvSharp.Point(704, 572),
+ // new OpenCvSharp.Point(642, 782),
+ // new OpenCvSharp.Point(360, 808),
+ // new OpenCvSharp.Point(500, 920)
+ //};
+ //List list_8area_002 = Get8area(list_5points_002);
+ //Show8area(mat_oriImg.Clone(), list_8area_002);
+
+
+ /*
+ // 色抽出
+ List list_8Bgr = Get8colors(mat_maskedImg, list_8area);
+
+ // 色変換(RGB->XYZ->Lab)
+ List list_8Lab = Calc8Lab(list_8Bgr, path_calibCsv, path_colorMatrixXYZ);
+
+ // 色の表示
+ Show8colors(list_8Bgr, list_8Lab);
+
+ // 保存
+ // Write8colors(list_8Bgr, list_8Lab);
+ */
+
+
+ // 廃棄
+ mat_finalMask.Dispose();
+ mat_maskedImg.Dispose();
+ GC.Collect();
+ }
+ System.Threading.Thread.Sleep(100);
+ }
+
+ private List Get5points(Mat mat_finalMask)
+ {
+ List li_dst;
+
+ // 表示用
+ var mat_dst = mat_finalMask.Clone();
+ Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
+
+ // マスクの舌領域画素座標
+ var mat_nonZeroCoordinates = new Mat();
+ Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
+
+ // 舌領域上の点をすべてlistに詰める
+ var list_X = new List();
+ var list_Y = new List();
+ for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
+ {
+ var x = mat_nonZeroCoordinates.At(i).X;
+ var y = mat_nonZeroCoordinates.At(i).Y;
+ list_X.Add(x);
+ list_Y.Add(y);
+ }
+
+ /// method1
+ // 端っこを探索(ラスタ左上から)
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
+ var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ //var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
+ //var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
+ //var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
+
+ // 表示してみる
+ //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
+
+ /// method2
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var maxArea = contours.Select(n => Cv2.ContourArea(n)).Max();
+ var maxContour = contours.Where(n => Cv2.ContourArea(n) == maxArea).ToList()[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ double maxDistance_bottom = 0.0;
+ var p_left_2 = new OpenCvSharp.Point();
+ var p_right_2 = new OpenCvSharp.Point();
+ var p_bottom_2 = new OpenCvSharp.Point();
+ // 距離が最も遠いものを採択
+ for (int i = 0; i < maxContour.Count(); i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_2 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_2 = maxContour[i];
+ }
+ }
+ }
+ else
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_bottom)
+ {
+ maxDistance_bottom = distance;
+ p_bottom_2 = maxContour[i];
+ }
+ }
+ }
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
+ var y_apex_2 = (int)(y_top_avg_ + ((p_bottom_2.Y - y_top_avg_) * 0.53));
+ var p_apex_left_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Min());
+ var p_apex_right_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Max());
+
+ // 表示
+ Cv2.Circle(mat_dst, p_left_2, 20, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(mat_dst, p_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, new OpenCvSharp.Point(moment_x, moment_y), 10, new Scalar(200, 60, 200), -1); //重心
+ //Cv2.DrawContours(mat_dst, maxContours, 0, new Scalar(0, 255, 255), 4); // 輪郭
+
+ /// method3
+ // 重心とtopの中点を算出する
+ var y_topToCoG = (int)(moment_y + p_top.Y) / 2;
+ var p_topToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Min());
+ var p_topToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Max());
+ var p_topToCoG_center = new OpenCvSharp.Point((int)((p_topToCoG_left.X + p_topToCoG_right.X) / 2), y_topToCoG);
+ // 重心とbottomの中点を算出する
+ var y_bottomToCoG = (int)(moment_y + p_bottom.Y) / 2;
+ var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min());
+ var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max());
+ var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG);
+
+ var lefty = new OpenCvSharp.Point();
+ var righty = new OpenCvSharp.Point();
+ if (p_topToCoG_center.X == p_bottomToCoG_center.X)
+ {
+ lefty.X = p_topToCoG_center.X;
+ lefty.Y = 0;
+ righty.X = p_topToCoG_center.X;
+ righty.Y = mat_finalMask.Rows - 1;
+ }
+ else
+ {
+ // 直線フィッティング
+ var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.01);
+ lefty.X = 0;
+ lefty.Y = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1);
+ righty.X = mat_finalMask.Cols - 1;
+ righty.Y = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1);
+
+ }
+ // 直線上の輪郭点
+ var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ Cv2.Line(mat_centerline, righty, lefty, 3);
+ Cv2.DrawContours(mat_contour, contours, 0, 3);
+ Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and);
+ Cv2.FindNonZero(mat_and, mat_and);
+ var bottom_y_3 = 0;
+ var bottom_x_3 = 0;
+ for (int i = 0; i < mat_and.Total(); i++)
+ {
+ var x = mat_and.At(i).X;
+ var y = mat_and.At(i).Y;
+ if (bottom_y_3 < y)
+ {
+ bottom_y_3 = y;
+ bottom_x_3 = x;
+ }
+ }
+ var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3);
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_top_avg_3 = (p_left_2.Y + p_right_2.Y) / 2.0;
+ var y_apex_3 = (int)(y_top_avg_ + ((p_bottom_3.Y - y_top_avg_3) * 0.57));
+ var p_apex_left_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Min());
+ var p_apex_right_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Max());
+
+ //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ Cv2.Line(mat_dst, lefty, righty, new Scalar(0, 100, 255)); //近似直線
+ //Cv2.Circle(mat_dst, p_apex_left_3, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_apex_right_3, 20, new Scalar(0, 0, 255), -1);
+ Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1);
+
+ // DEBUG
+ Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize);
+ Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5)));
+
+ // 出力
+ li_dst = new List { p_left_2, p_apex_left_3, p_bottom_3, p_apex_right_3, p_right_2 };
+ //li_dst = new List { p_left_2, p_apex_left_2, p_bottom_2, p_apex_right_2, p_right_2 };
+
+ // 破棄
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+ return li_dst;
+ }
+
+ private List Get8area(List list_5points)
+ {
+ var li_dst = new List();
+ // ROIマスク画像1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|● ●|3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points1 = new int[2, 3]{{0, 2, 1}, {4, 2, 3}};
+ var area1 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i ++)
+ {
+ var cx1 = (list_5points[points1[i, 0]].X + list_5points[points1[i, 1]].X) / 2;
+ var cy1 = (list_5points[points1[i, 0]].Y + list_5points[points1[i, 1]].Y) / 2;
+ area1.X = (list_5points[points1[i, 2]].X + cx1) / 2;
+ area1.Y = (list_5points[points1[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area1);
+ }
+
+ // ROIマスク画像2
+ // 0____________ 4
+ // | ● ● |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points2 = new int[2, 4]{{0, 3, 0, 4}, {4, 1, 4, 0}};
+ var area2 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i++)
+ {
+ float cx1 = list_5points[points2[i, 0]].X + (list_5points[points2[i, 1]].X - list_5points[points2[i, 0]].X) / 4;
+ float cy1 = list_5points[points2[i, 0]].Y + (list_5points[points2[i, 1]].Y - list_5points[points2[i, 0]].Y) / 4;
+ float cx2 = (list_5points[points2[i, 2]].X + list_5points[points2[i, 3]].X) / 2;
+ float cy2 = (list_5points[points2[i, 2]].Y + list_5points[points2[i, 3]].Y) / 2;
+ area2.X = (int)(cx1 + cx2) / 2;
+ area2.Y = (int)(cy1 + cy2) / 2;
+ li_dst.Add(area2);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| ● ● |3
+ // 舌 /
+ // 舌_______/
+ // 2
+ var points3 = new int[2, 3]{{0, 3, 2}, {4, 1, 2}};
+ var area3 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i ++)
+ {
+ float cx1 = list_5points[points3[i, 0]].X + (list_5points[points3[i, 1]].X - list_5points[points3[i, 0]].X) / 4;
+ float cy1 = list_5points[points3[i, 0]].Y + (list_5points[points3[i, 1]].Y - list_5points[points3[i, 0]].Y) / 4;
+ area3.X = (int)(list_5points[points3[i, 2]].X + cx1) / 2;
+ area3.Y = (int)(list_5points[points3[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area3);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_●_●_/
+ // 2
+ var points4 = new int[2, 2]{{0, 2}, {4, 2}};
+ var area4 = new OpenCvSharp.Point();
+ for (int i = 0; i< 2; i ++)
+ {
+ area4.X = list_5points[points4[i, 0]].X + (list_5points[points4[i, 1]].X - list_5points[points4[i, 0]].X) * 7 / 8;
+ area4.Y = list_5points[points4[i, 0]].Y + (list_5points[points4[i, 1]].Y - list_5points[points4[i, 0]].Y) * 7 / 8;
+ li_dst.Add(area4);
+ }
+
+ // To do : もし8areaが舌領域に載っていなかったら修正
+ //bool isOnTongueArea = DiscriminateOnTongueArea(li_dst);
+ //if (isOnTongueArea)
+ //{
+
+ //}
+
+ return li_dst;
+ }
+
+ private List Get8colors(Mat mat_maskedImg, List list_8area)
+ {
+ List li_dst = new List();
+ for (int i = 0; i < list_8area.Count(); i++)
+ {
+ using (Mat mat_colorRoi = Mat.Zeros(mat_maskedImg.Size(), MatType.CV_8UC1))
+ {
+ // 色抽出領域を示すマスク画像を作成
+ Cv2.Circle(mat_colorRoi, list_8area[i], RADIUS_COLORAREA, 255, -1);
+
+ // 領域で色を抽出
+ var color = Cv2.Mean(mat_maskedImg, mat_colorRoi);
+ li_dst.Add(color);
+ }
+ }
+ return li_dst;
+ }
+
+ private void Show5point(Mat oriImg, List list_5point)
+ {
+ Cv2.Circle(oriImg, list_5point[0], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[1], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[2], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[3], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[4], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_point", WindowMode.AutoSize);
+ Cv2.ImShow("dst_point", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ private void Show8area(Mat oriImg, List list_8area)
+ {
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_", WindowMode.AutoSize);
+ Cv2.ImShow("dst_", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ private List Calc8Lab(List list_8colors, string path_calibCsv, string path_colorMatrixXYZ)
+ {
+ GetColorMatrixRGB(path_calibCsv);
+ CalcTransMat(path_colorMatrixXYZ);
+
+
+
+
+ Read_TranslationMatrix();
+ var li_dst = new List();
+ for (int i = 0; i < list_8colors.Count(); i++)
+ {
+ var LabValue = CalcLab(list_8colors[i]);
+ li_dst.Add(LabValue);
+ }
+ return li_dst;
+ }
+
+ private void GetColorMatrixRGB(string path_calibCsv)
+ {
+ System.Text.Encoding encoding = GetType(path_calibCsv);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_calibCsv, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding);
+ string strLine = "";
+ string[] aryLine;
+ sr3.ReadLine(); // headerをスルー
+ int i = 0;
+ while ((strLine = sr3.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ var no = Convert.ToSingle(aryLine[0]);
+ float r = Convert.ToSingle(aryLine[1]);
+ float g = Convert.ToSingle(aryLine[2]);
+ float b = Convert.ToSingle(aryLine[3]);
+ m_BforLab[i] = b;
+ m_GforLab[i] = g;
+ m_RforLab[i] = r;
+ i++;
+ }
+ sr3.Close();
+ fs3.Close();
+ }
+
+ private void CalcTransMat(string path_colorMatrixXYZ)
+ {
+ Mat RGBmat = new Mat(24, 17, MatType.CV_64F, new Scalar(1.0f));
+ Mat XYZmat = new Mat(24, 4, MatType.CV_64F, new Scalar(1.0f));
+
+ // ColorChartのXYZ読み込み
+ string line;
+ string[] split = new string[3];
+ double valueX = 0, valueY = 0, valueZ = 0;
+ System.Text.Encoding encoding3 = GetType(path_colorMatrixXYZ);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_colorMatrixXYZ, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding3);
+ for (int i = 0; i < 24; i++)
+ {
+ line = sr3.ReadLine();
+ split = line.Split(' ');
+ valueX = Convert.ToDouble(split[0]);
+ valueY = Convert.ToDouble(split[1]);
+ valueZ = Convert.ToDouble(split[2]);
+ XYZmat.Set(i, 0, valueX);
+ XYZmat.Set(i, 1, valueY);
+ XYZmat.Set(i, 2, valueZ);
+ }
+ sr3.Close();
+ fs3.Close();
+
+ // 変換行列の計算
+ for (int j = 0; j < 24; j++)
+ {
+ //順番
+ //R,G,B,RG,RB,GB,R^2,G^2,B^2
+ //R^2B,R^2G,G^2,R,G^2B,B^2R,B^2G,RGB
+ RGBmat.Set(j, 2, m_BforLab[j]);
+ RGBmat.Set(j, 1, m_GforLab[j]);
+ RGBmat.Set(j, 0, m_RforLab[j]);
+ //2V次の項
+ RGBmat.Set(j, 3, m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 4, m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 5, m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 6, m_RforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 7, m_GforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 8, m_BforLab[j] * m_BforLab[j]);
+ //3V次の項
+ RGBmat.Set(j, 9, m_RforLab[j] * m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 10, m_RforLab[j] * m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 11, m_GforLab[j] * m_GforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 12, m_GforLab[j] * m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 13, m_BforLab[j] * m_BforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 14, m_BforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 15, m_RforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ }
+ // 変換行列の生成
+ Mat translation = new Mat();
+ var canSolve = Cv2.Solve(RGBmat, XYZmat, translation, DecompTypes.SVD);
+
+ // 保存
+ string CSVfilename = "translateMatrix.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ string s2 = "";
+ for (int i = 0; i <= 16; i++)
+ {
+ for (int j = 0; j <= 2; j++)
+ {
+ double s1 = translation.At(i, j);
+ s2 += s1.ToString() + ",";
+ }
+ CSV_data.WriteLine(s2);
+ s2 = "";
+ }
+ CSV_data.Close();
+ }
+
+ private void Read_TranslationMatrix()
+ {
+ // 変換行列を読み込みなおす
+ System.Text.Encoding encoding = GetType("translateMatrix.csv");
+ System.IO.FileStream fs1 = new System.IO.FileStream("translateMatrix.csv", System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr1 = new System.IO.StreamReader(fs1, encoding);
+ for (int i = 0; i < 17; i++)
+ {
+ a[i] = 0;
+ b[i] = 0;
+ c[i] = 0;
+ }
+ k = 0;
+ string strLine = "";
+ string[] aryLine = null;
+ while ((strLine = sr1.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ a[k] = Convert.ToSingle(aryLine[0]);
+ b[k] = Convert.ToSingle(aryLine[1]);
+ c[k] = Convert.ToSingle(aryLine[2]);
+ k++;
+ }
+ sr1.Close();
+ fs1.Close();
+ }
+
+ private OpenCvSharp.Scalar CalcLab(OpenCvSharp.Scalar BgrValue)
+ {
+ // CIELabの計算
+ // XYZに変換
+ double X, Y, Z;
+ double m_B = BgrValue.Val0;
+ double m_G = BgrValue.Val1;
+ double m_R = BgrValue.Val2;
+ X = m_R * a[0] + m_G * a[1] + m_B * a[2]
+ + a[3] * m_R * m_G + a[4] * m_R * m_B + a[5] * m_G * m_B
+ + a[6] * m_R * m_R + a[7] * m_G * m_G + a[8] * m_B * m_B
+ + a[9] * m_R * m_R * m_B + a[10] * m_R * m_R * m_G
+ + a[11] * m_G * m_G * m_R + a[12] * m_G * m_G * m_B
+ + a[13] * m_B * m_B * m_R + a[14] * m_B * m_B * m_G
+ + a[15] * m_R * m_G * m_B
+ + a[16];
+ Y = m_R * b[0] + m_G * b[1] + m_B * b[2]
+ + b[3] * m_R * m_G + b[4] * m_R * m_B + b[5] * m_G * m_B
+ + b[6] * m_R * m_R + b[7] * m_G * m_G + b[8] * m_B * m_B
+ + b[9] * m_R * m_R * m_B + b[10] * m_R * m_R * m_G
+ + b[11] * m_G * m_G * m_R + b[12] * m_G * m_G * m_B
+ + b[13] * m_B * m_B * m_R + b[14] * m_B * m_B * m_G
+ + b[15] * m_R * m_G * m_B
+ + b[16];
+ Z = m_R * c[0] + m_G * c[1] + m_B * c[2]
+ + c[3] * m_R * m_G + c[4] * m_R * m_B + c[5] * m_G * m_B
+ + c[6] * m_R * m_R + c[7] * m_G * m_G + c[8] * m_B * m_B
+ + c[9] * m_R * m_R * m_B + c[10] * m_R * m_R * m_G
+ + c[11] * m_G * m_G * m_R + c[12] * m_G * m_G * m_B
+ + c[13] * m_B * m_B * m_R + c[14] * m_B * m_B * m_G
+ + c[15] * m_R * m_G * m_B
+ + c[16];
+ if (X < 0) X = 0;
+ if (Y < 0) Y = 0;
+ if (Z < 0) Z = 0;
+
+ // Labに変換(固定の計算式)
+ // TIAS 光源 (測定値20201023)
+ // double Xn = 99.5829;
+ // double Yn = 100.0;
+ // double Zn = 57.1402;
+
+ // Tangさん,竹田さんが使用してた値 (おそらく昔のTIAS光源の測定値)
+ //double Xn = 102.07;
+ //double Yn = 100.0;
+ //double Zn = 79.41;
+
+ // 石川さん,中口先生が使用している値 人口太陽照明?
+ double Xn = 92.219;
+ double Yn = 100.0;
+ double Zn = 95.965;
+ double cL = 116.0 * Math.Pow((Y / Yn), 1.0 / 3.0) - 16.0;
+ double ca = 500.0 * (Math.Pow((X / Xn), 1.0 / 3.0) - Math.Pow((Y / Yn), 1.0 / 3.0));
+ double cb = 200.0 * (Math.Pow((Y / Yn), 1.0 / 3.0) - Math.Pow((Z / Zn), 1.0 / 3.0));
+
+ return new OpenCvSharp.Scalar(cL, ca, cb);
+ }
+
+ private void Show8colors(List list_8Bgr, List list_8Lab)
+ {
+ Invoke((MethodInvoker)delegate
+ {
+ textBox1.Text = list_8Bgr[0].ToString() + " " + list_8Bgr[1].ToString();
+ textBox2.Text = list_8Bgr[2].ToString() + " " + list_8Bgr[3].ToString();
+ textBox3.Text = list_8Bgr[4].ToString() + " " + list_8Bgr[5].ToString();
+ textBox4.Text = list_8Bgr[6].ToString() + " " + list_8Bgr[7].ToString();
+ textBox5.Text = list_8Lab[0].ToString() + " " + list_8Lab[1].ToString();
+ textBox6.Text = list_8Lab[2].ToString() + " " + list_8Lab[3].ToString();
+ textBox7.Text = list_8Lab[4].ToString() + " " + list_8Lab[5].ToString();
+ textBox8.Text = list_8Lab[6].ToString() + " " + list_8Lab[7].ToString();
+ });
+ }
+
+ private void Write8colors(List list_8Bgr, List list_8Lab)
+ {
+ // 保存
+ string CSVfilename = "CalculatedLab.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+ }
+
+ private List IndexOfAll(List li, int target)
+ {
+ int num = li.IndexOf(target);
+ var li_num = new List();
+ if (num > 0)
+ {
+ li_num.Add(num);
+ // IndexOfメソッドで見つからなくなるまで繰り返す
+ while (num > 0)
+ {
+ //見つかった位置の次の位置から検索
+ num = li.IndexOf(target, num + 1);
+ if (num > 0)
+ {
+ li_num.Add(num);
+ }
+ }
+ }
+ else
+ {
+ Console.WriteLine("{0}は見つかりませんでした", target);
+ }
+ return li_num;
+ }
+
+
+ public static System.Text.Encoding GetType(string FILE_NAME)
+ {
+ System.IO.FileStream fs = new System.IO.FileStream(FILE_NAME, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.Text.Encoding r = GetType(fs);
+ fs.Close();
+ return r;
+ }
+
+ public static System.Text.Encoding GetType(System.IO.FileStream fs)
+ {
+ byte[] Unicode = new byte[] { 0xFF, 0xFE, 0x41 };
+ byte[] UnicodeBIG = new byte[] { 0xFE, 0xFF, 0x00 };
+ byte[] UTF8 = new byte[] { 0xEF, 0xBB, 0xBF };
+ System.Text.Encoding reVal = System.Text.Encoding.Default;
+
+ System.IO.BinaryReader r = new System.IO.BinaryReader(fs, System.Text.Encoding.Default);
+ int i;
+ int.TryParse(fs.Length.ToString(), out i);
+ byte[] ss = r.ReadBytes(i);
+ if (IsUTF8Bytes(ss) || (ss[0] == 0xEF && ss[1] == 0xBB && ss[2] == 0xBF))
+ {
+ reVal = System.Text.Encoding.UTF8;
+ }
+ else if (ss[0] == 0xFE && ss[1] == 0xFF && ss[2] == 0x00)
+ {
+ reVal = System.Text.Encoding.BigEndianUnicode;
+ }
+ else if (ss[0] == 0xFF && ss[1] == 0xFE && ss[2] == 0x41)
+ {
+ reVal = System.Text.Encoding.Unicode;
+ }
+ r.Close();
+ return reVal;
+ }
+
+ private static bool IsUTF8Bytes(byte[] data)
+ {
+ int charByteCounter = 1;
+ byte curByte;
+ for (int i = 0; i < data.Length; i++)
+ {
+ curByte = data[i];
+ if (charByteCounter == 1)
+ {
+ if (curByte >= 0x80)
+ {
+ while (((curByte <<= 1) & 0x80) != 0)
+ {
+ charByteCounter++;
+ }
+ if (charByteCounter == 1 || charByteCounter > 6)
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ if ((curByte & 0xC0) != 0x80)
+ {
+ return false;
+ }
+ charByteCounter--;
+ }
+ }
+ if (charByteCounter > 1)
+ {
+ throw new Exception("Error");
+ }
+ return true;
+ }
+
+ private void Form_TIASAutomaticShootingSystem_FormClosing(object sender, FormClosingEventArgs e)
+ {
+ System.Threading.Thread.Sleep(100);
+ }
+
+ private void Form_TIASAutomaticShootingSystem_Load(object sender, EventArgs e)
+ {
+ }
+
+ private void textBox5_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox1_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel7_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox4_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel6_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox3_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel5_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox2_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label10_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label9_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel3_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void ShootingGuideBox_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label1_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel1_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void tableLayoutPanel1_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void label7_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label8_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox2_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox3_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox4_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox6_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox7_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void textBox8_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel2_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void GuideBox_TextChanged(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label11_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void label12_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void panel4_Paint(object sender, PaintEventArgs e)
+ {
+
+ }
+
+ private void pictureBox1_Click(object sender, EventArgs e)
+ {
+
+ }
+
+ private void button1_Click(object sender, EventArgs e)
+ {
+ var path_csv = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_clicked.csv";
+ var path_out = @"D:\kei2\Study\Tongue\TongueColorAnalysis\automaze5click\Nakaguchi_area.csv";
+ var sr = new StreamReader(path_csv);
+ var sw = new StreamWriter(path_out);
+
+ // skip header
+ var head = sr.ReadLine();
+
+ //
+ for (int i = 0; i < 101; i++)
+ {
+ var line = sr.ReadLine();
+ var words = line.Split(',');
+ sw.Write(words[0] + "," + words[1] + ",");
+ var list_point = new List();
+
+ for (int j = 2; j < words.Length; j = j + 2)
+ {
+ var p = new OpenCvSharp.Point(int.Parse(words[j]), int.Parse(words[j + 1]));
+ list_point.Add(p);
+ }
+ var list_Psorted = list_point.OrderBy(n => n.X).ToList();
+ var list_area = Get8area(list_Psorted);
+ foreach (var n in list_Psorted)
+ {
+ sw.Write(n.X + "," + n.Y + ",");
+ }
+ foreach (var n in list_area)
+ {
+ sw.Write(n.X + "," + n.Y + ",");
+ }
+ sw.WriteLine();
+ }
+
+ // dispose
+ sw.Close();
+ sr.Close();
+ }
+ }
+}
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
new file mode 100644
index 0000000..5f4899d
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Form_TIASAutomaticShootingSystem.resx
@@ -0,0 +1,126 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ 17, 17
+
+
+ 56
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
new file mode 100644
index 0000000..4df843f
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Program.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using System.IO.Ports;
+using System.Windows.Forms;
+using System.Linq;
+
+namespace OperateCamera
+{
+ static class Program
+ {
+ ///
+ /// Main System Running In Here!
+ ///
+ [STAThread]
+ static void Main()
+ {
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form_TIASAutomaticShootingSystem());
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..13ca395
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
+// アセンブリに関連付けられている情報を変更するには、
+// これらの属性値を変更してください。
+[assembly: AssemblyTitle("newcamera_deeplearning")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("newcamera_deeplearning")]
+[assembly: AssemblyCopyright("Copyright © 2018")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
+// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
+// その型の ComVisible 属性を true に設定してください。
+[assembly: ComVisible(false)]
+
+// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
+[assembly: Guid("9b9d9f04-f367-4b3a-a842-51d01b42539b")]
+
+// アセンブリのバージョン情報は次の 4 つの値で構成されています:
+//
+// メジャー バージョン
+// マイナー バージョン
+// ビルド番号
+// Revision
+//
+// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
+// 既定値にすることができます:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..22a72a7
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace deepTIAS_feature_labColor.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("deepTIAS_feature_labColor.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..11eed20
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace deepTIAS_feature_labColor.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
new file mode 100644
index 0000000..061c9ca
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/bin/x64/Debug/newcamera_deeplearning.vshost.exe.manifest
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
new file mode 100644
index 0000000..8b969e5
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/deepTIAS_feature_labColor.csproj
@@ -0,0 +1,185 @@
+
+
+
+
+
+ Debug
+ AnyCPU
+ {9B9D9F04-F367-4B3A-A842-51D01B42539B}
+ WinExe
+ Properties
+ deepTIAS_feature_labColor
+ deepTIAS_feature_labColor
+ v4.7.1
+ 512
+ true
+
+
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ AnyCPU
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+ true
+
+
+
+ Form
+
+
+ Form_TIASAutomaticShootingSystem.cs
+
+
+
+
+ Form_TIASAutomaticShootingSystem.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+
+ SettingsSingleFileGenerator
+ Settings.Designer.cs
+
+
+ True
+ Settings.settings
+ True
+
+
+
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 および x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
+ Always
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Blob.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.Extensions.dll
+
+
+ ..\packages\OpenCvSharp3-AnyCPU.4.0.0.20181129\lib\net461\OpenCvSharp.UserInterface.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs
diff --git a/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Features/DeepTongue_feature_LabColor/feature_labColor/newcamera_deeplearning/obj/x64/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs
diff --git a/Main/ColorSubdivision/ColorSubdivision.csproj b/Main/ColorSubdivision/ColorSubdivision.csproj
new file mode 100644
index 0000000..9f1ebea
--- /dev/null
+++ b/Main/ColorSubdivision/ColorSubdivision.csproj
@@ -0,0 +1,151 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {AD42A573-7AC3-4714-9D53-DB9921815CBB}
+ WinExe
+ ColorSubdivision
+ ColorSubdivision
+ v4.7.1
+ 512
+ true
+ true
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ x64
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ x64
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ 7.3
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ 7.3
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Blob.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Extensions.dll
+
+
+ ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.UserInterface.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Form
+
+
+ Form1.cs
+
+
+
+
+ Form1.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+ SettingsSingleFileGenerator
+ Settings.Designer.cs
+
+
+ True
+ Settings.settings
+ True
+
+
+
+
+
+
+
+ Always
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 and x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Form1.Designer.cs b/Main/ColorSubdivision/Form1.Designer.cs
new file mode 100644
index 0000000..1b0e6fd
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.Designer.cs
@@ -0,0 +1,84 @@
+namespace ColorSubdivision
+{
+ partial class Form1
+ {
+ ///
+ /// Required designer variable.
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// Clean up any resources being used.
+ ///
+ /// true if managed resources should be disposed; otherwise, false.
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ ///
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ ///
+ private void InitializeComponent()
+ {
+ this.button1 = new System.Windows.Forms.Button();
+ this.button2 = new System.Windows.Forms.Button();
+ this.RichTextBox1 = new System.Windows.Forms.RichTextBox();
+ this.SuspendLayout();
+ //
+ // button1
+ //
+ this.button1.Location = new System.Drawing.Point(115, 47);
+ this.button1.Name = "button1";
+ this.button1.Size = new System.Drawing.Size(75, 23);
+ this.button1.TabIndex = 0;
+ this.button1.Text = "Button1";
+ this.button1.UseVisualStyleBackColor = true;
+ this.button1.Click += new System.EventHandler(this.Button1_Click);
+ //
+ // button2
+ //
+ this.button2.Location = new System.Drawing.Point(115, 105);
+ this.button2.Name = "button2";
+ this.button2.Size = new System.Drawing.Size(75, 23);
+ this.button2.TabIndex = 1;
+ this.button2.Text = "Button2";
+ this.button2.UseVisualStyleBackColor = true;
+ this.button2.Click += new System.EventHandler(this.Button2_Click);
+ //
+ // RichTextBox1
+ //
+ this.RichTextBox1.Location = new System.Drawing.Point(370, 138);
+ this.RichTextBox1.Name = "RichTextBox1";
+ this.RichTextBox1.Size = new System.Drawing.Size(100, 96);
+ this.RichTextBox1.TabIndex = 2;
+ this.RichTextBox1.Text = "";
+ //
+ // Form1
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(800, 450);
+ this.Controls.Add(this.RichTextBox1);
+ this.Controls.Add(this.button2);
+ this.Controls.Add(this.button1);
+ this.Name = "Form1";
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.Button button1;
+ private System.Windows.Forms.Button button2;
+ private System.Windows.Forms.RichTextBox RichTextBox1;
+ }
+}
+
diff --git a/Main/ColorSubdivision/Form1.cs b/Main/ColorSubdivision/Form1.cs
new file mode 100644
index 0000000..9d64c49
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.cs
@@ -0,0 +1,56 @@
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Data;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+using OpenCvSharp;
+
+namespace ColorSubdivision
+{
+ public partial class Form1 : Form
+ {
+ public Form1()
+ {
+ InitializeComponent();
+ }
+
+ private void Button1_Click(object sender, EventArgs e)
+ {
+ var path = @"D:\kei2\Solutions\DeepTongue\LocalRepository\Tongue extraction_cropresizemethod\Tongue extraction\bin\x64\Debug\mask_final\20180315093610.jpg";
+ using (Mat mat_input = Cv2.ImRead(path, ImreadModes.Grayscale))
+ {
+ var mat_dst = new Mat(mat_input.Size(), mat_input.Type());
+ bool isEdge = false;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ if(!isEdge)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ if (mat_input.At(i, j) > 200)
+ {
+ mat_dst.Set(i, j, 100);
+ isEdge = true;
+ }
+ }
+ }
+ }
+ Cv2.ImShow("input", mat_input);
+ Cv2.ImShow("dst", mat_dst);
+ mat_dst.Dispose();
+ }
+ GC.Collect();
+
+ }
+
+ private void Button2_Click(object sender, EventArgs e)
+ {
+
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Form1.resx b/Main/ColorSubdivision/Form1.resx
new file mode 100644
index 0000000..1af7de1
--- /dev/null
+++ b/Main/ColorSubdivision/Form1.resx
@@ -0,0 +1,120 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Program.cs b/Main/ColorSubdivision/Program.cs
new file mode 100644
index 0000000..b7233a7
--- /dev/null
+++ b/Main/ColorSubdivision/Program.cs
@@ -0,0 +1,22 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+
+namespace ColorSubdivision
+{
+ static class Program
+ {
+ ///
+ /// The main entry point for the application.
+ ///
+ [STAThread]
+ static void Main()
+ {
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form1());
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/AssemblyInfo.cs b/Main/ColorSubdivision/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..87b468f
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("ColorSubdivision")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("ColorSubdivision")]
+[assembly: AssemblyCopyright("Copyright © 2020")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("ad42a573-7ac3-4714-9d53-db9921815cbb")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Main/ColorSubdivision/Properties/Resources.Designer.cs b/Main/ColorSubdivision/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..6ca7260
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace ColorSubdivision.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ColorSubdivision.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/Resources.resx b/Main/ColorSubdivision/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/ColorSubdivision/Properties/Settings.Designer.cs b/Main/ColorSubdivision/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..41f48bd
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace ColorSubdivision.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Main/ColorSubdivision/Properties/Settings.settings b/Main/ColorSubdivision/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Main/ColorSubdivision/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Main/Tongue extraction.sln b/Main/Tongue extraction.sln
new file mode 100644
index 0000000..d3135fb
--- /dev/null
+++ b/Main/Tongue extraction.sln
@@ -0,0 +1,37 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.30011.22
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DeepTIAS1.9", "Tongue extraction\DeepTIAS1.9.csproj", "{AFD610B1-8D23-423A-AA0F-B09BA769BDD7}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.ActiveCfg = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.Build.0 = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.ActiveCfg = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.Build.0 = Debug|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.ActiveCfg = Release|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.Build.0 = Release|x64
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.ActiveCfg = Release|Any CPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {33F6C697-859B-4D55-9D28-998267FD09AE}
+ EndGlobalSection
+EndGlobal
diff --git a/Main/Tongue extraction/ColorExtractor.cs b/Main/Tongue extraction/ColorExtractor.cs
new file mode 100644
index 0000000..e1f3646
--- /dev/null
+++ b/Main/Tongue extraction/ColorExtractor.cs
@@ -0,0 +1,769 @@
+using OpenCvSharp;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Windows.Forms;
+
+namespace Tongue_extraction
+{
+ public partial class ColorExtractor
+ {
+ // Config
+ const int RADIUS_COLORAREA = 10;
+ public static Bitmap bitmap;
+ float[] a = new float[17];
+ float[] b = new float[17];
+ float[] c = new float[17];
+ float d;
+ float e;
+ float f;
+ int k;
+ public static bool m_getColor = false;
+ public static Mat m_CalibFrame; //キャリブレーション用画像
+ public static OpenCvSharp.Point[] getRGBpoint = new OpenCvSharp.Point[24];//RGB取得用
+ double[] m_BforLab = new double[24];
+ double[] m_GforLab = new double[24];
+ double[] m_RforLab = new double[24];
+ public static bool m_bCalib;
+ public static OpenCvSharp.Point pt = new OpenCvSharp.Point();//キャリブレーション用のポイント入れ
+
+ public enum FivePointMethod {Method1, Method2, Method3};
+
+ public List Get5points(Mat mat_finalMask, FivePointMethod method)
+ {
+ // 表示用
+ var mat_dst = mat_finalMask.Clone();
+ Cv2.CvtColor(mat_dst, mat_dst, ColorConversionCodes.GRAY2BGR);
+
+ // マスクの舌領域画素座標
+ var mat_nonZeroCoordinates = new Mat();
+ Cv2.FindNonZero(mat_finalMask, mat_nonZeroCoordinates);
+
+ // 舌領域上の点をすべてlistに詰める
+ var list_X = new List();
+ var list_Y = new List();
+ for (int i = 0; i < mat_nonZeroCoordinates.Total(); i++)
+ {
+ var x = mat_nonZeroCoordinates.At(i).X;
+ var y = mat_nonZeroCoordinates.At(i).Y;
+ list_X.Add(x);
+ list_Y.Add(y);
+ }
+
+ if(method == FivePointMethod.Method1)
+ {
+ /// method1
+ // 端っこを探索(ラスタ左上から)
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var p_left = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Min()));
+ var p_right = mat_nonZeroCoordinates.At(list_X.IndexOf(list_X.Max()));
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_apex = (int)(p_top.Y + ((p_bottom.Y - p_top.Y) * 0.8));
+ var p_apex_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Min());
+ var p_apex_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex).Max());
+
+ // 表示してみる
+ //Cv2.Circle(mat_dst, p_top, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_right, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left, 20, new Scalar(255, 255, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right, 20, new Scalar(255, 255, 0), -1);
+
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left, p_apex_left, p_bottom, p_apex_right, p_right };
+ return li_dst;
+ }
+ else if (method == FivePointMethod.Method2)
+ {
+ /// method2
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var sortedContour = contours.OrderByDescending(n => Cv2.ContourArea(n)).ToList();
+ var maxContour = sortedContour[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ var p_left_2 = new OpenCvSharp.Point();
+ var p_right_2 = new OpenCvSharp.Point();
+ for (int i = 0; i < maxContour.Length; i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_2 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_2 = maxContour[i];
+ }
+ }
+ }
+ }
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var y_top_avg_ = (p_left_2.Y + p_right_2.Y) / 2.0;
+ var y_apex_2 = (int)(y_top_avg_ + ((p_bottom.Y - y_top_avg_) * 0.57));
+ var p_apex_left_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Min());
+ var p_apex_right_2 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_2).Max());
+
+ // 表示
+ //Cv2.Circle(mat_dst, p_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_bottom_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_left_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, p_apex_right_2, 20, new Scalar(255, 0, 0), -1);
+ //Cv2.Circle(mat_dst, new OpenCvSharp.Point(moment_x, moment_y), 10, new Scalar(200, 60, 200), -1); //重心
+ //Cv2.DrawContours(mat_dst, contours, 0, new Scalar(0, 255, 255), 4); // 輪郭
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left_2, p_apex_left_2, p_bottom, p_apex_right_2, p_right_2 };
+ return li_dst;
+ }
+ else if (method == FivePointMethod.Method3)
+ {
+ /// method3
+ // 重心(CoG)計算
+ var moments = Cv2.Moments(mat_finalMask, true);
+ var moment_x = moments.M10 / moments.M00;
+ var moment_y = moments.M01 / moments.M00;
+
+ // 輪郭座標
+ OpenCvSharp.Point[][] contours;
+ HierarchyIndex[] hierarchy;
+ Cv2.FindContours(mat_finalMask, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
+ var sortedContour = contours.OrderByDescending(n => Cv2.ContourArea(n)).ToList();
+ var maxContour = sortedContour[0];
+
+ // 重心-輪郭の距離
+ double maxDistance_lefttop = 0.0;
+ double maxDistance_righttop = 0.0;
+ var p_left_3 = new OpenCvSharp.Point();
+ var p_right_3 = new OpenCvSharp.Point();
+ // 距離が最も遠いものを採択
+ for (int i = 0; i < maxContour.Length; i++)
+ {
+ // 重心より上側
+ if (maxContour[i].Y < moment_y)
+ {
+ // 重心より上側左側
+ if (maxContour[i].X < moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_lefttop)
+ {
+ maxDistance_lefttop = distance;
+ p_left_3 = maxContour[i];
+ }
+
+ }
+ // 重心より上側右側
+ if (maxContour[i].X >= moment_x)
+ {
+ var distance = maxContour[i].DistanceTo(new OpenCvSharp.Point(moment_x, moment_y));
+ if (distance > maxDistance_righttop)
+ {
+ maxDistance_righttop = distance;
+ p_right_3 = maxContour[i];
+ }
+ }
+ }
+ }
+
+ // 重心とtopの中点を算出する
+ var p_top = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Min()));
+ var y_topToCoG = (int)(moment_y + p_top.Y) / 2;
+ var p_topToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Min());
+ var p_topToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_topToCoG).Max());
+ var p_topToCoG_center = new OpenCvSharp.Point((int)((p_topToCoG_left.X + p_topToCoG_right.X) / 2), y_topToCoG);
+ // 重心とbottomの中点を算出する
+ var p_bottom = mat_nonZeroCoordinates.At(list_Y.IndexOf(list_Y.Max()));
+ var y_bottomToCoG = (int)(moment_y + p_bottom.Y) / 2;
+ var p_bottomToCoG_left = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Min());
+ var p_bottomToCoG_right = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_bottomToCoG).Max());
+ var p_bottomToCoG_center = new OpenCvSharp.Point((int)((p_bottomToCoG_left.X + p_bottomToCoG_right.X) / 2), y_bottomToCoG);
+
+ // 直線フィッティング
+ var lefty = new OpenCvSharp.Point();
+ var righty = new OpenCvSharp.Point();
+ if (p_topToCoG_center.X == p_bottomToCoG_center.X)
+ {
+ // 傾き0の場合の例外処理
+ lefty.X = p_topToCoG_center.X;
+ lefty.Y = 0;
+ righty.X = p_topToCoG_center.X;
+ righty.Y = mat_finalMask.Rows - 1;
+ }
+ else
+ {
+ var line = Cv2.FitLine(new OpenCvSharp.Point[2] { p_topToCoG_center, p_bottomToCoG_center }, DistanceTypes.L2, 0, 0.01, 0.01);
+ lefty.X = 0;
+ lefty.Y = (int)((-line.X1 * line.Vy / line.Vx) + line.Y1);
+ righty.X = mat_finalMask.Cols - 1;
+ righty.Y = (int)(((mat_finalMask.Cols - line.X1) * line.Vy / line.Vx) + line.Y1);
+
+ }
+ // 直線上の輪郭点
+ var mat_centerline = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_contour = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ var mat_and = new Mat(mat_finalMask.Size(), MatType.CV_8UC1, 0);
+ Cv2.Line(mat_centerline, lefty, righty, 3);
+ Cv2.DrawContours(mat_contour, sortedContour, 0, 3);
+ Cv2.BitwiseAnd(mat_centerline, mat_contour, mat_and);
+ Cv2.FindNonZero(mat_and, mat_and);
+ var bottom_y_3 = 0;
+ var bottom_x_3 = 0;
+ for (int i = 0; i < mat_and.Total(); i++)
+ {
+ var x = mat_and.At(i).X;
+ var y = mat_and.At(i).Y;
+ if (bottom_y_3 < y)
+ {
+ bottom_y_3 = y;
+ bottom_x_3 = x;
+ }
+ }
+ var p_bottom_3 = new OpenCvSharp.Point(bottom_x_3, bottom_y_3);
+
+ // 舌尖領域を示すy座標を取得(割合を今回は決め打ち)
+ var y_top_avg_ = (p_left_3.Y + p_right_3.Y) / 2.0;
+ var y_apex_3 = (int)(y_top_avg_ + ((p_bottom_3.Y - y_top_avg_) * 0.57));
+ var p_apex_left_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Min());
+ var p_apex_right_3 = mat_nonZeroCoordinates.At(IndexOfAll(list_Y, y_apex_3).Max());
+
+ //Cv2.Circle(mat_dst, p_topToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_topToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_left, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_right, 20, new Scalar(0, 100, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottomToCoG_center, 20, new Scalar(0, 0, 255), -1);
+ //Cv2.Circle(mat_dst, p_bottom_3, 20, new Scalar(0, 0, 255), -1);
+
+ // 破棄
+ mat_dst.Dispose();
+ mat_nonZeroCoordinates.Dispose();
+ GC.Collect();
+
+ var li_dst = new List { p_left_3, p_apex_left_3, p_bottom_3, p_apex_right_3, p_right_3 };
+ return li_dst;
+ }
+ else
+ {
+ return new List();
+ }
+ // DEBUG
+ //Cv2.NamedWindow("dst", WindowMode.KeepRatio ^ WindowMode.AutoSize);
+ //Cv2.ImShow("dst", mat_dst.Resize(new OpenCvSharp.Size((int)mat_dst.Width * 0.5, (int)mat_dst.Height * 0.5)));
+
+ }
+
+ public List Get8area(List list_5points)
+ {
+ var li_dst = new List();
+ // ROIマスク画像1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|● ●|3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points1 = new int[2, 3] { { 0, 2, 1 }, { 4, 2, 3 } };
+ var area1 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ var cx1 = (list_5points[points1[i, 0]].X + list_5points[points1[i, 1]].X) / 2;
+ var cy1 = (list_5points[points1[i, 0]].Y + list_5points[points1[i, 1]].Y) / 2;
+ area1.X = (list_5points[points1[i, 2]].X + cx1) / 2;
+ area1.Y = (list_5points[points1[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area1);
+ }
+
+ // ROIマスク画像2
+ // 0____________ 4
+ // | ● ● |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_______/
+ // 2
+
+ var points2 = new int[2, 4] { { 0, 3, 0, 4 }, { 4, 1, 4, 0 } };
+ var area2 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ float cx1 = list_5points[points2[i, 0]].X + (list_5points[points2[i, 1]].X - list_5points[points2[i, 0]].X) / 4;
+ float cy1 = list_5points[points2[i, 0]].Y + (list_5points[points2[i, 1]].Y - list_5points[points2[i, 0]].Y) / 4;
+ float cx2 = (list_5points[points2[i, 2]].X + list_5points[points2[i, 3]].X) / 2;
+ float cy2 = (list_5points[points2[i, 2]].Y + list_5points[points2[i, 3]].Y) / 2;
+ area2.X = (int)(cx1 + cx2) / 2;
+ area2.Y = (int)(cy1 + cy2) / 2;
+ li_dst.Add(area2);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| ● ● |3
+ // 舌 /
+ // 舌_______/
+ // 2
+ var points3 = new int[2, 3] { { 0, 3, 2 }, { 4, 1, 2 } };
+ var area3 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ float cx1 = list_5points[points3[i, 0]].X + (list_5points[points3[i, 1]].X - list_5points[points3[i, 0]].X) / 4;
+ float cy1 = list_5points[points3[i, 0]].Y + (list_5points[points3[i, 1]].Y - list_5points[points3[i, 0]].Y) / 4;
+ area3.X = (int)(list_5points[points3[i, 2]].X + cx1) / 2;
+ area3.Y = (int)(list_5points[points3[i, 2]].Y + cy1) / 2;
+ li_dst.Add(area3);
+ }
+
+ // ROIマスク画像3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // 舌 /
+ // 舌_●_●_/
+ // 2
+ var points4 = new int[2, 2] { { 0, 2 }, { 4, 2 } };
+ var area4 = new OpenCvSharp.Point();
+ for (int i = 0; i < 2; i++)
+ {
+ area4.X = list_5points[points4[i, 0]].X + (list_5points[points4[i, 1]].X - list_5points[points4[i, 0]].X) * 7 / 8;
+ area4.Y = list_5points[points4[i, 0]].Y + (list_5points[points4[i, 1]].Y - list_5points[points4[i, 0]].Y) * 7 / 8;
+ li_dst.Add(area4);
+ }
+
+ // To do : もし8areaが舌領域に載っていなかったら修正
+ //bool isOnTongueArea = DiscriminateOnTongueArea(li_dst);
+ //if (isOnTongueArea)
+ //{
+
+ //}
+
+ return li_dst;
+ }
+
+ public List Get8colors(Mat mat_maskedImg, List list_8area)
+ {
+ List li_dst = new List();
+ for (int i = 0; i < list_8area.Count(); i++)
+ {
+ using (Mat mat_colorRoi = Mat.Zeros(mat_maskedImg.Size(), MatType.CV_8UC1))
+ {
+ // 色抽出領域を示すマスク画像を作成
+ Cv2.Circle(mat_colorRoi, list_8area[i], RADIUS_COLORAREA, 255, -1);
+
+ // 領域で色を抽出
+ var color = Cv2.Mean(mat_maskedImg, mat_colorRoi);
+ li_dst.Add(color);
+ }
+ }
+ return li_dst;
+ }
+
+ public void Show8area(Mat oriImg, List list_8area)
+ {
+
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(255, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 0), -1);
+ Cv2.NamedWindow("dst_", WindowMode.KeepRatio ^ WindowMode.Normal);
+ Cv2.ImShow("dst_", oriImg.Resize(new OpenCvSharp.Size((int)oriImg.Width * 0.5, (int)oriImg.Height * 0.5)));
+ }
+
+ public Mat ShowResult(Mat oriImg, List list_5point, List list_8area)
+ {
+ Cv2.Circle(oriImg, list_5point[0], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[1], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[2], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[3], 3, new Scalar(255, 0, 0), -1);
+ Cv2.Circle(oriImg, list_5point[4], 3, new Scalar(255, 0, 0), -1);
+
+ Cv2.Circle(oriImg, list_8area[0], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[1], 10, new Scalar(0, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[2], 10, new Scalar(0, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[3], 10, new Scalar(0, 255, 255), -1);
+ Cv2.Circle(oriImg, list_8area[4], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[5], 10, new Scalar(255, 255, 0), -1);
+ Cv2.Circle(oriImg, list_8area[6], 10, new Scalar(255, 0, 255), -1);
+ Cv2.Circle(oriImg, list_8area[7], 10, new Scalar(255, 0, 255), -1);
+
+ Cv2.Line(oriImg, list_5point[0], list_5point[1], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[1], list_5point[2], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[2], list_5point[3], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[3], list_5point[4], new Scalar(255, 100, 100));
+ Cv2.Line(oriImg, list_5point[4], list_5point[0], new Scalar(255, 100, 100));
+
+ return oriImg;
+ }
+
+ public List Calc8Lab(List list_8colors, string path_calibCsv, string path_colorMatrixXYZ)
+ {
+ GetColorMatrixRGB(path_calibCsv);
+ CalcTransMat(path_colorMatrixXYZ);
+ Read_TranslationMatrix();
+ var li_dst = new List();
+ for (int i = 0; i < list_8colors.Count(); i++)
+ {
+ var LabValue = CalcLab(list_8colors[i]);
+ li_dst.Add(LabValue);
+ }
+ return li_dst;
+ }
+
+ public void GetColorMatrixRGB(string path_calibCsv)
+ {
+ System.Text.Encoding encoding = GetType(path_calibCsv);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_calibCsv, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding);
+ string strLine = "";
+ string[] aryLine;
+ sr3.ReadLine(); // headerをスルー
+ int i = 0;
+ while ((strLine = sr3.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ var no = Convert.ToSingle(aryLine[0]);
+ float r = Convert.ToSingle(aryLine[1]);
+ float g = Convert.ToSingle(aryLine[2]);
+ float b = Convert.ToSingle(aryLine[3]);
+ m_BforLab[i] = b;
+ m_GforLab[i] = g;
+ m_RforLab[i] = r;
+ i++;
+ }
+ sr3.Close();
+ fs3.Close();
+ }
+
+ public void CalcTransMat(string path_colorMatrixXYZ)
+ {
+ Mat RGBmat = new Mat(24, 17, MatType.CV_64F, new Scalar(1.0f));
+ Mat XYZmat = new Mat(24, 4, MatType.CV_64F, new Scalar(1.0f));
+
+ // ColorChartのXYZ読み込み
+ string line;
+ string[] split = new string[3];
+ double valueX = 0, valueY = 0, valueZ = 0;
+ System.Text.Encoding encoding3 = GetType(path_colorMatrixXYZ);
+ System.IO.FileStream fs3 = new System.IO.FileStream(path_colorMatrixXYZ, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr3 = new System.IO.StreamReader(fs3, encoding3);
+ for (int i = 0; i < 24; i++)
+ {
+ line = sr3.ReadLine();
+ split = line.Split(' ');
+ valueX = Convert.ToDouble(split[0]);
+ valueY = Convert.ToDouble(split[1]);
+ valueZ = Convert.ToDouble(split[2]);
+ XYZmat.Set(i, 0, valueX);
+ XYZmat.Set(i, 1, valueY);
+ XYZmat.Set(i, 2, valueZ);
+ }
+ sr3.Close();
+ fs3.Close();
+
+ // 変換行列の計算
+ for (int j = 0; j < 24; j++)
+ {
+ //順番
+ //R,G,B,RG,RB,GB,R^2,G^2,B^2
+ //R^2B,R^2G,G^2,R,G^2B,B^2R,B^2G,RGB
+ RGBmat.Set(j, 2, m_BforLab[j]);
+ RGBmat.Set(j, 1, m_GforLab[j]);
+ RGBmat.Set(j, 0, m_RforLab[j]);
+ //2V次の項
+ RGBmat.Set(j, 3, m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 4, m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 5, m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 6, m_RforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 7, m_GforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 8, m_BforLab[j] * m_BforLab[j]);
+ //3V次の項
+ RGBmat.Set(j, 9, m_RforLab[j] * m_RforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 10, m_RforLab[j] * m_RforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 11, m_GforLab[j] * m_GforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 12, m_GforLab[j] * m_GforLab[j] * m_BforLab[j]);
+ RGBmat.Set(j, 13, m_BforLab[j] * m_BforLab[j] * m_RforLab[j]);
+ RGBmat.Set(j, 14, m_BforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ RGBmat.Set(j, 15, m_RforLab[j] * m_BforLab[j] * m_GforLab[j]);
+ }
+ // 変換行列の生成
+ Mat translation = new Mat();
+ var canSolve = Cv2.Solve(RGBmat, XYZmat, translation, DecompTypes.SVD);
+
+ // 保存
+ string CSVfilename = "translateMatrix.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ string s2 = "";
+ for (int i = 0; i <= 16; i++)
+ {
+ for (int j = 0; j <= 2; j++)
+ {
+ double s1 = translation.At(i, j);
+ s2 += s1.ToString() + ",";
+ }
+ CSV_data.WriteLine(s2);
+ s2 = "";
+ }
+ CSV_data.Close();
+ }
+
+ public void Read_TranslationMatrix()
+ {
+ // 変換行列を読み込みなおす
+ System.Text.Encoding encoding = GetType("translateMatrix.csv");
+ System.IO.FileStream fs1 = new System.IO.FileStream("translateMatrix.csv", System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.IO.StreamReader sr1 = new System.IO.StreamReader(fs1, encoding);
+ for (int i = 0; i < 17; i++)
+ {
+ a[i] = 0;
+ b[i] = 0;
+ c[i] = 0;
+ }
+ k = 0;
+ string strLine = "";
+ string[] aryLine = null;
+ while ((strLine = sr1.ReadLine()) != null)
+ {
+ aryLine = strLine.Split(',');
+ a[k] = Convert.ToSingle(aryLine[0]);
+ b[k] = Convert.ToSingle(aryLine[1]);
+ c[k] = Convert.ToSingle(aryLine[2]);
+ k++;
+ }
+ sr1.Close();
+ fs1.Close();
+ }
+
+ private OpenCvSharp.Scalar CalcLab(OpenCvSharp.Scalar BgrValue)
+ {
+ // CIELabの計算
+ // XYZに変換
+ double X, Y, Z;
+ double m_B = BgrValue.Val0;
+ double m_G = BgrValue.Val1;
+ double m_R = BgrValue.Val2;
+ X = m_R * a[0] + m_G * a[1] + m_B * a[2]
+ + a[3] * m_R * m_G + a[4] * m_R * m_B + a[5] * m_G * m_B
+ + a[6] * m_R * m_R + a[7] * m_G * m_G + a[8] * m_B * m_B
+ + a[9] * m_R * m_R * m_B + a[10] * m_R * m_R * m_G
+ + a[11] * m_G * m_G * m_R + a[12] * m_G * m_G * m_B
+ + a[13] * m_B * m_B * m_R + a[14] * m_B * m_B * m_G
+ + a[15] * m_R * m_G * m_B
+ + a[16];
+ Y = m_R * b[0] + m_G * b[1] + m_B * b[2]
+ + b[3] * m_R * m_G + b[4] * m_R * m_B + b[5] * m_G * m_B
+ + b[6] * m_R * m_R + b[7] * m_G * m_G + b[8] * m_B * m_B
+ + b[9] * m_R * m_R * m_B + b[10] * m_R * m_R * m_G
+ + b[11] * m_G * m_G * m_R + b[12] * m_G * m_G * m_B
+ + b[13] * m_B * m_B * m_R + b[14] * m_B * m_B * m_G
+ + b[15] * m_R * m_G * m_B
+ + b[16];
+ Z = m_R * c[0] + m_G * c[1] + m_B * c[2]
+ + c[3] * m_R * m_G + c[4] * m_R * m_B + c[5] * m_G * m_B
+ + c[6] * m_R * m_R + c[7] * m_G * m_G + c[8] * m_B * m_B
+ + c[9] * m_R * m_R * m_B + c[10] * m_R * m_R * m_G
+ + c[11] * m_G * m_G * m_R + c[12] * m_G * m_G * m_B
+ + c[13] * m_B * m_B * m_R + c[14] * m_B * m_B * m_G
+ + c[15] * m_R * m_G * m_B
+ + c[16];
+ if (X < 0) X = 0;
+ if (Y < 0) Y = 0;
+ if (Z < 0) Z = 0;
+
+ // Labに変換(固定の計算式)
+ // TIAS 光源 (測定値20201023)
+ // double Xn = 99.5829;
+ // double Yn = 100.0;
+ // double Zn = 57.1402;
+
+ // Tangさん,竹田さんが使用してた値 (おそらく昔のTIAS光源の測定値)
+ //double Xn = 102.07;
+ //double Yn = 100.0;
+ //double Zn = 79.41;
+
+ // 石川さん,中口先生が使用している値 人口太陽照明?
+ double Xn = 92.219;
+ double Yn = 100.0;
+ double Zn = 95.965;
+ double cL = 116.0 * Math.Pow((Y / Yn), 1.0 / 3.0) - 16.0;
+ double ca = 500.0 * (Math.Pow((X / Xn), 1.0 / 3.0) - Math.Pow((Y / Yn), 1.0 / 3.0));
+ double cb = 200.0 * (Math.Pow((Y / Yn), 1.0 / 3.0) - Math.Pow((Z / Zn), 1.0 / 3.0));
+
+ return new OpenCvSharp.Scalar(cL, ca, cb);
+ }
+
+ public void Write8colors(List list_8Bgr, List list_8Lab)
+ {
+ // 保存
+ string CSVfilename = "CalculatedLab.csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+ }
+
+ private List IndexOfAll(List li, int target)
+ {
+ int num = li.IndexOf(target);
+ var li_num = new List();
+ if (num > 0)
+ {
+ li_num.Add(num);
+ // IndexOfメソッドで見つからなくなるまで繰り返す
+ while (num > 0)
+ {
+ //見つかった位置の次の位置から検索
+ num = li.IndexOf(target, num + 1);
+ if (num > 0)
+ {
+ li_num.Add(num);
+ }
+ }
+ }
+ else
+ {
+ Console.WriteLine("{0}は見つかりませんでした", target);
+ }
+ return li_num;
+ }
+
+
+ public static System.Text.Encoding GetType(string FILE_NAME)
+ {
+ System.IO.FileStream fs = new System.IO.FileStream(FILE_NAME, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+ System.Text.Encoding r = GetType(fs);
+ fs.Close();
+ return r;
+ }
+
+ public static System.Text.Encoding GetType(System.IO.FileStream fs)
+ {
+ byte[] Unicode = new byte[] { 0xFF, 0xFE, 0x41 };
+ byte[] UnicodeBIG = new byte[] { 0xFE, 0xFF, 0x00 };
+ byte[] UTF8 = new byte[] { 0xEF, 0xBB, 0xBF };
+ System.Text.Encoding reVal = System.Text.Encoding.Default;
+
+ System.IO.BinaryReader r = new System.IO.BinaryReader(fs, System.Text.Encoding.Default);
+ int i;
+ int.TryParse(fs.Length.ToString(), out i);
+ byte[] ss = r.ReadBytes(i);
+ if (IsUTF8Bytes(ss) || (ss[0] == 0xEF && ss[1] == 0xBB && ss[2] == 0xBF))
+ {
+ reVal = System.Text.Encoding.UTF8;
+ }
+ else if (ss[0] == 0xFE && ss[1] == 0xFF && ss[2] == 0x00)
+ {
+ reVal = System.Text.Encoding.BigEndianUnicode;
+ }
+ else if (ss[0] == 0xFF && ss[1] == 0xFE && ss[2] == 0x41)
+ {
+ reVal = System.Text.Encoding.Unicode;
+ }
+ r.Close();
+ return reVal;
+ }
+
+ private static bool IsUTF8Bytes(byte[] data)
+ {
+ int charByteCounter = 1;
+ byte curByte;
+ for (int i = 0; i < data.Length; i++)
+ {
+ curByte = data[i];
+ if (charByteCounter == 1)
+ {
+ if (curByte >= 0x80)
+ {
+ while (((curByte <<= 1) & 0x80) != 0)
+ {
+ charByteCounter++;
+ }
+ if (charByteCounter == 1 || charByteCounter > 6)
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ if ((curByte & 0xC0) != 0x80)
+ {
+ return false;
+ }
+ charByteCounter--;
+ }
+ }
+ if (charByteCounter > 1)
+ {
+ throw new Exception("Error");
+ }
+ return true;
+ }
+ }
+}
diff --git a/Main/Tongue extraction/DeepTIAS1.9.csproj b/Main/Tongue extraction/DeepTIAS1.9.csproj
new file mode 100644
index 0000000..882082c
--- /dev/null
+++ b/Main/Tongue extraction/DeepTIAS1.9.csproj
@@ -0,0 +1,191 @@
+
+
+
+
+
+ Debug
+ AnyCPU
+ {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}
+ WinExe
+ Properties
+ Tongue_extraction
+ DeepTIAS1.9
+ v4.7.1
+ 512
+ true
+
+
+
+ publish\
+ true
+ Disk
+ false
+ Foreground
+ 7
+ Days
+ false
+ false
+ true
+ 0
+ 1.0.0.%2a
+ false
+ false
+ true
+
+
+ AnyCPU
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+ true
+ bin\x64\Debug\
+ DEBUG;TRACE
+ full
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ bin\x64\Release\
+ TRACE
+ true
+ pdbonly
+ x64
+ prompt
+ MinimumRecommendedRules.ruleset
+ true
+
+
+ DeepTongue_Icon.ico
+
+
+
+
+ Form
+
+
+ Form1.cs
+
+
+
+
+
+ Form1.cs
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+ True
+ Resources.resx
+ True
+
+
+
+ SettingsSingleFileGenerator
+
+
+
+
+
+
+
+
+ Always
+
+
+
+
+ False
+ Microsoft .NET Framework 4.7.1 %28x86 および x64%29
+ true
+
+
+ False
+ .NET Framework 3.5 SP1
+ false
+
+
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.dll
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Blob.dll
+
+
+ False
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Extensions.dll
+
+
+ ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.UserInterface.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.Registration.dll
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.DataAnnotations.dll
+
+
+
+
+
+
+
+
+
+ C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.Reflection.Context.dll
+
+
+ ..\..\..\packages\System.ValueTuple.4.4.0\lib\net461\System.ValueTuple.dll
+
+
+
+
+ False
+ ..\..\..\packages\TensorFlowSharp.1.11.0\lib\net471\TensorFlowSharp.dll
+
+
+
+
+
+ このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/DeepTongue_Icon.ico b/Main/Tongue extraction/DeepTongue_Icon.ico
new file mode 100644
index 0000000..85fef71
--- /dev/null
+++ b/Main/Tongue extraction/DeepTongue_Icon.ico
Binary files differ
diff --git a/Main/Tongue extraction/Form1.Designer.cs b/Main/Tongue extraction/Form1.Designer.cs
new file mode 100644
index 0000000..6a7aa09
--- /dev/null
+++ b/Main/Tongue extraction/Form1.Designer.cs
@@ -0,0 +1,485 @@
+namespace Tongue_extraction
+{
+ partial class Form1
+ {
+ ///
+ /// 必要なデザイナー変数です。
+ ///
+ private System.ComponentModel.IContainer components = null;
+
+ ///
+ /// 使用中のリソースをすべてクリーンアップします。
+ ///
+ /// マネージ リソースを破棄する場合は true を指定し、その他の場合は false を指定します。
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows フォーム デザイナーで生成されたコード
+
+ ///
+ /// デザイナー サポートに必要なメソッドです。このメソッドの内容を
+ /// コード エディターで変更しないでください。
+ ///
+ private void InitializeComponent()
+ {
+ System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(Form1));
+ this.button_start = new System.Windows.Forms.Button();
+ this.pictureBox_extraction = new System.Windows.Forms.PictureBox();
+ this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
+ this.panel3 = new System.Windows.Forms.Panel();
+ this.label9 = new System.Windows.Forms.Label();
+ this.comboBox_mode = new System.Windows.Forms.ComboBox();
+ this.label_mode = new System.Windows.Forms.Label();
+ this.panel1 = new System.Windows.Forms.Panel();
+ this.pictureBox_detection = new System.Windows.Forms.PictureBox();
+ this.pictureBox_input = new System.Windows.Forms.PictureBox();
+ this.label1 = new System.Windows.Forms.Label();
+ this.label2 = new System.Windows.Forms.Label();
+ this.label3 = new System.Windows.Forms.Label();
+ this.label4 = new System.Windows.Forms.Label();
+ this.label5 = new System.Windows.Forms.Label();
+ this.label6 = new System.Windows.Forms.Label();
+ this.label7 = new System.Windows.Forms.Label();
+ this.label8 = new System.Windows.Forms.Label();
+ this.pictureBox_cropResized = new System.Windows.Forms.PictureBox();
+ this.pictureBox_output = new System.Windows.Forms.PictureBox();
+ this.pictureBox_outputSRG = new System.Windows.Forms.PictureBox();
+ this.pictureBox_maskSRG = new System.Windows.Forms.PictureBox();
+ this.pictureBox_last = new System.Windows.Forms.PictureBox();
+ this.label_processingFileName = new System.Windows.Forms.Label();
+ this.label_totalProgress = new System.Windows.Forms.Label();
+ this.panel2 = new System.Windows.Forms.Panel();
+ this.button_pause = new System.Windows.Forms.Button();
+ this.backgroundWorker1 = new System.ComponentModel.BackgroundWorker();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).BeginInit();
+ this.tableLayoutPanel1.SuspendLayout();
+ this.panel3.SuspendLayout();
+ this.panel1.SuspendLayout();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).BeginInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_last)).BeginInit();
+ this.panel2.SuspendLayout();
+ this.SuspendLayout();
+ //
+ // button_start
+ //
+ this.button_start.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_start.Location = new System.Drawing.Point(0, 0);
+ this.button_start.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.button_start.Name = "button_start";
+ this.button_start.Size = new System.Drawing.Size(246, 49);
+ this.button_start.TabIndex = 0;
+ this.button_start.Text = "Start";
+ this.button_start.UseVisualStyleBackColor = true;
+ this.button_start.Click += new System.EventHandler(this.Button_start_Click);
+ //
+ // pictureBox_extraction
+ //
+ this.pictureBox_extraction.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_extraction.Location = new System.Drawing.Point(511, 241);
+ this.pictureBox_extraction.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_extraction.Name = "pictureBox_extraction";
+ this.pictureBox_extraction.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_extraction.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_extraction.TabIndex = 4;
+ this.pictureBox_extraction.TabStop = false;
+ //
+ // tableLayoutPanel1
+ //
+ this.tableLayoutPanel1.CellBorderStyle = System.Windows.Forms.TableLayoutPanelCellBorderStyle.Outset;
+ this.tableLayoutPanel1.ColumnCount = 4;
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
+ this.tableLayoutPanel1.Controls.Add(this.panel3, 2, 4);
+ this.tableLayoutPanel1.Controls.Add(this.label_mode, 1, 5);
+ this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_detection, 1, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_input, 0, 0);
+ this.tableLayoutPanel1.Controls.Add(this.label1, 0, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label2, 1, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label3, 2, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label4, 3, 1);
+ this.tableLayoutPanel1.Controls.Add(this.label5, 0, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label6, 1, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label7, 2, 3);
+ this.tableLayoutPanel1.Controls.Add(this.label8, 3, 3);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_cropResized, 2, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_extraction, 2, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_output, 3, 0);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_outputSRG, 0, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_maskSRG, 1, 2);
+ this.tableLayoutPanel1.Controls.Add(this.pictureBox_last, 3, 2);
+ this.tableLayoutPanel1.Controls.Add(this.label_processingFileName, 0, 5);
+ this.tableLayoutPanel1.Controls.Add(this.label_totalProgress, 2, 5);
+ this.tableLayoutPanel1.Controls.Add(this.panel2, 1, 4);
+ this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
+ this.tableLayoutPanel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.tableLayoutPanel1.Name = "tableLayoutPanel1";
+ this.tableLayoutPanel1.RowCount = 6;
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44445F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44444F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 11.11111F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
+ this.tableLayoutPanel1.Size = new System.Drawing.Size(1018, 554);
+ this.tableLayoutPanel1.TabIndex = 5;
+ //
+ // panel3
+ //
+ this.panel3.Controls.Add(this.label9);
+ this.panel3.Controls.Add(this.comboBox_mode);
+ this.panel3.Dock = System.Windows.Forms.DockStyle.Left;
+ this.panel3.Location = new System.Drawing.Point(511, 478);
+ this.panel3.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.panel3.Name = "panel3";
+ this.panel3.Size = new System.Drawing.Size(246, 49);
+ this.panel3.TabIndex = 23;
+ //
+ // label9
+ //
+ this.label9.AutoSize = true;
+ this.label9.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label9.Font = new System.Drawing.Font("Arial Narrow", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
+ this.label9.Location = new System.Drawing.Point(0, 0);
+ this.label9.Name = "label9";
+ this.label9.Size = new System.Drawing.Size(47, 24);
+ this.label9.TabIndex = 1;
+ this.label9.Text = "Input";
+ //
+ // comboBox_mode
+ //
+ this.comboBox_mode.Dock = System.Windows.Forms.DockStyle.Bottom;
+ this.comboBox_mode.FormattingEnabled = true;
+ this.comboBox_mode.Items.AddRange(new object[] {
+ "Image",
+ "Image&Calib.csv",
+ "Image&Calib.csv&Mask"});
+ this.comboBox_mode.Location = new System.Drawing.Point(0, 29);
+ this.comboBox_mode.Name = "comboBox_mode";
+ this.comboBox_mode.Size = new System.Drawing.Size(246, 20);
+ this.comboBox_mode.TabIndex = 0;
+ this.comboBox_mode.SelectedIndexChanged += new System.EventHandler(this.comboBox1_SelectedIndexChanged);
+ //
+ // label_mode
+ //
+ this.label_mode.AutoSize = true;
+ this.label_mode.BackColor = System.Drawing.Color.White;
+ this.label_mode.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_mode.Location = new System.Drawing.Point(513, 531);
+ this.label_mode.Name = "label_mode";
+ this.label_mode.Size = new System.Drawing.Size(246, 21);
+ this.label_mode.TabIndex = 22;
+ this.label_mode.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // panel1
+ //
+ this.panel1.Controls.Add(this.button_start);
+ this.panel1.Dock = System.Windows.Forms.DockStyle.Left;
+ this.panel1.Location = new System.Drawing.Point(3, 478);
+ this.panel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.panel1.Name = "panel1";
+ this.panel1.Size = new System.Drawing.Size(246, 49);
+ this.panel1.TabIndex = 6;
+ //
+ // pictureBox_detection
+ //
+ this.pictureBox_detection.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_detection.Location = new System.Drawing.Point(257, 4);
+ this.pictureBox_detection.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_detection.Name = "pictureBox_detection";
+ this.pictureBox_detection.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_detection.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_detection.TabIndex = 2;
+ this.pictureBox_detection.TabStop = false;
+ //
+ // pictureBox_input
+ //
+ this.pictureBox_input.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_input.Location = new System.Drawing.Point(3, 4);
+ this.pictureBox_input.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_input.Name = "pictureBox_input";
+ this.pictureBox_input.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_input.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_input.TabIndex = 1;
+ this.pictureBox_input.TabStop = false;
+ //
+ // label1
+ //
+ this.label1.AutoSize = true;
+ this.label1.BackColor = System.Drawing.Color.White;
+ this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label1.Location = new System.Drawing.Point(5, 217);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(246, 20);
+ this.label1.TabIndex = 7;
+ this.label1.Text = "Input";
+ this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label2
+ //
+ this.label2.AutoSize = true;
+ this.label2.BackColor = System.Drawing.Color.White;
+ this.label2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label2.Location = new System.Drawing.Point(259, 217);
+ this.label2.Name = "label2";
+ this.label2.Size = new System.Drawing.Size(246, 20);
+ this.label2.TabIndex = 8;
+ this.label2.Text = "Detection";
+ this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label3
+ //
+ this.label3.AutoSize = true;
+ this.label3.BackColor = System.Drawing.Color.White;
+ this.label3.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label3.Location = new System.Drawing.Point(513, 217);
+ this.label3.Name = "label3";
+ this.label3.Size = new System.Drawing.Size(246, 20);
+ this.label3.TabIndex = 9;
+ this.label3.Text = "Crop+Resize";
+ this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label4
+ //
+ this.label4.AutoSize = true;
+ this.label4.BackColor = System.Drawing.Color.White;
+ this.label4.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label4.Location = new System.Drawing.Point(767, 217);
+ this.label4.Name = "label4";
+ this.label4.Size = new System.Drawing.Size(246, 20);
+ this.label4.TabIndex = 10;
+ this.label4.Text = "Output";
+ this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label5
+ //
+ this.label5.AutoSize = true;
+ this.label5.BackColor = System.Drawing.Color.White;
+ this.label5.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label5.Location = new System.Drawing.Point(5, 454);
+ this.label5.Name = "label5";
+ this.label5.Size = new System.Drawing.Size(246, 20);
+ this.label5.TabIndex = 11;
+ this.label5.Text = "Output+SRG";
+ this.label5.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label6
+ //
+ this.label6.AutoSize = true;
+ this.label6.BackColor = System.Drawing.Color.White;
+ this.label6.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label6.Location = new System.Drawing.Point(259, 454);
+ this.label6.Name = "label6";
+ this.label6.Size = new System.Drawing.Size(246, 20);
+ this.label6.TabIndex = 12;
+ this.label6.Text = "Mask+SRG";
+ this.label6.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label7
+ //
+ this.label7.AutoSize = true;
+ this.label7.BackColor = System.Drawing.Color.White;
+ this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label7.Location = new System.Drawing.Point(513, 454);
+ this.label7.Name = "label7";
+ this.label7.Size = new System.Drawing.Size(246, 20);
+ this.label7.TabIndex = 13;
+ this.label7.Text = "Extraction";
+ this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // label8
+ //
+ this.label8.AutoSize = true;
+ this.label8.BackColor = System.Drawing.Color.White;
+ this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label8.Location = new System.Drawing.Point(767, 454);
+ this.label8.Name = "label8";
+ this.label8.Size = new System.Drawing.Size(246, 20);
+ this.label8.TabIndex = 14;
+ this.label8.Text = "Color Extraction";
+ this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
+ //
+ // pictureBox_cropResized
+ //
+ this.pictureBox_cropResized.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_cropResized.Location = new System.Drawing.Point(511, 4);
+ this.pictureBox_cropResized.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.pictureBox_cropResized.Name = "pictureBox_cropResized";
+ this.pictureBox_cropResized.Size = new System.Drawing.Size(250, 209);
+ this.pictureBox_cropResized.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_cropResized.TabIndex = 3;
+ this.pictureBox_cropResized.TabStop = false;
+ //
+ // pictureBox_output
+ //
+ this.pictureBox_output.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_output.Location = new System.Drawing.Point(767, 5);
+ this.pictureBox_output.Name = "pictureBox_output";
+ this.pictureBox_output.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_output.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_output.TabIndex = 15;
+ this.pictureBox_output.TabStop = false;
+ //
+ // pictureBox_outputSRG
+ //
+ this.pictureBox_outputSRG.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_outputSRG.Location = new System.Drawing.Point(5, 242);
+ this.pictureBox_outputSRG.Name = "pictureBox_outputSRG";
+ this.pictureBox_outputSRG.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_outputSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_outputSRG.TabIndex = 16;
+ this.pictureBox_outputSRG.TabStop = false;
+ //
+ // pictureBox_maskSRG
+ //
+ this.pictureBox_maskSRG.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_maskSRG.Location = new System.Drawing.Point(259, 242);
+ this.pictureBox_maskSRG.Name = "pictureBox_maskSRG";
+ this.pictureBox_maskSRG.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_maskSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_maskSRG.TabIndex = 17;
+ this.pictureBox_maskSRG.TabStop = false;
+ //
+ // pictureBox_last
+ //
+ this.pictureBox_last.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.pictureBox_last.Location = new System.Drawing.Point(767, 242);
+ this.pictureBox_last.Name = "pictureBox_last";
+ this.pictureBox_last.Size = new System.Drawing.Size(246, 207);
+ this.pictureBox_last.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
+ this.pictureBox_last.TabIndex = 18;
+ this.pictureBox_last.TabStop = false;
+ //
+ // label_processingFileName
+ //
+ this.label_processingFileName.AutoSize = true;
+ this.label_processingFileName.BackColor = System.Drawing.Color.White;
+ this.tableLayoutPanel1.SetColumnSpan(this.label_processingFileName, 2);
+ this.label_processingFileName.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_processingFileName.Location = new System.Drawing.Point(5, 531);
+ this.label_processingFileName.Name = "label_processingFileName";
+ this.label_processingFileName.Size = new System.Drawing.Size(500, 21);
+ this.label_processingFileName.TabIndex = 19;
+ this.label_processingFileName.Text = "Processing File: None";
+ this.label_processingFileName.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // label_totalProgress
+ //
+ this.label_totalProgress.AutoSize = true;
+ this.label_totalProgress.BackColor = System.Drawing.Color.White;
+ this.label_totalProgress.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.label_totalProgress.Location = new System.Drawing.Point(767, 531);
+ this.label_totalProgress.Name = "label_totalProgress";
+ this.label_totalProgress.Size = new System.Drawing.Size(246, 21);
+ this.label_totalProgress.TabIndex = 20;
+ this.label_totalProgress.Text = "Total Progress: 0/0";
+ this.label_totalProgress.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
+ //
+ // panel2
+ //
+ this.panel2.Controls.Add(this.button_pause);
+ this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.panel2.Location = new System.Drawing.Point(259, 479);
+ this.panel2.Name = "panel2";
+ this.panel2.Size = new System.Drawing.Size(246, 47);
+ this.panel2.TabIndex = 21;
+ //
+ // button_pause
+ //
+ this.button_pause.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.button_pause.Enabled = false;
+ this.button_pause.Location = new System.Drawing.Point(0, 0);
+ this.button_pause.Name = "button_pause";
+ this.button_pause.Size = new System.Drawing.Size(246, 47);
+ this.button_pause.TabIndex = 0;
+ this.button_pause.Text = "Pause";
+ this.button_pause.UseVisualStyleBackColor = true;
+ this.button_pause.Click += new System.EventHandler(this.Button_pause_Click);
+ //
+ // backgroundWorker1
+ //
+ this.backgroundWorker1.WorkerSupportsCancellation = true;
+ //
+ // Form1
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(1018, 554);
+ this.Controls.Add(this.tableLayoutPanel1);
+ this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
+ this.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
+ this.Name = "Form1";
+ this.Text = "DeepTIAS1.9";
+ this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form1_FormClosing);
+ this.Load += new System.EventHandler(this.Form1_Load);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).EndInit();
+ this.tableLayoutPanel1.ResumeLayout(false);
+ this.tableLayoutPanel1.PerformLayout();
+ this.panel3.ResumeLayout(false);
+ this.panel3.PerformLayout();
+ this.panel1.ResumeLayout(false);
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).EndInit();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBox_last)).EndInit();
+ this.panel2.ResumeLayout(false);
+ this.ResumeLayout(false);
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.Button button_start;
+ private System.Windows.Forms.PictureBox pictureBox_extraction;
+ private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
+ private System.Windows.Forms.Panel panel1;
+ private System.ComponentModel.BackgroundWorker backgroundWorker1;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Label label2;
+ private System.Windows.Forms.Label label3;
+ private System.Windows.Forms.Label label4;
+ private System.Windows.Forms.Label label5;
+ private System.Windows.Forms.Label label6;
+ private System.Windows.Forms.Label label7;
+ private System.Windows.Forms.Label label8;
+ private System.Windows.Forms.PictureBox pictureBox_output;
+ private System.Windows.Forms.PictureBox pictureBox_outputSRG;
+ private System.Windows.Forms.PictureBox pictureBox_maskSRG;
+ private System.Windows.Forms.PictureBox pictureBox_last;
+ private System.Windows.Forms.Label label_processingFileName;
+ private System.Windows.Forms.Panel panel2;
+ private System.Windows.Forms.Button button_pause;
+ private System.Windows.Forms.Label label_totalProgress;
+ private System.Windows.Forms.Label label_mode;
+ private System.Windows.Forms.PictureBox pictureBox_detection;
+ private System.Windows.Forms.PictureBox pictureBox_input;
+ private System.Windows.Forms.PictureBox pictureBox_cropResized;
+ private System.Windows.Forms.Panel panel3;
+ private System.Windows.Forms.ComboBox comboBox_mode;
+ private System.Windows.Forms.Label label9;
+ }
+}
+
diff --git a/Main/Tongue extraction/Form1.cs b/Main/Tongue extraction/Form1.cs
new file mode 100644
index 0000000..8e9cfed
--- /dev/null
+++ b/Main/Tongue extraction/Form1.cs
@@ -0,0 +1,1284 @@
+using System;
+using System.Windows.Forms;
+using System.IO;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.ComponentModel;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Diagnostics;
+using System.Linq;
+using System.Collections.Generic;
+using TensorFlow;
+using OpenCvSharp;
+
+namespace Tongue_extraction
+{
+ public partial class Form1 : Form
+ {
+ //Use the RemoveSmallRegionDLL
+ [DllImport(@"RemoveSmallRegionDLL.dll", EntryPoint = "RemoveSmallRegion", SetLastError = true, CharSet = CharSet.Ansi, ExactSpelling = false, CallingConvention = CallingConvention.StdCall)]
+ extern static void RemoveSmallRegion(string name, string name2, int AreaLimit, int CheckMode, int NeihborMode);
+
+ Mat mat_drawBox = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_roi = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_input = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_roi256 = new Mat(256, 256, MatType.CV_8UC3, 1);
+ Mat mat_roisize = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_output = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputChanged = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_mask = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_extraction = new Mat(1024, 1280, MatType.CV_8UC3, 1);
+ Mat mat_cropped;
+ Mat mat_outputSRGNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_outputSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+ Mat mat_maskSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
+
+ public static Bitmap bitmap_bitch;
+ string[] path;
+
+ OpenCvSharp.Rect rectangle;
+
+ byte[] byte_inputDetection;
+ byte[] byte_inputSegmentation;
+ OpenCvSharp.Point P1 = new OpenCvSharp.Point();
+ OpenCvSharp.Point P2 = new OpenCvSharp.Point();
+
+ byte[] mask = new byte[200000];
+
+ string basepath;
+ string imageFile;
+ string time;
+ string modelFile;
+ int ii = 0;
+ int count;
+ int areaCount;
+
+ Rect roi = new Rect();
+ int mmp = 0;
+ int pmm = 0;
+
+ int check_detection = 0;
+ float max_score = 0;
+
+ private ManualResetEvent manualReset = new ManualResetEvent(true);
+
+ string fileName_info = Directory.GetCurrentDirectory() + "//info//" + DateTime.Now.ToLocalTime().ToString("yyyyMMddhhmmss") + ".csv";
+ StreamWriter sw;
+
+ public Form1()
+ {
+ InitializeComponent();
+
+ }
+
+ private void Form1_Load(object sender, EventArgs e)
+ {
+ comboBox_mode.SelectedIndex = 1;
+ }
+
+ private void Button_start_Click(object sender, EventArgs e)
+ {
+ path = Directory.GetFiles("data");
+ button_start.Enabled = false;
+ button_pause.Enabled = true;
+ pictureBox_input.Image = null;
+ pictureBox_input.Refresh();
+ pictureBox_detection.Image = null;
+ pictureBox_detection.Refresh();
+ pictureBox_cropResized.Image = null;
+ pictureBox_cropResized.Refresh();
+ pictureBox_output.Image = null;
+ pictureBox_output.Refresh();
+ pictureBox_outputSRG.Image = null;
+ pictureBox_outputSRG.Refresh();
+ pictureBox_maskSRG.Image = null;
+ pictureBox_maskSRG.Refresh();
+ pictureBox_extraction.Image = null;
+ pictureBox_extraction.Refresh();
+ pictureBox_last.Image = null;
+ pictureBox_last.Refresh();
+ comboBox_mode.Enabled = false;
+
+ if (comboBox_mode.SelectedIndex == 0)
+ {
+ this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_Image);
+ backgroundWorker1.RunWorkerAsync();
+ }
+ else if (comboBox_mode.SelectedIndex == 1)
+ {
+ this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_ImageandCalib);
+ backgroundWorker1.RunWorkerAsync();
+ }
+ else if (comboBox_mode.SelectedIndex == 2)
+ {
+ //this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork_MaskImage);
+ //backgroundWorker1.RunWorkerAsync();
+ MessageBox.Show("This mode is not implemented.");
+ }
+ else
+ {
+ MessageBox.Show("Please select the mode on the combobox");
+ }
+ }
+
+ public static class ImageUtil
+ {
+ // Convert the image in filename to a Tensor suitable as input to the Inception model.
+ public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.UInt8)
+ {
+ // DecodeJpeg uses a scalar String-valued tensor as input.
+ var tensor = TFTensor.CreateString(contents);
+
+ TFGraph graph;
+ TFOutput input, output;
+
+ // Construct a graph to normalize the image
+ ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
+
+ // Execute that graph to normalize this one image
+ using (var session = new TFSession(graph))
+ {
+ var normalized = session.Run(
+ inputs: new[] { input },
+ inputValues: new[] { tensor },
+ outputs: new[] { output });
+
+ if (session != null)
+ {
+ session.Dispose();
+ }
+ if (tensor != null)
+ {
+ tensor.Dispose();
+ }
+ if (graph != null)
+ {
+ graph.Dispose();
+ }
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+
+ return normalized[0];
+ }
+ }
+
+ // The inception model takes as input the image described by a Tensor in a very
+ // specific normalized format (a particular image size, shape of the input tensor,
+ // normalized pixel values etc.).
+ //
+ // This function constructs a graph of TensorFlow operations which takes as
+ // input a JPEG-encoded string and returns a tensor suitable as input to the
+ // inception model.
+
+ public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.UInt8)
+ {
+ const int W = 256;
+ const int H = 256;
+ const float Mean = 0;
+ const float Scale = 1;
+ graph = new TFGraph();
+ input = graph.Placeholder(TFDataType.String);
+ output = graph.Cast(graph.Div(
+ x: graph.Sub(
+ x: graph.ResizeBilinear(
+ images: graph.ExpandDims(
+ input: graph.Cast(
+ graph.DecodeJpeg(contents: input, channels: 3), DstT: destinationDataType),
+ dim: graph.Const(0, "make_batch")),
+ size: graph.Const(new int[] { W, H }, "size")),
+ y: graph.Const(Mean, "mean")),
+ y: graph.Const(Scale, "scale")), destinationDataType);
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+ }
+ }
+
+ public static class ImageUtil2
+ {
+ // Convert the image in filename to a Tensor suitable as input to the Inception model.
+ public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.Float)
+ {
+ // DecodeJpeg uses a scalar String-valued tensor as input.
+ var tensor = TFTensor.CreateString(contents);
+
+ TFGraph graph;
+ TFOutput input, output;
+
+ // Construct a graph to normalize the image
+ ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
+
+ // Execute that graph to normalize this one image
+ using (var session = new TFSession(graph))
+ {
+ var normalized = session.Run(
+ inputs: new[] { input },
+ inputValues: new[] { tensor },
+ outputs: new[] { output });
+
+ if (session != null)
+ {
+ session.Dispose();
+ }
+ if (tensor != null)
+ {
+ tensor.Dispose();
+ }
+ if (graph != null)
+ {
+ graph.Dispose();
+ }
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+
+ return normalized[0];
+ }
+ }
+
+ // The inception model takes as input the image described by a Tensor in a very
+ // specific normalized format (a particular image size, shape of the input tensor,
+ // normalized pixel values etc.).
+ //
+ // This function constructs a graph of TensorFlow operations which takes as
+ // input a JPEG-encoded string and returns a tensor suitable as input to the
+ // inception model.
+
+ public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.Float)
+ {
+ const int W = 256;
+ const int H = 256;
+ //const int W = 512;
+ //const int H = 512;
+
+ const float Mean = 128;
+ const float Scale = 128;
+ graph = new TFGraph();
+ input = graph.Placeholder(TFDataType.String);
+ output = graph.Cast(graph.Div(
+ x: graph.Sub(
+ x: graph.ResizeBilinear(
+ images: graph.ExpandDims(
+ input: graph.Cast(
+ graph.DecodeJpeg(contents: input, channels: 3), DstT: TFDataType.Float),
+ dim: graph.Const(0, "make_batch")),
+ size: graph.Const(new int[] { W, H }, "size")),
+ y: graph.Const(Mean, "mean")),
+ y: graph.Const(Scale, "scale")), destinationDataType);
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+ }
+ }
+
+ public static byte[] Bitmap2Byte(Bitmap bitmap)
+ {
+ using (MemoryStream stream = new MemoryStream())
+ {
+ bitmap.Save(stream, ImageFormat.Jpeg);
+ byte[] data = new byte[stream.Length];
+ stream.Seek(0, SeekOrigin.Begin);
+ stream.Read(data, 0, Convert.ToInt32(stream.Length));
+ return data;
+ }
+ }
+
+ private static string DownloadDefaultModel(string dir)
+ {
+ var modelFile = Path.Combine(dir, "pruning101_step11999.pb");
+ return modelFile;
+ }
+
+ private static string DownloadDefaultModel_noBoxPix2Pix(string dir)
+ {
+ var modelFile = Path.Combine(dir, "424_256_64_5999_scale300_enhancment_L1loss0.02001.pb");
+ return modelFile;
+ }
+
+ public static Bitmap ToGrayBitmap(byte[] rawValues, int width, int height)
+ {
+ //// Apply for a target bitmap variable and lock its memory area
+ Bitmap bmp = new Bitmap(width, height, PixelFormat.Format8bppIndexed);
+ BitmapData bmpData = bmp.LockBits(new Rectangle(0, 0, width, height),
+ ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed);
+
+ //// Get image parameters
+ int stride = bmpData.Stride; // Width of the scan line
+ int offset = stride - width; // Show gap between width and scan line width
+ IntPtr iptr = bmpData.Scan0; // Get the memory start position of bmpData
+ int scanBytes = stride * height;// Use stride width to indicate that this is the size of the memory area
+
+ //// The following is to convert the original display size byte array to the byte array actually stored in memory.
+ int posScan = 0, posReal = 0;// Set two position pointers respectively, pointing to the source array and the target array
+ byte[] pixelValues = new byte[scanBytes]; //Allocate memory for the target array
+
+ for (int x = 0; x < height; x++)
+ {
+ //// The following loop section is a simulated line scan
+ for (int y = 0; y < width; y++)
+ {
+ pixelValues[posScan++] = rawValues[posReal++];
+ }
+ posScan += offset; //At the end of the line scan, move the target position pointer over that "gap"
+ }
+
+ //// Use Marshal's Copy method to copy the just obtained memory byte array into BitmapData.
+ System.Runtime.InteropServices.Marshal.Copy(pixelValues, 0, iptr, scanBytes);
+ bmp.UnlockBits(bmpData); // Unlock the memory area
+
+ //// The following code is to modify the index table of the generated bitmap, from pseudo color to grayscale
+ ColorPalette tempPalette;
+ using (Bitmap tempBmp = new Bitmap(1, 1, PixelFormat.Format8bppIndexed))
+ {
+ tempPalette = tempBmp.Palette;
+ }
+ for (int i = 0; i < 256; i++)
+ {
+ tempPalette.Entries[i] = Color.FromArgb(i, i, i);
+ }
+
+ bmp.Palette = tempPalette;
+
+ //// The algorithm ends here and returns the result.
+ return bmp;
+ }
+
+ private void BackgroundWorker1_DoWork_Image(object sender, DoWorkEventArgs e)
+ {
+
+ // boundingboxなどのinfo出力用
+ sw = new StreamWriter(fileName_info, false, System.Text.Encoding.GetEncoding("shift_jis"));
+ sw.WriteLine(
+ "image" + "," +
+ "top left X" + "," + "top left Y" + "," + "bottom right X" + "," + "bottom right Y" + "," +
+ "Width" + "," + "Height" + "," + "Area" + "," + "Gloss Count");
+
+ using (MemoryStream ms = new MemoryStream())
+ {
+ for (int a = 0; a < path.Length; a++)
+ {
+ manualReset.WaitOne();
+ ii = 0;
+ basepath = Directory.GetCurrentDirectory();
+ imageFile = System.Text.RegularExpressions.Regex.Replace(path[a], "data", "");
+ Invoke((MethodInvoker)delegate
+ {
+ label_processingFileName.Text = "Processing File: " + imageFile;
+ count = a + 1;
+ label_totalProgress.Text = "Total Progress: " + count + "/" + path.Length;
+ });
+
+ mat_input = Cv2.ImRead(basepath + "\\data" + imageFile, ImreadModes.Color);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_input.Image = bitmap_bitch;
+ pictureBox_input.Refresh();
+ });
+
+ label8.BackColor = Color.White;
+ label1.BackColor = Color.Red;
+ mat_drawBox = mat_input.Clone();
+ mat_cropped = new Mat(mat_input.Size(), MatType.CV_8UC3, 1);
+ byte_inputDetection = Bitmap2Byte(bitmap_bitch);
+
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(Directory.GetCurrentDirectory() + "/Detection_Normal.pb");
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil.CreateTensorFromImageFile(byte_inputDetection, TFDataType.UInt8);
+
+ var runner = session.GetRunner();
+ runner
+
+ .AddInput(graph["image_tensor"][0], tensor)
+ .Fetch("detection_boxes", "detection_scores", "detection_classes", "num_detections");
+
+ var output = runner.Run();
+ var boxes = (float[,,])output[0].GetValue();
+ var scores = (float[,])output[1].GetValue();
+ var classes = (float[,])output[2].GetValue();
+ var detections = (float[])output[3].GetValue();
+
+ check_detection = 0;
+ max_score = 0;
+
+ for (int i = 0; i < scores.Length; i++)
+ {
+ if ((scores[0, i] > 0.5) && (scores[0, i] > max_score))
+ {
+ max_score = scores[0, i];
+ float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
+ float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
+ float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
+ float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
+ P1.X = (int)x_min;
+ P1.Y = (int)y_min;
+ P2.X = (int)x_max;
+ P2.Y = (int)y_max;
+ Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
+ rectangle.X = (int)x_min;
+ rectangle.Y = (int)y_min;
+ rectangle.Width = (int)(x_max - x_min);
+ rectangle.Height = (int)(y_max - y_min);
+
+ check_detection = 1;
+ }
+ }
+ }
+ }
+
+ // 舌が検出されなかった場合,Detectionされた画像で学習したモデル(CropResize)を使用するのはまずいので
+ // 以前のモデル(Detectionせずに学習)を使用する
+ if (check_detection == 0)
+ {
+ MessageBox.Show("Error: Sorry can not detect any tongue in this image.\nPress [OK] to skip preprocessing.",
+ "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("DetectionFailedLog.txt ", time + " " + imageFile + "\n");
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = null;
+ pictureBox_detection.Refresh();
+ pictureBox_cropResized.Image = null;
+ pictureBox_cropResized.Refresh();
+ pictureBox_output.Image = null;
+ pictureBox_output.Refresh();
+ pictureBox_outputSRG.Image = null;
+ pictureBox_outputSRG.Refresh();
+ pictureBox_maskSRG.Image = null;
+ pictureBox_maskSRG.Refresh();
+ pictureBox_extraction.Image = null;
+ pictureBox_extraction.Refresh();
+ pictureBox_last.Image = null;
+ pictureBox_last.Refresh();
+ });
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel_noBoxPix2Pix(basepath);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["generator/input_image"][0], tensor)
+ .Fetch(graph["generator/prediction"][0]);
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ Thread.Sleep(1000);
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_outputNoBox = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputNoBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ label1.BackColor = Color.White;
+ label4.BackColor = Color.Red;
+ });
+
+ mat_outputNoBox.SaveImage(basepath + "\\output256" + imageFile);
+ try
+ {
+ RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRGNoBox = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRGNoBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+ });
+ Cv2.Resize(mat_outputSRGNoBox, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ try
+ {
+ RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ label5.BackColor = Color.White;
+ label6.BackColor = Color.Red;
+ });
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+ label6.BackColor = Color.White;
+
+
+
+ label7.BackColor = Color.Red;
+ label7.BackColor = Color.White;
+
+
+ label8.BackColor = Color.Red;
+ //csvにboundingbox情報を保存
+ sw.WriteLine(
+ imageFile.Substring(1) + ","
+ + "None" + "," + "None" + ","
+ + "None" + "," + "None" + ","
+ + "None" + "," + "None" + "," + areaCount.ToString()
+ );
+
+ continue;
+ }
+
+ // 舌が正常にDetectionされた場合の処理
+ else
+ {
+ // 検出されたバウンディングボックス画像を保存
+ mat_drawBox.SaveImage(basepath + "\\detection" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = bitmap_bitch;
+ pictureBox_detection.Refresh();
+ });
+ label1.BackColor = Color.White;
+
+ // 検出領域でcropし,256*256にリサイズして表示
+ label2.BackColor = Color.Red;
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ // 舌領域以外を黒へ
+ Vec3b pix = mat_input.At(i, j);
+ mat_cropped.Set(i, j, pix);
+ }
+ }
+ mat_cropped.SaveImage(basepath + "\\cropped" + imageFile);
+ // 検出領域の範囲を切り出す
+ OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
+ size_roi.Height = rectangle.Height;
+ size_roi.Width = rectangle.Width;
+ roi = new Rect(P1, size_roi);
+ mat_roisize = mat_input.Clone(roi);
+ // セグメンテーションのため,256*256にリサイズ
+ Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
+ mat_roi.SaveImage(basepath + "\\cropresized" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_cropResized.Image = bitmap_bitch;
+ pictureBox_cropResized.Refresh();
+ });
+ label2.BackColor = Color.White;
+
+ // セグメンテーションを行う
+ label3.BackColor = Color.Red;
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel(basepath);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["generator/input_image"][0], tensor)
+ .Fetch(graph["generator/prediction"][0]);
+
+ //.AddInput(graph["input_image"][0], tensor)
+ //.Fetch(graph["generator1/decoder_1/Tanh"][0]);
+
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ GC.Collect();
+ Thread.Sleep(1000);
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ });
+ label3.BackColor = Color.White;
+
+ // 舌分割結果の後処理
+ label4.BackColor = Color.Red;
+ // 舌分割結果を保存
+ mat_output.SaveImage(basepath + "\\output256" + imageFile);
+ // 後処理(領域拡張法)でノイズ除去
+ try
+ {
+ RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRG = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ });
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+
+ // バウンディングボックスのサイズに舌分割結果をリサイズ
+ // この時に2値じゃなくなってるみたい
+ Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
+ mat_outputChanged.SaveImage(basepath + "\\output_resized" + imageFile);
+ // 舌検出された領域において舌領域を切り出す
+ mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 1);
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ int pix = mat_outputChanged.At(mmp, pmm);
+ mat_mask.Set(i, j, pix);
+ pmm++;
+ }
+ mmp++;
+ pmm = 0;
+ }
+ mmp = 0;
+ Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ // ノイズ処理
+ try
+ {
+ RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+
+ mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ // 2値マスクの最終結果
+ mat_maskSRG.SaveImage(basepath + "\\mask_final" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ });
+ label5.BackColor = Color.White;
+
+ // 元画像とマスクを合わせて,舌領域を抜き出す
+ label6.BackColor = Color.Red;
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+
+ label6.BackColor = Color.White;
+
+ // Gloss抽出処理
+ // extraction結果は使用しない(255,255,255の扱いが煩雑なため)
+ label7.BackColor = Color.Red;
+ label7.BackColor = Color.White;
+
+ // 処理が終わった画像を記録する
+ label8.BackColor = Color.Red;
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("Log.txt ", time + " " + imageFile + " Done!\n");
+
+
+ //csvにboundingbox情報を保存
+ sw.WriteLine(
+ imageFile.Substring(1) + ","
+ + P1.X.ToString() + "," + P1.Y.ToString() + ","
+ + P2.X.ToString() + "," + P2.Y.ToString() + ","
+ + Math.Abs(P1.X - P2.X).ToString() + "," + Math.Abs(P1.Y - P2.Y).ToString() + ","
+ + areaCount.ToString() + ","
+ );
+ }
+ GC.Collect();
+ }
+ MessageBox.Show("Finished!");
+
+ Invoke((MethodInvoker)delegate
+ {
+ button_start.Enabled = true;
+ button_pause.Enabled = false;
+ label_processingFileName.Text = "Processing File: None";
+ });
+ }
+ }
+
+ private void BackgroundWorker1_DoWork_ImageandCalib(object sender, DoWorkEventArgs e)
+ {
+
+ // boundingboxなどのinfo出力用
+ sw = new StreamWriter(fileName_info, false, System.Text.Encoding.GetEncoding("shift_jis"));
+ // 出力用csvの準備
+ sw.Write(
+ "image" + "," +
+ "top left X" + "," + "top left Y" + "," + "bottom right X" + "," + "bottom right Y" + ","
+ );
+ for (int i = 0; i < 1; i++)
+ for (int j = 0; j < 5; j++)
+ sw.Write("x" + j.ToString() + "," + "y" + j.ToString() + ",");
+ for (int i = 0; i < 1; i++)
+ for (int j = 0; j < 8; j++)
+ sw.Write("x" + j.ToString() + "," + "y" + j.ToString() + ",");
+ sw.WriteLine();
+ sw.Close();
+
+ using (MemoryStream ms = new MemoryStream())
+ {
+ // ディレクトリglobの取得
+ var glob_dir = Directory.GetDirectories(@"data\errors");
+
+ for (int a = 0; a < glob_dir.Length; a++)
+ {
+ manualReset.WaitOne();
+
+ // 画像と校正ファイルのパス
+ var glob_file = Directory.GetFiles(glob_dir[a]);
+ var path_calib = glob_file.Where(n => n.Contains("csv")).ToList()[0];
+ var path_image = glob_file.Where(n => n.Contains("bmp") || n.Contains("png")).ToList()[0];
+ var path_base = Directory.GetCurrentDirectory();
+ var name_image = Path.GetFileName(path_image);
+ var name_dir = Path.GetFileName(glob_dir[a]);
+
+ // ステータスの表示
+ Invoke((MethodInvoker)delegate
+ {
+ label_processingFileName.Text = "Processing File: " + "\\" + name_dir;
+ count = a + 1;
+ label_totalProgress.Text = "Total Progress: " + count + "/" + glob_dir.Length;
+ });
+
+ // 入力画像読み込み・表示
+ mat_input = Cv2.ImRead(path_image, ImreadModes.Color);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_input.Image = bitmap_bitch;
+ pictureBox_input.Refresh();
+ });
+
+ // Detection
+ label8.BackColor = Color.White;
+ label1.BackColor = Color.Red;
+ mat_drawBox = mat_input.Clone();
+ mat_cropped = new Mat(mat_input.Size(), MatType.CV_8UC3, 1);
+ byte_inputDetection = Bitmap2Byte(bitmap_bitch);
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(path_base + "/Detection_Normal.pb");
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil.CreateTensorFromImageFile(byte_inputDetection, TFDataType.UInt8);
+
+ var runner = session.GetRunner();
+ runner
+ .AddInput(graph["image_tensor"][0], tensor)
+ .Fetch("detection_boxes", "detection_scores", "detection_classes", "num_detections");
+
+ var output = runner.Run();
+ var boxes = (float[,,])output[0].GetValue();
+ var scores = (float[,])output[1].GetValue();
+ var classes = (float[,])output[2].GetValue();
+ var detections = (float[])output[3].GetValue();
+ check_detection = 0;
+ max_score = 0;
+ for (int i = 0; i < scores.Length; i++)
+ {
+ if ((scores[0, i] > 0.5) && (scores[0, i] > max_score))
+ {
+ max_score = scores[0, i];
+ float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
+ float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
+ float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
+ float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
+ P1.X = (int)x_min;
+ P1.Y = (int)y_min;
+ P2.X = (int)x_max;
+ P2.Y = (int)y_max;
+ Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
+ rectangle.X = (int)x_min;
+ rectangle.Y = (int)y_min;
+ rectangle.Width = (int)(x_max - x_min);
+ rectangle.Height = (int)(y_max - y_min);
+
+ check_detection = 1;
+ }
+ }
+ }
+ }
+ // Detection結果表示・保存
+ mat_drawBox.SaveImage(path_base + "\\detection" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_detection.Image = bitmap_bitch;
+ pictureBox_detection.Refresh();
+ });
+ label1.BackColor = Color.White;
+
+ // 舌検出領域でcrop
+ label2.BackColor = Color.Red;
+ for (int i = P1.Y; i < P2.Y; i++)
+ {
+ for (int j = P1.X; j < P2.X; j++)
+ {
+ Vec3b pix = mat_input.At(i, j);
+ mat_cropped.Set(i, j, pix);
+ }
+ }
+ mat_cropped.SaveImage(path_base + "\\cropped" + "\\" + name_dir + ".bmp");
+
+ // 舌検出領域でresize
+ OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
+ size_roi.Height = rectangle.Height;
+ size_roi.Width = rectangle.Width;
+ roi = new Rect(P1, size_roi);
+ mat_roisize = mat_input.Clone(roi);
+ Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
+ mat_roi.SaveImage(path_base + "\\cropresized" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_cropResized.Image = bitmap_bitch;
+ pictureBox_cropResized.Refresh();
+ });
+ label2.BackColor = Color.White;
+
+ // Segmenation
+ label3.BackColor = Color.Red;
+ byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+ Thread.Sleep(1000);
+ modelFile = DownloadDefaultModel(path_base);
+ ii = 0;
+ using (var graph = new TFGraph())
+ {
+ var model = File.ReadAllBytes(modelFile);
+ graph.Import(model, "");
+
+ using (var session = new TFSession(graph))
+ {
+ var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+ var runner = session.GetRunner();
+ runner
+ //.AddInput(graph["generator/input_image"][0], tensor)
+ //.Fetch(graph["generator/prediction"][0]);
+
+ .AddInput(graph["input_image"][0], tensor)
+ .Fetch(graph["generator1/decoder_1/Tanh"][0]);
+
+ var output = runner.Run();
+ float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ for (int p = 0; p < 256; p++)
+ {
+ for (int q = 0; q < 256; q++)
+ {
+ float check = resultfloat[0, p, q, 0];
+ if (check < 0)
+ {
+ mask[ii] = 0;
+ }
+ else
+ {
+ mask[ii] = 255;
+ }
+ ii++;
+ }
+ }
+ }
+ }
+ GC.Collect();
+ Thread.Sleep(1000);
+
+ // segmentation結果表示・保存
+ bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_output.Image = bitmap_bitch;
+ pictureBox_output.Refresh();
+ });
+ label3.BackColor = Color.White;
+ label4.BackColor = Color.Red;
+ mat_output.SaveImage(path_base + "\\output256" + "\\" + name_dir + ".bmp");
+
+ // 後処理(領域拡張法)でノイズ除去 iteration 2
+ try
+ {
+ RemoveSmallRegion(path_base + "\\output256" + "\\" + name_dir + ".bmp", path_base + "\\output_changed1" + "\\" + name_dir + ".bmp", 500, 1, 1);
+ RemoveSmallRegion(path_base + "\\output_changed1" + "\\" + name_dir + ".bmp", path_base + "\\output_changed2" + "\\" + name_dir + ".bmp", 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_outputSRG = new Mat(path_base + "\\output_changed2" + "\\" + name_dir + ".bmp", ImreadModes.GrayScale);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_outputSRG.Image = bitmap_bitch;
+ pictureBox_outputSRG.Refresh();
+ });
+ label4.BackColor = Color.White;
+ label5.BackColor = Color.Red;
+
+ // バウンディングボックスのサイズに戻す
+ Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
+ mat_outputChanged.SaveImage(path_base + "\\output_resized" + "\\" + name_dir + ".bmp");
+
+ // 入力と同じサイズでマスクを作成(なんかたまに不安定.resizeで微妙にサイズ変わってrange overしれるかも)
+ // 気休め
+ Thread.Sleep(100);
+ GC.Collect();
+
+ mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 0);
+ var y_mask = 0;
+ for (int y = P1.Y; y < P2.Y; y++)
+ {
+ if (y_mask >= mat_outputChanged.Height)
+ break;
+
+ var x_mask = 0;
+ for (int x = P1.X; x < P2.X; x++)
+ {
+ if (x_mask >= mat_outputChanged.Width)
+ break;
+
+ int pix = mat_outputChanged.At(y_mask, x_mask);
+ mat_mask.Set(y, x, pix);
+ x_mask++;
+ }
+ y_mask++;
+ }
+ mmp = 0;
+ Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
+ mat_mask.SaveImage(path_base + "\\mask" + "\\" + name_dir + ".bmp");
+
+ // ノイズ処理
+ try
+ {
+ RemoveSmallRegion(path_base + "\\mask" + "\\" + name_dir + ".bmp", path_base + "\\mask_changed1" + "\\" + name_dir + ".bmp", 500, 1, 1);
+ RemoveSmallRegion(path_base + "\\mask_changed1" + "\\" + name_dir + ".bmp", path_base + "\\mask_changed2" + "\\" + name_dir + ".bmp", 500, 0, 0);
+ }
+ catch
+ {
+ MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ break;
+ }
+ mat_maskSRG = new Mat(path_base + "\\mask_changed2" + "\\" + name_dir + ".bmp", ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+
+ // 2値マスクの最終結果
+ mat_maskSRG.SaveImage(path_base + "\\mask_final" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ });
+ label5.BackColor = Color.White;
+
+ // 元画像とマスクを合わせて,舌領域を抜き出す
+ // todo: opencv等によるマスク処理と領域計算へ
+ label6.BackColor = Color.Red;
+ mat_extraction = mat_input.Clone();
+ areaCount = 0;
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ mat_extraction.Set(i, j, pix);
+ areaCount++;
+ }
+ }
+ }
+ mat_extraction.SaveImage(path_base + "\\extraction" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_extraction.Image = bitmap_bitch;
+ pictureBox_extraction.Refresh();
+ });
+ label6.BackColor = Color.White;
+
+ /// 5点クリックによる色抽出処理
+ var path_colorMatrixXYZ = "xyz.txt";
+
+ // 色抽出処理のクラス
+ ColorExtractor ce = new ColorExtractor();
+
+ // マスクされた舌領域画像の作成
+ Mat mat_finalMask = mat_maskSRG.Clone();
+ Mat mat_maskedImg = new Mat();
+ mat_input.CopyTo(mat_maskedImg, mat_finalMask);
+
+ // 5点クリック法(2010石川)
+ List list_5points_3 = ce.Get5points(mat_finalMask, ColorExtractor.FivePointMethod.Method3);
+
+ // 8領域の取得
+ List list_8area_3 = ce.Get8area(list_5points_3);
+
+ // 領域の表示
+ var mat_areaDicision = ce.ShowResult(mat_input.Clone(), list_5points_3, list_8area_3);
+
+ // 色抽出
+ List list_8Bgr = ce.Get8colors(mat_maskedImg, list_8area_3);
+
+ // 色変換(RGB->XYZ->Lab)
+ List list_8Lab = ce.Calc8Lab(list_8Bgr, path_calib, path_colorMatrixXYZ);
+
+ // 保存
+ string CSVfilename = path_base + "\\color" + "\\" + name_dir + ".csv";
+ FileStream CSV_file = File.Open(CSVfilename, FileMode.OpenOrCreate, FileAccess.Write);
+ CSV_file.Seek(0, SeekOrigin.Begin);
+ CSV_file.SetLength(0);
+ CSV_file.Close();
+
+ StreamWriter CSV_data = new StreamWriter(CSVfilename);
+ CSV_data.WriteLine("Area,R,G,B,L,a,B");
+ for (int i = 0; i < list_8Bgr.Count(); i++)
+ {
+ string str = (i + 1).ToString() + ",";
+ str +=
+ list_8Bgr[i].Val2.ToString("0.0000") + "," +
+ list_8Bgr[i].Val1.ToString("0.0000") + "," +
+ list_8Bgr[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val0.ToString("0.0000") + "," +
+ list_8Lab[i].Val1.ToString("0.0000") + "," +
+ list_8Lab[i].Val2.ToString("0.0000");
+ CSV_data.WriteLine(str);
+ }
+ CSV_data.Close();
+
+ // 廃棄
+ mat_finalMask.Dispose();
+ mat_maskedImg.Dispose();
+ GC.Collect();
+ System.Threading.Thread.Sleep(100);
+
+ mat_areaDicision.SaveImage(path_base + "\\autoAreaDecision" + "\\" + name_dir + ".bmp");
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_areaDicision);
+ Invoke((MethodInvoker)delegate
+ {
+ pictureBox_last.Image = bitmap_bitch;
+ pictureBox_last.Refresh();
+ });
+ label7.BackColor = Color.White;
+
+ // 処理log
+ label8.BackColor = Color.Red;
+ time = DateTime.Now.ToLocalTime().ToString();
+ File.AppendAllText("Log.txt ", time + " " + "\\" + name_dir + ".bmp" + " Done!\n");
+
+ // info出力
+ sw = new StreamWriter(fileName_info, true, System.Text.Encoding.GetEncoding("shift_jis"));
+ sw.Write(
+ name_dir.ToString() + ","
+ // bounding box
+ + P1.X.ToString() + "," + P1.Y.ToString() + ","
+ + P2.X.ToString() + "," + P2.Y.ToString() + ","
+ // area
+ //+ areaCount.ToString() + ","
+ );
+ foreach (var n in list_5points_3)
+ sw.Write(n.X + "," + n.Y + ",");
+ foreach (var n in list_8area_3)
+ sw.Write(n.X + "," + n.Y + ",");
+ sw.Write("\n");
+
+ sw.Close();
+ // 破棄
+
+ }
+ GC.Collect();
+ MessageBox.Show("Finished!");
+
+ Invoke((MethodInvoker)delegate
+ {
+ button_start.Enabled = true;
+ button_pause.Enabled = false;
+ label_processingFileName.Text = "Processing File: None";
+ });
+ }
+ }
+
+ private void Button_pause_Click(object sender, EventArgs e)
+ {
+ if (button_pause.Text == "Pause")
+ {
+ manualReset.Reset();
+ button_pause.Text = "Continue";
+ }
+ else
+ {
+ manualReset.Set();
+ button_pause.Text = "Pause";
+ }
+ }
+
+ private void Form1_FormClosing(object sender, FormClosingEventArgs e)
+ {
+ Console.WriteLine("file closing");
+ if (sw != null)
+ {
+ sw.Close();
+ }
+ Console.WriteLine("file closed");
+ }
+
+ private void comboBox1_SelectedIndexChanged(object sender, EventArgs e)
+ {
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/Main/Tongue extraction/Form1.resx b/Main/Tongue extraction/Form1.resx
new file mode 100644
index 0000000..e1424ee
--- /dev/null
+++ b/Main/Tongue extraction/Form1.resx
@@ -0,0 +1,1080 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ 17, 17
+
+
+ 58
+
+
+
+
+ AAABAAEAa4AAAAEAIAAo3gAAFgAAACgAAABrAAAAAAEAAAEAIAAAAAAAANYAAGM4AABjOAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmA+Xk5gLl5OYB5ePlAOjs6gDl5OUA4dznAOfn
+ 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYA5eTmAOXj5gHl5OYA5enrAObe4QDl5OYC5eTmBOXk
+ 5gTl5OYD5eTmAebk5wDk5OcA5+XkAOXj6ADl5ecA5+HnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5QDl4+UA5eTlCeXk5kPl5OYl5uXmBujl
+ 5gDn5eYA3OTmAOXl5QDl5eUA5ebjAOXk5gPl5OYE5eTmA+Xk5gLl5OYB5uPmAO3o7gDj5OQA6+LsAObl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZN5eTm/+Xk
+ 5vjl5Obe5eTmuuXk5pDl5OZj5eTmOeXk5hbx5ewB/+T/AOfk5wDl6OoA5OPlAOTk5QDl5OYB5eTmBOXk
+ 5gTl5OYD5eTmAuXk5gHk4eYA5uHkAObk5QDk4ugA5eXlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOTj5QDl5OYD5eTmAOXk
+ 5qfl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm8eXk5s/l5Oap5eTmfeXk5lDl5OYq5eTmCubn
+ 5gDm5uYA5d7mAOXm5QDk6OQA5OjkAOXl5gPl5OYE5eTmBOXk5gLl5OYB5eTmAObo5gDt8+IA5ubmAOXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAeXk
+ 5gDl5OYW5eTm7OXk5v/l5Ob95eTm/eXk5vzl5Ob75eTm++Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Obl5eTmwOXk5pbl5OZp5eTmP+Xk5hvm5+YC5+jmAObl5gDr5OYA5OTnAOTk5wDm5OcB5eTmBOXk
+ 5gTl5OYD5eTmAuXk5gHl5OcA4uXkAOTl5QDq5eoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYD5eTmAOXk5lfl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm++Xk
+ 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm9eXk5tXl5Oau5eTmg+Xk5lbl5OYu5eTnDuPk
+ 5QDj5OUA5+XoAOXj6ADg89UA4e7XAOXk5gLl5OYE5eTmBOXk5gLl5OYB5eTmAObl5gDl5OUA5uTmAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADj4uMA5uTmAOXk5gPl5OYA5eTmpOXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/eXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Obp5eTmx+Xk5p7l5OZw5eTmROXk5h/m5eYE5eXmAObk5gDl5OYA5eTmAOXk5gDl5OYB5eTmA+Xk
+ 5gTl5OYD5eTmAuXk5gHl5OUA6tneAOTm5wDk4uQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTk5QDl5OYB5eTmAOXk5hDl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
+ 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+OXk5tvl5Oa25eTmiuXk5l3l5OYz5uPmEsvl
+ zQDg5eIA5+PnAObk5gDm5OYA5uTmAOXk5wHp4+UA6uPlALb09QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eXnAOXk5gPl5OYA5eTmSeXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Obt5eTmzOXk5qPl5OZ25eTmS+Xk5iXl5ecH5eXnAOPj5QDf398A39/fAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOji6ADk4+UA5eTmBOXk5gDl5OaT5eTm/+Xk5vvl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
+ 5vvl5Ob85eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+eXk5ufl5OaY5eXnAOXl5wHl4+UAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5OTlAOXk5wHl5ecA5eXoB+Xk5tjl5Ob/5eTm/eXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v3l5Ob75eTm++Xk5v3l5Ob75eTm/+Xk5n3l5OYA5eTmBOXm
+ 6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmA+Xk5gDl5OY95eTm/+Xk
+ 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obr5eTmGOXk
+ 5gDl5OYB4ODgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA7entAOXk5gDl5OYE5eTmAOXk
+ 5onl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
+ 5pbl5OYA5eTmA+Xk5QDm5ugAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmAePk
+ 5gDj5OYD5eTm0+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTmNeXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYD5eTmAOXk5j3l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5r3l5OYA5eTlAeXk5wDl5OUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5OcA6OTlAOXk5gTl5OYA5eTmkeXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYD5eTmAObm6gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eXmAOXj5gDl5OYB5uXmAObk5hLl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5uPl5OYP5eTmAOXk5gHk5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAObk5gDk5OYA5OPmAOXk5gbl5OYA5eTmcuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmleXk5gDl5OYD5eXmAOfm5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXl5QDl5OYA5uTmAObk5QDl5OYF5eTmAOXk5jfl5Oby5eTm/+Xk5v7l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZD5eTmAOXk5gPl5OcAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTj5wDl5OUA5OPmAOTj5gDl5OYC5eTmBeXk5gDl5OY35eTm6+Xk5v/l5Ob+5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Tj5grk4+YA5eTmAeXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA6ungAOfm4wDi4ekA5OTnAOTk5wDl5OYA5uTmAOXj
+ 5QDl4+UA5eXnAOXk5gHl5OYB5eTmAeXk5gLl5OYE5eTmA+Xk5gHl5OYA5eTmVuXk5vTl5Ob/5eTm/eXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTmAOXk
+ 5gPl5OYA5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5ecA3eHjAN3g4wDl5OYC5eTmBOXk5gPm5OUC5+TlAeTk
+ 5QHi5OYB5unrAOPs7gDT2dsA0NvdANzl5wAAAAAA5eTmAOXk5gHk5OUA5eTmHeXk5qHl5Ob/5eTm/eXk
+ 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
+ 5l7l5OYA5eTmBOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAObl5gDm5eYA5eTmAuPh5QDj4uUA5OPmAAAA
+ AAAAAAAAAAAAAAAAAACl5OYA/+DmAP/R2wDg4OUB5uTmBeXk5hHl5OYp5eTmWOXk5qbl5Ob05eTm/+Xk
+ 5vzl5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5Ob+5eTmK+Xk5gDl5OYC5OTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA4eLlAOTk5gLj5OUA5eTmLOXk
+ 5oHl5Oat5eTmvuXk5sTl5ObG5eTmyOXk5svl5ObM5eTmz+Xk5tTl5Obd5eTm7OXk5vzl5Ob/5eTm/+Xk
+ 5v7l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob95eTm/+Xk5uTl5OYM5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAObl5gDl5OYC5eTmAOXk
+ 5nXl5Ob/5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmyuTk5QDk5OUB5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OUA5eTmA+Xk
+ 5gDl5OZM5eTm/+Xk5v3l5Ob75eTm++Xk5vzl5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v3l5Ob95eTm/eXk
+ 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa15eTmAOXk5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYC5eTmAOXk5rjl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+bk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAebk5gDm5OYX5eTm7uXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmsOXk5gDl5OYD5eTmAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXl5gDl5OYD5eTmAOXk5kXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa/5eTmAOXk
+ 5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gTl5OYA5eTmcuXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5tbm5eUA5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5uUA5eTmBOXk5gDl5Oac5eTm/+Xk5vvl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Ob/5eTm7+Xk5hfl5OYA5eTmAebm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDl5OYC5eTmAOXk5rvl5Ob/5eTm/OXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTmPOXk5gDl5OYD5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOjm7ADj4uIA5eTm1OXk
+ 5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ15eTmAOXk5gTk4+QAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYB5uPmAObj
+ 5gvl5Obj5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rXl5OYA5eTmAuXl5gDo6OgAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gHl5OYA5eTmFuXk5u/l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7uXk5hbl5OYA5eTmAeXk
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAuXk5gDl5OYi5eTm++Xk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmXOXk
+ 5gDl5OYD5eTmAObm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYC5eTmAOXk5i/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v/l5Oaz5eTmAOXk5gLl5OYA5+PnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmPuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vfl5OYn5eTmAOXk5gLl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmA+Xk5gDl5OZM5eTm/+Xk5v3l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5oPl5OYA5eTmBOjj5gDl5OUAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOvr6wDl5OYE5eTmAOXk5lzl5Ob/5eTm++Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Xk5g3l5OYA5eTmAeXk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePlAOXk5gTl5OYA5eTmcuXk
+ 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmYOXk5gDl5OYD5eTlAOXl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADf3+wA5eTmBOXk
+ 5gDl5OaK5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObJ5NG+AOTm
+ 7ADl5OUA5OPnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObk
+ 5gDl5OYE5eTmAOXk5qPl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5v/l5OZI5eTmAOXk5gPl5eYA5ubmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAOXk5gHl5OYA5eTmw+Xk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5rLl5OYA5eTmAubk5gDl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYB5eTnAOTk5wrl5Obi5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm/eXk5jPl5OYA5eTmAuXk5wDm4uYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gLl5OYA5eTmK+Xk5v7l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmmuXk5gDl5OYD5OPmAOXl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmBOXk5gDl5OZe5eTm/+Xk5vzl5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obx5ePmHuXk5gDl5OYC5eTlAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6OjoAOXl5QDl5OYD5eTmAOXk5qLl5Ob/5eTm++Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+b/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi
+ 5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ65eTmAOXk
+ 5gTn5OUA4+LjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANvb2wDl5OYA5eTmAeXk5gDl5OYQ5eTm5eXk
+ 5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPm/+nn5v/w7OX/7erm/+7q5v/u6ub/7urm/+7q5v/u6ub/7uvm/+bl
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+bk5v/u6+b/7urm/+7q5v/u6ub/7urm/+7q
+ 5v/t6ub/8Ozl/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5trl5OUK5eTlAOXk5QHl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+gA5eTmAOXk5gDl5OYE5eTmAOXk
+ 5ljl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5v/p5+b/0NTn/2WF7f9df+3/X4Ht/1+A7f9fgO3/X4Ht/1t+
+ 7f9ujOz/3t/m/+bl5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/m5eb/4ODm/3CN7P9bfu3/X4Ht/1+A
+ 7f9fgO3/X4Ht/15/7f9khO3/ztPn/+ro5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob85eTm/+Xk5lrl5OYA5eTmA+Xk5gDk4eQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj4wDl5eYA5uXoAN7e1wDl5OcB5eTmBOTk
+ 5QLj5OUA5eTmwuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ht5f+ot+n/Ekfx/x5R8P8cT/H/HE/x/xxP
+ 8f8dT/H/HE/x/xxP8f/Gzej/7Onm/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm/+vp5v/Jz+f/HlDw/xtO
+ 8f8dT/H/HE/x/xxP8f8cT/H/H1Hw/xBG8f+ktOn/8u3l/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmvubk5gDm5OYB5eTmAObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAOXh5gDl5uUA5ePlAOXk5gLl5OYE5eTmA+Xk
+ 5gHn5eIA5ubiAuXk5pXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6u56f8gUvD/K1rw/ypZ
+ 8P8pWfD/KVnw/ylZ8P8sW/D/HlHw/2yL7P/07+X/5+Xm/+Hh5v/i4ub/4uLm/+Hh5v/m5eb/9fDl/3GO
+ 7P8eUfD/LFvw/ylZ8P8pWfD/KVnw/ylZ8P8sW/D/HlDw/6i26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmOuXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADq6esA5eTmAOXk5gDm5OcA5eTmA+Xk5gTm5ugB5eXnAOXl
+ 5wDn5+gA5uXnB+Xk5k/l5ObN5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q
+ 8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HlHw/3eT7P/h4eb/8u7l//Ht5f/x7eX/8u7l/+Lh
+ 5v97lez/H1Hw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ypZ8P8bTvH/p7bp//Ht5f/i4ub/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oac5eTmAOXk5gPm4+YA5OLlAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAuXk5QLl6OcA5d3kAOXg
+ 5QPl5OYl5eTmXeXk5pzl5Obf5eTm/+Xk5v7l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds
+ 5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HE/x/zxn7/95k+z/mKvq/5mr
+ 6v96lOz/PWjv/xxP8f8oWPD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+mten/7uvm/+Dg
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u/m5OYb5eTmAOXk
+ 5gHm5OcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5gDl5OYA5eTmAOXk5gLl5eYB7OHuAOXk
+ 5jrl5OaT5eTm0+Xk5vrl5Ob/5eTm/+Xk5v/l5Ob85eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/IlPw/xpN
+ 8f8aTvH/Gk7x/xpN8f8iU/D/Klnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/626
+ 6f/+9+X/7erm/+Tk5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
+ 5nTl5OYA5eTmBOXk5wDm5eYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTnAOXk5wDl5OYC5eTmAOXk
+ 5ifl5Oa95eTm/+Xk5v7l5Ob/5eTm/+Xk5vzl5Ob75eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8oWPD/Klnw/ypZ8P8qWfD/Klnw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
+ 8P8kVPD/T3Tu/3iT7P+ruen/6efm/+nn5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v3l5Ob/5eTm0eff6APo3+gA5uPmAeXj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAuXk
+ 5gDl5OY05eTm7eXk5v/l5Ob85eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/yhY8P8gUvD/Gk3x/xxO8f9Ye+3/2dvn/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTmR+Xk5gDl5OYD5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5wHl5ecA5eXoBuXk5tHl5Ob/5eTm+uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ylY8P8qWfD/K1rw/xZK8f9ZfO3/6efm/+Tj5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oaj5eTmAOXk5gPl4+YA5eTlAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmA+Xk5gDl5OZC5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/yBR8P+5w+j/7uvm/+Pj
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u7l5OYa5eTmAOXk5gHm5OYAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OYE5eTmAOXk5lzl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/HU/x/6Oz
+ 6f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5mfl5OYA5eTmBOXk
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmR+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q5
+ 6f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KFjw/yhY
+ 8P8kVfD/xczo/+zp5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmveXk
+ 5wDl5OcB5eTlAOXj5gAAAAAAAAAAAAAAAAAAAAAA5eTmAeXk5gDl5OYa5eTm8OXk5v/l5Ob+5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5OPl/+Pi4//i4eP/4+Lk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li
+ 5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KVjw/ypZ
+ 8P8mVvD/FEnx/3iT7P/v6+b/4+Pm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5Ob55eTmJ+Xk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmAuXk5gDl5Oat5eTm/+Xk
+ 5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/m5ef/7+/y//Hx9P/s7O//5OLk/+Tj5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
+ 8P8eUfD/G07x/yxb8P+Cm+v/6Obm/+bl5v/k5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm++Xk5v/l5OZw5eTmAOXk5gTm5OcAAAAAAAAAAAAAAAAAAAAAAOXl5wDl5OYD5eTmAOXk
+ 5kfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/j4uT/6+rt/+Hg4v+tqKb/nJWS/8C9vP/s7O//5uXn/+Tj5f/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8oWPD/IVLw/2yK7P+ntun/z9Tn//Ht5f/l5Ob/5OPm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5r7l5eUA5eXmAeXj5gDj4egAAAAAAAAAAAAAAAAA5eTmAOXk
+ 5gDk5OYB5OTnAOXk5rrl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+vq7f/Oy8v/VkpC/zEiGP8yIxj/NSYc/4F4c//o5+n/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ypZ8P8bTvH/sL3p//735f/n5eb/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm9eXk5h/l5OYA5eTmAubl5QAAAAAAAAAAAAAA
+ AADk5OcA5ePmAOXk5gLl5OYA5eTmN+Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/4uHj/1dLQ/8wIRb/QDIo/z8xJ/8+MCb/KxwQ/5KK
+ h//w8PL/4+Hj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+ltOn/7uvm/+Hh5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmWuXk5gDl5OYE5OXnAAAA
+ AAAAAAAAAAAAAAAAAADl4+UA5+nnAOXk5gPl5OYA5eTmkOXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+X/4uHj/+Lh4//i4eP/4N/g/+3t8P+knZv/MCEW/0AyKf88LiT/PC4k/z0v
+ Jv86LCL/Rjkw/9fV1v/l5Ob/4uDi/+Lh4//i4eP/4uHj/+Xk5v/j4ub/8Ozl/6u56f8eUPD/KVjw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/6e26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oae5eTmAOXk
+ 5gPk5OYAAAAAAAAAAAAAAAAAAAAAAOno6QDm5OYA5eTlAeTk5ADl5OUN5eTm1uXk5v/l5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+rq7P/x8fT/8PDz//Hw8//u7vH/+/z//3lvaf8xIhj/PzEn/zwu
+ JP88LiT/PC4k/z4wJv8zJRr/u7e2//v8///u7vH/8fDz//Hx8//w8PP/5+bo/+Li5v/w7Ob/qrjp/yBS
+ 8P8sW/D/Klnw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/Klnw/y1b8P8eUPD/p7bp//Ht5f/i4ub/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5tXk4+YD5eTmAOXk5gHm5uYAAAAAAAAAAAAAAAAAAAAAAOXk5QDl5OYA5eTmA+Xk5gDl5OZA5eTm/eXk
+ 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/p6Ov/ycXF/6CZlv+knpv/o52a/6Kcmf+noZ//VkpC/zco
+ Hv89LyX/PC4k/zwuJP88LiT/PTAm/zUnHP98c27/qqSi/6KbmP+jnZv/opyZ/6ein//d3N3/5OTo//Ds
+ 5f+suun/Ekjx/xxP8f8aTvH/Gk3x/xpN8f8aTfH/Gk3x/xtO8f8mV/D/J1fw/ydX8P8nV/D/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/G07x/xpN8f8aTfH/Gk3x/xpN8f8aTvH/HE/x/xBG8f+pt+n/8e3l/+Li
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v7l5Ob/5eTm++Xk5ifl5OYA5eTmAuXk5gAAAAAAAAAAAAAAAAAAAAAA5+fnAOXk5gDl5OYA5eTmBOXk
+ 5gDl5OZ85eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+7t8P9rYFn/JxcM/zMkGv8xIhf/MSIX/zAh
+ F/85KyH/PS8l/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zUnHP8wIRb/MSIX/zEiF/8zJBn/KBkN/6ym
+ pP/v7/L/5ePj/9zd5v+Tp+n/iqDp/4yh6f+Loen/i6Hp/4uh6f+Noun/hJvq/y5c8P8mVvD/J1fw/ydX
+ 8P8nV/D/J1fw/ydX8P8nV/D/Jlbw/yxb8P+Dm+r/jaPp/4uh6f+Loen/i6Hp/4yh6f+KoOn/kqbp/9rc
+ 5v/n5ub/5OTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmV+Xk5gDl5OYD5eTnAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk
+ 5gDl5OYA5ePmAuHi5ADl5Oay5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7Ozu/2pgWf81Jx3/QDMp/z4x
+ J/8+MSf/PzEn/z0vJf88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjAm/z8xJ/8+MSf/PjEn/0E0
+ Kv8zJBr/qKKg//Dw8//i4eP/5+bm//Pv6//z7+3/8+/s//Pv7P/z7+z/8+/s//Pv7f/08Ov/SXHu/x9R
+ 8P8rWvD/J1fw/ydX8P8nV/D/J1fw/ypZ8P8gUvD/RW3u//Lu6//08O3/8+/s//Pv7P/z7+z/8+/s//Pv
+ 7f/z7+v/5+bm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OaG5eTmAOXk5gTl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmAOTj5QDl5OYC5OPlAOTk5hbl5Obc5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMk
+ Gf8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PjEn/zEiF/+po6H/8PDz/+Ph4//l5Of/3tzM/93bxf/d28f/3dvH/93bx//d28f/2tnG/+nk
+ zP+ntun/Gk7x/yNU8P8qWvD/Klnw/ypZ8P8qWvD/JFTw/xlN8f+js+n/6eXM/9rZxv/d28f/3dvH/93b
+ x//d28f/3dvF/97czP/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rTl5OYA5eTmA+bl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYD5eTmAOXk5jnl5Ob25eTm/+Xk5v7l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs
+ 7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PH/5OPs/97buv/GvSz/x74v/8e+L//Hvi7/x74u/8e+
+ L//GvSv/x788/+zo2P+gsez/Ml/v/xtO8f8aTvH/Gk7x/xtO8f8xXu//nK7s/+zo2v/Hvzz/xr0r/8e+
+ L//Hvi7/x74u/8e+L//Hvi//xr0r/97buf/m5u//5eTk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm1OPg5QHk4uUA5eTmAQAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5eTmAOTk5gDl5OYE5eTmAOXk5mHl5Ob/5eTm/OXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw8P/k5O7/29ir/8W8Jf/HvzL/x74w/8e+
+ MP/HvjD/x74w/8e+Mv/GvCf/2teu//Lv9P/X2eH/n7Dp/3+Z6/9/mOv/nq/p/9XY4f/y7/T/2tev/8a8
+ J//HvjL/x74w/8e+MP/HvjD/x74w/8e/M//FvCT/29ep/+fm8v/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obt5eTmEuXk
+ 5gDl5OYBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5eUA5eTmAP///wDl5OYE5eTmAOXk
+ 5ovl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Y/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwn/8e+
+ Mv/HvjD/x74w/8e+MP/HvjD/x74y/8a9K//LwkX/397U/+vq9f/y7uj/8u7l//Lu5f/y7uj/6+r1/+Df
+ 1f/Lw0b/xr0r/8e+Mv/HvjD/x74w/8e+MP/HvjD/x78z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk
+ 5vzl5OYp5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5uPlAOXk
+ 5gHl4+YC3NfmAOXk5rTl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/k4+X/4eDi/+/v8f9sYVr/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z0vJf9AMyn/Lh8U/6agnv/w8PH/5OPt/9vY
+ rP/FvCf/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9Kf/KwUD/2dWh/+Df2P/i4uj/4uLo/+Df
+ 2P/Z1aL/ysJA/8a9Kf/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob95eTm/+Xk5jrl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5OYA5eTmAOXk5gLj4+YA5OPmE+Xk5tfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+bl5//u7fD/4+Lk/2RaUv80JRv/PjAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/OSsh/zUmHP8/MSj/tK+s/+7t
+ 7P/l5fP/3Nmy/8W8Jv/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9LP/FvCX/ycA8/8zF
+ UP/MxVD/ycA8/8W8Jf/GvSz/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm
+ 8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmSuXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAPT09QDl5OYA5eTmAOXk5gPm5OYA5eTmMuXk5vPl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5uj/5eTm/4qCff9KPTT/PTAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zgqIP9ENi3/pqCe/9rY
+ 2P/m5e3/5uXs/+Ti3v/Y1Jj/xrwo/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74x/8e/
+ Mv/HvS3/xrwp/8a8Kf/HvS3/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8
+ Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA5eTmAOXk5gTl5OYA5eTmXuXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+zr7v95cGr/KxsQ/zkrIf88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJf87LSP/PC4k/725
+ uf/19ff/6Ojx/97cvv/PyGH/ycA5/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+Mf/HvjL/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5lvl5OYA5eTmBAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYA4+flAOXk5gTl5OYA5eTmluXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/u7vH/trGw/zIjGP8/MSj/PS8l/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zEi
+ F/+DenX/8/P0/+Lh6v/b16v/xr0q/8W8Jf/Hvi7/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYEAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5wD/+eIA5eTmAebk
+ 5QDm5OYN5eTm1uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Hj//Dv8v97cm3/MSIY/z8x
+ J/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP89LyX/Nige/725t//t7fL/4d/Y/8nAPP/GvSv/yL80/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk
+ 5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYA5eTmA+Xk5gDl5OZL5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4+X/6urs/2JX
+ UP80Jhv/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PS8l/zssIv9BMyr/0c/O/+vr9v/b167/xrwo/8e/M//HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
+ 5knl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5+bnAOXk5gDl5OYA5uTkAeTj7QHl5ObQ5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
+ 5f/q6uz/Y1hR/zQlG/8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LyX/Oywi/0EzKv/Rzs3/6+v1/9vYsf/GvCj/x78z/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v3l5Ob/5eTmOeXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYC5eTmAOXk5rTl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/4+Hj//Dw8v99dG//MSIX/z8xJ/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/Nicd/7u3tf/t7fH/4uDd/8rCRP/GvCf/yL81/8e+
+ Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj
+ 4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vzl5OYn5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4+TjAOXk5gTl5OYA5eTmmuXk5v/l5Ob75eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/j4uT/7u3w/7m1tP8zJBn/PzEn/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/f3Zx//Pz9P/i4en/3dq6/8i/
+ Nf/FuyP/xr0r/8e+Lv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vY
+ rP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7eXk5hHl5OYA5eTmAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmBOXk5gDl5OZ05eTm/+Xk
+ 5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7ezv/392cf8rGxD/OCof/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP86LCL/uLOy//X2
+ 9//p6fL/4d/P/9PNdv/Lw0j/yMA3/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObRk73YAOHi5QDl5OYBAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYD5eTmAOXk
+ 5k7l5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/5+bp/5GKhv9QQzv/PzEn/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zkr
+ If9AMyn/npiV/9XS0f/k4+n/5+bw/+bl7P/a1qP/xrwn/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+Tj5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gLl5OYA5eTmJeXk5vvl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/l5Ob/7u7w/+jn
+ 6f9mW1T/NCUa/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PS8l/zosIv80JRv/Oiwi/7Ktqv/u7ez/5eTw/9zZsP/FvCb/x74y/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmf+Xk5gDl5OYE5uXnAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5uTmAefj5wDo4+gF5eTm2uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xj
+ 5f/h4OL/7u7w/2thWv8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PS8l/0EzKf8vIBX/p6Ge//Dw8f/k4+3/29is/8W8J//HvjL/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZK5eTmAOXk
+ 5gPj4ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADl5OcA5eTmA+Xk5gDl5Oan5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Z/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwo/8e/
+ M//HvjH/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjH/yL8z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm8eXk
+ 5hnl5OYA5eTmAeXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYE5eTmAOXk5mbl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PD/5OTv/9vX
+ qv/FuyL/x74v/8e9Lf/HvS3/x70t/8e9Lf/HvS3/x70u/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvS7/x70t/8e9Lf/HvS3/x70t/8e9Lf/HvjD/xLsh/9vXqf/n5vL/5ePj/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v/l5ObD5eTmAOXk5wHl5OYA////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gLl5OYA5eTmJuXk5vjl5Ob/5eTm/uXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw
+ 8f/k4+v/39zA/8nAO//JwDz/ycE8/8nAPP/JwDz/ycA7/8nBPP/JwDr/x74x/8e+MP/HvjD/x74w/8e+
+ MP/HvjD/x74w/8e+MP/HvjD/x74x/8nAOv/JwTz/ycA7/8nAPP/JwDz/ycE8/8nAPP/JwDr/39y//+bm
+ 7v/l5OT/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob75eTm/+Xk5nvl5OYA5eTmBObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzP8A5uTmAOTj5gHk4+YA5eTmwuXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMkGv8+MCb/PC4k/zwu
+ JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zEi
+ GP+po6H/8PDz/+Lh4//l5ej/4+Lb/+Lh1v/i4df/4uHW/+Lh1v/i4db/4+HZ/+Hfzv/JwT7/x70u/8e+
+ Mf/HvjD/x74w/8e+MP/HvjD/x74x/8e+Lv/JwTz/4d/N/+Ph2f/i4db/4uHW/+Lh1v/i4df/4uHW/+Pi
+ 2//l5Oj/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmM+Xk5gDl5OYC5uXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmBOXk
+ 5gDl5OZy5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OLk/+zs7v9qX1j/NScc/0Ay
+ Kf8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4w
+ Jv9BMyn/MiQZ/6ehn//w8PP/4uHj/+Xk5f/l5en/5uXq/+bl6v/m5er/5uXq/+bl6v/l5Oj/5+fz/9HL
+ bv/EuyD/yL82/8e+Mf/HvjD/x74w/8e+Mf/Ivzb/xLsg/9HLbP/n5/P/5eTo/+bl6v/m5er/5uXq/+bl
+ 6v/m5er/5eXp/+Xk5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk5s3m3t4A4P7/AOXk5gDk4+UAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObl
+ 5gDl5OYC5eTnAOXk5yHl5Obz5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7u7w/21j
+ XP8oGQ3/NSYb/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIk
+ Gf8yJBn/MiQZ/zQlG/8qGxD/raim//Dv8v/j4eP/5eTm/+Xk5f/l5OX/5eTl/+Xk5f/l5OX/5eTl/+Xk
+ 5P/m5en/4uHW/8vESv/EuyD/xr0s/8e+L//Hvi//xr0s/8S7IP/Lw0j/4uDV/+bl6v/l5OT/5eTl/+Xk
+ 5f/l5OX/5eTl/+Xk5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmeOXk5gDl5OYE5eTmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5OTlAObk5wDl5OYD5eTmAOXk5qjl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
+ 5f/p6Or/zcrL/6iioP+rpqT/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ul
+ o/+rpaP/q6Wj/6ulo/+rpqP/qqSi/6+qqP/f3t//5uXo/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5P/m5ez/4+Lc/9TPgP/KwkX/yL80/8i/NP/KwkX/1M9//+Pi2//m5ez/5eTk/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5vXl5OYj5eTmAOXk5gLl5OcAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gPl5OYA5eTmQuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Tj5f/q6ev/8PDz//Dv8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw
+ 8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/7+/y/+bl6P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5P/l5en/5+fy/+Tj4f/h387/4d/N/+Tj4f/n5/L/5eXp/+Xk
+ 5P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmpeXk5gDl5OYD5eTmAObl
+ 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAObk5gHm5OYA5eTmwOXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/i4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph
+ 4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4uP/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5f/l4+P/5eTn/+bl7P/m5ez/5eTn/+Xj
+ 4//l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5OY+5eTmAOXk
+ 5gPl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZM5eTm/+Xk
+ 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5OX/5eTl/+Xk
+ 5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmtOXk
+ 5gDl5OYC5eTmAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDn5eYA4+PlAeLj
+ 5QDl5Oa55eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
+ 5v/l5OY/5eTmAOXk5gPl5OcA4uLiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ubmAOXk
+ 5gDl5OYC5eTmAOXk5jnl5Ob95eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5vzl5Ob/5eTmquXk5gDl5OYC5eTnAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5eTmANzi8QDl5OYD5eTmAOXk5pLl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob+5eTm/+Xk5vXl5OYq5eTmAOXk5gLl5OYA5uTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAADr6usA5eTmAOXk5gHl5eYA5eXmEeXk5tvl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmfuXk5gDl5OYD5+TlAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTnAOXk5gPl5OYA5eTmSOXk5v/l5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5svl4+YG6ODkAOXk5gHl5OYA5ubmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTlAOXk5gPl5OYA5eTmh+Xk
+ 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Ob35eTmNOXk5gDl5OYC5eXmAOXk5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj5QDm5OYA5eTmAebl
+ 5gHi4OcC5eTmueXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5m7l5OYA5eTmBObj5gDl5OYA6OjoAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5gDl5OYA5eTmAuXk5gDl5OYW5eTm2eXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTlAOXk5gPl5ecA5eTmAObm
+ 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXk5gDl5OYA5eTmA+Xk5gDl5OYp5eTm6uXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmxOTl5giU7fsA5eTmAePk
+ 5wDl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gDl5OYA5eTmA+Xk5gDl5OY45eTm8OXk5v/l5Ob85eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5tbl5eYW5eXmAOXk
+ 5gLl5OYA5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5uXmAOXk5gDl5OYA5eTmBOXk5gDl5OY75eTm8OXk
+ 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Obf5eTmI+bk
+ 5gDl5OYD5eTmAOXk5gDl5ecAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmBOXk
+ 5gDl5OY05eTm5+Xk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTm3uXk
+ 5iTl5OYA5eTmA+Xk5gDl5OYA4+PoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXl
+ 5wDl5ecA5eTmBOXk5gDl5OYi5eTm0+Xk5v/l5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
+ 5tLl5OYf5eTmAOXk5gPl5OYA5eTmAOfl5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA5OPlAOXl5gDl5eYA5eTmA+fk5gDm5OYO5eTmr+Xk5v/l5Ob85eTm/eXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm++Xk
+ 5v/l5Oa65uTmEObj5QDl5OYD5eTmAOXk5gDm5ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAA5uPmAOXk5gDl5OYA5eTmAuXk5wLi4+cA5eTmeOXk5vzl5Ob/5eTm++Xk
+ 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
+ 5v3l5Ob/5eTmjePm6QLm5OUB5eTmAuXk5gDl5OYA4eHjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAeXk5gTl5OYA5eTmN+Xk
+ 5tTl5Ob/5eTm/OXk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vvl5Ob/5eTm6+Xk5lTl5OYA5eTmA+Xk5gLl5OYA5eTmAObj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYA5eTmAeXk
+ 5gTm4+cA5ePmCOXk5oTl5Ob45eTm/+Xk5vzl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vzl5Ob+5eTm/+Xk5rHm5OYb5eTmAOXk5gTl5OYB5eTmAOXk5gDf398AAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
+ 5wDn5OQA5uTlAOXk5gPl5OYD5eTmAOXk5inl5Oa05eTm/+Xk5v/l5Ob95eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
+ 5vzl5Ob+5eTm/+Xk5uDl5OZU4+LlAOTj5gHl5OYD5eTmAOXk5gDl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOLi6QDl5OYA5eTmAOXk5gHl5OYD5OLmAeTi5wDl5OZC5eTmw+Xk5v/l5Ob/5eTm/uXk
+ 5vvl5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/OXk
+ 5vzl5Ob+5eTm/+Xk5uzl5OZ55uTmC+bk5gDl5OYD5eTmAuXk5gDl4+YA5uXmAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5OPlAOTj5QDl5OYC5eTmA/Hs4gD//9kA5eTmQuXk
+ 5rfl5Ob75eTm/+Xk5v/l5Ob95eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob85eTm++Xk
+ 5v/l5Ob+5eTm/+Xk5uPl5OZ65ePlE+Xk5gDl5OcC5eTmA+bk5gDm5OYA5eTmAP///wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+MA5eTmAOXl5gDl5eUA5eTmA+Xk
+ 5gLk4+EA5OPjAOXk5ivl5OaL5eTm3+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm++Xk5vzl5Ob95eTm/uXk
+ 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v7l5Ob95eTm/OXk5vvl5Ob95eTm/+Xk
+ 5v/l5Ob/5eTm+eXk5r7l5OZa5eTmC+Xj5gDl5OYB5eTmA+Xk5gHm5OYA5uTmAOTk5AAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6ujqAOXk
+ 5gDm5eYA5uXmAOXk5gPl5OYC5ebnAOXl5gDl5eYI5eTmReXk5pLl5ObW5eTm/OXk5v/l5Ob/5eTm/+Xk
+ 5v/l5Ob/5eTm/+Xk5v3l5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
+ 5v/l5Obw5eTmu+Xk5nLl4+Yj5ersAObn6gDk5OYB5eTmBOXk5gHl5OYA5eTmAObk5gAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOXk5wDl5OYA5eTnAOXk5wDl5OYC5eTmBOXl5wDl4eYA5eDnAOTh6QTl5OYq5eTmXuXk
+ 5pDl5Oa+5eTm3uXk5vPl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Obu5eTm0uXk
+ 5q7l5OZ+5eTmRubk5hXk5OYA5OTmAOTk5gDl5eYC5eTmBOXk5gHj4+UA5OPmAOfl5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAOXl5wDk5ugA5eTmAeXk5gPl5OYE5uPmAObj
+ 5gDk5OYA5+TmAOnm5gDl5eYF5eTmGuXk5i/l5OY+5eTmT+Xk5lXl5OZY5eTmVOXk5kjl5OY65eTmJ+Xk
+ 5hLo4eYC6ODnAObj5gDl5OcA5eTnAObk5gLl5OYE5eTmAuXi5QDk4+MA5OTlAOfj5wAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDk5OUA5uHoAODy
+ 3ADl4+cB5eTmAuXk5gTl5OYE5OTmAubi5QDm4uYA4ejmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AADl5eQA5eXjAOXl4wDl5OYD5eTmBOXk5gPl5OYB5OTmAOfl6wDm5ecA5ePlAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAOTk5ADo5usA5OLkAP///wDm4+YA5eTmAeXk5gHl5OYC5eTmA+Xk5gPl5OYD5eTmA+Xk
+ 5gPl5OYD5eTmA+Xk5gLl5OYB5ePmAObk5wDi598A6OPoAOTk5gDm5uYAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAA/////////////////+AAAP///////oD////////gAAD///////6YB///////4AAA///////8
+ hEAf/////+AAAP///////IAVAX/////gAAD///////yAAIgH////4AAA///////5AAACoC///+AAAP//
+ ////+QAAABCAv//gAAD///////kAAAAAVAX/4AAA///////yAAAAAAIl/+AAAP//////8gAAAAAACv/g
+ AAD///////IAAAAAAAT/4AAA///////0AAAAAAAE/+AAAP//////5AAAAAAABf/gAAD//////+QAAAAA
+ AAn/4AAA///////IAAAAAAAJ/+AAAP//////yAAAAAAAEf/gAAD//////4gAAAAAABP/4AAA//////+Q
+ AAAAAAAT/+AAAP/////+UAAAAAAAJ//gAAD//////SAAAAAAACf/4AAA//////JAAAAAAAAn/+AAAP//
+ /wAAgAAAAAAAT//gAAD///IAcgAAAAAAAE//4AAA///ovIAAAAAAAABP/+AAAP//6gAAAAAAAAAAX//g
+ AAD//8QAAAAAAAAAAJ//4AAA///IAAAAAAAAAACf/+AAAP//yAAAAAAAAAAAn//gAAD//9AAAAAAAAAA
+ AJ//4AAA///QAAAAAAAAAACf/+AAAP//0AAAAAAAAAAAX//gAAD//5AAAAAAAAAAAF//4AAA//+QAAAA
+ AAAAAABP/+AAAP//sAAAAAAAAAAAT//gAAD//6AAAAAAAAAAAE//4AAA//+gAAAAAAAAAAAn/+AAAP//
+ oAAAAAAAAAAAJ//gAAD//6AAAAAAAAAAACP/4AAA//+gAAAAAAAAAAAT/+AAAP//oAAAAAAAAAAAE//g
+ AAD//6AAAAAAAAAAAAn/4AAA//8gAAAAAAAAAAAJ/+AAAP//oAAAAAAAAAAAAP/gAAD//yAAAAAAAAAA
+ AAT/4AAA//8gAAAAAAAAAAAEf+AAAP//QAAAAAAAAAAAAn/gAAD//kAAAAAAAAAAAAJ/4AAA//5AAAAA
+ AAAAAAABP+AAAP/+QAAAAAAAAAAAAT/gAAD//IAAAAAAAAAAAACf4AAA//qAAAAAAAAAAAAAn+AAAP/A
+ gAAAAAAAAAAAAI/gAAD9AgAAAAAAAAAAAABP4AAA9CgAAAAAAAAAAAAAT+AAAOmAAAAAAAAAAAAAACfg
+ AADQAAAAAAAAAAAAAAAn4AAAqAAAAAAAAAAAAAAAE+AAAJAAAAAAAAAAAAAAABPgAACgAAAAAAAAAAAA
+ AAAT4AAAoAAAAAAAAAAAAAAACeAAAKAAAAAAAAAAAAAAAAngAACgAAAAAAAAAAAAAAAJ4AAAoAAAAAAA
+ AAAAAAAABOAAAJAAAAAAAAAAAAAAAATgAACQAAAAAAAAAAAAAAAE4AAAiAAAAAAAAAAAAAAAAuAAAMgA
+ AAAAAAAAAAAAAAJgAADEAAAAAAAAAAAAAAACYAAA5AAAAAAAAAAAAAAAAWAAAOoAAAAAAAAAAAAAAAFg
+ AAD1AAAAAAAAAAAAAAABIAAA+IAAAAAAAAAAAAAAASAAAPqAAAAAAAAAAAAAAAEgAAD9QAAAAAAAAAAA
+ AAAAoAAA/qAAAAAAAAAAAAAAAKAAAP9QAAAAAAAAAAAAAACgAAD/SAAAAAAAAAAAAAAAoAAA/6gAAAAA
+ AAAAAAAAAKAAAP/UAAAAAAAAAAAAAACgAAD/6gAAAAAAAAAAAAAAoAAA//EAAAAAAAAAAAAAAKAAAP/1
+ AAAAAAAAAAAAAACgAAD/+oAAAAAAAAAAAAAAoAAA//wAAAAAAAAAAAAAAKAAAP/+QAAAAAAAAAAAAACg
+ AAD//kAAAAAAAAAAAAAAoAAA//9AAAAAAAAAAAAAAKAAAP//QAAAAAAAAAAAAAEgAAD//0AAAAAAAAAA
+ AAABIAAA//9AAAAAAAAAAAAAAWAAAP//IAAAAAAAAAAAAAFgAAD//yAAAAAAAAAAAAACYAAA//8gAAAA
+ AAAAAAAAAmAAAP//kAAAAAAAAAAAAAJgAAD//5AAAAAAAAAAAAAG4AAA//+QAAAAAAAAAAAABOAAAP//
+ yAAAAAAAAAAAAATgAAD//8gAAAAAAAAAAAAJ4AAA///EAAAAAAAAAAAACeAAAP//5AAAAAAAAAAAABHg
+ AAD//+IAAAAAAAAAAAAT4AAA///yAAAAAAAAAAAAI+AAAP//8QAAAAAAAAAAACfgAAD///kAAAAAAAAA
+ AABX4AAA///6gAAAAAAAAAAAT+AAAP///UAAAAAAAAAAAK/gAAD///4AAAAAAAAAAAFf4AAA///+oAAA
+ AAAAAAACP+AAAP///1AAAAAAAAAAAr/gAAD///+oAAAAAAAAAAV/4AAA////1AAAAAAAAAAK/+AAAP//
+ /+oAAAAAAAAAFf/gAAD////lAAAAAAAAACv/4AAA////+oAAAAAAAABX/+AAAP////0gAAAAAAAAL//g
+ AAD////+kAAAAAAAAl//4AAA/////0gAAAAAAAS//+AAAP////+SAAAAAAARf//gAAD/////6IAAAAAA
+ Jf//4AAA//////JAAAAAAIv//+AAAP/////9CAAAAAIv///gAAD//////0IAAAAQn///4AAA///////Q
+ QAAAov///+AAAP//////+hIACQv////gAAD///////8Bf6Bf////4AAA////////9AAD/////+AAAP//
+ ///////////////gAAA=
+
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/Program.cs b/Main/Tongue extraction/Program.cs
new file mode 100644
index 0000000..c2f1ac0
--- /dev/null
+++ b/Main/Tongue extraction/Program.cs
@@ -0,0 +1,66 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+using System.Text;
+
+namespace Tongue_extraction
+{
+ static class Program
+ {
+ [STAThread]
+ static void Main()
+ {
+ try
+ {
+ Application.SetUnhandledExceptionMode(UnhandledExceptionMode.CatchException);
+ Application.ThreadException += new System.Threading.ThreadExceptionEventHandler(Application_ThreadException);
+ AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException);
+
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new Form1());
+ }
+ catch (Exception ex)
+ {
+ string str = GetExceptionMsg(ex, string.Empty);
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ }
+ }
+
+
+ static void Application_ThreadException(object sender, System.Threading.ThreadExceptionEventArgs e)
+ {
+ string str = GetExceptionMsg(e.Exception, e.ToString());
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ //LogManager.WriteLog(str);
+ }
+
+ static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e)
+ {
+ string str = GetExceptionMsg(e.ExceptionObject as Exception, e.ToString());
+ MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ //LogManager.WriteLog(str);
+ }
+
+ static string GetExceptionMsg(Exception ex, string backStr)
+ {
+ StringBuilder sb = new StringBuilder();
+ sb.AppendLine("****************************Exception Text****************************");
+ sb.AppendLine("【Time】:" + DateTime.Now.ToString());
+ if (ex != null)
+ {
+ sb.AppendLine("【Exception Type】:" + ex.GetType().Name);
+ sb.AppendLine("【Exception Information】:" + ex.Message);
+ sb.AppendLine("【Stack Call】:" + ex.StackTrace);
+ }
+ else
+ {
+ sb.AppendLine("【Unhandled Exception】:" + backStr);
+ }
+ sb.AppendLine("***************************************************************");
+ return sb.ToString();
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/AssemblyInfo.cs b/Main/Tongue extraction/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..604a626
--- /dev/null
+++ b/Main/Tongue extraction/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
+// アセンブリに関連付けられている情報を変更するには、
+// これらの属性値を変更してください。
+[assembly: AssemblyTitle("Tongue extraction")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Tongue extraction")]
+[assembly: AssemblyCopyright("Copyright © 2018")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
+// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
+// その型の ComVisible 属性を true に設定してください。
+[assembly: ComVisible(false)]
+
+// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
+[assembly: Guid("d382f9e7-a41d-4d82-a59b-cf4095134d6b")]
+
+// アセンブリのバージョン情報は次の 4 つの値で構成されています:
+//
+// メジャー バージョン
+// マイナー バージョン
+// ビルド番号
+// Revision
+//
+// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
+// 既定値にすることができます:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Main/Tongue extraction/Properties/Resources.Designer.cs b/Main/Tongue extraction/Properties/Resources.Designer.cs
new file mode 100644
index 0000000..84d1885
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace Tongue_extraction.Properties {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Tongue_extraction.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/Resources.resx b/Main/Tongue extraction/Properties/Resources.resx
new file mode 100644
index 0000000..af7dbeb
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Resources.resx
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
\ No newline at end of file
diff --git a/Main/Tongue extraction/Properties/Settings.Designer.cs b/Main/Tongue extraction/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..15b8fb1
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Settings.Designer.cs
@@ -0,0 +1,26 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace Tongue_extraction.Properties {
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default {
+ get {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/Main/Tongue extraction/Properties/Settings.settings b/Main/Tongue extraction/Properties/Settings.settings
new file mode 100644
index 0000000..3964565
--- /dev/null
+++ b/Main/Tongue extraction/Properties/Settings.settings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj b/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj
deleted file mode 100644
index 9f1ebea..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/ColorSubdivision.csproj
+++ /dev/null
@@ -1,151 +0,0 @@
-
-
-
-
- Debug
- AnyCPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}
- WinExe
- ColorSubdivision
- ColorSubdivision
- v4.7.1
- 512
- true
- true
-
- publish\
- true
- Disk
- false
- Foreground
- 7
- Days
- false
- false
- true
- 0
- 1.0.0.%2a
- false
- false
- true
-
-
- x64
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE
- prompt
- 4
-
-
- x64
- pdbonly
- true
- bin\Release\
- TRACE
- prompt
- 4
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE
- full
- x64
- 7.3
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
- bin\x64\Release\
- TRACE
- true
- pdbonly
- x64
- 7.3
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Blob.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.Extensions.dll
-
-
- ..\..\..\..\..\..\system\sdk\OpenCVsharp\net461\OpenCvSharp.UserInterface.dll
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Form
-
-
- Form1.cs
-
-
-
-
- Form1.cs
-
-
- ResXFileCodeGenerator
- Resources.Designer.cs
- Designer
-
-
- True
- Resources.resx
- True
-
-
- SettingsSingleFileGenerator
- Settings.Designer.cs
-
-
- True
- Settings.settings
- True
-
-
-
-
-
-
-
- Always
-
-
-
-
- False
- Microsoft .NET Framework 4.7.1 %28x86 and x64%29
- true
-
-
- False
- .NET Framework 3.5 SP1
- false
-
-
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs
deleted file mode 100644
index 1b0e6fd..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.Designer.cs
+++ /dev/null
@@ -1,84 +0,0 @@
-namespace ColorSubdivision
-{
- partial class Form1
- {
- ///
- /// Required designer variable.
- ///
- private System.ComponentModel.IContainer components = null;
-
- ///
- /// Clean up any resources being used.
- ///
- /// true if managed resources should be disposed; otherwise, false.
- protected override void Dispose(bool disposing)
- {
- if (disposing && (components != null))
- {
- components.Dispose();
- }
- base.Dispose(disposing);
- }
-
- #region Windows Form Designer generated code
-
- ///
- /// Required method for Designer support - do not modify
- /// the contents of this method with the code editor.
- ///
- private void InitializeComponent()
- {
- this.button1 = new System.Windows.Forms.Button();
- this.button2 = new System.Windows.Forms.Button();
- this.RichTextBox1 = new System.Windows.Forms.RichTextBox();
- this.SuspendLayout();
- //
- // button1
- //
- this.button1.Location = new System.Drawing.Point(115, 47);
- this.button1.Name = "button1";
- this.button1.Size = new System.Drawing.Size(75, 23);
- this.button1.TabIndex = 0;
- this.button1.Text = "Button1";
- this.button1.UseVisualStyleBackColor = true;
- this.button1.Click += new System.EventHandler(this.Button1_Click);
- //
- // button2
- //
- this.button2.Location = new System.Drawing.Point(115, 105);
- this.button2.Name = "button2";
- this.button2.Size = new System.Drawing.Size(75, 23);
- this.button2.TabIndex = 1;
- this.button2.Text = "Button2";
- this.button2.UseVisualStyleBackColor = true;
- this.button2.Click += new System.EventHandler(this.Button2_Click);
- //
- // RichTextBox1
- //
- this.RichTextBox1.Location = new System.Drawing.Point(370, 138);
- this.RichTextBox1.Name = "RichTextBox1";
- this.RichTextBox1.Size = new System.Drawing.Size(100, 96);
- this.RichTextBox1.TabIndex = 2;
- this.RichTextBox1.Text = "";
- //
- // Form1
- //
- this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
- this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
- this.ClientSize = new System.Drawing.Size(800, 450);
- this.Controls.Add(this.RichTextBox1);
- this.Controls.Add(this.button2);
- this.Controls.Add(this.button1);
- this.Name = "Form1";
- this.ResumeLayout(false);
-
- }
-
- #endregion
-
- private System.Windows.Forms.Button button1;
- private System.Windows.Forms.Button button2;
- private System.Windows.Forms.RichTextBox RichTextBox1;
- }
-}
-
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs
deleted file mode 100644
index 9d64c49..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.cs
+++ /dev/null
@@ -1,56 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Data;
-using System.Drawing;
-using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using System.Windows.Forms;
-using OpenCvSharp;
-
-namespace ColorSubdivision
-{
- public partial class Form1 : Form
- {
- public Form1()
- {
- InitializeComponent();
- }
-
- private void Button1_Click(object sender, EventArgs e)
- {
- var path = @"D:\kei2\Solutions\DeepTongue\LocalRepository\Tongue extraction_cropresizemethod\Tongue extraction\bin\x64\Debug\mask_final\20180315093610.jpg";
- using (Mat mat_input = Cv2.ImRead(path, ImreadModes.Grayscale))
- {
- var mat_dst = new Mat(mat_input.Size(), mat_input.Type());
- bool isEdge = false;
- for (int i = 0; i < mat_input.Height; i++)
- {
- if(!isEdge)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- if (mat_input.At(i, j) > 200)
- {
- mat_dst.Set(i, j, 100);
- isEdge = true;
- }
- }
- }
- }
- Cv2.ImShow("input", mat_input);
- Cv2.ImShow("dst", mat_dst);
- mat_dst.Dispose();
- }
- GC.Collect();
-
- }
-
- private void Button2_Click(object sender, EventArgs e)
- {
-
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx b/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx
deleted file mode 100644
index 1af7de1..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Form1.resx
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs
deleted file mode 100644
index b7233a7..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Program.cs
+++ /dev/null
@@ -1,22 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading.Tasks;
-using System.Windows.Forms;
-
-namespace ColorSubdivision
-{
- static class Program
- {
- ///
- /// The main entry point for the application.
- ///
- [STAThread]
- static void Main()
- {
- Application.EnableVisualStyles();
- Application.SetCompatibleTextRenderingDefault(false);
- Application.Run(new Form1());
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs
deleted file mode 100644
index 87b468f..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("ColorSubdivision")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("ColorSubdivision")]
-[assembly: AssemblyCopyright("Copyright © 2020")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible
-// to COM components. If you need to access a type in this assembly from
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("ad42a573-7ac3-4714-9d53-db9921815cbb")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs
deleted file mode 100644
index 6ca7260..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.Designer.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace ColorSubdivision.Properties {
- using System;
-
-
- ///
- /// A strongly-typed resource class, for looking up localized strings, etc.
- ///
- // This class was auto-generated by the StronglyTypedResourceBuilder
- // class via a tool like ResGen or Visual Studio.
- // To add or remove a member, edit your .ResX file then rerun ResGen
- // with the /str option, or rebuild your VS project.
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
- [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- internal class Resources {
-
- private static global::System.Resources.ResourceManager resourceMan;
-
- private static global::System.Globalization.CultureInfo resourceCulture;
-
- [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
- internal Resources() {
- }
-
- ///
- /// Returns the cached ResourceManager instance used by this class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Resources.ResourceManager ResourceManager {
- get {
- if (object.ReferenceEquals(resourceMan, null)) {
- global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ColorSubdivision.Properties.Resources", typeof(Resources).Assembly);
- resourceMan = temp;
- }
- return resourceMan;
- }
- }
-
- ///
- /// Overrides the current thread's CurrentUICulture property for all
- /// resource lookups using this strongly typed resource class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Globalization.CultureInfo Culture {
- get {
- return resourceCulture;
- }
- set {
- resourceCulture = value;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx
deleted file mode 100644
index af7dbeb..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Resources.resx
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs
deleted file mode 100644
index 41f48bd..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.Designer.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace ColorSubdivision.Properties {
-
-
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
- internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
-
- private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
-
- public static Settings Default {
- get {
- return defaultInstance;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings b/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings
deleted file mode 100644
index 3964565..0000000
--- a/Tongue extraction_cropresizemethod/ColorSubdivision/Properties/Settings.settings
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction.sln b/Tongue extraction_cropresizemethod/Tongue extraction.sln
deleted file mode 100644
index 81c772d..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction.sln
+++ /dev/null
@@ -1,50 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio Version 16
-VisualStudioVersion = 16.0.30011.22
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DeepTIAS1.9", "Tongue extraction\DeepTIAS1.9.csproj", "{AFD610B1-8D23-423A-AA0F-B09BA769BDD7}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ColorSubdivision", "ColorSubdivision\ColorSubdivision.csproj", "{AD42A573-7AC3-4714-9D53-DB9921815CBB}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Any CPU = Debug|Any CPU
- Debug|x64 = Debug|x64
- Debug|x86 = Debug|x86
- Release|Any CPU = Release|Any CPU
- Release|x64 = Release|x64
- Release|x86 = Release|x86
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x64.ActiveCfg = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.ActiveCfg = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Debug|x86.Build.0 = Debug|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|Any CPU.Build.0 = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.ActiveCfg = Release|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x64.Build.0 = Release|x64
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.ActiveCfg = Release|Any CPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}.Release|x86.Build.0 = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|x64.ActiveCfg = Debug|x64
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|x64.Build.0 = Debug|x64
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|x86.ActiveCfg = Debug|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Debug|x86.Build.0 = Debug|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|Any CPU.Build.0 = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|x64.ActiveCfg = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|x64.Build.0 = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|x86.ActiveCfg = Release|Any CPU
- {AD42A573-7AC3-4714-9D53-DB9921815CBB}.Release|x86.Build.0 = Release|Any CPU
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
- GlobalSection(ExtensibilityGlobals) = postSolution
- SolutionGuid = {33F6C697-859B-4D55-9D28-998267FD09AE}
- EndGlobalSection
-EndGlobal
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/App.config b/Tongue extraction_cropresizemethod/Tongue extraction/App.config
deleted file mode 100644
index 8fc0551..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/App.config
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/DeepTIAS1.9.csproj b/Tongue extraction_cropresizemethod/Tongue extraction/DeepTIAS1.9.csproj
deleted file mode 100644
index dae0f1c..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/DeepTIAS1.9.csproj
+++ /dev/null
@@ -1,190 +0,0 @@
-
-
-
-
-
- Debug
- AnyCPU
- {AFD610B1-8D23-423A-AA0F-B09BA769BDD7}
- WinExe
- Properties
- Tongue_extraction
- DeepTIAS1.9
- v4.7.1
- 512
- true
-
-
-
- publish\
- true
- Disk
- false
- Foreground
- 7
- Days
- false
- false
- true
- 0
- 1.0.0.%2a
- false
- false
- true
-
-
- AnyCPU
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE
- prompt
- 4
-
-
- AnyCPU
- pdbonly
- true
- bin\Release\
- TRACE
- prompt
- 4
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE
- full
- x64
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
- bin\x64\Release\
- TRACE
- true
- pdbonly
- x64
- prompt
- MinimumRecommendedRules.ruleset
- true
-
-
- DeepTongue_Icon.ico
-
-
-
- Form
-
-
- Form1.cs
-
-
-
-
-
- Form1.cs
-
-
- ResXFileCodeGenerator
- Resources.Designer.cs
- Designer
-
-
- True
- Resources.resx
- True
-
-
-
- SettingsSingleFileGenerator
-
-
-
-
-
-
-
-
- Always
-
-
-
-
- False
- Microsoft .NET Framework 4.7.1 %28x86 および x64%29
- true
-
-
- False
- .NET Framework 3.5 SP1
- false
-
-
-
-
- False
- ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.dll
-
-
- False
- ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Blob.dll
-
-
- False
- ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.Extensions.dll
-
-
- ..\..\..\packages\OpenCvSharp3-AnyCPU.3.2.0.20170107\lib\net40\OpenCvSharp.UserInterface.dll
-
-
- C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.dll
-
-
- C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.dll
-
-
- C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.Composition.Registration.dll
-
-
- C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.ComponentModel.DataAnnotations.dll
-
-
-
-
-
-
-
-
-
- C:\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.7.1\System.Reflection.Context.dll
-
-
- ..\..\..\packages\System.ValueTuple.4.4.0\lib\net461\System.ValueTuple.dll
-
-
-
-
- False
- ..\..\..\packages\TensorFlowSharp.1.11.0\lib\net471\TensorFlowSharp.dll
-
-
-
-
-
- このプロジェクトは、このコンピューター上にない NuGet パッケージを参照しています。それらのパッケージをダウンロードするには、[NuGet パッケージの復元] を使用します。詳細については、http://go.microsoft.com/fwlink/?LinkID=322105 を参照してください。見つからないファイルは {0} です。
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/DeepTongue_Icon.ico b/Tongue extraction_cropresizemethod/Tongue extraction/DeepTongue_Icon.ico
deleted file mode 100644
index 85fef71..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/DeepTongue_Icon.ico
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.Designer.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Form1.Designer.cs
deleted file mode 100644
index 473b399..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.Designer.cs
+++ /dev/null
@@ -1,434 +0,0 @@
-namespace Tongue_extraction
-{
- partial class Form1
- {
- ///
- /// 必要なデザイナー変数です。
- ///
- private System.ComponentModel.IContainer components = null;
-
- ///
- /// 使用中のリソースをすべてクリーンアップします。
- ///
- /// マネージ リソースを破棄する場合は true を指定し、その他の場合は false を指定します。
- protected override void Dispose(bool disposing)
- {
- if (disposing && (components != null))
- {
- components.Dispose();
- }
- base.Dispose(disposing);
- }
-
- #region Windows フォーム デザイナーで生成されたコード
-
- ///
- /// デザイナー サポートに必要なメソッドです。このメソッドの内容を
- /// コード エディターで変更しないでください。
- ///
- private void InitializeComponent()
- {
- System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(Form1));
- this.button_start = new System.Windows.Forms.Button();
- this.pictureBox_input = new System.Windows.Forms.PictureBox();
- this.pictureBox_detection = new System.Windows.Forms.PictureBox();
- this.pictureBox_cropResized = new System.Windows.Forms.PictureBox();
- this.pictureBox_extraction = new System.Windows.Forms.PictureBox();
- this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
- this.panel1 = new System.Windows.Forms.Panel();
- this.textBox1 = new System.Windows.Forms.TextBox();
- this.label1 = new System.Windows.Forms.Label();
- this.label2 = new System.Windows.Forms.Label();
- this.label3 = new System.Windows.Forms.Label();
- this.label4 = new System.Windows.Forms.Label();
- this.label5 = new System.Windows.Forms.Label();
- this.label6 = new System.Windows.Forms.Label();
- this.label7 = new System.Windows.Forms.Label();
- this.label8 = new System.Windows.Forms.Label();
- this.pictureBox_output = new System.Windows.Forms.PictureBox();
- this.pictureBox_outputSRG = new System.Windows.Forms.PictureBox();
- this.pictureBox_maskSRG = new System.Windows.Forms.PictureBox();
- this.pictureBox_gloss = new System.Windows.Forms.PictureBox();
- this.label_processingFileName = new System.Windows.Forms.Label();
- this.label_totalProgress = new System.Windows.Forms.Label();
- this.panel2 = new System.Windows.Forms.Panel();
- this.button_pause = new System.Windows.Forms.Button();
- this.backgroundWorker1 = new System.ComponentModel.BackgroundWorker();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).BeginInit();
- this.tableLayoutPanel1.SuspendLayout();
- this.panel1.SuspendLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).BeginInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_gloss)).BeginInit();
- this.panel2.SuspendLayout();
- this.SuspendLayout();
- //
- // button_start
- //
- this.button_start.Dock = System.Windows.Forms.DockStyle.Fill;
- this.button_start.Location = new System.Drawing.Point(0, 0);
- this.button_start.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.button_start.Name = "button_start";
- this.button_start.Size = new System.Drawing.Size(758, 49);
- this.button_start.TabIndex = 0;
- this.button_start.Text = "Start";
- this.button_start.UseVisualStyleBackColor = true;
- this.button_start.Click += new System.EventHandler(this.Button_start_Click);
- //
- // pictureBox_input
- //
- this.pictureBox_input.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_input.Location = new System.Drawing.Point(3, 4);
- this.pictureBox_input.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.pictureBox_input.Name = "pictureBox_input";
- this.pictureBox_input.Size = new System.Drawing.Size(250, 209);
- this.pictureBox_input.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_input.TabIndex = 1;
- this.pictureBox_input.TabStop = false;
- //
- // pictureBox_detection
- //
- this.pictureBox_detection.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_detection.Location = new System.Drawing.Point(257, 4);
- this.pictureBox_detection.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.pictureBox_detection.Name = "pictureBox_detection";
- this.pictureBox_detection.Size = new System.Drawing.Size(250, 209);
- this.pictureBox_detection.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_detection.TabIndex = 2;
- this.pictureBox_detection.TabStop = false;
- //
- // pictureBox_cropResized
- //
- this.pictureBox_cropResized.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_cropResized.Location = new System.Drawing.Point(511, 4);
- this.pictureBox_cropResized.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.pictureBox_cropResized.Name = "pictureBox_cropResized";
- this.pictureBox_cropResized.Size = new System.Drawing.Size(250, 209);
- this.pictureBox_cropResized.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_cropResized.TabIndex = 3;
- this.pictureBox_cropResized.TabStop = false;
- //
- // pictureBox_extraction
- //
- this.pictureBox_extraction.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_extraction.Location = new System.Drawing.Point(511, 241);
- this.pictureBox_extraction.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.pictureBox_extraction.Name = "pictureBox_extraction";
- this.pictureBox_extraction.Size = new System.Drawing.Size(250, 209);
- this.pictureBox_extraction.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_extraction.TabIndex = 4;
- this.pictureBox_extraction.TabStop = false;
- //
- // tableLayoutPanel1
- //
- this.tableLayoutPanel1.CellBorderStyle = System.Windows.Forms.TableLayoutPanelCellBorderStyle.Outset;
- this.tableLayoutPanel1.ColumnCount = 4;
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 25F));
- this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 4);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_detection, 1, 0);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_input, 0, 0);
- this.tableLayoutPanel1.Controls.Add(this.label1, 0, 1);
- this.tableLayoutPanel1.Controls.Add(this.label2, 1, 1);
- this.tableLayoutPanel1.Controls.Add(this.label3, 2, 1);
- this.tableLayoutPanel1.Controls.Add(this.label4, 3, 1);
- this.tableLayoutPanel1.Controls.Add(this.label5, 0, 3);
- this.tableLayoutPanel1.Controls.Add(this.label6, 1, 3);
- this.tableLayoutPanel1.Controls.Add(this.label7, 2, 3);
- this.tableLayoutPanel1.Controls.Add(this.label8, 3, 3);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_cropResized, 2, 0);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_extraction, 2, 2);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_output, 3, 0);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_outputSRG, 0, 2);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_maskSRG, 1, 2);
- this.tableLayoutPanel1.Controls.Add(this.pictureBox_gloss, 3, 2);
- this.tableLayoutPanel1.Controls.Add(this.label_processingFileName, 0, 5);
- this.tableLayoutPanel1.Controls.Add(this.label_totalProgress, 2, 5);
- this.tableLayoutPanel1.Controls.Add(this.panel2, 3, 4);
- this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
- this.tableLayoutPanel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.tableLayoutPanel1.Name = "tableLayoutPanel1";
- this.tableLayoutPanel1.RowCount = 6;
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44445F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 44.44444F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 11.11111F));
- this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
- this.tableLayoutPanel1.Size = new System.Drawing.Size(1018, 554);
- this.tableLayoutPanel1.TabIndex = 5;
- //
- // panel1
- //
- this.tableLayoutPanel1.SetColumnSpan(this.panel1, 3);
- this.panel1.Controls.Add(this.textBox1);
- this.panel1.Controls.Add(this.button_start);
- this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel1.Location = new System.Drawing.Point(3, 478);
- this.panel1.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.panel1.Name = "panel1";
- this.panel1.Size = new System.Drawing.Size(758, 49);
- this.panel1.TabIndex = 6;
- //
- // textBox1
- //
- this.textBox1.Location = new System.Drawing.Point(486, -1);
- this.textBox1.Multiline = true;
- this.textBox1.Name = "textBox1";
- this.textBox1.Size = new System.Drawing.Size(269, 60);
- this.textBox1.TabIndex = 1;
- this.textBox1.Visible = false;
- //
- // label1
- //
- this.label1.AutoSize = true;
- this.label1.BackColor = System.Drawing.Color.White;
- this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label1.Location = new System.Drawing.Point(5, 217);
- this.label1.Name = "label1";
- this.label1.Size = new System.Drawing.Size(246, 20);
- this.label1.TabIndex = 7;
- this.label1.Text = "Input";
- this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label2
- //
- this.label2.AutoSize = true;
- this.label2.BackColor = System.Drawing.Color.White;
- this.label2.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label2.Location = new System.Drawing.Point(259, 217);
- this.label2.Name = "label2";
- this.label2.Size = new System.Drawing.Size(246, 20);
- this.label2.TabIndex = 8;
- this.label2.Text = "Detection";
- this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label3
- //
- this.label3.AutoSize = true;
- this.label3.BackColor = System.Drawing.Color.White;
- this.label3.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label3.Location = new System.Drawing.Point(513, 217);
- this.label3.Name = "label3";
- this.label3.Size = new System.Drawing.Size(246, 20);
- this.label3.TabIndex = 9;
- this.label3.Text = "Crop+Resize";
- this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label4
- //
- this.label4.AutoSize = true;
- this.label4.BackColor = System.Drawing.Color.White;
- this.label4.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label4.Location = new System.Drawing.Point(767, 217);
- this.label4.Name = "label4";
- this.label4.Size = new System.Drawing.Size(246, 20);
- this.label4.TabIndex = 10;
- this.label4.Text = "Output";
- this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label5
- //
- this.label5.AutoSize = true;
- this.label5.BackColor = System.Drawing.Color.White;
- this.label5.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label5.Location = new System.Drawing.Point(5, 454);
- this.label5.Name = "label5";
- this.label5.Size = new System.Drawing.Size(246, 20);
- this.label5.TabIndex = 11;
- this.label5.Text = "Output+SRG";
- this.label5.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label6
- //
- this.label6.AutoSize = true;
- this.label6.BackColor = System.Drawing.Color.White;
- this.label6.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label6.Location = new System.Drawing.Point(259, 454);
- this.label6.Name = "label6";
- this.label6.Size = new System.Drawing.Size(246, 20);
- this.label6.TabIndex = 12;
- this.label6.Text = "Mask+SRG";
- this.label6.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label7
- //
- this.label7.AutoSize = true;
- this.label7.BackColor = System.Drawing.Color.White;
- this.label7.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label7.Location = new System.Drawing.Point(513, 454);
- this.label7.Name = "label7";
- this.label7.Size = new System.Drawing.Size(246, 20);
- this.label7.TabIndex = 13;
- this.label7.Text = "Extraction";
- this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // label8
- //
- this.label8.AutoSize = true;
- this.label8.BackColor = System.Drawing.Color.White;
- this.label8.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label8.Location = new System.Drawing.Point(767, 454);
- this.label8.Name = "label8";
- this.label8.Size = new System.Drawing.Size(246, 20);
- this.label8.TabIndex = 14;
- this.label8.Text = "Gloss";
- this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
- //
- // pictureBox_output
- //
- this.pictureBox_output.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_output.Location = new System.Drawing.Point(767, 5);
- this.pictureBox_output.Name = "pictureBox_output";
- this.pictureBox_output.Size = new System.Drawing.Size(246, 207);
- this.pictureBox_output.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_output.TabIndex = 15;
- this.pictureBox_output.TabStop = false;
- //
- // pictureBox_outputSRG
- //
- this.pictureBox_outputSRG.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_outputSRG.Location = new System.Drawing.Point(5, 242);
- this.pictureBox_outputSRG.Name = "pictureBox_outputSRG";
- this.pictureBox_outputSRG.Size = new System.Drawing.Size(246, 207);
- this.pictureBox_outputSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_outputSRG.TabIndex = 16;
- this.pictureBox_outputSRG.TabStop = false;
- //
- // pictureBox_maskSRG
- //
- this.pictureBox_maskSRG.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_maskSRG.Location = new System.Drawing.Point(259, 242);
- this.pictureBox_maskSRG.Name = "pictureBox_maskSRG";
- this.pictureBox_maskSRG.Size = new System.Drawing.Size(246, 207);
- this.pictureBox_maskSRG.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_maskSRG.TabIndex = 17;
- this.pictureBox_maskSRG.TabStop = false;
- //
- // pictureBox_gloss
- //
- this.pictureBox_gloss.Dock = System.Windows.Forms.DockStyle.Fill;
- this.pictureBox_gloss.Location = new System.Drawing.Point(767, 242);
- this.pictureBox_gloss.Name = "pictureBox_gloss";
- this.pictureBox_gloss.Size = new System.Drawing.Size(246, 207);
- this.pictureBox_gloss.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
- this.pictureBox_gloss.TabIndex = 18;
- this.pictureBox_gloss.TabStop = false;
- //
- // label_processingFileName
- //
- this.label_processingFileName.AutoSize = true;
- this.label_processingFileName.BackColor = System.Drawing.Color.White;
- this.tableLayoutPanel1.SetColumnSpan(this.label_processingFileName, 2);
- this.label_processingFileName.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label_processingFileName.Location = new System.Drawing.Point(5, 531);
- this.label_processingFileName.Name = "label_processingFileName";
- this.label_processingFileName.Size = new System.Drawing.Size(500, 21);
- this.label_processingFileName.TabIndex = 19;
- this.label_processingFileName.Text = "Processing File: None";
- this.label_processingFileName.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
- //
- // label_totalProgress
- //
- this.label_totalProgress.AutoSize = true;
- this.label_totalProgress.BackColor = System.Drawing.Color.White;
- this.tableLayoutPanel1.SetColumnSpan(this.label_totalProgress, 2);
- this.label_totalProgress.Dock = System.Windows.Forms.DockStyle.Fill;
- this.label_totalProgress.Location = new System.Drawing.Point(513, 531);
- this.label_totalProgress.Name = "label_totalProgress";
- this.label_totalProgress.Size = new System.Drawing.Size(500, 21);
- this.label_totalProgress.TabIndex = 20;
- this.label_totalProgress.Text = "Total Progress: 0/0";
- this.label_totalProgress.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
- //
- // panel2
- //
- this.panel2.Controls.Add(this.button_pause);
- this.panel2.Dock = System.Windows.Forms.DockStyle.Fill;
- this.panel2.Location = new System.Drawing.Point(767, 479);
- this.panel2.Name = "panel2";
- this.panel2.Size = new System.Drawing.Size(246, 47);
- this.panel2.TabIndex = 21;
- //
- // button_pause
- //
- this.button_pause.Dock = System.Windows.Forms.DockStyle.Fill;
- this.button_pause.Enabled = false;
- this.button_pause.Location = new System.Drawing.Point(0, 0);
- this.button_pause.Name = "button_pause";
- this.button_pause.Size = new System.Drawing.Size(246, 47);
- this.button_pause.TabIndex = 0;
- this.button_pause.Text = "Pause";
- this.button_pause.UseVisualStyleBackColor = true;
- this.button_pause.Click += new System.EventHandler(this.Button_pause_Click);
- //
- // backgroundWorker1
- //
- this.backgroundWorker1.WorkerSupportsCancellation = true;
- this.backgroundWorker1.DoWork += new System.ComponentModel.DoWorkEventHandler(this.BackgroundWorker1_DoWork);
- //
- // Form1
- //
- this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
- this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
- this.ClientSize = new System.Drawing.Size(1018, 554);
- this.Controls.Add(this.tableLayoutPanel1);
- this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
- this.Margin = new System.Windows.Forms.Padding(1, 2, 1, 2);
- this.Name = "Form1";
- this.Text = "DeepTIAS1.8";
- this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.Form1_FormClosing);
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_input)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_detection)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_cropResized)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_extraction)).EndInit();
- this.tableLayoutPanel1.ResumeLayout(false);
- this.tableLayoutPanel1.PerformLayout();
- this.panel1.ResumeLayout(false);
- this.panel1.PerformLayout();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_output)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_outputSRG)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_maskSRG)).EndInit();
- ((System.ComponentModel.ISupportInitialize)(this.pictureBox_gloss)).EndInit();
- this.panel2.ResumeLayout(false);
- this.ResumeLayout(false);
-
- }
-
- #endregion
-
- private System.Windows.Forms.Button button_start;
- private System.Windows.Forms.PictureBox pictureBox_input;
- private System.Windows.Forms.PictureBox pictureBox_detection;
- private System.Windows.Forms.PictureBox pictureBox_cropResized;
- private System.Windows.Forms.PictureBox pictureBox_extraction;
- private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
- private System.Windows.Forms.Panel panel1;
- private System.ComponentModel.BackgroundWorker backgroundWorker1;
- private System.Windows.Forms.Label label1;
- private System.Windows.Forms.Label label2;
- private System.Windows.Forms.Label label3;
- private System.Windows.Forms.Label label4;
- private System.Windows.Forms.Label label5;
- private System.Windows.Forms.Label label6;
- private System.Windows.Forms.Label label7;
- private System.Windows.Forms.Label label8;
- private System.Windows.Forms.PictureBox pictureBox_output;
- private System.Windows.Forms.PictureBox pictureBox_outputSRG;
- private System.Windows.Forms.PictureBox pictureBox_maskSRG;
- private System.Windows.Forms.PictureBox pictureBox_gloss;
- private System.Windows.Forms.Label label_processingFileName;
- private System.Windows.Forms.Label label_totalProgress;
- private System.Windows.Forms.Panel panel2;
- private System.Windows.Forms.Button button_pause;
- private System.Windows.Forms.TextBox textBox1;
- }
-}
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Form1.cs
deleted file mode 100644
index 6192e82..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.cs
+++ /dev/null
@@ -1,959 +0,0 @@
-using System;
-using System.Windows.Forms;
-using System.IO;
-using System.Drawing;
-using System.Drawing.Imaging;
-using OpenCvSharp;
-using System.ComponentModel;
-using System.Runtime.InteropServices;
-using TensorFlow;
-using System.Threading;
-using System.Diagnostics;
-
-namespace Tongue_extraction
-{
- public partial class Form1 : Form
- {
- //Use the RemoveSmallRegionDLL
- [DllImport(@"RemoveSmallRegionDLL.dll", EntryPoint = "RemoveSmallRegion", SetLastError = true, CharSet = CharSet.Ansi, ExactSpelling = false, CallingConvention = CallingConvention.StdCall)]
- extern static void RemoveSmallRegion(string name, string name2, int AreaLimit, int CheckMode, int NeihborMode);
-
- Mat mat_drawBox = new Mat(1024, 1280, MatType.CV_8UC3, 1);
- Mat mat_roi = new Mat(1024, 1280, MatType.CV_8UC3, 1);
- Mat mat_input = new Mat(1024, 1280, MatType.CV_8UC3, 1);
- Mat mat_roi256 = new Mat(256, 256, MatType.CV_8UC3, 1);
- Mat mat_roisize = new Mat(1024, 1280, MatType.CV_8UC3, 1);
- Mat mat_output = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_outputNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_outputChanged = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_mask = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_extraction = new Mat(1024, 1280, MatType.CV_8UC3, 1);
- Mat mat_cropped;
- Mat mat_outputSRGNoBox = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_outputSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_maskSRG = new Mat(1024, 1280, MatType.CV_8UC1, 1);
- Mat mat_gloss = new Mat(1024, 1280, MatType.CV_8UC1, 1);
-
- public static Bitmap bitmap_bitch;
- string[] path;
-
- OpenCvSharp.Rect rectangle;
-
- byte[] byte_inputDetection;
- byte[] byte_inputSegmentation;
- OpenCvSharp.Point P1 = new OpenCvSharp.Point();
- OpenCvSharp.Point P2 = new OpenCvSharp.Point();
-
- byte[] mask = new byte[200000];
-
- string basepath;
- string imageFile;
- string time;
- string modelFile;
- int ii = 0;
- int count;
- int areaCount;
-
- Rect roi = new Rect();
- int mmp = 0;
- int pmm = 0;
-
- int check_detection = 0;
- float max_score = 0;
-
- private ManualResetEvent manualReset = new ManualResetEvent(true);
-
- string fileName_info = Directory.GetCurrentDirectory() + "//info//" + DateTime.Now.ToLocalTime().ToString("yyyyMMddhhmmss") + ".csv";
- StreamWriter sw;
-
- public Form1()
- {
- InitializeComponent();
-
- // boundingboxなどのinfo出力用
- sw = new StreamWriter(fileName_info, false, System.Text.Encoding.GetEncoding("shift_jis"));
- sw.WriteLine(
- "image" + "," +
- "top left X" + "," + "top left Y" + "," + "bottom right X" + "," + "bottom right Y" + "," +
- "Width" + "," + "Height" + "," + "Area" + "," + "Gloss Count");
- }
-
- [Conditional("DEBUG")]
- private void ShowDebugBox()
- {
- textBox1.Visible = true;
- }
-
- private void Button_start_Click(object sender, EventArgs e)
- {
- ShowDebugBox();
- path = Directory.GetFiles("data");
- button_start.Enabled = false;
- button_pause.Enabled = true;
- pictureBox_input.Image = null;
- pictureBox_input.Refresh();
- pictureBox_detection.Image = null;
- pictureBox_detection.Refresh();
- pictureBox_cropResized.Image = null;
- pictureBox_cropResized.Refresh();
- pictureBox_output.Image = null;
- pictureBox_output.Refresh();
- pictureBox_outputSRG.Image = null;
- pictureBox_outputSRG.Refresh();
- pictureBox_maskSRG.Image = null;
- pictureBox_maskSRG.Refresh();
- pictureBox_extraction.Image = null;
- pictureBox_extraction.Refresh();
- pictureBox_gloss.Image = null;
- pictureBox_gloss.Refresh();
- backgroundWorker1.RunWorkerAsync();
- }
-
- public static class ImageUtil
- {
- // Convert the image in filename to a Tensor suitable as input to the Inception model.
- public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.UInt8)
- {
- // DecodeJpeg uses a scalar String-valued tensor as input.
- var tensor = TFTensor.CreateString(contents);
-
- TFGraph graph;
- TFOutput input, output;
-
- // Construct a graph to normalize the image
- ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
-
- // Execute that graph to normalize this one image
- using (var session = new TFSession(graph))
- {
- var normalized = session.Run(
- inputs: new[] { input },
- inputValues: new[] { tensor },
- outputs: new[] { output });
-
- if (session != null)
- {
- session.Dispose();
- }
- if (tensor != null)
- {
- tensor.Dispose();
- }
- if (graph != null)
- {
- graph.Dispose();
- }
- GC.Collect();
- GC.WaitForPendingFinalizers();
- GC.Collect();
-
- return normalized[0];
- }
- }
-
- // The inception model takes as input the image described by a Tensor in a very
- // specific normalized format (a particular image size, shape of the input tensor,
- // normalized pixel values etc.).
- //
- // This function constructs a graph of TensorFlow operations which takes as
- // input a JPEG-encoded string and returns a tensor suitable as input to the
- // inception model.
-
- public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.UInt8)
- {
- const int W = 256;
- const int H = 256;
- const float Mean = 0;
- const float Scale = 1;
- graph = new TFGraph();
- input = graph.Placeholder(TFDataType.String);
- output = graph.Cast(graph.Div(
- x: graph.Sub(
- x: graph.ResizeBilinear(
- images: graph.ExpandDims(
- input: graph.Cast(
- graph.DecodeJpeg(contents: input, channels: 3), DstT: destinationDataType),
- dim: graph.Const(0, "make_batch")),
- size: graph.Const(new int[] { W, H }, "size")),
- y: graph.Const(Mean, "mean")),
- y: graph.Const(Scale, "scale")), destinationDataType);
- GC.Collect();
- GC.WaitForPendingFinalizers();
- GC.Collect();
- }
- }
-
- public static class ImageUtil2
- {
- // Convert the image in filename to a Tensor suitable as input to the Inception model.
- public static TFTensor CreateTensorFromImageFile(byte[] contents, TFDataType destinationDataType = TFDataType.Float)
- {
- // DecodeJpeg uses a scalar String-valued tensor as input.
- var tensor = TFTensor.CreateString(contents);
-
- TFGraph graph;
- TFOutput input, output;
-
- // Construct a graph to normalize the image
- ConstructGraphToNormalizeImage(out graph, out input, out output, destinationDataType);
-
- // Execute that graph to normalize this one image
- using (var session = new TFSession(graph))
- {
- var normalized = session.Run(
- inputs: new[] { input },
- inputValues: new[] { tensor },
- outputs: new[] { output });
-
- if (session != null)
- {
- session.Dispose();
- }
- if (tensor != null)
- {
- tensor.Dispose();
- }
- if (graph != null)
- {
- graph.Dispose();
- }
- GC.Collect();
- GC.WaitForPendingFinalizers();
- GC.Collect();
-
- return normalized[0];
- }
- }
-
- // The inception model takes as input the image described by a Tensor in a very
- // specific normalized format (a particular image size, shape of the input tensor,
- // normalized pixel values etc.).
- //
- // This function constructs a graph of TensorFlow operations which takes as
- // input a JPEG-encoded string and returns a tensor suitable as input to the
- // inception model.
-
- public static void ConstructGraphToNormalizeImage(out TFGraph graph, out TFOutput input, out TFOutput output, TFDataType destinationDataType = TFDataType.Float)
- {
- const int W = 256;
- const int H = 256;
- //const int W = 512;
- //const int H = 512;
-
- const float Mean = 128;
- const float Scale = 128;
- graph = new TFGraph();
- input = graph.Placeholder(TFDataType.String);
- output = graph.Cast(graph.Div(
- x: graph.Sub(
- x: graph.ResizeBilinear(
- images: graph.ExpandDims(
- input: graph.Cast(
- graph.DecodeJpeg(contents: input, channels: 3), DstT: TFDataType.Float),
- dim: graph.Const(0, "make_batch")),
- size: graph.Const(new int[] { W, H }, "size")),
- y: graph.Const(Mean, "mean")),
- y: graph.Const(Scale, "scale")), destinationDataType);
- GC.Collect();
- GC.WaitForPendingFinalizers();
- GC.Collect();
- }
- }
-
- public static byte[] Bitmap2Byte(Bitmap bitmap)
- {
- using (MemoryStream stream = new MemoryStream())
- {
- bitmap.Save(stream, ImageFormat.Jpeg);
- byte[] data = new byte[stream.Length];
- stream.Seek(0, SeekOrigin.Begin);
- stream.Read(data, 0, Convert.ToInt32(stream.Length));
- return data;
- }
- }
-
- private static string DownloadDefaultModel(string dir)
- {
- var modelFile = Path.Combine(dir, "2120_256_64_42999_enhancment_L1loss0.03435.pb");
- return modelFile;
- }
-
- private static string DownloadDefaultModel_noBoxPix2Pix(string dir)
- {
- var modelFile = Path.Combine(dir, "424_256_64_5999_scale300_enhancment_L1loss0.02001.pb");
- return modelFile;
- }
-
- public static Bitmap ToGrayBitmap(byte[] rawValues, int width, int height)
- {
- //// Apply for a target bitmap variable and lock its memory area
- Bitmap bmp = new Bitmap(width, height, PixelFormat.Format8bppIndexed);
- BitmapData bmpData = bmp.LockBits(new Rectangle(0, 0, width, height),
- ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed);
-
- //// Get image parameters
- int stride = bmpData.Stride; // Width of the scan line
- int offset = stride - width; // Show gap between width and scan line width
- IntPtr iptr = bmpData.Scan0; // Get the memory start position of bmpData
- int scanBytes = stride * height;// Use stride width to indicate that this is the size of the memory area
-
- //// The following is to convert the original display size byte array to the byte array actually stored in memory.
- int posScan = 0, posReal = 0;// Set two position pointers respectively, pointing to the source array and the target array
- byte[] pixelValues = new byte[scanBytes]; //Allocate memory for the target array
-
- for (int x = 0; x < height; x++)
- {
- //// The following loop section is a simulated line scan
- for (int y = 0; y < width; y++)
- {
- pixelValues[posScan++] = rawValues[posReal++];
- }
- posScan += offset; //At the end of the line scan, move the target position pointer over that "gap"
- }
-
- //// Use Marshal's Copy method to copy the just obtained memory byte array into BitmapData.
- System.Runtime.InteropServices.Marshal.Copy(pixelValues, 0, iptr, scanBytes);
- bmp.UnlockBits(bmpData); // Unlock the memory area
-
- //// The following code is to modify the index table of the generated bitmap, from pseudo color to grayscale
- ColorPalette tempPalette;
- using (Bitmap tempBmp = new Bitmap(1, 1, PixelFormat.Format8bppIndexed))
- {
- tempPalette = tempBmp.Palette;
- }
- for (int i = 0; i < 256; i++)
- {
- tempPalette.Entries[i] = Color.FromArgb(i, i, i);
- }
-
- bmp.Palette = tempPalette;
-
- //// The algorithm ends here and returns the result.
- return bmp;
- }
-
- private void BackgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
- {
- using (MemoryStream ms = new MemoryStream())
- {
- for (int a = 0; a < path.Length; a++)
- {
- manualReset.WaitOne();
- ii = 0;
- basepath = Directory.GetCurrentDirectory();
- imageFile = System.Text.RegularExpressions.Regex.Replace(path[a], "data", "");
- Invoke((MethodInvoker)delegate
- {
- label_processingFileName.Text = "Processing File: " + imageFile;
- count = a + 1;
- label_totalProgress.Text = "Total Progress: " + count + "/" + path.Length;
- });
-
- mat_input = Cv2.ImRead(basepath + "\\data" + imageFile, ImreadModes.Color);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_input.Image = bitmap_bitch;
- pictureBox_input.Refresh();
- });
-
- label8.BackColor = Color.White;
- label1.BackColor = Color.Red;
- mat_drawBox = mat_input.Clone();
- mat_cropped = new Mat(mat_input.Size(), MatType.CV_8UC3, 1);
- byte_inputDetection = Bitmap2Byte(bitmap_bitch);
-
- using (var graph = new TFGraph())
- {
- var model = File.ReadAllBytes(Directory.GetCurrentDirectory() + "/Detection_Normal.pb");
- graph.Import(model, "");
-
- using (var session = new TFSession(graph))
- {
- var tensor = ImageUtil.CreateTensorFromImageFile(byte_inputDetection, TFDataType.UInt8);
-
- var runner = session.GetRunner();
- runner
-
- .AddInput(graph["image_tensor"][0], tensor)
- .Fetch("detection_boxes", "detection_scores", "detection_classes", "num_detections");
-
- var output = runner.Run();
- var boxes = (float[,,])output[0].GetValue();
- var scores = (float[,])output[1].GetValue();
- var classes = (float[,])output[2].GetValue();
- var detections = (float[])output[3].GetValue();
-
- check_detection = 0;
- max_score = 0;
-
- for (int i = 0; i < scores.Length; i++)
- {
- if ((scores[0, i] > 0.5) && (scores[0, i] > max_score))
- {
- max_score = scores[0, i];
- float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
- float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
- float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
- float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
- P1.X = (int)x_min;
- P1.Y = (int)y_min;
- P2.X = (int)x_max;
- P2.Y = (int)y_max;
- Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
- rectangle.X = (int)x_min;
- rectangle.Y = (int)y_min;
- rectangle.Width = (int)(x_max - x_min);
- rectangle.Height = (int)(y_max - y_min);
-
- check_detection = 1;
- }
- }
- }
- }
-
- // 舌が検出されなかった場合,Detectionされた画像で学習したモデル(CropResize)を使用するのはまずいので
- // 以前のモデル(Detectionせずに学習)を使用する
- if (check_detection == 0)
- {
- MessageBox.Show("Error: Sorry can not detect any tongue in this image.\nPress [OK] to skip preprocessing.",
- "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- time = DateTime.Now.ToLocalTime().ToString();
- File.AppendAllText("DetectionFailedLog.txt ", time + " " + imageFile + "\n");
- Invoke((MethodInvoker)delegate
- {
- pictureBox_detection.Image = null;
- pictureBox_detection.Refresh();
- pictureBox_cropResized.Image = null;
- pictureBox_cropResized.Refresh();
- pictureBox_output.Image = null;
- pictureBox_output.Refresh();
- pictureBox_outputSRG.Image = null;
- pictureBox_outputSRG.Refresh();
- pictureBox_maskSRG.Image = null;
- pictureBox_maskSRG.Refresh();
- pictureBox_extraction.Image = null;
- pictureBox_extraction.Refresh();
- pictureBox_gloss.Image = null;
- pictureBox_gloss.Refresh();
- });
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
- byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
-
- Thread.Sleep(1000);
- modelFile = DownloadDefaultModel_noBoxPix2Pix(basepath);
- using (var graph = new TFGraph())
- {
- var model = File.ReadAllBytes(modelFile);
- graph.Import(model, "");
-
- using (var session = new TFSession(graph))
- {
- var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
-
- var runner = session.GetRunner();
- runner
- .AddInput(graph["generator/input_image"][0], tensor)
- .Fetch(graph["generator/prediction"][0]);
- var output = runner.Run();
- float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
-
- for (int p = 0; p < 256; p++)
- {
- for (int q = 0; q < 256; q++)
- {
- float check = resultfloat[0, p, q, 0];
- if (check < 0)
- {
- mask[ii] = 0;
- }
- else
- {
- mask[ii] = 255;
- }
- ii++;
- }
- }
- }
- }
- Thread.Sleep(1000);
- bitmap_bitch = ToGrayBitmap(mask, 256, 256);
- mat_outputNoBox = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputNoBox);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_output.Image = bitmap_bitch;
- pictureBox_output.Refresh();
- label1.BackColor = Color.White;
- label4.BackColor = Color.Red;
- });
-
- mat_outputNoBox.SaveImage(basepath + "\\output256" + imageFile);
- try
- {
- RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
- RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
- }
- catch
- {
- MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- break;
- }
- mat_outputSRGNoBox = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRGNoBox);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_outputSRG.Image = bitmap_bitch;
- pictureBox_outputSRG.Refresh();
- label4.BackColor = Color.White;
- label5.BackColor = Color.Red;
- });
- Cv2.Resize(mat_outputSRGNoBox, mat_mask, mat_input.Size());
- mat_mask.SaveImage(basepath + "\\mask" + imageFile);
- try
- {
- RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
- RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
- }
- catch
- {
- MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- break;
- }
- mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
- Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_maskSRG.Image = bitmap_bitch;
- pictureBox_maskSRG.Refresh();
- label5.BackColor = Color.White;
- label6.BackColor = Color.Red;
- });
- mat_extraction = mat_input.Clone();
- areaCount = 0;
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- Vec3b pix = mat_extraction.At(i, j);
- if (mat_maskSRG.At(i, j) == 0)
- {
- pix[0] = (byte)(255);
- pix[1] = (byte)(255);
- pix[2] = (byte)(255);
- mat_extraction.Set(i, j, pix);
- }
- else
- {
- pix[0] = (byte)(mat_extraction.At(i, j).Item0);
- pix[1] = (byte)(mat_extraction.At(i, j).Item1);
- pix[2] = (byte)(mat_extraction.At(i, j).Item2);
- mat_extraction.Set(i, j, pix);
- areaCount++;
- }
- }
- }
- mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_extraction.Image = bitmap_bitch;
- pictureBox_extraction.Refresh();
- });
- label6.BackColor = Color.White;
-
-
-
- label7.BackColor = Color.Red;
- mat_gloss = mat_input.Clone();
- double sum = 0.0;
- double sumsq = 0.0;
- double avg = 0.0;
- double stdev = 0.0;
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- var g_value = mat_gloss.At(i, j).Item1;
- if (mat_maskSRG.At(i, j) == 0)
- {
- }
- else
- {
- sum += g_value; // G チャンネル の和
- sumsq += g_value * g_value; // G チャンネル の平方和
- }
- }
- }
- avg = (double)(sum / areaCount); // G チャンネル の平均
- stdev = Math.Sqrt(Math.Abs((sumsq / areaCount) - (avg * avg))); // 標準偏差
- double thresh = avg + (stdev * 2.0);
- int glossCount = 0;
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- if (mat_maskSRG.At(i, j) == 0)
- {
- }
- else
- {
- Vec3b pix = mat_gloss.At(i, j);
- if (pix.Item1 > thresh)
- {
- glossCount++;
- pix[0] = (byte)(255);
- pix[1] = (byte)(0);
- pix[2] = (byte)(0);
- mat_gloss.Set(i, j, pix);
- }
- }
-
- }
- }
- mat_gloss.SaveImage(basepath + "\\gloss" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_gloss);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_gloss.Image = bitmap_bitch;
- pictureBox_gloss.Refresh();
- });
- label7.BackColor = Color.White;
-
-
- label8.BackColor = Color.Red;
- //csvにboundingbox情報を保存
- sw.WriteLine(
- imageFile.Substring(1) + ","
- + "None" + "," + "None" + ","
- + "None" + "," + "None" + ","
- + "None" + "," + "None" + "," + areaCount.ToString()
- );
-
- continue;
- }
-
- // 舌が正常にDetectionされた場合の処理
- else
- {
- // 検出されたバウンディングボックス画像を保存
- mat_drawBox.SaveImage(basepath + "\\detection" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_detection.Image = bitmap_bitch;
- pictureBox_detection.Refresh();
- });
- label1.BackColor = Color.White;
-
- // 検出領域でcropし,256*256にリサイズして表示
- label2.BackColor = Color.Red;
- for (int i = P1.Y; i < P2.Y; i++)
- {
- for (int j = P1.X; j < P2.X; j++)
- {
- // 舌領域以外を黒へ
- Vec3b pix = mat_input.At(i, j);
- mat_cropped.Set(i, j, pix);
- }
- }
- mat_cropped.SaveImage(basepath + "\\cropped" + imageFile);
- // 検出領域の範囲を切り出す
- OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
- size_roi.Height = rectangle.Height;
- size_roi.Width = rectangle.Width;
- roi = new Rect(P1, size_roi);
- mat_roisize = mat_input.Clone(roi);
- // セグメンテーションのため,256*256にリサイズ
- Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
- mat_roi.SaveImage(basepath + "\\cropresized" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_cropResized.Image = bitmap_bitch;
- pictureBox_cropResized.Refresh();
- });
- label2.BackColor = Color.White;
-
- // セグメンテーションを行う
- label3.BackColor = Color.Red;
- byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
- Thread.Sleep(1000);
- modelFile = DownloadDefaultModel(basepath);
- using (var graph = new TFGraph())
- {
- var model = File.ReadAllBytes(modelFile);
- graph.Import(model, "");
-
- using (var session = new TFSession(graph))
- {
- var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
- var runner = session.GetRunner();
- runner
- .AddInput(graph["generator/input_image"][0], tensor)
- .Fetch(graph["generator/prediction"][0]);
-
- //.AddInput(graph["input_image"][0], tensor)
- //.Fetch(graph["generator1/decoder_1/Tanh"][0]);
-
- var output = runner.Run();
- float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
-
- for (int p = 0; p < 256; p++)
- {
- for (int q = 0; q < 256; q++)
- {
- float check = resultfloat[0, p, q, 0];
- if (check < 0)
- {
- mask[ii] = 0;
- }
- else
- {
- mask[ii] = 255;
- }
- ii++;
- }
- }
- }
- }
- GC.Collect();
- Thread.Sleep(1000);
- bitmap_bitch = ToGrayBitmap(mask, 256, 256);
- mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_output.Image = bitmap_bitch;
- pictureBox_output.Refresh();
- });
- label3.BackColor = Color.White;
-
- // 舌分割結果の後処理
- label4.BackColor = Color.Red;
- // 舌分割結果を保存
- mat_output.SaveImage(basepath + "\\output256" + imageFile);
- // 後処理(領域拡張法)でノイズ除去
- try
- {
- RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
- RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
- }
- catch
- {
- MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- break;
- }
- mat_outputSRG = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_outputSRG.Image = bitmap_bitch;
- pictureBox_outputSRG.Refresh();
- });
- label4.BackColor = Color.White;
-
- label5.BackColor = Color.Red;
- // バウンディングボックスのサイズに舌分割結果をリサイズ
- // この時に2値じゃなくなってるみたい
- Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
- mat_outputChanged.SaveImage(basepath + "\\output_resized" + imageFile);
- // 舌検出された領域において舌領域を切り出す
- mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 1);
- for (int i = P1.Y; i < P2.Y; i++)
- {
- for (int j = P1.X; j < P2.X; j++)
- {
- int pix = mat_outputChanged.At(mmp, pmm);
- mat_mask.Set(i, j, pix);
- pmm++;
- }
- mmp++;
- pmm = 0;
- }
- mmp = 0;
- Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
- mat_mask.SaveImage(basepath + "\\mask" + imageFile);
- // ノイズ処理
- try
- {
- RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
- RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
- }
- catch
- {
- MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- break;
- }
-
- mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
- Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
- // 2値マスクの最終結果
- mat_maskSRG.SaveImage(basepath + "\\mask_final" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_maskSRG.Image = bitmap_bitch;
- pictureBox_maskSRG.Refresh();
- });
- label5.BackColor = Color.White;
-
- // 元画像とマスクを合わせて,舌領域を抜き出す
- label6.BackColor = Color.Red;
- mat_extraction = mat_input.Clone();
- areaCount = 0;
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- Vec3b pix = mat_extraction.At(i, j);
- if (mat_maskSRG.At(i, j) == 0)
- {
- pix[0] = (byte)(255);
- pix[1] = (byte)(255);
- pix[2] = (byte)(255);
- mat_extraction.Set(i, j, pix);
- }
- else
- {
- pix[0] = (byte)(mat_extraction.At(i, j).Item0);
- pix[1] = (byte)(mat_extraction.At(i, j).Item1);
- pix[2] = (byte)(mat_extraction.At(i, j).Item2);
- mat_extraction.Set(i, j, pix);
- areaCount++;
- }
- }
- }
- mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_extraction.Image = bitmap_bitch;
- pictureBox_extraction.Refresh();
- });
-
- label6.BackColor = Color.White;
-
- // Gloss抽出処理
- // extraction結果は使用しない(255,255,255の扱いが煩雑なため)
- label7.BackColor = Color.Red;
- mat_gloss = mat_input.Clone();
- double sum = 0.0;
- double sumsq = 0.0;
- double avg = 0.0;
- double stdev = 0.0;
- // 2010石川さんの手法にならい,Gchannelのavgとstdを計算
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- var g_value = mat_gloss.At(i, j).Item1;
- if (mat_maskSRG.At(i, j) == 0)
- {
- }
- else
- {
- sum += g_value; // G チャンネル の和
- sumsq += g_value * g_value; // G チャンネル の平方和
- }
- }
- }
- avg = (double)(sum / areaCount); // G チャンネル の平均
- stdev = Math.Sqrt(Math.Abs((sumsq / areaCount) - (avg * avg))); // 標準偏差
-
- // 閾値を決定し,glossをカウント
- double thresh = avg + (stdev * 2.0);
- int glossCount = 0;
- for (int i = 0; i < mat_input.Height; i++)
- {
- for (int j = 0; j < mat_input.Width; j++)
- {
- Vec3b pix = mat_gloss.At(i, j);
- if (mat_maskSRG.At(i, j) == 0)
- {
- pix[0] = (byte)((pix[0] + pix[1] + pix[2]) / 3);
- pix[1] = (byte)pix[0];
- pix[2] = (byte)pix[0];
- mat_gloss.Set(i, j, pix);
- }
- else
- {
- if (pix.Item1 > thresh)
- {
- glossCount++;
- pix[0] = (byte)(255);
- pix[1] = (byte)(0);
- pix[2] = (byte)(255);
- mat_gloss.Set(i, j, pix);
- }
- else
- {
- pix[0] = (byte)((pix[0] + pix[1] + pix[2]) / 3);
- pix[1] = (byte)pix[0];
- pix[2] = (byte)pix[0];
- mat_gloss.Set(i, j, pix);
- }
- }
-
- }
- }
- mat_gloss.SaveImage(basepath + "\\gloss" + imageFile);
- bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_gloss);
- Invoke((MethodInvoker)delegate
- {
- pictureBox_gloss.Image = bitmap_bitch;
- pictureBox_gloss.Refresh();
- });
- label7.BackColor = Color.White;
-
- // 処理が終わった画像を記録する
- label8.BackColor = Color.Red;
- time = DateTime.Now.ToLocalTime().ToString();
- File.AppendAllText("Log.txt ", time + " " + imageFile + " Done!\n");
-
-
- //csvにboundingbox情報を保存
- sw.WriteLine(
- imageFile.Substring(1) + ","
- + P1.X.ToString() + "," + P1.Y.ToString() + ","
- + P2.X.ToString() + "," + P2.Y.ToString() + ","
- + Math.Abs(P1.X - P2.X).ToString() + "," + Math.Abs(P1.Y - P2.Y).ToString() + ","
- + areaCount.ToString() + "," + glossCount.ToString()
- );
- }
- GC.Collect();
- }
- MessageBox.Show("Finished!");
-
- Invoke((MethodInvoker)delegate
- {
- button_start.Enabled = true;
- button_pause.Enabled = false;
- label_processingFileName.Text = "Processing File: None";
- });
- }
- }
-
- private void Button_pause_Click(object sender, EventArgs e)
- {
- if (button_pause.Text == "Pause")
- {
- manualReset.Reset();
- button_pause.Text = "Continue";
- }
- else
- {
- manualReset.Set();
- button_pause.Text = "Pause";
- }
- }
-
- private void Form1_FormClosing(object sender, FormClosingEventArgs e)
- {
- Console.WriteLine("file closing");
- sw.Close();
- Console.WriteLine("file closed");
- }
- }
-}
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.resx b/Tongue extraction_cropresizemethod/Tongue extraction/Form1.resx
deleted file mode 100644
index e1424ee..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Form1.resx
+++ /dev/null
@@ -1,1080 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- 17, 17
-
-
- 58
-
-
-
-
- AAABAAEAa4AAAAEAIAAo3gAAFgAAACgAAABrAAAAAAEAAAEAIAAAAAAAANYAAGM4AABjOAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmA+Xk5gLl5OYB5ePlAOjs6gDl5OUA4dznAOfn
- 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYA5eTmAOXj5gHl5OYA5enrAObe4QDl5OYC5eTmBOXk
- 5gTl5OYD5eTmAebk5wDk5OcA5+XkAOXj6ADl5ecA5+HnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5QDl4+UA5eTlCeXk5kPl5OYl5uXmBujl
- 5gDn5eYA3OTmAOXl5QDl5eUA5ebjAOXk5gPl5OYE5eTmA+Xk5gLl5OYB5uPmAO3o7gDj5OQA6+LsAObl
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZN5eTm/+Xk
- 5vjl5Obe5eTmuuXk5pDl5OZj5eTmOeXk5hbx5ewB/+T/AOfk5wDl6OoA5OPlAOTk5QDl5OYB5eTmBOXk
- 5gTl5OYD5eTmAuXk5gHk4eYA5uHkAObk5QDk4ugA5eXlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOTj5QDl5OYD5eTmAOXk
- 5qfl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm8eXk5s/l5Oap5eTmfeXk5lDl5OYq5eTmCubn
- 5gDm5uYA5d7mAOXm5QDk6OQA5OjkAOXl5gPl5OYE5eTmBOXk5gLl5OYB5eTmAObo5gDt8+IA5ubmAOXl
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAeXk
- 5gDl5OYW5eTm7OXk5v/l5Ob95eTm/eXk5vzl5Ob75eTm++Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5vzl5Obl5eTmwOXk5pbl5OZp5eTmP+Xk5hvm5+YC5+jmAObl5gDr5OYA5OTnAOTk5wDm5OcB5eTmBOXk
- 5gTl5OYD5eTmAuXk5gHl5OcA4uXkAOTl5QDq5eoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gDl5OYD5eTmAOXk5lfl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm++Xk
- 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm9eXk5tXl5Oau5eTmg+Xk5lbl5OYu5eTnDuPk
- 5QDj5OUA5+XoAOXj6ADg89UA4e7XAOXk5gLl5OYE5eTmBOXk5gLl5OYB5eTmAObl5gDl5OUA5uTmAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AADj4uMA5uTmAOXk5gPl5OYA5eTmpOXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/eXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v7l5Obp5eTmx+Xk5p7l5OZw5eTmROXk5h/m5eYE5eXmAObk5gDl5OYA5eTmAOXk5gDl5OYB5eTmA+Xk
- 5gTl5OYD5eTmAuXk5gHl5OUA6tneAOTm5wDk4uQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOTk5QDl5OYB5eTmAOXk5hDl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
- 5vvl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+OXk5tvl5Oa25eTmiuXk5l3l5OYz5uPmEsvl
- zQDg5eIA5+PnAObk5gDm5OYA5uTmAOXk5wHp4+UA6uPlALb09QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5eXnAOXk5gPl5OYA5eTmSeXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5vzl5Ob75eTm++Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Obt5eTmzOXk5qPl5OZ25eTmS+Xk5iXl5ecH5eXnAOPj5QDf398A39/fAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOji6ADk4+UA5eTmBOXk5gDl5OaT5eTm/+Xk5vvl5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob95eTm/OXk
- 5vvl5Ob85eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm+eXk5ufl5OaY5eXnAOXl5wHl4+UAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5OTlAOXk5wHl5ecA5eXoB+Xk5tjl5Ob/5eTm/eXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v3l5Ob75eTm++Xk5v3l5Ob75eTm/+Xk5n3l5OYA5eTmBOXm
- 6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmA+Xk5gDl5OY95eTm/+Xk
- 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obr5eTmGOXk
- 5gDl5OYB4ODgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA7entAOXk5gDl5OYE5eTmAOXk
- 5onl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
- 5pbl5OYA5eTmA+Xk5QDm5ugAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmAePk
- 5gDj5OYD5eTm0+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTmNeXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gDl5OYD5eTmAOXk5j3l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob85eTm/+Xk5r3l5OYA5eTlAeXk5wDl5OUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AADl5OcA6OTlAOXk5gTl5OYA5eTmkeXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYD5eTmAObm6gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eXmAOXj5gDl5OYB5uXmAObk5hLl5Obl5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5uPl5OYP5eTmAOXk5gHk5OYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAObk5gDk5OYA5OPmAOXk5gbl5OYA5eTmcuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmleXk5gDl5OYD5eXmAOfm5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOXl5QDl5OYA5uTmAObk5QDl5OYF5eTmAOXk5jfl5Oby5eTm/+Xk5v7l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZD5eTmAOXk5gPl5OcAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOTj5wDl5OUA5OPmAOTj5gDl5OYC5eTmBeXk5gDl5OY35eTm6+Xk5v/l5Ob+5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Tj5grk4+YA5eTmAeXl
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA6ungAOfm4wDi4ekA5OTnAOTk5wDl5OYA5uTmAOXj
- 5QDl4+UA5eXnAOXk5gHl5OYB5eTmAeXk5gLl5OYE5eTmA+Xk5gHl5OYA5eTmVuXk5vTl5Ob/5eTm/eXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTmAOXk
- 5gPl5OYA5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5ecA3eHjAN3g4wDl5OYC5eTmBOXk5gPm5OUC5+TlAeTk
- 5QHi5OYB5unrAOPs7gDT2dsA0NvdANzl5wAAAAAA5eTmAOXk5gHk5OUA5eTmHeXk5qHl5Ob/5eTm/eXk
- 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
- 5l7l5OYA5eTmBOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAObl5gDm5eYA5eTmAuPh5QDj4uUA5OPmAAAA
- AAAAAAAAAAAAAAAAAACl5OYA/+DmAP/R2wDg4OUB5uTmBeXk5hHl5OYp5eTmWOXk5qbl5Ob05eTm/+Xk
- 5vzl5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
- 5v/l5Ob+5eTmK+Xk5gDl5OYC5OTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA4eLlAOTk5gLj5OUA5eTmLOXk
- 5oHl5Oat5eTmvuXk5sTl5ObG5eTmyOXk5svl5ObM5eTmz+Xk5tTl5Obd5eTm7OXk5vzl5Ob/5eTm/+Xk
- 5v7l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob95eTm/+Xk5uTl5OYM5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAObl5gDl5OYC5eTmAOXk
- 5nXl5Ob/5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5vzl5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmyuTk5QDk5OUB5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OUA5eTmA+Xk
- 5gDl5OZM5eTm/+Xk5v3l5Ob75eTm++Xk5vzl5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v3l5Ob95eTm/eXk
- 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa15eTmAOXk5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gDl5OYC5eTmAOXk5rjl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+bk5gAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmAebk5gDm5OYX5eTm7uXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmsOXk5gDl5OYD5eTmAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOXl5gDl5OYD5eTmAOXk5kXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oa/5eTmAOXk
- 5gLl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gTl5OYA5eTmcuXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
- 5tbm5eUA5eTmAOXk5gEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5uUA5eTmBOXk5gDl5Oac5eTm/+Xk5vvl5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v7l5Ob/5eTm7+Xk5hfl5OYA5eTmAebm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDl5OYC5eTmAOXk5rvl5Ob/5eTm/OXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTmPOXk5gDl5OYD5eXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOjm7ADj4uIA5eTm1OXk
- 5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ15eTmAOXk5gTk4+QAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYB5uPmAObj
- 5gvl5Obj5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rXl5OYA5eTmAuXl5gDo6OgAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gHl5OYA5eTmFuXk5u/l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7uXk5hbl5OYA5eTmAeXk
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmAuXk5gDl5OYi5eTm++Xk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmXOXk
- 5gDl5OYD5eTmAObm5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAADl5OYC5eTmAOXk5i/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
- 5v/l5Oaz5eTmAOXk5gLl5OYA5+PnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmPuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob+5eTm/+Xk5vfl5OYn5eTmAOXk5gLl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmA+Xk5gDl5OZM5eTm/+Xk5v3l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5oPl5OYA5eTmBOjj5gDl5OUAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOvr6wDl5OYE5eTmAOXk5lzl5Ob/5eTm++Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm3+Xk5g3l5OYA5eTmAeXk5gAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePlAOXk5gTl5OYA5eTmcuXk
- 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmYOXk5gDl5OYD5eTlAOXl
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADf3+wA5eTmBOXk
- 5gDl5OaK5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObJ5NG+AOTm
- 7ADl5OUA5OPnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObk
- 5gDl5OYE5eTmAOXk5qPl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
- 5v/l5OZI5eTmAOXk5gPl5eYA5ubmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmAOXk5gHl5OYA5eTmw+Xk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob85eTm/+Xk5rLl5OYA5eTmAubk5gDl4+YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAADl5OYB5eTnAOTk5wrl5Obi5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm/eXk5jPl5OYA5eTmAuXk5wDm4uYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gLl5OYA5eTmK+Xk5v7l5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmmuXk5gDl5OYD5OPmAOXl5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm5OYA5eTmBOXk5gDl5OZe5eTm/+Xk5vzl5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obx5ePmHuXk5gDl5OYC5eTlAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6OjoAOXl5QDl5OYD5eTmAOXk5qLl5Ob/5eTm++Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/k4+b/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lm/+Pj5v/j4+b/4+Pm/+Pj5v/j4+b/4+Pm/+Pi
- 5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OZ65eTmAOXk
- 5gTn5OUA4+LjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANvb2wDl5OYA5eTmAeXk5gDl5OYQ5eTm5eXk
- 5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPm/+nn5v/w7OX/7erm/+7q5v/u6ub/7urm/+7q5v/u6ub/7uvm/+bl
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+bk5v/u6+b/7urm/+7q5v/u6ub/7urm/+7q
- 5v/t6ub/8Ozl/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
- 5trl5OUK5eTlAOXk5QHl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+gA5eTmAOXk5gDl5OYE5eTmAOXk
- 5ljl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5v/p5+b/0NTn/2WF7f9df+3/X4Ht/1+A7f9fgO3/X4Ht/1t+
- 7f9ujOz/3t/m/+bl5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/m5eb/4ODm/3CN7P9bfu3/X4Ht/1+A
- 7f9fgO3/X4Ht/15/7f9khO3/ztPn/+ro5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob85eTm/+Xk5lrl5OYA5eTmA+Xk5gDk4eQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj4wDl5eYA5uXoAN7e1wDl5OcB5eTmBOTk
- 5QLj5OUA5eTmwuXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ht5f+ot+n/Ekfx/x5R8P8cT/H/HE/x/xxP
- 8f8dT/H/HE/x/xxP8f/Gzej/7Onm/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm/+vp5v/Jz+f/HlDw/xtO
- 8f8dT/H/HE/x/xxP8f8cT/H/H1Hw/xBG8f+ktOn/8u3l/+Li5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmvubk5gDm5OYB5eTmAObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ePmAOXh5gDl5uUA5ePlAOXk5gLl5OYE5eTmA+Xk
- 5gHn5eIA5ubiAuXk5pXl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6u56f8gUvD/K1rw/ypZ
- 8P8pWfD/KVnw/ylZ8P8sW/D/HlHw/2yL7P/07+X/5+Xm/+Hh5v/i4ub/4uLm/+Hh5v/m5eb/9fDl/3GO
- 7P8eUfD/LFvw/ylZ8P8pWfD/KVnw/ylZ8P8sW/D/HlDw/6i26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmOuXk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADq6esA5eTmAOXk5gDm5OcA5eTmA+Xk5gTm5ugB5eXnAOXl
- 5wDn5+gA5uXnB+Xk5k/l5ObN5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q
- 8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HlHw/3eT7P/h4eb/8u7l//Ht5f/x7eX/8u7l/+Lh
- 5v97lez/H1Hw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ypZ8P8bTvH/p7bp//Ht5f/i4ub/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oac5eTmAOXk5gPm4+YA5OLlAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAuXk5QLl6OcA5d3kAOXg
- 5QPl5OYl5eTmXeXk5pzl5Obf5eTm/+Xk5v7l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds
- 5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8oWPD/HE/x/zxn7/95k+z/mKvq/5mr
- 6v96lOz/PWjv/xxP8f8oWPD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+mten/7uvm/+Dg
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u/m5OYb5eTmAOXk
- 5gHm5OcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5gDl5OYA5eTmAOXk5gLl5eYB7OHuAOXk
- 5jrl5OaT5eTm0+Xk5vrl5Ob/5eTm/+Xk5v/l5Ob85eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/IlPw/xpN
- 8f8aTvH/Gk7x/xpN8f8iU/D/Klnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/626
- 6f/+9+X/7erm/+Tk5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk
- 5nTl5OYA5eTmBOXk5wDm5eYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTnAOXk5wDl5OYC5eTmAOXk
- 5ifl5Oa95eTm/+Xk5v7l5Ob/5eTm/+Xk5vzl5Ob75eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8oWPD/Klnw/ypZ8P8qWfD/Klnw/yhY8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
- 8P8kVPD/T3Tu/3iT7P+ruen/6efm/+nn5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v3l5Ob/5eTm0eff6APo3+gA5uPmAeXj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAuXk
- 5gDl5OY05eTm7eXk5v/l5Ob85eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/yhY8P8gUvD/Gk3x/xxO8f9Ye+3/2dvn/+nn5v/k4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTmR+Xk5gDl5OYD5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5wHl5ecA5eXoBuXk5tHl5Ob/5eTm+uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ylY8P8qWfD/K1rw/xZK8f9ZfO3/6efm/+Tj5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5Oaj5eTmAOXk5gPl4+YA5eTlAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmA+Xk5gDl5OZC5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/Klnw/yBR8P+5w+j/7uvm/+Pj
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5u7l5OYa5eTmAOXk5gHm5OYAAAAAAAAA
- AAAAAAAAAAAAAAAAAADl5OYE5eTmAOXk5lzl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/HU/x/6Oz
- 6f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5mfl5OYA5eTmBOXk
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gPl5OYA5eTmR+Xk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q5
- 6f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KFjw/yhY
- 8P8kVfD/xczo/+zp5v/j4+b/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmveXk
- 5wDl5OcB5eTlAOXj5gAAAAAAAAAAAAAAAAAAAAAA5eTmAeXk5gDl5OYa5eTm8OXk5v/l5Ob+5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5OPl/+Pi4//i4eP/4+Lk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li
- 5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/KVjw/ypZ
- 8P8mVvD/FEnx/3iT7P/v6+b/4+Pm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
- 5v/l5Ob55eTmJ+Xk5gDl5OYC5eTmAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmAuXk5gDl5Oat5eTm/+Xk
- 5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Pi5P/m5ef/7+/y//Hx9P/s7O//5OLk/+Tj5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/yhY
- 8P8eUfD/G07x/yxb8P+Cm+v/6Obm/+bl5v/k5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm++Xk5v/l5OZw5eTmAOXk5gTm5OcAAAAAAAAAAAAAAAAAAAAAAOXl5wDl5OYD5eTmAOXk
- 5kfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/j4uT/6+rt/+Hg4v+tqKb/nJWS/8C9vP/s7O//5uXn/+Tj5f/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/i4ub/8Ozl/6q56f8eUPD/KVnw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8oWPD/IVLw/2yK7P+ntun/z9Tn//Ht5f/l5Ob/5OPm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5r7l5eUA5eXmAeXj5gDj4egAAAAAAAAAAAAAAAAA5eTmAOXk
- 5gDk5OYB5OTnAOXk5rrl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+vq7f/Oy8v/VkpC/zEiGP8yIxj/NSYc/4F4c//o5+n/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Li5v/w7OX/qrnp/x5Q8P8pWfD/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ypZ8P8bTvH/sL3p//735f/n5eb/4uLm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm9eXk5h/l5OYA5eTmAubl5QAAAAAAAAAAAAAA
- AADk5OcA5ePmAOXk5gLl5OYA5eTmN+Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/4uHj/1dLQ/8wIRb/QDIo/z8xJ/8+MCb/KxwQ/5KK
- h//w8PL/4+Hj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4uLm//Ds5f+quen/HlDw/ylZ8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/Klnw/xxO8f+ltOn/7uvm/+Hh5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmWuXk5gDl5OYE5OXnAAAA
- AAAAAAAAAAAAAAAAAADl4+UA5+nnAOXk5gPl5OYA5eTmkOXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/k4+X/4uHj/+Lh4//i4eP/4N/g/+3t8P+knZv/MCEW/0AyKf88LiT/PC4k/z0v
- Jv86LCL/Rjkw/9fV1v/l5Ob/4uDi/+Lh4//i4eP/4uHj/+Xk5v/j4ub/8Ozl/6u56f8eUPD/KVjw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX8P8qWfD/G07x/6e26f/x7eX/4uLm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oae5eTmAOXk
- 5gPk5OYAAAAAAAAAAAAAAAAAAAAAAOno6QDm5OYA5eTlAeTk5ADl5OUN5eTm1uXk5v/l5Ob95eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+rq7P/x8fT/8PDz//Hw8//u7vH/+/z//3lvaf8xIhj/PzEn/zwu
- JP88LiT/PC4k/z4wJv8zJRr/u7e2//v8///u7vH/8fDz//Hx8//w8PP/5+bo/+Li5v/w7Ob/qrjp/yBS
- 8P8sW/D/Klnw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/J1fw/ydX8P8nV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ypZ8P8qWfD/Klnw/ypZ8P8qWfD/Klnw/y1b8P8eUPD/p7bp//Ht5f/i4ub/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
- 5tXk4+YD5eTmAOXk5gHm5uYAAAAAAAAAAAAAAAAAAAAAAOXk5QDl5OYA5eTmA+Xk5gDl5OZA5eTm/eXk
- 5v7l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/p6Ov/ycXF/6CZlv+knpv/o52a/6Kcmf+noZ//VkpC/zco
- Hv89LyX/PC4k/zwuJP88LiT/PTAm/zUnHP98c27/qqSi/6KbmP+jnZv/opyZ/6ein//d3N3/5OTo//Ds
- 5f+suun/Ekjx/xxP8f8aTvH/Gk3x/xpN8f8aTfH/Gk3x/xtO8f8mV/D/J1fw/ydX8P8nV/D/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/G07x/xpN8f8aTfH/Gk3x/xpN8f8aTvH/HE/x/xBG8f+pt+n/8e3l/+Li
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v7l5Ob/5eTm++Xk5ifl5OYA5eTmAuXk5gAAAAAAAAAAAAAAAAAAAAAA5+fnAOXk5gDl5OYA5eTmBOXk
- 5gDl5OZ85eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+7t8P9rYFn/JxcM/zMkGv8xIhf/MSIX/zAh
- F/85KyH/PS8l/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zUnHP8wIRb/MSIX/zEiF/8zJBn/KBkN/6ym
- pP/v7/L/5ePj/9zd5v+Tp+n/iqDp/4yh6f+Loen/i6Hp/4uh6f+Noun/hJvq/y5c8P8mVvD/J1fw/ydX
- 8P8nV/D/J1fw/ydX8P8nV/D/Jlbw/yxb8P+Dm+r/jaPp/4uh6f+Loen/i6Hp/4yh6f+KoOn/kqbp/9rc
- 5v/n5ub/5OTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmV+Xk5gDl5OYD5eTnAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk
- 5gDl5OYA5ePmAuHi5ADl5Oay5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7Ozu/2pgWf81Jx3/QDMp/z4x
- J/8+MSf/PzEn/z0vJf88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjAm/z8xJ/8+MSf/PjEn/0E0
- Kv8zJBr/qKKg//Dw8//i4eP/5+bm//Pv6//z7+3/8+/s//Pv7P/z7+z/8+/s//Pv7f/08Ov/SXHu/x9R
- 8P8rWvD/J1fw/ydX8P8nV/D/J1fw/ypZ8P8gUvD/RW3u//Lu6//08O3/8+/s//Pv7P/z7+z/8+/s//Pv
- 7f/z7+v/5+bm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5OaG5eTmAOXk5gTl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmAOTj5QDl5OYC5OPlAOTk5hbl5Obc5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMk
- Gf8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PjEn/zEiF/+po6H/8PDz/+Ph4//l5Of/3tzM/93bxf/d28f/3dvH/93bx//d28f/2tnG/+nk
- zP+ntun/Gk7x/yNU8P8qWvD/Klnw/ypZ8P8qWvD/JFTw/xlN8f+js+n/6eXM/9rZxv/d28f/3dvH/93b
- x//d28f/3dvF/97czP/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5rTl5OYA5eTmA+bl5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYD5eTmAOXk5jnl5Ob25eTm/+Xk5v7l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs
- 7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PH/5OPs/97buv/GvSz/x74v/8e+L//Hvi7/x74u/8e+
- L//GvSv/x788/+zo2P+gsez/Ml/v/xtO8f8aTvH/Gk7x/xtO8f8xXu//nK7s/+zo2v/Hvzz/xr0r/8e+
- L//Hvi7/x74u/8e+L//Hvi//xr0r/97buf/m5u//5eTk/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v3l5Ob/5eTm1OPg5QHk4uUA5eTmAQAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5eTmAOTk5gDl5OYE5eTmAOXk5mHl5Ob/5eTm/OXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw8P/k5O7/29ir/8W8Jf/HvzL/x74w/8e+
- MP/HvjD/x74w/8e+Mv/GvCf/2teu//Lv9P/X2eH/n7Dp/3+Z6/9/mOv/nq/p/9XY4f/y7/T/2tev/8a8
- J//HvjL/x74w/8e+MP/HvjD/x74w/8e/M//FvCT/29ep/+fm8v/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Obt5eTmEuXk
- 5gDl5OYBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5eUA5eTmAP///wDl5OYE5eTmAOXk
- 5ovl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Y/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwn/8e+
- Mv/HvjD/x74w/8e+MP/HvjD/x74y/8a9K//LwkX/397U/+vq9f/y7uj/8u7l//Lu5f/y7uj/6+r1/+Df
- 1f/Lw0b/xr0r/8e+Mv/HvjD/x74w/8e+MP/HvjD/x78z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk
- 5vzl5OYp5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5uPlAOXk
- 5gHl4+YC3NfmAOXk5rTl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/k4+X/4eDi/+/v8f9sYVr/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z0vJf9AMyn/Lh8U/6agnv/w8PH/5OPt/9vY
- rP/FvCf/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9Kf/KwUD/2dWh/+Df2P/i4uj/4uLo/+Df
- 2P/Z1aL/ysJA/8a9Kf/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob95eTm/+Xk5jrl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AADl5OYA5eTmAOXk5gLj4+YA5OPmE+Xk5tfl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+bl5//u7fD/4+Lk/2RaUv80JRv/PjAm/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/OSsh/zUmHP8/MSj/tK+s/+7t
- 7P/l5fP/3Nmy/8W8Jv/HvjL/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8a9LP/FvCX/ycA8/8zF
- UP/MxVD/ycA8/8W8Jf/GvSz/x74y/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm
- 8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmSuXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAPT09QDl5OYA5eTmAOXk5gPm5OYA5eTmMuXk5vPl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5uj/5eTm/4qCff9KPTT/PTAm/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zgqIP9ENi3/pqCe/9rY
- 2P/m5e3/5uXs/+Ti3v/Y1Jj/xrwo/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74x/8e/
- Mv/HvS3/xrwp/8a8Kf/HvS3/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8
- Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDl5OYA5eTmAOXk5gTl5OYA5eTmXuXk5v/l5Ob85eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OPl/+zr7v95cGr/KxsQ/zkrIf88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJf87LSP/PC4k/725
- uf/19ff/6Ojx/97cvv/PyGH/ycA5/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+Mf/HvjL/x74y/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5lvl5OYA5eTmBAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYA4+flAOXk5gTl5OYA5eTmluXk
- 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/u7vH/trGw/zIjGP8/MSj/PS8l/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PzEn/zEi
- F/+DenX/8/P0/+Lh6v/b16v/xr0q/8W8Jf/Hvi7/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmW+Xk5gDl5OYEAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5wD/+eIA5eTmAebk
- 5QDm5OYN5eTm1uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Hj//Dv8v97cm3/MSIY/z8x
- J/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP89LyX/Nige/725t//t7fL/4d/Y/8nAPP/GvSv/yL80/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk5v/l5OZW5eTmAOXk
- 5gMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gDl5OYA5eTmA+Xk5gDl5OZL5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4+X/6urs/2JX
- UP80Jhv/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PS8l/zssIv9BMyr/0c/O/+vr9v/b167/xrwo/8e/M//HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk
- 5knl5OYA5eTmAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5+bnAOXk5gDl5OYA5uTkAeTj7QHl5ObQ5eTm/+Xk5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
- 5f/q6uz/Y1hR/zQlG/8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LyX/Oywi/0EzKv/Rzs3/6+v1/9vYsf/GvCj/x78z/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v3l5Ob/5eTmOeXk5gDl5OYDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYC5eTmAOXk5rTl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/4+Hj//Dw8v99dG//MSIX/z8xJ/88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP89LyX/Nicd/7u3tf/t7fH/4uDd/8rCRP/GvCf/yL81/8e+
- Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj
- 4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob+5eTm/+Xk5vzl5OYn5eTmAOXk5gIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4+TjAOXk5gTl5OYA5eTmmuXk5v/l5Ob75eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/j4uT/7u3w/7m1tP8zJBn/PzEn/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/f3Zx//Pz9P/i4en/3dq6/8i/
- Nf/FuyP/xr0r/8e+Lv/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vY
- rP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm7eXk5hHl5OYA5eTmAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmBOXk5gDl5OZ05eTm/+Xk
- 5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/k4uT/7ezv/392cf8rGxD/OCof/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP86LCL/uLOy//X2
- 9//p6fL/4d/P/9PNdv/Lw0j/yMA3/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5ObRk73YAOHi5QDl5OYBAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOTk5ADl5OYD5eTmAOXk
- 5k7l5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/m5ef/5+bp/5GKhv9QQzv/PzEn/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PS8l/zkr
- If9AMyn/npiV/9XS0f/k4+n/5+bw/+bl7P/a1qP/xrwn/8e+Mv/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74y/8W8Jf/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5q3l5OYA5eTmA+Tj5gAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gLl5OYA5eTmJeXk5vvl5Ob/5eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/l5Ob/7u7w/+jn
- 6f9mW1T/NCUa/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PS8l/zosIv80JRv/Oiwi/7Ktqv/u7ez/5eTw/9zZsP/FvCb/x74y/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjL/xbwl/9vYrP/n5vH/5ePj/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmf+Xk5gDl5OYE5uXnAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5uTmAefj5wDo4+gF5eTm2uXk5v/l5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xj
- 5f/h4OL/7u7w/2thWv8zJBn/PjAm/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PS8l/0EzKf8vIBX/p6Ge//Dw8f/k4+3/29is/8W8J//HvjL/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+Mv/FvCX/29is/+fm8f/l4+P/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk5v/l5OZK5eTmAOXk
- 5gPj4ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAADl5OcA5eTmA+Xk5gDl5Oan5eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Pi5P/s6+7/al9Z/zMkGv8+MCb/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zIjGP+po6H/8PDw/+Tk7v/c2K7/xrwo/8e/
- M//HvjH/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjD/x74w/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74x/8e+Mf/HvjH/x74x/8e+Mf/HvjH/yL8z/8W8Jv/b2Kz/5+bx/+Xj4//l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob/5eTm8eXk
- 5hnl5OYA5eTmAeXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXj5gDl5OYE5eTmAOXk5mbl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/4+Lk/+zs7/9qYFn/MyQZ/z4wJv88LiT/PC4k/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP8+MSf/MSIX/6ijoP/w8PD/5OTv/9vX
- qv/FuyL/x74v/8e9Lf/HvS3/x70t/8e9Lf/HvS3/x70u/8e+MP/HvjD/x74w/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvS7/x70t/8e9Lf/HvS3/x70t/8e9Lf/HvjD/xLsh/9vXqf/n5vL/5ePj/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
- 5v/l5ObD5eTmAOXk5wHl5OYA////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gLl5OYA5eTmJuXk5vjl5Ob/5eTm/uXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7Ozv/2pgWf8zJBn/PjAm/zwuJP88LiT/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/z4xJ/8xIhf/qKOg//Dw
- 8f/k4+v/39zA/8nAO//JwDz/ycE8/8nAPP/JwDz/ycA7/8nBPP/JwDr/x74x/8e+MP/HvjD/x74w/8e+
- MP/HvjD/x74w/8e+MP/HvjD/x74x/8nAOv/JwTz/ycA7/8nAPP/JwDz/ycE8/8nAPP/JwDr/39y//+bm
- 7v/l5OT/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob75eTm/+Xk5nvl5OYA5eTmBObk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzP8A5uTmAOTj5gHk4+YA5eTmwuXk
- 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Pi5P/s7O//a2BZ/zMkGv8+MCb/PC4k/zwu
- JP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PC4k/zwuJP88LiT/PjEn/zEi
- GP+po6H/8PDz/+Lh4//l5ej/4+Lb/+Lh1v/i4df/4uHW/+Lh1v/i4db/4+HZ/+Hfzv/JwT7/x70u/8e+
- Mf/HvjD/x74w/8e+MP/HvjD/x74x/8e+Lv/JwTz/4d/N/+Ph2f/i4db/4uHW/+Lh1v/i4df/4uHW/+Pi
- 2//l5Oj/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTmM+Xk5gDl5OYC5uXnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmBOXk
- 5gDl5OZy5eTm/+Xk5vvl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5OLk/+zs7v9qX1j/NScc/0Ay
- Kf8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4wJv8+MCb/PjAm/z4w
- Jv9BMyn/MiQZ/6ehn//w8PP/4uHj/+Xk5f/l5en/5uXq/+bl6v/m5er/5uXq/+bl6v/l5Oj/5+fz/9HL
- bv/EuyD/yL82/8e+Mf/HvjD/x74w/8e+Mf/Ivzb/xLsg/9HLbP/n5/P/5eTo/+bl6v/m5er/5uXq/+bl
- 6v/m5er/5eXp/+Xk5f/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm/+Xk5s3m3t4A4P7/AOXk5gDk4+UAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObl
- 5gDl5OYC5eTnAOXk5yHl5Obz5eTm/+Xk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/j4uT/7u7w/21j
- XP8oGQ3/NSYb/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIkGf8yJBn/MiQZ/zIk
- Gf8yJBn/MiQZ/zQlG/8qGxD/raim//Dv8v/j4eP/5eTm/+Xk5f/l5OX/5eTl/+Xk5f/l5OX/5eTl/+Xk
- 5P/m5en/4uHW/8vESv/EuyD/xr0s/8e+L//Hvi//xr0s/8S7IP/Lw0j/4uDV/+bl6v/l5OT/5eTl/+Xk
- 5f/l5OX/5eTl/+Xk5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmeOXk5gDl5OYE5eTmAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5OTlAObk5wDl5OYD5eTmAOXk5qjl5Ob/5eTm/OXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj
- 5f/p6Or/zcrL/6iioP+rpqT/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ulo/+rpaP/q6Wj/6ul
- o/+rpaP/q6Wj/6ulo/+rpqP/qqSi/6+qqP/f3t//5uXo/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5P/m5ez/4+Lc/9TPgP/KwkX/yL80/8i/NP/KwkX/1M9//+Pi2//m5ez/5eTk/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/+Xk5vXl5OYj5eTmAOXk5gLl5OcAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5+XlAOXk5gPl5OYA5eTmQuXk5v/l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Tj5f/q6ev/8PDz//Dv8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw
- 8v/w8PL/8PDy//Dw8v/w8PL/8PDy//Dw8v/w8PL/7+/y/+bl6P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5P/l5en/5+fy/+Tj4f/h387/4d/N/+Tj4f/n5/L/5eXp/+Xk
- 5P/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmpeXk5gDl5OYD5eTmAObl
- 5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTmAObk5gHm5OYA5eTmwOXk5v/l5Ob85eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Tj5f/i4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph
- 4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4eP/4+Hj/+Ph4//j4uP/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5f/l4+P/5eTn/+bl7P/m5ez/5eTn/+Xj
- 4//l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5OY+5eTmAOXk
- 5gPl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTmA+Xk5gDl5OZM5eTm/+Xk
- 5v3l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5OX/5eTl/+Xk
- 5f/l5OX/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vzl5Ob/5eTmtOXk
- 5gDl5OYC5eTmAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk5gDn5eYA4+PlAeLj
- 5QDl5Oa55eTm/+Xk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk
- 5v/l5OY/5eTmAOXk5gPl5OcA4uLiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5ubmAOXk
- 5gDl5OYC5eTmAOXk5jnl5Ob95eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5vzl5Ob/5eTmquXk5gDl5OYC5eTnAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5eTmANzi8QDl5OYD5eTmAOXk5pLl5Ob/5eTm++Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob+5eTm/+Xk5vXl5OYq5eTmAOXk5gLl5OYA5uTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAADr6usA5eTmAOXk5gHl5eYA5eXmEeXk5tvl5Ob/5eTm/eXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmfuXk5gDl5OYD5+TlAOXk5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADk5OcA5eTnAOXk5gPl5OYA5eTmSOXk5v/l5Ob95eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm/+Xk5svl4+YG6ODkAOXk5gHl5OYA5ubmAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5OYA5eTlAOXk5gPl5OYA5eTmh+Xk
- 5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v/l5Ob35eTmNOXk5gDl5OYC5eXmAOXk5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj5QDm5OYA5eTmAebl
- 5gHi4OcC5eTmueXk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5m7l5OYA5eTmBObj5gDl5OYA6OjoAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5gDl5OYA5eTmAuXk5gDl5OYW5eTm2eXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Oaf5eTlAOXk5gPl5ecA5eTmAObm
- 5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOXk5gDl5OYA5eTmA+Xk5gDl5OYp5eTm6uXk5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTmxOTl5giU7fsA5eTmAePk
- 5wDl5OYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5OTmAOXk5gDl5OYA5eTmA+Xk5gDl5OY45eTm8OXk5v/l5Ob85eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk5tbl5eYW5eXmAOXk
- 5gLl5OYA5eTmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5uXmAOXk5gDl5OYA5eTmBOXk5gDl5OY75eTm8OXk
- 5v/l5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm++Xk5v/l5Obf5eTmI+bk
- 5gDl5OYD5eTmAOXk5gDl5ecAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXk5gDl5OYA5eTmBOXk
- 5gDl5OY05eTm5+Xk5v/l5Ob75eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Ob/5eTm3uXk
- 5iTl5OYA5eTmA+Xk5gDl5OYA4+PoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTmAOXl
- 5wDl5ecA5eTmBOXk5gDl5OYi5eTm0+Xk5v/l5Ob75eTm/uXk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob75eTm/+Xk
- 5tLl5OYf5eTmAOXk5gPl5OYA5eTmAOfl5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA5OPlAOXl5gDl5eYA5eTmA+fk5gDm5OYO5eTmr+Xk5v/l5Ob85eTm/eXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob95eTm++Xk
- 5v/l5Oa65uTmEObj5QDl5OYD5eTmAOXk5gDm5ucAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAA5uPmAOXk5gDl5OYA5eTmAuXk5wLi4+cA5eTmeOXk5vzl5Ob/5eTm++Xk
- 5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/OXk
- 5v3l5Ob/5eTmjePm6QLm5OUB5eTmAuXk5gDl5OYA4eHjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eTlAOXk5gDl5OYA5eTmAeXk5gTl5OYA5eTmN+Xk
- 5tTl5Ob/5eTm/OXk5vzl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
- 5vvl5Ob/5eTm6+Xk5lTl5OYA5eTmA+Xk5gLl5OYA5eTmAObj5gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5eXlAOXk5gDl5OYA5eTmAeXk
- 5gTm4+cA5ePmCOXk5oTl5Ob45eTm/+Xk5vzl5Ob85eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
- 5vzl5Ob+5eTm/+Xk5rHm5OYb5eTmAOXk5gTl5OYB5eTmAOXk5gDf398AAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXk
- 5wDn5OQA5uTlAOXk5gPl5OYD5eTmAOXk5inl5Oa05eTm/+Xk5v/l5Ob95eTm/OXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/eXk
- 5vzl5Ob+5eTm/+Xk5uDl5OZU4+LlAOTj5gHl5OYD5eTmAOXk5gDl5OYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOLi6QDl5OYA5eTmAOXk5gHl5OYD5OLmAeTi5wDl5OZC5eTmw+Xk5v/l5Ob/5eTm/uXk
- 5vvl5Ob95eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob+5eTm/OXk
- 5vzl5Ob+5eTm/+Xk5uzl5OZ55uTmC+bk5gDl5OYD5eTmAuXk5gDl4+YA5uXmAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADl5ecA5OPlAOTj5QDl5OYC5eTmA/Hs4gD//9kA5eTmQuXk
- 5rfl5Ob75eTm/+Xk5v/l5Ob95eTm++Xk5v3l5Ob+5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v7l5Ob85eTm++Xk
- 5v/l5Ob+5eTm/+Xk5uPl5OZ65ePlE+Xk5gDl5OcC5eTmA+bk5gDm5OYA5eTmAP///wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj4+MA5eTmAOXl5gDl5eUA5eTmA+Xk
- 5gLk4+EA5OPjAOXk5ivl5OaL5eTm3+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob85eTm++Xk5vzl5Ob95eTm/uXk
- 5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/uXk5v7l5Ob95eTm/OXk5vvl5Ob95eTm/+Xk
- 5v/l5Ob/5eTm+eXk5r7l5OZa5eTmC+Xj5gDl5OYB5eTmA+Xk5gHm5OYA5uTmAOTk5AAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6ujqAOXk
- 5gDm5eYA5uXmAOXk5gPl5OYC5ebnAOXl5gDl5eYI5eTmReXk5pLl5ObW5eTm/OXk5v/l5Ob/5eTm/+Xk
- 5v/l5Ob/5eTm/+Xk5v3l5Ob85eTm/OXk5vzl5Ob85eTm/eXk5v7l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk
- 5v/l5Obw5eTmu+Xk5nLl4+Yj5ersAObn6gDk5OYB5eTmBOXk5gHl5OYA5eTmAObk5gAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOXk5wDl5OYA5eTnAOXk5wDl5OYC5eTmBOXl5wDl4eYA5eDnAOTh6QTl5OYq5eTmXuXk
- 5pDl5Oa+5eTm3uXk5vPl5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5v/l5Ob/5eTm/+Xk5vvl5Obu5eTm0uXk
- 5q7l5OZ+5eTmRubk5hXk5OYA5OTmAOTk5gDl5eYC5eTmBOXk5gHj4+UA5OPmAOfl5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADm4+YA5eTmAOXl5wDk5ugA5eTmAeXk5gPl5OYE5uPmAObj
- 5gDk5OYA5+TmAOnm5gDl5eYF5eTmGuXk5i/l5OY+5eTmT+Xk5lXl5OZY5eTmVOXk5kjl5OY65eTmJ+Xk
- 5hLo4eYC6ODnAObj5gDl5OcA5eTnAObk5gLl5OYE5eTmAuXi5QDk4+MA5OTlAOfj5wAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAObm5gDk5OUA5uHoAODy
- 3ADl4+cB5eTmAuXk5gTl5OYE5OTmAubi5QDm4uYA4ejmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AADl5eQA5eXjAOXl4wDl5OYD5eTmBOXk5gPl5OYB5OTmAOfl6wDm5ecA5ePlAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAOTk5ADo5usA5OLkAP///wDm4+YA5eTmAeXk5gHl5OYC5eTmA+Xk5gPl5OYD5eTmA+Xk
- 5gPl5OYD5eTmA+Xk5gLl5OYB5ePmAObk5wDi598A6OPoAOTk5gDm5uYAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
- AAAAAAAA/////////////////+AAAP///////oD////////gAAD///////6YB///////4AAA///////8
- hEAf/////+AAAP///////IAVAX/////gAAD///////yAAIgH////4AAA///////5AAACoC///+AAAP//
- ////+QAAABCAv//gAAD///////kAAAAAVAX/4AAA///////yAAAAAAIl/+AAAP//////8gAAAAAACv/g
- AAD///////IAAAAAAAT/4AAA///////0AAAAAAAE/+AAAP//////5AAAAAAABf/gAAD//////+QAAAAA
- AAn/4AAA///////IAAAAAAAJ/+AAAP//////yAAAAAAAEf/gAAD//////4gAAAAAABP/4AAA//////+Q
- AAAAAAAT/+AAAP/////+UAAAAAAAJ//gAAD//////SAAAAAAACf/4AAA//////JAAAAAAAAn/+AAAP//
- /wAAgAAAAAAAT//gAAD///IAcgAAAAAAAE//4AAA///ovIAAAAAAAABP/+AAAP//6gAAAAAAAAAAX//g
- AAD//8QAAAAAAAAAAJ//4AAA///IAAAAAAAAAACf/+AAAP//yAAAAAAAAAAAn//gAAD//9AAAAAAAAAA
- AJ//4AAA///QAAAAAAAAAACf/+AAAP//0AAAAAAAAAAAX//gAAD//5AAAAAAAAAAAF//4AAA//+QAAAA
- AAAAAABP/+AAAP//sAAAAAAAAAAAT//gAAD//6AAAAAAAAAAAE//4AAA//+gAAAAAAAAAAAn/+AAAP//
- oAAAAAAAAAAAJ//gAAD//6AAAAAAAAAAACP/4AAA//+gAAAAAAAAAAAT/+AAAP//oAAAAAAAAAAAE//g
- AAD//6AAAAAAAAAAAAn/4AAA//8gAAAAAAAAAAAJ/+AAAP//oAAAAAAAAAAAAP/gAAD//yAAAAAAAAAA
- AAT/4AAA//8gAAAAAAAAAAAEf+AAAP//QAAAAAAAAAAAAn/gAAD//kAAAAAAAAAAAAJ/4AAA//5AAAAA
- AAAAAAABP+AAAP/+QAAAAAAAAAAAAT/gAAD//IAAAAAAAAAAAACf4AAA//qAAAAAAAAAAAAAn+AAAP/A
- gAAAAAAAAAAAAI/gAAD9AgAAAAAAAAAAAABP4AAA9CgAAAAAAAAAAAAAT+AAAOmAAAAAAAAAAAAAACfg
- AADQAAAAAAAAAAAAAAAn4AAAqAAAAAAAAAAAAAAAE+AAAJAAAAAAAAAAAAAAABPgAACgAAAAAAAAAAAA
- AAAT4AAAoAAAAAAAAAAAAAAACeAAAKAAAAAAAAAAAAAAAAngAACgAAAAAAAAAAAAAAAJ4AAAoAAAAAAA
- AAAAAAAABOAAAJAAAAAAAAAAAAAAAATgAACQAAAAAAAAAAAAAAAE4AAAiAAAAAAAAAAAAAAAAuAAAMgA
- AAAAAAAAAAAAAAJgAADEAAAAAAAAAAAAAAACYAAA5AAAAAAAAAAAAAAAAWAAAOoAAAAAAAAAAAAAAAFg
- AAD1AAAAAAAAAAAAAAABIAAA+IAAAAAAAAAAAAAAASAAAPqAAAAAAAAAAAAAAAEgAAD9QAAAAAAAAAAA
- AAAAoAAA/qAAAAAAAAAAAAAAAKAAAP9QAAAAAAAAAAAAAACgAAD/SAAAAAAAAAAAAAAAoAAA/6gAAAAA
- AAAAAAAAAKAAAP/UAAAAAAAAAAAAAACgAAD/6gAAAAAAAAAAAAAAoAAA//EAAAAAAAAAAAAAAKAAAP/1
- AAAAAAAAAAAAAACgAAD/+oAAAAAAAAAAAAAAoAAA//wAAAAAAAAAAAAAAKAAAP/+QAAAAAAAAAAAAACg
- AAD//kAAAAAAAAAAAAAAoAAA//9AAAAAAAAAAAAAAKAAAP//QAAAAAAAAAAAAAEgAAD//0AAAAAAAAAA
- AAABIAAA//9AAAAAAAAAAAAAAWAAAP//IAAAAAAAAAAAAAFgAAD//yAAAAAAAAAAAAACYAAA//8gAAAA
- AAAAAAAAAmAAAP//kAAAAAAAAAAAAAJgAAD//5AAAAAAAAAAAAAG4AAA//+QAAAAAAAAAAAABOAAAP//
- yAAAAAAAAAAAAATgAAD//8gAAAAAAAAAAAAJ4AAA///EAAAAAAAAAAAACeAAAP//5AAAAAAAAAAAABHg
- AAD//+IAAAAAAAAAAAAT4AAA///yAAAAAAAAAAAAI+AAAP//8QAAAAAAAAAAACfgAAD///kAAAAAAAAA
- AABX4AAA///6gAAAAAAAAAAAT+AAAP///UAAAAAAAAAAAK/gAAD///4AAAAAAAAAAAFf4AAA///+oAAA
- AAAAAAACP+AAAP///1AAAAAAAAAAAr/gAAD///+oAAAAAAAAAAV/4AAA////1AAAAAAAAAAK/+AAAP//
- /+oAAAAAAAAAFf/gAAD////lAAAAAAAAACv/4AAA////+oAAAAAAAABX/+AAAP////0gAAAAAAAAL//g
- AAD////+kAAAAAAAAl//4AAA/////0gAAAAAAAS//+AAAP////+SAAAAAAARf//gAAD/////6IAAAAAA
- Jf//4AAA//////JAAAAAAIv//+AAAP/////9CAAAAAIv///gAAD//////0IAAAAQn///4AAA///////Q
- QAAAov///+AAAP//////+hIACQv////gAAD///////8Bf6Bf////4AAA////////9AAD/////+AAAP//
- ///////////////gAAA=
-
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Program.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Program.cs
deleted file mode 100644
index c2f1ac0..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Program.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading.Tasks;
-using System.Windows.Forms;
-using System.Text;
-
-namespace Tongue_extraction
-{
- static class Program
- {
- [STAThread]
- static void Main()
- {
- try
- {
- Application.SetUnhandledExceptionMode(UnhandledExceptionMode.CatchException);
- Application.ThreadException += new System.Threading.ThreadExceptionEventHandler(Application_ThreadException);
- AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException);
-
- Application.EnableVisualStyles();
- Application.SetCompatibleTextRenderingDefault(false);
- Application.Run(new Form1());
- }
- catch (Exception ex)
- {
- string str = GetExceptionMsg(ex, string.Empty);
- MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- }
- }
-
-
- static void Application_ThreadException(object sender, System.Threading.ThreadExceptionEventArgs e)
- {
- string str = GetExceptionMsg(e.Exception, e.ToString());
- MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- //LogManager.WriteLog(str);
- }
-
- static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e)
- {
- string str = GetExceptionMsg(e.ExceptionObject as Exception, e.ToString());
- MessageBox.Show(str, "System Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
- //LogManager.WriteLog(str);
- }
-
- static string GetExceptionMsg(Exception ex, string backStr)
- {
- StringBuilder sb = new StringBuilder();
- sb.AppendLine("****************************Exception Text****************************");
- sb.AppendLine("【Time】:" + DateTime.Now.ToString());
- if (ex != null)
- {
- sb.AppendLine("【Exception Type】:" + ex.GetType().Name);
- sb.AppendLine("【Exception Information】:" + ex.Message);
- sb.AppendLine("【Stack Call】:" + ex.StackTrace);
- }
- else
- {
- sb.AppendLine("【Unhandled Exception】:" + backStr);
- }
- sb.AppendLine("***************************************************************");
- return sb.ToString();
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/AssemblyInfo.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Properties/AssemblyInfo.cs
deleted file mode 100644
index 604a626..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
-// アセンブリに関連付けられている情報を変更するには、
-// これらの属性値を変更してください。
-[assembly: AssemblyTitle("Tongue extraction")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("Tongue extraction")]
-[assembly: AssemblyCopyright("Copyright © 2018")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// ComVisible を false に設定すると、その型はこのアセンブリ内で COM コンポーネントから
-// 参照不可能になります。COM からこのアセンブリ内の型にアクセスする場合は、
-// その型の ComVisible 属性を true に設定してください。
-[assembly: ComVisible(false)]
-
-// このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
-[assembly: Guid("d382f9e7-a41d-4d82-a59b-cf4095134d6b")]
-
-// アセンブリのバージョン情報は次の 4 つの値で構成されています:
-//
-// メジャー バージョン
-// マイナー バージョン
-// ビルド番号
-// Revision
-//
-// すべての値を指定するか、下のように '*' を使ってビルドおよびリビジョン番号を
-// 既定値にすることができます:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.Designer.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.Designer.cs
deleted file mode 100644
index 84d1885..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.Designer.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace Tongue_extraction.Properties {
- using System;
-
-
- ///
- /// A strongly-typed resource class, for looking up localized strings, etc.
- ///
- // This class was auto-generated by the StronglyTypedResourceBuilder
- // class via a tool like ResGen or Visual Studio.
- // To add or remove a member, edit your .ResX file then rerun ResGen
- // with the /str option, or rebuild your VS project.
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")]
- [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- internal class Resources {
-
- private static global::System.Resources.ResourceManager resourceMan;
-
- private static global::System.Globalization.CultureInfo resourceCulture;
-
- [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
- internal Resources() {
- }
-
- ///
- /// Returns the cached ResourceManager instance used by this class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Resources.ResourceManager ResourceManager {
- get {
- if (object.ReferenceEquals(resourceMan, null)) {
- global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Tongue_extraction.Properties.Resources", typeof(Resources).Assembly);
- resourceMan = temp;
- }
- return resourceMan;
- }
- }
-
- ///
- /// Overrides the current thread's CurrentUICulture property for all
- /// resource lookups using this strongly typed resource class.
- ///
- [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
- internal static global::System.Globalization.CultureInfo Culture {
- get {
- return resourceCulture;
- }
- set {
- resourceCulture = value;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.resx b/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.resx
deleted file mode 100644
index af7dbeb..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Resources.resx
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- text/microsoft-resx
-
-
- 2.0
-
-
- System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
- System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
-
-
\ No newline at end of file
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.Designer.cs b/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.Designer.cs
deleted file mode 100644
index 15b8fb1..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.Designer.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-//------------------------------------------------------------------------------
-//
-// This code was generated by a tool.
-// Runtime Version:4.0.30319.42000
-//
-// Changes to this file may cause incorrect behavior and will be lost if
-// the code is regenerated.
-//
-//------------------------------------------------------------------------------
-
-namespace Tongue_extraction.Properties {
-
-
- [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.5.0.0")]
- internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
-
- private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
-
- public static Settings Default {
- get {
- return defaultInstance;
- }
- }
- }
-}
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.settings b/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.settings
deleted file mode 100644
index 3964565..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/Properties/Settings.settings
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropped/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropped/20180419045627.jpg
deleted file mode 100644
index f43b7ad..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropped/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropresized/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropresized/20180419045627.jpg
deleted file mode 100644
index aafc381..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/cropresized/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/detection/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/detection/20180419045627.jpg
deleted file mode 100644
index 034c23b..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/detection/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/extraction/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/extraction/20180419045627.jpg
deleted file mode 100644
index 459eb3f..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/extraction/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask/20180419045627.jpg
deleted file mode 100644
index a5b6106..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask_changed2/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask_changed2/20180419045627.jpg
deleted file mode 100644
index b7ffdb2..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/mask_changed2/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output256/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output256/20180419045627.jpg
deleted file mode 100644
index b29964a..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output256/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed1/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed1/20180419045627.jpg
deleted file mode 100644
index 353282e..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed1/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed2/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed2/20180419045627.jpg
deleted file mode 100644
index 77ad378..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_changed2/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_resized/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_resized/20180419045627.jpg
deleted file mode 100644
index d2034d3..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Debug/output_resized/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropped/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropped/20180419045627.jpg
deleted file mode 100644
index f43b7ad..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropped/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropresized/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropresized/20180419045627.jpg
deleted file mode 100644
index aafc381..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/cropresized/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/detection/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/detection/20180419045627.jpg
deleted file mode 100644
index 034c23b..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/detection/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask/20180419045627.jpg
deleted file mode 100644
index 155c1b8..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed1/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed1/20180419045627.jpg
deleted file mode 100644
index 78f9625..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed1/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed2/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed2/20180419045627.jpg
deleted file mode 100644
index 1c24e76..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/mask_changed2/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output256/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output256/20180419045627.jpg
deleted file mode 100644
index b29964a..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output256/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed1/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed1/20180419045627.jpg
deleted file mode 100644
index 353282e..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed1/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed2/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed2/20180419045627.jpg
deleted file mode 100644
index 77ad378..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_changed2/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_resized/20180419045627.jpg b/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_resized/20180419045627.jpg
deleted file mode 100644
index d2034d3..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/bin/x64/Release/output_resized/20180419045627.jpg
+++ /dev/null
Binary files differ
diff --git a/Tongue extraction_cropresizemethod/Tongue extraction/packages.config b/Tongue extraction_cropresizemethod/Tongue extraction/packages.config
deleted file mode 100644
index 6725092..0000000
--- a/Tongue extraction_cropresizemethod/Tongue extraction/packages.config
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/reference/Analysis/Analysis.cpp b/reference/Analysis/Analysis.cpp
new file mode 100644
index 0000000..e68e032
--- /dev/null
+++ b/reference/Analysis/Analysis.cpp
@@ -0,0 +1,497 @@
+#include "Analysis.h"
+#include "ProcessList.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CAnalysis::CAnalysis(void)
+{
+ m_RefProc = new CReferenceProc;
+ m_Casmatch = new CDetectCasmatch;
+ m_Tracker = new CTracking;
+ m_NumProcFrames = 0;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CAnalysis::~CAnalysis(void)
+{
+ SAFE_DELETE(m_RefProc);
+ SAFE_DELETE(m_Casmatch);
+ SAFE_DELETE(m_Tracker);
+
+ cvDestroyAllWindows();
+}
+
+//-------------------------------------------------------------------------------
+// �S�ʓI�ȏ�����
+bool CAnalysis::GlobalInit()
+{
+#ifndef DEBUG_TRACK_ONLY
+ CALL(m_RefProc->Init());
+ CALL(m_Casmatch->Init());
+#endif
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �S�̏���
+bool CAnalysis::GlobalProc()
+{
+ CALL(this->GlobalInit());
+
+ CProcessList pl;
+ CALL(pl.Init());
+
+ FILE *fp;
+ char filename[PATH_LEN];
+ sprintf_s(filename, PATH_LEN, "output\\GlobalLog20110113.csv");
+ fopen_s(&fp, filename, "w");
+ if (!fp) ERROR_RET("Can't open Global log file.");
+ fprintf(fp, "���t, ����, �팱��, ��, �G���[, ��������, ����, "
+ "L1(t0), a1(t0), b1(t0), L1(t10), a1(t10), b1(t10), "
+ "L1(t20), a1(t20), b1(t20), L1(t30), a1(t30), b1(t30), "
+ "L2(t0), a2(t0), b2(t0), L2(t10), a2(t10), b2(t10), "
+ "L2(t20), a2(t20), b2(t20), L2(t30), a2(t30), b2(t30), "
+ "L3(t0), a3(t0), b3(t0), L3(t10), a3(t10), b3(t10), "
+ "L3(t20), a3(t20), b3(t20), L3(t30), a3(t30), b3(t30), "
+ "L4(t0), a4(t0), b4(t0), L4(t10), a4(t10), b4(t10), "
+ "L4(t20), a4(t20), b4(t20), L4(t30), a4(t30), b4(t30), \n");
+
+ char workpath[PATH_LEN];
+ bool noRefInfo = false;
+ do
+ {
+#ifndef DEBUG_TRACK_ONLY
+ if (pl.IsFirstTOD())
+ {
+ CALL(pl.GetRefDir(workpath));
+ printf("\n***** RefProc: %s **********\n\n", workpath);
+
+ if (m_RefProc->CalcMatrix(workpath, pl.CurrentRefFile())) noRefInfo = false;
+ else noRefInfo = true;
+ }
+#endif
+ if (noRefInfo) continue;
+
+ if(!pl.GetFrameDir(workpath)) continue;
+ printf("\n***** MeasurementProc: %s **********\n\n", workpath);
+
+ int err = this->MeasurementProc(workpath, pl.CurrentImgFile());
+ if (err == -1) continue;
+
+ // ���O�ɏo��
+ char buffer[256];
+ pl.MakeParamStr(buffer, sizeof(buffer));
+ fprintf(fp, "%s, %d, %d, %f, ",
+ buffer, err, m_NumProcFrames, m_Tracker->TotalMovement() / m_NumProcFrames);
+
+ // ���O�ɏo��(�̈�1)
+ for (int i = 0; i < ((m_NumProcFrames - 1) / m_FrameScale) + 1; i++)
+ {
+ fprintf(fp, "%.3f, %.3f, %.3f, ",
+ m_ROILabT1[i].val[0], m_ROILabT1[i].val[1], m_ROILabT1[i].val[2]);
+ }
+ for (int i = ((m_NumProcFrames - 1) / m_FrameScale) + 1; i < 4; i++)
+ {
+ fprintf(fp, " , , , ");
+ }
+
+ // ���O�ɏo��(�̈�2)
+ for (int i = 0; i < ((m_NumProcFrames - 1) / m_FrameScale) + 1; i++)
+ {
+ fprintf(fp, "%.3f, %.3f, %.3f, ",
+ m_ROILabT2[i].val[0], m_ROILabT2[i].val[1], m_ROILabT2[i].val[2]);
+ }
+ for (int i = ((m_NumProcFrames - 1) / m_FrameScale) + 1; i < 4; i++)
+ {
+ fprintf(fp, " , , , ");
+ }
+ // ���O�ɏo��(�̈�3)
+ for (int i = 0; i < ((m_NumProcFrames - 1) / m_FrameScale) + 1; i++)
+ {
+ fprintf(fp, "%.3f, %.3f, %.3f, ",
+ m_ROILabT3[i].val[0], m_ROILabT3[i].val[1], m_ROILabT3[i].val[2]);
+ }
+ for (int i = ((m_NumProcFrames - 1) / m_FrameScale) + 1; i < 4; i++)
+ {
+ fprintf(fp, " , , , ");
+ }
+
+ // ���O�ɏo��(�̈�4)
+ for (int i = 0; i < ((m_NumProcFrames - 1) / m_FrameScale) + 1; i++)
+ {
+ fprintf(fp, "%.3f, %.3f, %.3f, ",
+ m_ROILabT4[i].val[0], m_ROILabT4[i].val[1], m_ROILabT4[i].val[2]);
+ }
+ for (int i = ((m_NumProcFrames - 1) / m_FrameScale) + 1; i < 4; i++)
+ {
+ fprintf(fp, " , , , ");
+ }
+
+ fprintf(fp, "\n");
+
+ } while(pl.MoveNext());
+
+ fclose(fp);
+
+ printf("\nFinished.\n");
+ if (GShowImage(NULL, 0) < 1)
+ {
+ printf("Press key on terminal.\n");
+ getchar();
+ }
+ else
+ {
+ printf("Press key on window.\n");
+ cvWaitKey(0);
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �B�e����
+// �߂�l �G���[�l int 0=OK 1=�����_�ʒu�s�� 2=�����_���� 3=�蓮�G���[
+// -1=�v���I�ȃG���[ -2=�摜�Ȃ��G���[
+//
+int CAnalysis::MeasurementProc(const char *path, const char *file)
+{
+ // �摜�����Ǝ蓮���o�p�����[�^�̎擾
+ int numFrames = GetNumFrames(path, file);
+ if (numFrames < 1)
+ {
+ printf("No measurement file found.\n");
+ return -1;
+ }
+ int frame0 = ReadParam(path);
+ if (frame0 < 1)
+ {
+ printf("Read param failed.\n");
+ return -1;
+ }
+
+ // �����摜�����̌���
+ int procFrames = (numFrames < (PROC_FRAMES_S + PROC_FRAMES_L) / 2 ?
+ PROC_FRAMES_S : PROC_FRAMES_L);
+ m_FrameScale = (procFrames == PROC_FRAMES_S ? 10 : 100);
+#ifdef DEBUG_NUM_FRAMES
+ procFrames = DEBUG_NUM_FRAMES;
+#endif
+
+ // �����摜���J��
+ char filename[PATH_LEN], filenam2[PATH_LEN];
+ sprintf_s(filenam2, PATH_LEN, file, frame0);
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, filenam2);
+
+#ifndef DEBUG_TRACK_ONLY
+ // �L���X�}�b�`�̌��o�C�p�b�`�F�擾
+ CALL(m_Casmatch->SetImage(filename));
+ CALL(m_Casmatch->Detect());
+#endif
+
+ // �g���b�L���O�̏�����
+ IplImage *img = cvLoadImage(filename);
+ m_Tracker->Init(m_InitTrack, TRACK_POINTS, img);
+// m_Tracker->DrawPoints(img);
+// GShowImage(img, 1, "Init Tracker");
+ SAFE_RELEASEIMG(img);
+
+ // ���ʏo�̓f�B���N�g���̍쐬
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, OUTPUT_DIR);
+ if (!GFileExists(filename)) CreateDirectory(filename, NULL);
+
+ // ���O�̏���
+ FILE *fpLog;
+ sprintf_s(filename, PATH_LEN, "%s\\result20101221.csv", path);
+ fopen_s(&fpLog, filename, "w");
+ if (!fpLog) ERROR_RET("Can't open local log file.");
+ fprintf(fpLog, "�t���[��, ����, L*1, a*1, b*1, L*2, a*2, b*2, L*3, a*3, b*3, L*4, a*4, b*4\n");
+
+ // �e�t���[���̏���
+ int err = 0;
+ m_NumProcFrames = 0;
+ for (int frame = frame0; frame < frame0 + procFrames && err == 0; frame ++)
+ {
+ printf("\n* Frame %d\n", m_NumProcFrames);
+
+ sprintf_s(filenam2, PATH_LEN, file, frame);
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, filenam2);
+
+ err = this->FrameProc(filename, path, m_NumProcFrames, fpLog);
+ if (err > -1) m_NumProcFrames ++;
+ }
+
+#ifdef MANUAL_TRACKING_EVALUATION
+ // �I����Ɏ蓮�Ńg���b�L���O���ʂ�]������
+ if (err == 0)
+ {
+ printf("Evaluate result pressing key [F]ailed or key else on the win:\n");
+ if (cvWaitKey(0) == 'f') err = 3;
+ }
+#endif // MANUAL_TRACKING_EVALUATION
+
+ fclose(fpLog);
+
+ return err;
+}
+
+//-------------------------------------------------------------------------------
+// �t���[������
+// �߂�l �G���[�l int 0=OK 1=�����_�ʒu�s�� 2=�����_����
+// -1=�v���I�ȃG���[ -2=�摜�Ȃ��G���[
+//
+int CAnalysis::FrameProc(const char *inputfile, const char *path,
+ const int frame, FILE *fpLog)
+{
+ int trackErr = 0;
+ CHQTime timer;
+
+ // �t�@�C�����J��
+ IplImage *imgCam = cvLoadImage(inputfile);
+ if (!imgCam)
+ {
+ printf("Error: No camera file.\n");
+ return -2;
+ }
+
+ // �g���b�L���O
+ if (!m_Tracker->Frame(imgCam))
+ {
+ printf("Track point missing.\n");
+ trackErr = 2;
+ }
+ fprintf(fpLog, "%d, %f, ", frame, m_Tracker->Movement());
+ if (!m_Tracker->IsVaildate())
+ {
+ printf("Track position invalidate.\n");
+ trackErr = 1;
+ }
+ timer.LapTime("tracking");
+
+#ifndef DEBUG_TRACK_ONLY
+ // �L���X�}�b�`�̃p�b�`�F�擾
+ CvMat *crgbFC = NULL;
+ CALL(m_Casmatch->SetImage(inputfile));
+ CALL(m_Casmatch->CalcPatchColor9(&crgbFC));
+#ifdef SHOW_RGB_VALUES
+ GShowMat(crgbFC, "crgbFC", "%5.1f");
+#endif // SHOW_RGB_VALUES
+
+ // ���`��
+ CvMat *lrgbFC = m_RefProc->GenLinearize(crgbFC);
+#ifdef SHOW_RGB_VALUES
+ GShowMat(lrgbFC, "lrgbFC", "%6.3f");
+#endif // SHOW_RGB_VALUES
+
+ // �t���[���ԐF��s��̎Z�o
+ CMRegressionRGB ccmFR(FRAME_REF_CONVERT_DIM);
+#ifdef SHOW_REGRESSION_COEF
+ printf("ccmFR ");
+#endif // SHOW_REGRESSION_COEF
+ ccmFR.CalcCoef(lrgbFC, m_RefProc->lrgbRC());
+ timer.LapTime("coef calculation");
+
+ if (trackErr == 0)
+ {
+ // ROI�̌v�Z(�}�X�N1)
+ CvScalar roiCam1 = m_Tracker->ROIColor1(imgCam);
+ CvScalar roiLin1 = m_RefProc->ScalarLinearize(roiCam1);
+ CvScalar roiCor1 = ccmFR.ScalarConvert(roiLin1);
+ CvScalar roiXYZ1 = m_RefProc->ScalarConvertXYZ(roiCor1);
+ CvScalar roiLab1 = GXYZtoLab(roiXYZ1);
+ printf("Lab1 = %f, %f, %f\n",
+ roiLab1.val[0], roiLab1.val[1], roiLab1.val[2]);
+ fprintf(fpLog, "%f, %f, %f, ",
+ roiLab1.val[0], roiLab1.val[1], roiLab1.val[2]);
+
+ if (frame % m_FrameScale == 0)
+ m_ROILabT1[frame / m_FrameScale] = roiLab1;
+
+ // ROI�̌v�Z(�}�X�N2)
+ CvScalar roiCam2 = m_Tracker->ROIColor2(imgCam);
+ CvScalar roiLin2 = m_RefProc->ScalarLinearize(roiCam2);
+ CvScalar roiCor2 = ccmFR.ScalarConvert(roiLin2);
+ CvScalar roiXYZ2 = m_RefProc->ScalarConvertXYZ(roiCor2);
+ CvScalar roiLab2 = GXYZtoLab(roiXYZ2);
+ printf("Lab2 = %f, %f, %f\n",
+ roiLab2.val[0], roiLab2.val[1], roiLab2.val[2]);
+ fprintf(fpLog, "%f, %f, %f, ",
+ roiLab2.val[0], roiLab2.val[1], roiLab2.val[2]);
+
+ if (frame % m_FrameScale == 0)
+ m_ROILabT2[frame / m_FrameScale] = roiLab2;
+
+ // ROI�̌v�Z(�}�X�N3)
+ CvScalar roiCam3 = m_Tracker->ROIColor3(imgCam);
+ CvScalar roiLin3 = m_RefProc->ScalarLinearize(roiCam3);
+ CvScalar roiCor3 = ccmFR.ScalarConvert(roiLin3);
+ CvScalar roiXYZ3 = m_RefProc->ScalarConvertXYZ(roiCor3);
+ CvScalar roiLab3 = GXYZtoLab(roiXYZ3);
+ printf("Lab3 = %f, %f, %f\n",
+ roiLab3.val[0], roiLab3.val[1], roiLab3.val[2]);
+ fprintf(fpLog, "%f, %f, %f, ",
+ roiLab3.val[0], roiLab3.val[1], roiLab3.val[2]);
+
+ if (frame % m_FrameScale == 0)
+ m_ROILabT3[frame / m_FrameScale] = roiLab3;
+
+ // ROI�̌v�Z(�}�X�N4)
+ CvScalar roiCam4 = m_Tracker->ROIColor4(imgCam);
+ CvScalar roiLin4 = m_RefProc->ScalarLinearize(roiCam4);
+ CvScalar roiCor4 = ccmFR.ScalarConvert(roiLin4);
+ CvScalar roiXYZ4 = m_RefProc->ScalarConvertXYZ(roiCor4);
+ CvScalar roiLab4 = GXYZtoLab(roiXYZ4);
+ printf("Lab4 = %f, %f, %f\n",
+ roiLab4.val[0], roiLab4.val[1], roiLab4.val[2]);
+ fprintf(fpLog, "%f, %f, %f, ",
+ roiLab4.val[0], roiLab4.val[1], roiLab4.val[2]);
+
+ if (frame % m_FrameScale == 0)
+ m_ROILabT4[frame / m_FrameScale] = roiLab4;
+ }
+
+#ifdef CONVERT_IMAGE
+ // �摜�̐F�ϊ�
+ IplImage *imgLin = m_RefProc->GenLinearize(imgCam);
+ timer.LapTime("image linearize");
+
+ IplImage *imgCor = ccmFR.GenConvert(imgLin);
+ timer.LapTime("image correction");
+
+ // XYZ�ɕϊ�
+ IplImage *imgXYZ = m_RefProc->GenConvertXYZ(imgCor);
+ timer.LapTime("image conversion to XYZ");
+
+ // ���j�^�[�p�ɕϊ�
+ IplImage *imgDisp = m_RefProc->GenConvertDisp(imgXYZ);
+ timer.LapTime("image conversion to Monitor RGB");
+
+ IplImage *imgGamma = GGenAddGamma(imgDisp);
+ timer.LapTime("image conversion adding Gamma");
+#endif // CONVERT_IMAGE
+
+#endif // DEBUG_TRACK_ONLY
+
+ // ��͌��ʂ̕\��
+#ifndef DEBUG_TRACK_ONLY
+
+ IplImage *imgCV = m_Casmatch->GenPatchedImage();
+ m_Tracker->DrawPoints(imgCV);
+ if (trackErr == 0) m_Tracker->DrawROI(imgCV);
+#ifdef SHOW_CV_IMAGE
+ GShowImage(imgCV, 1, "Detection result");
+#endif // SHOW_CV_IMAGE
+#ifdef CONVERT_IMAGE
+ GShowImage(imgGamma, 2, "Calibrated");
+#endif // CONVERT_IMAGE
+
+#ifdef SHOW_XYZ_IMAGE
+ //IplImage *imgX = cvCreateImage(cvGetSize(imgXYZ), IPL_DEPTH_64F, 1);
+ //IplImage *imgY = cvCreateImage(cvGetSize(imgXYZ), IPL_DEPTH_64F, 1);
+ //IplImage *imgZ = cvCreateImage(cvGetSize(imgXYZ), IPL_DEPTH_64F, 1);
+ //cvSplit(imgXYZ, imgX, imgY, imgZ, NULL);
+ //GShowImage(imgX, 3, "X image");
+ //GShowImage(imgY, 4, "Y image");
+ //GShowImage(imgZ, 5, "Z image");
+#endif // SHOW_XYZ_IMAGE
+#else // DEBUG_TRACK_ONLY
+ m_Tracker->DrawPoints(imgCam);
+ if (trackErr < 1) GShowImage(imgCam, 1, "Detection result", 1);
+ else GShowImage(imgCam, 1, "Tracking failed", 1);
+
+#endif // DEBUG_TRACK_ONLY
+ timer.LapTime("display");
+
+ // ��͌��ʂ̕ۑ�
+ char filename[PATH_LEN];
+ sprintf_s(filename, PATH_LEN, OUTPUT_CV, path, frame);
+ cvSaveImage(filename, imgCV);
+ timer.LapTime("save image");
+
+ // �v�Z�Ɏg�p�����s��̉��
+ SAFE_RELEASEIMG(imgCam);
+ SAFE_RELEASEIMG(imgCV);
+#ifndef DEBUG_TRACK_ONLY
+ SAFE_RELEASEMAT(crgbFC);
+ SAFE_RELEASEMAT(lrgbFC);
+#ifdef CONVERT_IMAGE
+ SAFE_RELEASEIMG(imgLin);
+ SAFE_RELEASEIMG(imgCor);
+ SAFE_RELEASEIMG(imgXYZ);
+ SAFE_RELEASEIMG(imgDisp);
+ SAFE_RELEASEIMG(imgGamma);
+#endif // CONVERT_IMAGE
+#ifdef SHOW_XYZ_IMAGE
+ SAFE_RELEASEIMG(imgX);
+ SAFE_RELEASEIMG(imgY);
+ SAFE_RELEASEIMG(imgZ);
+#endif // SHOW_XYZ_IMAGE
+#endif // DEBUG_TRACK_ONLY
+
+ fprintf(fpLog, "\n");
+ timer.CheckTime("FrameProc");
+
+ return trackErr;
+}
+
+//-------------------------------------------------------------------------------
+// �t���[�����̎擾
+int CAnalysis::GetNumFrames(const char *path, const char *file)
+{
+ char filename[PATH_LEN], filenam2[PATH_LEN];
+ sprintf_s(filenam2, PATH_LEN, file, 0);
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, filenam2);
+
+ int frame = GFileExists(filename) ? -1 : 0;
+ do
+ {
+ sprintf_s(filenam2, PATH_LEN, file, ++ frame);
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, filenam2);
+ } while (GFileExists(filename));
+
+ printf("Found %d frames\n", frame);
+
+ return frame;
+}
+
+//-------------------------------------------------------------------------------
+// �p�����[�^��ǂݍ���
+int CAnalysis::ReadParam(const char *path)
+{
+ // �p�����[�^�t�@�C�����J��
+ char filename[PATH_LEN];
+ sprintf_s(filename, PATH_LEN, "%s\\" PARAM_FILE, path);
+
+ // �p�����[�^��ǂݍ���
+ FILE *fp = NULL;
+ fopen_s(&fp, filename, "r");
+ if (fp == NULL) ERROR_RET("Can't open parameter file.");
+
+ const int BUFFER_LEN = 1024;
+ char buffer[BUFFER_LEN] = {0};
+
+ // �J�n�t���[���擾
+ fgets(buffer, BUFFER_LEN, fp);
+ int firstFrame = atoi(buffer);
+#ifdef SHOW_FILELOAD
+ printf("First frame %d\n", firstFrame);
+#endif // SHOW_FILELOAD
+
+ // ��`��|�C���g�̎擾
+ for (int i = 0; i < TRACK_POINTS; i++)
+ {
+ fgets(buffer, BUFFER_LEN, fp);
+ char *pt = buffer;
+ for (; *pt != '\0' && *pt != ','; pt ++);
+ if (*pt != ',') ERROR_RET("Parameter file error.");
+ *pt = '\0';
+ m_InitTrack[i].x = atoi(buffer) * 2; // �蓮���W��1/2����Ă���
+ m_InitTrack[i].y = atoi(pt + 1) * 2;
+#ifdef SHOW_FILELOAD
+ printf("Init. track point (%d, %d)\n", m_InitTrack[i].x, m_InitTrack[i].y);
+#endif // SHOW_FILELOAD
+ }
+
+ return firstFrame;
+}
diff --git a/reference/Analysis/Analysis.h b/reference/Analysis/Analysis.h
new file mode 100644
index 0000000..eac2687
--- /dev/null
+++ b/reference/Analysis/Analysis.h
@@ -0,0 +1,44 @@
+#pragma once
+
+#include "stdafx.h"
+#include "ReferenceProc.h"
+#include "MRegressionRGB.h"
+#include "Tracking.h"
+
+#define PARAM_FILE "Info.csv"
+#define OUTPUT_DIR "output20101221"
+#define OUTPUT_CV "%s\\" OUTPUT_DIR "\\cv%03d.jpg"
+#define TRACK_POINTS 5 // ��`�� �ǐՃ|�C���g��
+#define PROC_FRAMES_S 31 // ���������i�Z�j
+#define PROC_FRAMES_L 301 // ���������i���j
+#define FRAME_REF_CONVERT_DIM 4 // �t���[������Q�ƂF�ϊ����鎟��
+
+class CAnalysis
+{
+private:
+// �p�����[�^�̐錾
+ CReferenceProc *m_RefProc;
+ CDetectCasmatch *m_Casmatch;
+ CTracking *m_Tracker;
+ CvPoint m_InitTrack[TRACK_POINTS]; // ��`�� �������W
+ int m_NumProcFrames;
+ int m_FrameScale;
+ CvScalar m_ROILabT1[4];
+ CvScalar m_ROILabT2[4];
+ CvScalar m_ROILabT3[4];
+ CvScalar m_ROILabT4[4];
+
+// ���\�b�h�̐錾
+private:
+ bool GlobalInit();
+ bool ReadProcList();
+ int MeasurementProc(const char *path, const char *file);
+ int FrameProc(const char *inputfile, const char *path, const int frame, FILE *fpLog);
+ int ReadParam(const char *path);
+ int GetNumFrames(const char *path, const char *file);
+
+public:
+ CAnalysis(void);
+ ~CAnalysis(void);
+ bool GlobalProc();
+};
diff --git a/reference/Analysis/Detect.cpp b/reference/Analysis/Detect.cpp
new file mode 100644
index 0000000..5b3040d
--- /dev/null
+++ b/reference/Analysis/Detect.cpp
@@ -0,0 +1,182 @@
+#include "Detect.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CDetect::CDetect(void)
+{
+ m_Input = NULL;
+ m_Template = NULL;
+ m_PatchPos = NULL;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CDetect::~CDetect(void)
+{
+ SAFE_RELEASEIMG(m_Input);
+ SAFE_RELEASEIMG(m_Template);
+ SAFE_DELETEA(m_PatchPos);
+}
+
+//-------------------------------------------------------------------------------
+// �����摜���Z�b�g
+bool CDetect::SetImage(const char *filename)
+{
+ SAFE_RELEASEIMG(m_Input);
+ m_Input = cvLoadImage(filename);
+ printf("Reading : %s\n", filename);
+ if (!m_Input) ERROR_RET("Can't open input file for detection.");
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �e���v���[�g�摜���Z�b�g
+bool CDetect::SetTemplate(const char *filename)
+{
+ SAFE_RELEASEIMG(m_Template);
+ m_Template = cvLoadImage(filename);
+ if (!m_Template) ERROR_RET("Can't open template file.");
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ���o
+bool CDetect::Matching(CvPoint *matchPos, double *matchAngle)
+{
+ // �摜�T�C�Y(ROI�ɑΉ��j
+ int width, height;
+ if (m_Input->roi) width = m_Input->roi->width, height = m_Input->roi->height;
+ else width = m_Input->width, height = m_Input->height;
+
+ // �����ʃe���v���[�g�}�b�`���O
+ double gMinC = -1.0;
+ double startTime = omp_get_wtime();
+
+ // �p�x�̃��[�v���������
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for(int i = 0; i < (int)(360.0 / m_RotateStep); i++)
+ {
+ double angle = m_RotateStep * i;
+
+ // �}�b�`���O�̏���
+ CvMat *rotateMat = cvCreateMat(2, 3, CV_32FC1);
+ IplImage *rotateTemplate = cvCreateImage(cvGetSize(m_Template),
+ IPL_DEPTH_8U, COLOR);
+ IplImage *match = cvCreateImage(
+ cvSize(width - m_Template->width + 1, height - m_Template->height + 1),
+ IPL_DEPTH_32F , 1);
+
+ // �e���v���[�g�̉�]
+ cv2DRotationMatrix(cvPoint2D32f(m_Template->width/2.0,
+ m_Template->height/2.0), angle, 1.0, rotateMat);
+ cvWarpAffine(m_Template, rotateTemplate, rotateMat,
+ CV_WARP_FILL_OUTLIERS, cvScalarAll(255.0));
+
+ // �}�b�`���O
+ cvMatchTemplate(m_Input, rotateTemplate, match, CV_TM_SQDIFF_NORMED);
+ CvPoint minPos, maxPos;
+ double minC, maxC;
+ cvMinMaxLoc(match, &minC, &maxC, &minPos, &maxPos);
+
+ // �ő告�ւ̌��o�i���L�ϐ��ɏ������ނ̂ŃN���e�B�J���ɂ���j
+ #pragma omp critical(c1)
+ {
+ if (minC < gMinC || gMinC < 0)
+ {
+ *matchAngle = angle;
+ gMinC = minC;
+ matchPos->x = minPos.x + (m_Template->width / 2);
+ matchPos->y = minPos.y + (m_Template->height / 2);
+ if (m_Input->roi)
+ {
+ matchPos->x += m_Input->roi->xOffset;
+ matchPos->y += m_Input->roi->yOffset;
+ }
+ }
+ }
+
+ // �摜�o�b�t�@�̉��
+ cvReleaseImage(&rotateTemplate);
+ cvReleaseImage(&match);
+ cvReleaseMat(&rotateMat);
+ }
+
+ double elapsedTime = (omp_get_wtime() - startTime) * 1000.0;
+ printf("Matching Angle %3.0f, Coef %.4f, Position(%4d, %4d), Time %.0fms\n",
+ *matchAngle, gMinC, matchPos->x, matchPos->y, elapsedTime);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �p�b�`�̕��ϐF�̌v�Z
+bool CDetect::CalcPatchColor(CvMat **rgb)
+{
+ SAFE_RELEASEMAT(*rgb);
+ *rgb = cvCreateMat(m_NumPatch, COLOR, CV_32FC1);
+
+ for (int i = 0; i < m_NumPatch; i++)
+ {
+ int x1 = m_PatchPos[i].x - m_PatchSize;
+ int y1 = m_PatchPos[i].y - m_PatchSize;
+ int x2 = m_PatchPos[i].x + m_PatchSize;
+ int y2 = m_PatchPos[i].y + m_PatchSize;
+ int psize = m_PatchSize * 2 + 1;
+ CvScalar mean, stdev;
+
+ if ( (x1 < 0 || x1 >= m_Input->width || y1 < 0 || y1 > m_Input->height)
+ && (x2 < 0 || x2 >= m_Input->width || y2 < 0 || y2 > m_Input->height))
+ {
+ // �p�b�`���摜��
+ mean.val[0] = mean.val[1] = mean.val[2] = -1.0;
+ }
+ else
+ {
+ // �p�b�`ROI�ݒ�
+ cvSetImageROI(m_Input, cvRect(x1, y1, psize, psize));
+
+ // �p�b�`�̕��ςƕW�������v�Z
+ cvAvgSdv(m_Input, &mean, &stdev);
+ }
+
+ // �s��ɑ��
+ for(int c=0; cDrawROI(patched);
+
+ return patched;
+}
diff --git a/reference/Analysis/Detect.h b/reference/Analysis/Detect.h
new file mode 100644
index 0000000..9c0c8d0
--- /dev/null
+++ b/reference/Analysis/Detect.h
@@ -0,0 +1,27 @@
+#pragma once
+
+#include "stdafx.h"
+
+class CDetect
+{
+protected:
+ IplImage *m_Input;
+ IplImage *m_Template;
+ CvPoint *m_PatchPos;
+ double m_RotateStep;
+ int m_NumPatch;
+ int m_PatchSize;
+
+protected:
+ bool SetTemplate(const char *filename);
+ bool Matching(CvPoint *pos, double *angle);
+
+public:
+ CDetect(void);
+ ~CDetect(void);
+ bool SetImage(const char *filename);
+ virtual bool Init() = NULL;
+ bool CalcPatchColor(CvMat **rgb);
+ bool DrawROI(IplImage *img);
+ IplImage* GenPatchedImage();
+};
diff --git a/reference/Analysis/DetectCasmatch.cpp b/reference/Analysis/DetectCasmatch.cpp
new file mode 100644
index 0000000..9820db0
--- /dev/null
+++ b/reference/Analysis/DetectCasmatch.cpp
@@ -0,0 +1,111 @@
+#include "DetectCasmatch.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CDetectCasmatch::CDetectCasmatch(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CDetectCasmatch::~CDetectCasmatch(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// ������
+bool CDetectCasmatch::Init()
+{
+ // �L���X�}�b�`�̎Q�ƃe���v���[�g�摜��ǂݍ���
+ CALL(this->SetTemplate(CASMATCH));
+
+ m_NumPatch = C_NUM_PATCH * NUM_CASMATCH;
+ m_RotateStep = C_ROTATE_STEP;
+ m_PatchSize = C_PATCH_SIZE;
+
+ SAFE_DELETEA(m_PatchPos);
+ m_PatchPos = new CvPoint [m_NumPatch];
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ���o����
+bool CDetectCasmatch::Detect()
+{
+ // ROI���g���ĉE��ƍ����p�b�`����������
+ CvRect roi;
+ CvPoint pos;
+ double angle;
+
+ roi = cvRect(m_Input->width-SEARCH_AREA, 0, SEARCH_AREA, SEARCH_AREA);
+ cvSetImageROI(m_Input, roi);
+ CALL(CDetect::Matching(&pos, &angle));
+ CALL(CalcPatchPosition(pos, angle, 0));
+
+ roi = cvRect(0, m_Input->height-SEARCH_AREA, SEARCH_AREA, SEARCH_AREA);
+ cvSetImageROI(m_Input, roi);
+ CALL(CDetect::Matching(&pos, &angle));
+ CALL(CalcPatchPosition(pos, angle, 1));
+
+ // ROI����
+ cvResetImageROI(m_Input);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �e�p�b�`�̈ʒu�Ɗp�x���v�Z����
+bool CDetectCasmatch::CalcPatchPosition(CvPoint pos, double angle, int place)
+{
+ // �e�p�b�`�̒��S���W���v�Z
+ m_PatchPos[place * C_NUM_PATCH] = pos;
+ for(int i=0; i < C_NUM_PATCH - 1; i++)
+ {
+ m_PatchPos[place * C_NUM_PATCH + i + 1].x =
+ cvRound(pos.x + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ cos((BLACK_ANGLE - angle + i*45.0)/180.0*CV_PI));
+ m_PatchPos[place * C_NUM_PATCH + i + 1].y =
+ cvRound(pos.y + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ sin((BLACK_ANGLE - angle + i*45.0)/180.0*CV_PI));
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// 9�p�b�`�̕��ϐF�̌v�Z
+bool CDetectCasmatch::CalcPatchColor9(CvMat **rgb)
+{
+ SAFE_RELEASEMAT(*rgb);
+ *rgb = cvCreateMat(C_NUM_PATCH, COLOR, CV_32FC1);
+
+ CvMat *rgb18 = NULL;
+ this->CalcPatchColor(&rgb18);
+
+ // ���ς��v�Z
+ for(int i = 0; i < C_NUM_PATCH; i ++)
+ {
+ for (int c = 0; c < COLOR; c ++)
+ {
+ // �e�L���X�}�b�`����F���擾
+ double sum = 0;
+ int count = 0;
+ for (int j = 0; j < NUM_CASMATCH; j++)
+ {
+ double val = GcvmGet(rgb18, i + C_NUM_PATCH * j, c);
+ if (val >= 0) sum += val, count ++;
+ }
+
+ // ���ς��Z�o�i�v���s�p�b�`���l���j
+ double mean;
+ if (count < 1) mean = -1.0;
+ else mean = sum / count;
+ cvmSet(*rgb, i, c, mean);
+ }
+ }
+
+ SAFE_RELEASEMAT(rgb18);
+
+ return true;
+}
diff --git a/reference/Analysis/DetectCasmatch.h b/reference/Analysis/DetectCasmatch.h
new file mode 100644
index 0000000..f8a3242
--- /dev/null
+++ b/reference/Analysis/DetectCasmatch.h
@@ -0,0 +1,34 @@
+#pragma once
+
+#include "detect.h"
+
+#define CASMATCH "data\\CasmatchTmpl_v2.jpg"
+#define SEARCH_AREA 400 // �L���X�}�b�`��T������R�[�i�[�̈�̃T�C�Y
+#define C_ROTATE_STEP 2.0 // �L���X�}�b�`����]�T������p�x���݁i�x�j
+#define C_PATCH_SIZE 3 // �p�b�`�̐F�ς���̈�T�C�Y
+#define NUM_CASMATCH 2 // �L���X�}�b�`�̐�
+#define C_NUM_PATCH 9 // �L���X�}�b�`���p�b�`��
+
+//// �L���X�}�b�`�摜 CasmatchTmpl.jpg ����v�Z
+//// ���S�p�b�`���W 39,39 ���p�b�`���W 20,45 (-19,+6)
+//#define BLACK_ANGLE 160.0 // =atan(+6/-19) (degree)
+//#define BLACK_LEN 19.5 // =sqrt((+6)^2 + (-19)^2)
+// �L���X�}�b�`�摜 CasmatchTmpl_v2.jpg ����v�Z
+// ���S�p�b�`���W 25,25 ���p�b�`���W 39,23 (+14,-2)
+#define BLACK_ANGLE 175.0 // =atan(-2/+14) (degree)
+#define BLACK_LEN 14.1 // =sqrt((-2)^2 + (+14)^2)
+
+
+class CDetectCasmatch :
+ public CDetect
+{
+private:
+ bool CalcPatchPosition(CvPoint pos, double angle, int place);
+
+public:
+ CDetectCasmatch(void);
+ ~CDetectCasmatch(void);
+ bool Init();
+ bool Detect();
+ bool CalcPatchColor9(CvMat **rgb);
+};
diff --git a/reference/Analysis/DetectMacbeth.cpp b/reference/Analysis/DetectMacbeth.cpp
new file mode 100644
index 0000000..55b8449
--- /dev/null
+++ b/reference/Analysis/DetectMacbeth.cpp
@@ -0,0 +1,67 @@
+#include "DetectMacbeth.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CDetectMacbeth::CDetectMacbeth(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CDetectMacbeth::~CDetectMacbeth(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// ������
+bool CDetectMacbeth::Init()
+{
+ // �}�N�x�X�̎Q�ƃe���v���[�g�摜��ǂݍ���
+ CALL(this->SetTemplate(MACBETH));
+
+ m_NumPatch = M_NUM_PATCH;
+ m_RotateStep = M_ROTATE_STEP;
+ m_PatchSize = M_PATCH_SIZE;
+
+ SAFE_DELETEA(m_PatchPos);
+ m_PatchPos = new CvPoint [m_NumPatch];
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ���o����
+bool CDetectMacbeth::Detect()
+{
+ // ROI����
+ cvResetImageROI(m_Input);
+
+ // �}�N�x�X�̌��o�ƃp�b�`�ʒu�擾
+ CvPoint pos;
+ double angle;
+ CALL(CDetect::Matching(&pos, &angle));
+ CALL(this->CalcPatchPosition(pos, angle));
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �e�p�b�`�̈ʒu�Ɗp�x���v�Z����
+bool CDetectMacbeth::CalcPatchPosition(CvPoint pos, double angle)
+{
+ // �p�b�`�̌����i�㉺�j
+ int dir = angle < 90 ? 1 : -1;
+ int startX = pos.x - dir * (M_PATCH_SPACE * 3) / 2;
+ int startY = pos.y + dir * (M_PATCH_SPACE * 5) / 2;
+
+ for (int i = 0; i < 4; i ++)
+ {
+ for (int j = 0; j < 6; j ++)
+ {
+ m_PatchPos[i * 6 + j].x = startX + dir * i * M_PATCH_SPACE;
+ m_PatchPos[i * 6 + j].y = startY - dir * j * M_PATCH_SPACE;
+ }
+ }
+
+ return true;
+}
\ No newline at end of file
diff --git a/reference/Analysis/DetectMacbeth.h b/reference/Analysis/DetectMacbeth.h
new file mode 100644
index 0000000..e96f510
--- /dev/null
+++ b/reference/Analysis/DetectMacbeth.h
@@ -0,0 +1,27 @@
+#pragma once
+
+#include "detect.h"
+
+//#define MACBETH "data\\MacbethTmpl.bmp"
+#define MACBETH "data\\MacbethTmpl_v4.bmp"
+#define M_ROTATE_STEP 180.0 // �}�N�x�X����]�T������p�x���݁i�x�j
+#define M_PATCH_SIZE 10 // �p�b�`�̐F�ς���̈�T�C�Y
+#define M_NUM_PATCH 24 // �}�N�x�X���p�b�`��
+//#define M_PATCH_SPACE 90 // �p�b�`�Ԃ̋����i��f��, MacbethTmpl.bmp���v�Z�j
+#define M_PATCH_SPACE 68 // �p�b�`�Ԃ̋����i��f��, MacbethTmpl_v2.bmp���v�Z�j
+
+
+class CDetectMacbeth :
+ public CDetect
+{
+private:
+ bool CalcPatchPosition(CvPoint pos, double angle);
+
+public:
+ CDetectMacbeth(void);
+ ~CDetectMacbeth(void);
+ bool Init();
+ bool Detect();
+ bool CalcPatchColor9(CvMat **rgb);
+
+};
diff --git a/reference/Analysis/HQTime.cpp b/reference/Analysis/HQTime.cpp
new file mode 100644
index 0000000..5f1bdfd
--- /dev/null
+++ b/reference/Analysis/HQTime.cpp
@@ -0,0 +1,30 @@
+#include "HQTime.h"
+#include
+#include
+
+CHQTime::CHQTime(void)
+{
+ m_StartTime = m_LastTime = omp_get_wtime();
+}
+
+CHQTime::~CHQTime(void)
+{
+}
+
+void CHQTime::LapTime(const char *disp)
+{
+ double curTime = omp_get_wtime();
+#ifdef SHOW_TIME
+ printf("Lap time for %s : %.2f ms\n", disp, (curTime - m_LastTime) * 1000.0);
+#endif // SHOW_TIME
+ m_LastTime = curTime;
+}
+
+void CHQTime::CheckTime(const char *disp)
+{
+ double curTime = omp_get_wtime();
+
+#ifdef SHOW_TIME
+ printf("Total time for %s : %.2f ms\n", disp, (curTime - m_StartTime) * 1000.0);
+#endif // SHOW_TIME
+}
diff --git a/reference/Analysis/HQTime.h b/reference/Analysis/HQTime.h
new file mode 100644
index 0000000..f70ac15
--- /dev/null
+++ b/reference/Analysis/HQTime.h
@@ -0,0 +1,16 @@
+#pragma once
+
+//#define SHOW_TIME
+
+class CHQTime
+{
+private:
+ double m_StartTime;
+ double m_LastTime;
+
+public:
+ CHQTime(void);
+ ~CHQTime(void);
+ void LapTime(const char *disp);
+ void CheckTime(const char *disp);
+};
diff --git a/reference/Analysis/MRegression.cpp b/reference/Analysis/MRegression.cpp
new file mode 100644
index 0000000..74be4a8
--- /dev/null
+++ b/reference/Analysis/MRegression.cpp
@@ -0,0 +1,240 @@
+#include "MRegression.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CMRegression::CMRegression(const int dim)
+{
+ m_Coef = NULL;
+ m_Error = NULL;
+ m_Dim = dim;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CMRegression::~CMRegression(void)
+{
+ SAFE_RELEASEMAT(m_Coef);
+ SAFE_RELEASEMAT(m_Error);
+}
+
+//-------------------------------------------------------------------------------
+// �W���̏d��A����
+bool CMRegression::CalcCoef(const CvMat *data, const CvMat *observ)
+{
+ int numSample = data->rows; // �T���v����
+ int numCh = observ->cols; // �ϑ��l�`���l����
+
+ // �s��̊m��
+ CvMat *matData = cvCreateMat(m_Dim, m_Dim, CV_64FC1);
+ CvMat *matDataInv = cvCreateMat(m_Dim, m_Dim, CV_64FC1);
+ CvMat *matObsv = cvCreateMat(m_Dim, numCh, CV_64FC1);
+
+ // �f�[�^�s������߂�
+ for (int row = 0; row < m_Dim; row ++)
+ {
+ for (int col = 0; col < m_Dim; col ++)
+ {
+ double val = 0;
+ for (int sample = 0; sample < numSample; sample ++)
+ val += (GetX(data, sample, row) * GetX(data, sample, col));
+ cvmSet(matData, row, col, val);
+ }
+ }
+
+ // �f�[�^�s��̋t�s������߂�
+ cvInvert(matData, matDataInv, CV_LU);
+
+ // �E�Ӎs������߂�
+ for (int row = 0; row < m_Dim; row ++)
+ {
+ for (int col = 0; col < numCh; col ++)
+ {
+ double val = 0;
+ for (int sample = 0; sample < numSample; sample ++)
+ val += (GetX(data, sample, row) * GcvmGet(observ, sample, col));
+ cvmSet(matObsv, row, col, val);
+ }
+ }
+
+ // �W���s������߂�
+ SAFE_RELEASEMAT(m_Coef);
+ m_Coef = cvCreateMat(m_Dim, numCh, CV_64FC1);
+ cvMatMul(matDataInv, matObsv, m_Coef);
+
+ // �v�Z�Ɏg�p�����s��̉��
+ SAFE_RELEASEMAT(matData);
+ SAFE_RELEASEMAT(matDataInv);
+ SAFE_RELEASEMAT(matObsv);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �f�[�^�s�������
+CvMat * CMRegression::GenDataMat(const CvMat *data)
+{
+ // �f�[�^�s��쐬
+ CvMat *matData = cvCreateMat(data->rows, m_Dim, CV_64FC1);
+
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for (int sample = 0; sample < data->rows; sample ++)
+ {
+ for (int i = 0; i < m_Dim; i++)
+ {
+ cvmSet(matData, sample, i, GetX(data, sample, i));
+ }
+ }
+ return matData;
+}
+
+//-------------------------------------------------------------------------------
+// ����덷���v�Z����
+bool CMRegression::CalcError(const CvMat *data, const CvMat *observ)
+{
+ int numSample = data->rows; // �T���v����
+ int numCh = observ->cols; // �ϑ��l�`���l����
+
+ // ����l�̎Z�o
+ CvMat *estim = GenConvert(data);
+
+ // RMSE�̎Z�o
+ SAFE_RELEASEMAT(m_Error);
+ m_Error = cvCreateMat(1, numCh, CV_64FC1);
+ for (int col = 0; col < numCh; col ++)
+ {
+ double err = 0;
+ for (int sample = 0; sample < numSample; sample ++)
+ {
+ err += pow(GcvmGet(observ, sample, col) - GcvmGet(estim, sample, col),
+ 2.0);
+ }
+ cvmSet(m_Error, 0, col, sqrt(err / (double)numSample));
+ }
+
+ // �v�Z�Ɏg�p�����s��̉��
+ SAFE_RELEASEMAT(estim);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������i�W���s�v�Z�ς݂ł��邱�Ƃ��O��j
+CvMat *CMRegression::GenConvert(const CvMat *data)
+{
+// CHQTime timer;
+ // �f�[�^�s��쐬
+ CvMat *matData = GenDataMat(data);
+// timer.LapTime("GenDataMat");
+
+ // ����l�̎Z�o
+ CvMat *convert = cvCreateMat(data->rows, data->cols, CV_64FC1);
+ cvMatMul(matData, m_Coef, convert);
+// timer.LapTime("cvMatMul ");
+
+ // �v�Z�Ɏg�p�����s��̉��
+ SAFE_RELEASEMAT(matData);
+// timer.LapTime("RELEASEMAT");
+
+ return convert;
+}
+
+//-------------------------------------------------------------------------------
+// �摜��ϊ����f�[�^������i�W���s�v�Z�ς݂ł��邱�Ƃ��O��j
+IplImage *CMRegression::GenConvert(const IplImage *dataImg)
+{
+ // �摜���s��ɕϊ�
+ CvMat dataMat = cvMat(dataImg->width * dataImg->height, COLOR,
+ (dataImg->depth == IPL_DEPTH_8U ? CV_8UC1 : CV_64F),
+ dataImg->imageData);
+
+ // �d��A�ϊ�
+ CvMat *convMat = this->GenConvert(&dataMat);
+
+ // �Z�o���ꂽ�s����摜�ɖ߂�
+ IplImage *convImg =
+ cvCreateImage(cvGetSize(dataImg), IPL_DEPTH_64F, COLOR);
+ memcpy(convImg->imageData, convMat->data.ptr, convImg->imageSize);
+
+ // �s������
+ cvReleaseMat(&convMat);
+
+ return convImg;
+}
+
+bool CMRegression::DrawGraph4913(CvMat *rgb, CvMat *xyz)
+{
+ IplImage *graph = cvCreateImage(cvSize(DISP_W, DISP_H), IPL_DEPTH_8U, COLOR);
+ cvSet(graph, cvScalarAll(255.0));
+
+ CvMat *data = cvCreateMat(256, COLOR, CV_64F);
+ for (int b = 0; b < 256; b ++) cvmSet(data, b, 0, pow((double)b, 2.2) / 1000.0);
+ cvCreateMat(256, COLOR, CV_64F);
+
+ double yScale = 10.0;
+ double xScale = 2.0;
+ double xMin, xMax;
+
+ for (int z = 0; z < 1; z ++)
+ {
+// double zv = (z > 15 ? 255.0 : z * 16.0);
+ int iRGB = 0;
+ for (int y = 0; y < 17; y ++)
+ {
+// double yv = (y > 15 ? 255.0 : y * 16.0);
+ for (int x = 0; x < 17; x ++)
+ {
+ double x1 = cvmGet(xyz, x + y * 17 + z * (17 * 17), 0);
+ double y1 = cvmGet(rgb, x + y * 17 + z * (17 * 17), iRGB);
+ if (x == 0) xMin = x1;
+ if (x == 16) xMax = x1;
+ int dx1 = 100 + (int)(x1 * xScale);
+ int dy1 = 412 - (int)(y1 * yScale);
+ cvDrawCircle(graph, cvPoint(dx1, dy1), 3,
+ CV_RGB(0, (x > 15 ? 255.0 : x * 16.0), 0), CV_FILLED);
+ }
+
+ //for (int b = 0; b < 256; b ++)
+ //{
+ // cvmSet(data, b, 1, pow((double)gv, 2.2) / 1000.0);
+ // cvmSet(data, b, 2, pow((double)rv, 2.2) / 1000.0);
+ //}
+ //GShowMat(data, "data for graph");
+ //CvMat *est = this->GenConvert(data);
+ //for (int b = 0; b < 255; b ++)
+ //{
+ // double y1 = cvmGet(est, b, iXYZ);
+ // int dx1 = 100 + b * 2;
+ // int dy1 = 412 - (int)(y1 * yScale);
+ // double y2 = cvmGet(est, b + 1, iXYZ);
+ // int dx2 = 100 + (b + 1) * 2;
+ // int dy2 = 412 - (int)(y2 * yScale);
+ // cvDrawLine(graph, cvPoint(dx1, dy1), cvPoint(dx2, dy2), CV_RGB(0, (g>15 ? 255 : g*16), 0));
+ //}
+ //SAFE_RELEASEMAT(est);
+ }
+ GShowImage(graph, 1, "X : r = 0", 0);
+ }
+
+ SAFE_RELEASEIMG(graph);
+ SAFE_RELEASEMAT(data);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// Scalar�l��ϊ�����i�W���s�v�Z�ς݂ł��邱�Ƃ��O��j
+CvScalar CMRegression::ScalarConvert(const CvScalar data)
+{
+ double dataA[3];
+ dataA[0] = data.val[0];
+ dataA[1] = data.val[1];
+ dataA[2] = data.val[2];
+
+ CvMat dataMat = cvMat(1, COLOR, CV_64F, dataA);
+
+ CvMat *convMat = this->GenConvert(&dataMat);
+
+ return cvScalar(cvmGet(convMat, 0, 0), cvmGet(convMat, 0, 1), cvmGet(convMat, 0, 2));
+}
diff --git a/reference/Analysis/MRegression.h b/reference/Analysis/MRegression.h
new file mode 100644
index 0000000..4dbe871
--- /dev/null
+++ b/reference/Analysis/MRegression.h
@@ -0,0 +1,27 @@
+#pragma once
+
+#include "stdafx.h"
+
+class CMRegression
+{
+protected:
+ int m_Dim; // ������
+ int m_N; // �T���v����
+ CvMat *m_Coef; // �W���s��
+ CvMat *m_Error; // ����덷
+
+protected:
+ CvMat *GenDataMat(const CvMat *data);
+
+public:
+ CMRegression(const int dim);
+ ~CMRegression(void);
+ virtual bool CalcCoef (const CvMat *data, const CvMat *observ);
+ virtual bool CalcError(const CvMat *data, const CvMat *observ);
+ virtual double GetX(const CvMat *mat, const int sample, const int index) = NULL;
+ virtual CvMat *GenConvert(const CvMat *data);
+ virtual IplImage *GenConvert(const IplImage *data);
+ bool DrawGraph4913(CvMat *rgb, CvMat *xyz);
+ CvMat *Coef() { return m_Coef; };
+ CvScalar ScalarConvert(const CvScalar data);
+};
diff --git a/reference/Analysis/MRegressionLinear.cpp b/reference/Analysis/MRegressionLinear.cpp
new file mode 100644
index 0000000..189aab6
--- /dev/null
+++ b/reference/Analysis/MRegressionLinear.cpp
@@ -0,0 +1,206 @@
+#include "MRegressionLinear.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^�@RGB�����v�Z�����ɌW��������3�{����
+CMRegressionLinear::CMRegressionLinear(void) : CMRegression(LINEARIZE_DIM * 3)
+{
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CMRegressionLinear::~CMRegressionLinear(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// ����s��̎Z�o�Ɏg���l��Ԃ�
+double CMRegressionLinear::GetX(const CvMat *mat, const int sample, const int index)
+{
+ CvScalar v = cvGet2D(mat, sample, index / LINEARIZE_DIM);
+ return pow(v.val[0], (double)(index % LINEARIZE_DIM));
+}
+
+//-------------------------------------------------------------------------------
+// �W���̏d��A����
+bool CMRegressionLinear::CalcCoef(const CvMat *macbethX)
+{
+ // ���ʐF�p�b�`���o��(19�`24��)
+ CvMat rgbX = cvMat(NUM_PATCH_LINEARIZE, macbethX->cols, CV_64FC1, NULL);
+ cvGetRows(macbethX, &rgbX, 18, 24);
+
+ double reflectance[] = REFLECTANCE_LIST;
+ CvMat y = cvMat(NUM_PATCH_LINEARIZE, 1, CV_64FC1, reflectance);
+ CvMat x = cvMat(NUM_PATCH_LINEARIZE, 1, CV_64FC1, NULL);
+ CvMat *coef = cvCreateMat(m_Dim, rgbX.cols, CV_64FC1);
+ cvSetZero(coef);
+
+ // �F�ʂɏ���
+ m_Dim = LINEARIZE_DIM; // �ꎞ�I�ɌW��������1�F���ɗ��Ƃ�
+ for (int col = 0; col < rgbX.cols; col ++)
+ {
+ // �P�F���o��
+ cvGetCol(&rgbX, &x, col);
+
+ // �F�ʂɌW���̎Z�o
+ CALL(CMRegression::CalcCoef(&x, &y));
+
+ // �Z�o�����W��,�덷���R�s�[����
+ for(int i = 0; i < m_Coef->rows; i ++)
+ cvmSet(coef, i + col * LINEARIZE_DIM, col, GcvmGet(m_Coef, i, 0));
+
+ cvReleaseData(&x);
+ }
+
+ // RGB�����v�Z�����ɌW��������3�{�ɖ߂�
+ m_Dim = LINEARIZE_DIM * 3;
+
+ // �W���s��������o�[�ϐ��ɕۑ�����
+ SAFE_RELEASEMAT(m_Coef);
+ m_Coef = (CvMat*)cvClone(coef);
+#ifdef SHOW_REGRESSION_COEF
+ GShowMat(m_Coef, "CoefLinearize", "%9.6f");
+
+ // �덷�̌v�Z
+ double reflectance3[] = REFLECTANCE_LIST3;
+ CvMat y3 = cvMat(NUM_PATCH_LINEARIZE, 3, CV_64FC1, reflectance3);
+ CALL(CMRegression::CalcError(&rgbX, &y3));
+ GShowMat(m_Error, "Estimation error", "%9.6f");
+#endif // SHOW_REGRESSION_COEF
+
+ // �s��̉��
+ SAFE_RELEASEMAT(coef);
+ cvReleaseData(&rgbX);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �t�B�b�e�B���O�J�[�u�̎Z�o
+bool CMRegressionLinear::CalcCurve()
+{
+ // ���͒l�s��쐬
+ const int Steps = 256;
+ CvMat *fitData = cvCreateMat(Steps, m_Dim, CV_64FC1);
+ for (int sample = 0; sample < Steps; sample ++)
+ {
+ for (int i = 0; i < m_Dim; i++)
+ {
+ cvmSet(fitData, sample, i,
+ pow((double)sample, (double)(i % LINEARIZE_DIM)));
+ }
+ }
+
+ // �t�B�b�e�B���O�J�[�u�̎Z�o
+ CvMat *fitCurve = cvCreateMat(Steps, m_Coef->cols, CV_64FC1);
+ cvMatMul(fitData, m_Coef, fitCurve);
+ GShowMat(fitCurve, "Fitting Curve", "%7.4f");
+
+ // �s��̉��
+ SAFE_RELEASEMAT(fitData);
+ SAFE_RELEASEMAT(fitCurve);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������
+CvMat *CMRegressionLinear::GenConvert(const CvMat *data)
+{
+ // GetX���g��Ȃ��Ōv�Z����
+ return GenConvertNoGetX(data);
+ // �]���̏�����
+// return CMRegression::GenConvert(data);
+ // RGB�`���l�����Ɍv�Z����i�x���j
+// return GenConvertEachCh(data);
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������iRGB�ʂɏ�������o�[�W�����F�x�������j
+CvMat *CMRegressionLinear::GenConvertEachCh(const CvMat *data)
+{
+ // m_Dim ���ꎞ�I�ɕύX
+ m_Dim = LINEARIZE_DIM;
+
+ // �s��̊m��
+ CvMat *convert = cvCreateMat(data->rows, data->cols, CV_64FC1);
+ CvMat *sCoef = cvCreateMat(LINEARIZE_DIM, 1, CV_64F);
+ CvMat *sConvert = cvCreateMat(data->rows, 1, CV_64F);
+
+ // �e�F�̏���
+ for (int col = 0; col < COLOR; col ++)
+ {
+ // �W�������o��
+ for (int i = 0; i < LINEARIZE_DIM; i ++)
+ cvmSet(sCoef, i, 0, cvmGet(m_Coef, col * LINEARIZE_DIM + i, col));
+
+ // �f�[�^�����o��
+ CvMat sData = cvMat(data->rows, 1, data->type);
+ cvGetCol(data, &sData, col);
+
+ // �f�[�^�s��쐬
+ CvMat *matData = GenDataMat(&sData);
+
+ // ����l�̎Z�o
+ cvMatMul(matData, sCoef, sConvert);
+
+ // ����l���o�͍s��ɏ�������
+ for (int i = 0; i < data->rows; i ++)
+ cvmSet(convert, i, col, cvmGet(sConvert, i, 0));
+
+ SAFE_RELEASEMAT(matData);
+ }
+
+ // m_Dim ��߂�
+ m_Dim = LINEARIZE_DIM * 3;
+
+ // �v�Z�Ɏg�p�����s��̉��
+ SAFE_RELEASEMAT(sCoef);
+ SAFE_RELEASEMAT(sConvert);
+
+ return convert;
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������iGetX���g��Ȃ��o�[�W�����j
+CvMat *CMRegressionLinear::GenConvertNoGetX(const CvMat *data)
+{
+ assert(LINEARIZE_DIM == 4);
+
+ // �s��̊m��
+ CvMat *convert = cvCreateMat(data->rows, data->cols, CV_64FC1);
+
+ // �W���̎擾
+ double b0[COLOR], b1[COLOR], b2[COLOR], b3[COLOR];
+ for (int col = 0; col < data->cols; col ++)
+ {
+ b0[col] = cvmGet(m_Coef, col * LINEARIZE_DIM , col);
+ b1[col] = cvmGet(m_Coef, col * LINEARIZE_DIM + 1, col);
+ b2[col] = cvmGet(m_Coef, col * LINEARIZE_DIM + 2, col);
+ b3[col] = cvmGet(m_Coef, col * LINEARIZE_DIM + 3, col);
+ }
+
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for (int sample = 0; sample < data->rows; sample ++)
+ {
+ for (int col = 0; col < data->cols; col ++)
+ {
+ CvScalar v = cvGet2D(data, sample, col);
+// double x = cvmGet(data, sample, col);
+ double y = b0[col] + b1[col] * v.val[0]
+ + b2[col] * v.val[0] * v.val[0]
+ + b3[col] * v.val[0] * v.val[0] * v.val[0];
+ cvmSet(convert, sample, col, y);
+ }
+ }
+
+ return convert;
+}
+
+//-------------------------------------------------------------------------------
+// �摜��ϊ����f�[�^������i�W���s�v�Z�ς݂ł��邱�Ƃ��O��j
+IplImage *CMRegressionLinear::GenConvert(const IplImage *dataImg)
+{
+ return CMRegression::GenConvert(dataImg);
+}
diff --git a/reference/Analysis/MRegressionLinear.h b/reference/Analysis/MRegressionLinear.h
new file mode 100644
index 0000000..575ba8d
--- /dev/null
+++ b/reference/Analysis/MRegressionLinear.h
@@ -0,0 +1,25 @@
+#pragma once
+#include "mregression.h"
+
+#define LINEARIZE_DIM (3+1) // ���`����3�����Ńt�B�b�e�B���O
+#define NUM_PATCH_LINEARIZE 6 // ���`���ɗp����p�b�`��
+#define REFLECTANCE_LIST {1.5, 1.05, 0.7, 0.44, 0.23, 0.05} // ���ʐF�p�b�`�̔��˗�
+#define REFLECTANCE_LIST3 {1.5, 1.5, 1.5, 1.05, 1.05, 1.05, \
+ 0.7, 0.7, 0.7, 0.44, 0.44, 0.44, 0.23, 0.23, 0.23, 0.05, 0.05, 0.05}
+
+class CMRegressionLinear :
+ public CMRegression
+{
+private:
+ virtual CvMat *GenConvertEachCh(const CvMat *data);
+ virtual CvMat *GenConvertNoGetX(const CvMat *data);
+
+public:
+ CMRegressionLinear(void);
+ ~CMRegressionLinear(void);
+ bool CalcCoef(const CvMat *rgbX);
+ double GetX(const CvMat *mat, const int sample, const int index);
+ bool CalcCurve();
+ virtual CvMat *GenConvert(const CvMat *data);
+ virtual IplImage *GenConvert(const IplImage *data);
+};
diff --git a/reference/Analysis/MRegressionRGB.cpp b/reference/Analysis/MRegressionRGB.cpp
new file mode 100644
index 0000000..35144e2
--- /dev/null
+++ b/reference/Analysis/MRegressionRGB.cpp
@@ -0,0 +1,107 @@
+#include "MRegressionRGB.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CMRegressionRGB::CMRegressionRGB(const int dim) : CMRegression(dim)
+{
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CMRegressionRGB::~CMRegressionRGB(void)
+{
+}
+
+//-------------------------------------------------------------------------------
+// ����s��̎Z�o�Ɏg���l��Ԃ�
+double CMRegressionRGB::GetX(const CvMat *mat, const int sample, const int index)
+{
+ CvScalar v0 = cvGet2D(mat, sample, 0);
+ CvScalar v1 = cvGet2D(mat, sample, 1);
+ CvScalar v2 = cvGet2D(mat, sample, 2);
+
+ switch (index)
+ {
+ case 0: return 1.0;
+ case 1: return v0.val[0];
+ case 2: return v1.val[0];
+ case 3: return v2.val[0];
+ case 4: return v0.val[0] * v1.val[0];
+ case 5: return v1.val[0] * v2.val[0];
+ case 6: return v2.val[0] * v0.val[0];
+ case 7: return v0.val[0] * v0.val[0];
+ case 8: return v1.val[0] * v1.val[0];
+ case 9: return v2.val[0] * v2.val[0];
+ case 10: return v0.val[0] * v1.val[0] * v2.val[0];
+ case 11: return v0.val[0] * v0.val[0] * v0.val[0];
+ case 12: return v1.val[0] * v1.val[0] * v1.val[0];
+ case 13: return v2.val[0] * v2.val[0] * v2.val[0];
+ }
+
+ return 0;
+}
+
+//-------------------------------------------------------------------------------
+// ����s��̎Z�o�Ɏg���l��Ԃ�
+bool CMRegressionRGB::CalcCoef(const CvMat *data, const CvMat *observ)
+{
+ CALL(CMRegression::CalcCoef(data, observ));
+
+#ifdef SHOW_REGRESSION_COEF
+ GShowMat(m_Coef, "Conversion Matrix", "%8.3f");
+
+ CALL(CMRegression::CalcError(data, observ));
+ GShowMat(m_Error, "Estimation error", "%9.5f");
+#endif // SHOW_REGRESSION_COEF
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������
+CvMat *CMRegressionRGB::GenConvert(const CvMat *data)
+{
+ // �]���̏�����
+ return CMRegression::GenConvert(data);
+ // GenMat���g��Ȃ��Ōv�Z����i�x���j
+ //return GenConvertNoGetX(data);
+}
+
+//-------------------------------------------------------------------------------
+// �ϊ������s��f�[�^������iGenMat���g��Ȃ��o�[�W�����F�x���j
+CvMat *CMRegressionRGB::GenConvertNoGenMat(const CvMat *data)
+{
+ // �s��̊m��
+ CvMat *convert = cvCreateMat(data->rows, data->cols, CV_64FC1);
+
+ // �W���̎擾
+ double *b[COLOR];
+ for (int col = 0; col < data->cols; col ++)
+ {
+ b[col] = new double [m_Dim];
+ for (int i = 0; i < m_Dim; i ++) b[col][i] = cvmGet(m_Coef, i, col);
+ }
+
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for (int sample = 0; sample < data->rows; sample ++)
+ {
+ for (int col = 0; col < data->cols; col ++)
+ {
+ double y = 0;
+ for (int i = 0; i < m_Dim; i ++)
+ y += GetX(data, sample, i) * b[col][i];
+ cvmSet(convert, sample, col, y);
+ }
+ }
+
+ return convert;
+}
+
+//-------------------------------------------------------------------------------
+// �摜��ϊ����f�[�^������i�W���s�v�Z�ς݂ł��邱�Ƃ��O��j
+IplImage *CMRegressionRGB::GenConvert(const IplImage *dataImg)
+{
+ return CMRegression::GenConvert(dataImg);
+}
diff --git a/reference/Analysis/MRegressionRGB.h b/reference/Analysis/MRegressionRGB.h
new file mode 100644
index 0000000..20d2476
--- /dev/null
+++ b/reference/Analysis/MRegressionRGB.h
@@ -0,0 +1,17 @@
+#pragma once
+#include "mregression.h"
+
+class CMRegressionRGB :
+ public CMRegression
+{
+private:
+ virtual CvMat *GenConvertNoGenMat(const CvMat *data);
+
+public:
+ CMRegressionRGB(const int dim);
+ ~CMRegressionRGB(void);
+ double GetX(const CvMat *mat, const int sample, const int index);
+ bool CalcCoef(const CvMat *data, const CvMat *observ);
+ virtual CvMat *GenConvert(const CvMat *data);
+ virtual IplImage *GenConvert(const IplImage *data);
+};
diff --git a/reference/Analysis/ProcessList.cpp b/reference/Analysis/ProcessList.cpp
new file mode 100644
index 0000000..7416560
--- /dev/null
+++ b/reference/Analysis/ProcessList.cpp
@@ -0,0 +1,187 @@
+#include "ProcessList.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CProcessList::CProcessList(void)
+{
+ m_Param = NULL;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CProcessList::~CProcessList(void)
+{
+ SAFE_DELETEA(m_Param);
+}
+
+//-------------------------------------------------------------------------------
+// �������X�g�̓ǂݍ���
+bool CProcessList::Init()
+{
+ if (!this->ReadFile()) return false;
+ m_CurDate = 0;
+ m_CurTOD = 0;
+ m_CurSubject = 0;
+ m_CurCount = 0;
+ m_FirstTOD = true;
+ m_EndList = false;
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ���̏����ΏۂɈړ�����
+bool CProcessList::MoveNext()
+{
+ if (m_EndList) return false;
+ m_FirstTOD = false;
+
+ if (++ m_CurCount < m_Param[m_CurDate].count ) return true;
+ m_CurCount = 0;
+
+ if (++ m_CurSubject < m_Param[m_CurDate].subject) return true;
+ m_CurSubject = 0;
+ m_FirstTOD = true;
+
+ if (++ m_CurTOD < m_Param[m_CurDate].tod ) return true;
+ m_CurTOD = 0;
+
+ if (++ m_CurDate < m_NumParam ) return true;
+ m_EndList = true;
+
+ return false;
+}
+
+//-------------------------------------------------------------------------------
+// �t���[���摜�̃p�X��Ԃ�
+bool CProcessList::GetFrameDir(char *pathStr)
+{
+ const char *TOD[3] = TOD_STRING;
+
+ if (m_Param[m_CurDate].dateStr[4] == '-')
+ {
+ sprintf_s(pathStr, PATH_LEN, "%s\\%s\\%s\\S%02d_C%02d",
+ m_Param[m_CurDate].baseDir, m_Param[m_CurDate].dateStr,
+ TOD[m_CurTOD], m_CurSubject + 1, m_CurCount + 1);
+ }
+ else
+ {
+ sprintf_s(pathStr, PATH_LEN, "%s\\%s\\%s%d-%d",
+ m_Param[m_CurDate].baseDir, m_Param[m_CurDate].dateStr,
+ TOD[m_CurTOD], m_CurSubject + 1, m_CurCount + 1);
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �Q�Ɖ摜�̃p�X��Ԃ�
+bool CProcessList::GetRefDir(char *pathStr)
+{
+ const char *TOD[3] = TOD_STRING;
+
+
+ if (m_Param[m_CurDate].dateStr[4] == '-')
+ {
+ sprintf_s(pathStr, PATH_LEN, "%s\\%s\\%s",
+ m_Param[m_CurDate].baseDir, m_Param[m_CurDate].dateStr, TOD[m_CurTOD]);
+ }
+ else
+ {
+ sprintf_s(pathStr, PATH_LEN, "%s\\%s\\macbeth_%s",
+ m_Param[m_CurDate].baseDir, m_Param[m_CurDate].dateStr, TOD[m_CurTOD]);
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �������X�g�̓ǂݍ���
+bool CProcessList::ReadFile()
+{
+ // �������X�g�t�@�C�����J��
+ FILE *fp = NULL;
+ fopen_s(&fp, PROCESS_LIST, "r");
+ if (!fp) ERROR_RET("Can't open Process List file.");
+
+ // �������X�g�̌����擾
+ char buffer[1024];
+ int num = 0;
+ while (!feof(fp))
+ {
+ fgets(buffer, sizeof(buffer), fp);
+
+ // �R�����g��R�}���h�͐����Ȃ�
+ if (!strncmp(buffer, "//", 2)) continue;
+ if (!strncmp(buffer, "BASEDIR=", 8)) continue;
+ if (!strncmp(buffer, "IMGFILE=", 8)) continue;
+ if (!strncmp(buffer, "REFFILE=", 8)) continue;
+ if (!strncmp(buffer, "END", 3)) break;
+
+ Param p;
+ sscanf_s(buffer, "%s %d %d %d",
+ p.dateStr, DATE_STRING, &p.tod, &p.subject, &p.count);
+ if (p.tod > 0) num ++;
+ }
+ if (num < 1) ERROR_RET("No parameter data is found in file.");
+ m_NumParam = num;
+// SPRLOG "Parameter list: %d", m_numParam); WriteLog(true);
+
+ // �S�������X�g��ǂݍ���
+ SAFE_DELETEA(m_Param);
+ m_Param = new Param [m_NumParam];
+ int i = 0;
+ char curBaseDir[PATH_LEN] = DEFAULT_BASEDIR;
+ char curImgFile[PATH_LEN] = DEFAULT_IMGFILE;
+ char curRefFile[PATH_LEN] = DEFAULT_REFFILE;
+ fseek(fp, 0, SEEK_SET);
+ while (!feof(fp))
+ {
+ fgets(buffer, sizeof(buffer), fp);
+
+ if (!strncmp(buffer, "//", 2)) continue; // �R�����g�s
+ if (!strncmp(buffer, "END", 3)) break; // �I���R�}���h
+
+ // �R�}���h����
+ if ( !strncmp(buffer, "BASEDIR=", 8)
+ || !strncmp(buffer, "IMGFILE=", 8)
+ || !strncmp(buffer, "REFFILE=", 8))
+ {
+ char *pt = buffer + 8;
+ GTrimStr(&pt);
+ switch (*buffer)
+ {
+ case 'B': strcpy_s(curBaseDir, PATH_LEN, pt); break;
+ case 'I': strcpy_s(curImgFile, PATH_LEN, pt); break;
+ case 'R': strcpy_s(curRefFile, PATH_LEN, pt); break;
+ }
+ continue;
+ }
+
+ sscanf_s(buffer, "%s %d %d %d",
+ m_Param[i].dateStr, DATE_STRING,
+ &m_Param[i].tod, &m_Param[i].subject, &m_Param[i].count);
+ strcpy_s(m_Param[i].baseDir, PATH_LEN, curBaseDir);
+ strcpy_s(m_Param[i].imgFile, PATH_LEN, curImgFile);
+ strcpy_s(m_Param[i].refFile, PATH_LEN, curRefFile);
+ if (m_Param[i].tod > 0) i ++;
+ }
+
+ // �t�@�C�������
+ fclose(fp);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// CSV�`���p�����[�^��������쐬
+bool CProcessList::MakeParamStr(char *buffer, int len)
+{
+ const char *TOD[3] = TOD_STRING;
+
+ sprintf_s(buffer, len, "%s, %s, %d, %d",
+ m_Param[m_CurDate].dateStr, TOD[m_CurTOD],
+ m_CurSubject + 1, m_CurCount + 1);
+
+ return true;
+}
diff --git a/reference/Analysis/ProcessList.h b/reference/Analysis/ProcessList.h
new file mode 100644
index 0000000..65a4614
--- /dev/null
+++ b/reference/Analysis/ProcessList.h
@@ -0,0 +1,52 @@
+#pragma once
+
+#include "stdafx.h"
+
+#define PROCESS_LIST "ProcList.txt" // �������X�g���L�q�����t�@�C����
+#define DATE_STRING 64 // ���t������̒���
+#define TOD_STRING {"morning", "daytime", "evening"}
+#define DEFAULT_BASEDIR "."
+#define DEFAULT_IMGFILE "snapshot%d.bmp"
+#define DEFAULT_REFFILE "snapshot0.bmp"
+
+struct Param
+{
+ char baseDir[PATH_LEN];
+ char imgFile[PATH_LEN];
+ char refFile[PATH_LEN];
+ char dateStr[DATE_STRING];
+ int tod;
+ int subject;
+ int count;
+ Param() // �R���X�g���N�^�i�������j
+ { *baseDir = *imgFile = *refFile = *dateStr = '\0'; tod = subject = count = 0; }
+};
+
+class CProcessList
+{
+private:
+ Param *m_Param;
+ int m_NumParam;
+ int m_CurDate;
+ int m_CurTOD;
+ int m_CurSubject;
+ int m_CurCount;
+ bool m_FirstTOD;
+ bool m_EndList;
+
+private:
+ bool ReadFile();
+
+public:
+ CProcessList(void);
+ ~CProcessList(void);
+ bool Init();
+ bool MoveNext();
+ bool GetFrameDir(char *pathStr);
+ bool GetRefDir(char *pathStr);
+ bool MakeParamStr(char *buffer, int len);
+
+ bool IsFirstTOD() { return m_FirstTOD; };
+ char *CurrentImgFile() { return m_Param[m_CurDate].imgFile; };
+ char *CurrentRefFile() { return m_Param[m_CurDate].refFile; };
+};
diff --git a/reference/Analysis/ReferenceProc.cpp b/reference/Analysis/ReferenceProc.cpp
new file mode 100644
index 0000000..6ad7c6e
--- /dev/null
+++ b/reference/Analysis/ReferenceProc.cpp
@@ -0,0 +1,237 @@
+#include "ReferenceProc.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CReferenceProc::CReferenceProc(void)
+{
+ m_Casmatch = new CDetectCasmatch;
+ m_Macbeth = new CDetectMacbeth;
+ m_ccmCL = new CMRegressionLinear;
+ m_ccmLX = new CMRegressionRGB (LIN_XYZ_CONVERT_DIM);
+ m_ccmXD = new CMRegressionRGB (XYZ_DISP_CONVERT_DIM);
+ m_crgbRC = NULL;
+ m_lrgbRC = NULL;
+ m_crgbRM = NULL;
+ m_lrgbRM = NULL;
+ m_xyzM = NULL;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CReferenceProc::~CReferenceProc(void)
+{
+ SAFE_DELETE(m_Casmatch);
+ SAFE_DELETE(m_Macbeth);
+ SAFE_DELETE(m_ccmCL);
+ SAFE_DELETE(m_ccmLX);
+ SAFE_DELETE(m_ccmXD);
+ SAFE_RELEASEMAT(m_crgbRC);
+ SAFE_RELEASEMAT(m_lrgbRC);
+ SAFE_RELEASEMAT(m_crgbRM);
+ SAFE_RELEASEMAT(m_lrgbRM);
+ SAFE_RELEASEMAT(m_xyzM);
+}
+
+//-------------------------------------------------------------------------------
+// ������
+bool CReferenceProc::Init()
+{
+ CALL(m_Casmatch->Init());
+ CALL(m_Macbeth->Init());
+
+ SAFE_RELEASEMAT(m_xyzM);
+#ifdef XYZ_FROM_SPECTRAL_DATA
+ m_xyzM = this->GenMacbethXYZSpect();
+#else
+ m_xyzM = GLoadCsv(MEASURED_MACBETH_XYZ, 24, 3, 2, 2);
+#endif
+ CALL(m_xyzM);
+
+ // XYZ���f�B�X�v���CRGB �ϊ��s��
+ CvMat *monitorRGB = GLoadCsv(MONITOR_RGB, 4913, 3, 2, 1);
+ CALL(monitorRGB);
+ CvMat *monitorXYZ = GLoadCsv(MONITOR_XYZ, 4913, 3, 2, 1);
+ CALL(monitorXYZ);
+#ifdef SHOW_REGRESSION_COEF
+ printf("ccmXD ");
+#endif // SHOW_REGRESSION_COEF
+ CALL(m_ccmXD->CalcCoef(monitorXYZ, monitorRGB));
+ CALL(m_ccmXD->CalcError(monitorXYZ, monitorRGB));
+// m_ccmXD->DrawGraph4913(monitorRGB, monitorXYZ);
+ SAFE_RELEASEMAT(monitorRGB);
+ SAFE_RELEASEMAT(monitorXYZ);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �����f�[�^����}�N�x�X��XYZ������
+CvMat *CReferenceProc::GenMacbethXYZSpect()
+{
+ // �f�[�^�̓ǂݍ���
+ CvMat *light = GLoadCsv(LIGHTSOURCE_SPECTRAL, SPECT_DIM, 1, 2, 2);
+ CALL(light);
+ CvMat *macbeth = GLoadCsv(MACBETH_SPECTRAL, SPECT_DIM, 24, 2, 2);
+ CALL(macbeth);
+ CvMat *cmf = GLoadCsv(CMF_SPECTRAL, SPECT_DIM, 3, 2, 2);
+ CALL(cmf);
+
+ // �������������z ��R�s�[���Z�p�x�N�g��
+ double value[24] = {0.0};
+ CvMat ones = cvMat(1, 24, CV_64F, value);
+ cvAddS(&ones, cvScalar(1.0), &ones);
+
+ // �������������z 24��R�s�[
+ CvMat *light24 = cvCreateMat(SPECT_DIM, 24, CV_64F);
+ cvMatMul(light, &ones, light24);
+
+ // �}�N�x�X�̕������˗��ƌ������|����
+ CvMat *refMacbeth = cvCreateMat(SPECT_DIM, 24, CV_64F);
+ cvMul(macbeth, light24, refMacbeth);
+
+ // ���S���F�� Y=100 �Ƃ��鐳�K���W�����Z�o
+ CvMat *xyzWhite = cvCreateMat(1, 3, CV_64F);
+ cvGEMM(light, cmf, 1.0, NULL, 1.0, xyzWhite, CV_GEMM_A_T);
+ double k = 100.0 / cvmGet(xyzWhite, 0, 1);
+
+ CvMat *xyzM = cvCreateMat(24, 3, CV_64F);
+ cvGEMM(refMacbeth, cmf, k, NULL, 1.0, xyzM, CV_GEMM_A_T);
+ GShowMat(xyzM, "xyzM", "%8.4f");
+
+ SAFE_RELEASEMAT(light);
+ SAFE_RELEASEMAT(macbeth);
+ SAFE_RELEASEMAT(cmf);
+ SAFE_RELEASEMAT(light24);
+ SAFE_RELEASEMAT(refMacbeth);
+ SAFE_RELEASEMAT(xyzWhite);
+
+ return xyzM;
+}
+
+//-------------------------------------------------------------------------------
+// �����摜�ǂݍ��݂Ɗe�ϊ��s��̎Z�o
+bool CReferenceProc::CalcMatrix(const char *path, const char *file)
+{
+ // �Q�Ɖ摜���J��
+ char filename[PATH_LEN];
+ sprintf_s(filename, PATH_LEN, "%s\\%s", path, file);
+
+ // �}�N�x�X�̌��o�C�p�b�`�F�擾
+ CALL(m_Macbeth->SetImage(filename));
+ CALL(m_Macbeth->Detect());
+ CALL(m_Macbeth->CalcPatchColor(&m_crgbRM));
+#ifdef SHOW_RGB_VALUES
+ GShowMat(m_crgbRM, "crgbRM");
+#endif // SHOW_RGB_VALUES
+
+ // ���`���W���̎Z�o
+ m_ccmCL->CalcCoef(m_crgbRM);
+ //m_ccmCL->CalcCurve(); // �t�B�b�e�B���O�J�[�u���o�́i�f�o�b�O�p�j
+
+ // �L���X�}�b�`�̌��o�C�p�b�`�F�擾
+ CALL(m_Casmatch->SetImage(filename));
+ CALL(m_Casmatch->Detect());
+ CALL(m_Casmatch->CalcPatchColor9(&m_crgbRC));
+#ifdef SHOW_RGB_VALUES
+ GShowMat(m_crgbRC, "crgbRC");
+#endif // SHOW_RGB_VALUES
+
+ // �p�b�`�F�̐��`��
+ SAFE_RELEASEMAT(m_lrgbRM);
+ m_lrgbRM = m_ccmCL->GenConvert(m_crgbRM);
+ SAFE_RELEASEMAT(m_lrgbRC);
+ m_lrgbRC = m_ccmCL->GenConvert(m_crgbRC);
+#ifdef SHOW_RGB_VALUES
+ GShowMat(m_lrgbRM, "lrgbRM");
+ GShowMat(m_lrgbRC, "lrgbRC");
+#endif // SHOW_RGB_VALUES
+
+ // XYZ�ϊ��W���̎Z�o
+#ifdef SHOW_REGRESSION_COEF
+ printf("ccmLX ");
+#endif // SHOW_REGRESSION_COEF
+ m_ccmLX->CalcCoef(m_lrgbRM, m_xyzM);
+
+#ifdef SHOW_CV_IMAGE
+ // ���o���ʂ̕`��
+ IplImage *patched = m_Macbeth->GenPatchedImage();
+ CALL(m_Casmatch->DrawROI(patched));
+ GShowImage(patched, 1, "Detection result");
+ SAFE_RELEASEIMG(patched);
+#endif // SHOW_CV_IMAGE
+
+#ifdef SHOW_CALIBRATED_MACBETH
+ // �F����ʂ̕\��
+ IplImage *imgCam = cvLoadImage(filename);
+ IplImage *imgLin = m_ccmCL->GenConvert(imgCam);
+ IplImage *imgXYZ = m_ccmLX->GenConvert(imgLin);
+ IplImage *imgXD = m_ccmXD->GenConvert(imgXYZ);
+ IplImage *imgDisp = GGenAddGamma(imgXD);
+ GShowImage(imgDisp, 2, "Calibrated Macbeth");
+#endif // SHOW_CALIBRATED_MACBETH
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �s��f�[�^�̐��`��
+CvMat *CReferenceProc::GenLinearize(const CvMat *data)
+{
+ return m_ccmCL->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �摜�f�[�^�̐��`��
+IplImage *CReferenceProc::GenLinearize(const IplImage *data)
+{
+ return m_ccmCL->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �s��f�[�^��XYZ�ϊ�
+CvMat *CReferenceProc::GenConvertXYZ(const CvMat *data)
+{
+ return m_ccmLX->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �摜�f�[�^��XYZ�ϊ�
+IplImage *CReferenceProc::GenConvertXYZ(const IplImage *data)
+{
+ return m_ccmLX->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �s��f�[�^�̃��j�^�[�F�ϊ�
+CvMat *CReferenceProc::GenConvertDisp(const CvMat *data)
+{
+ return m_ccmXD->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �摜�f�[�^�̃��j�^�[�F�ϊ�
+IplImage *CReferenceProc::GenConvertDisp(const IplImage *data)
+{
+ return m_ccmXD->GenConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �X�J���[�f�[�^�̐��`��
+CvScalar CReferenceProc::ScalarLinearize(const CvScalar data)
+{
+ return m_ccmCL->ScalarConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �X�J���[�f�[�^��XYZ�ϊ�
+CvScalar CReferenceProc::ScalarConvertXYZ(const CvScalar data)
+{
+ return m_ccmLX->ScalarConvert(data);
+}
+
+//-------------------------------------------------------------------------------
+// �X�J���[�f�[�^�̃��j�^�[�F�ϊ�
+CvScalar CReferenceProc::ScalarConvertDisp(const CvScalar data)
+{
+ return m_ccmXD->ScalarConvert(data);
+}
diff --git a/reference/Analysis/ReferenceProc.h b/reference/Analysis/ReferenceProc.h
new file mode 100644
index 0000000..cc00047
--- /dev/null
+++ b/reference/Analysis/ReferenceProc.h
@@ -0,0 +1,50 @@
+#pragma once
+
+#include "stdafx.h"
+#include "DetectCasmatch.h"
+#include "DetectMacbeth.h"
+#include "MRegressionLinear.h"
+#include "MRegressionRGB.h"
+
+#define LIGHTSOURCE_SPECTRAL "Data\\ArtificalSunlightSpectrum.csv"
+#define MACBETH_SPECTRAL "Data\\MacbethSpectrum.csv"
+#define CMF_SPECTRAL "Data\\ColorMatchFuncSpectrum.csv"
+#define MEASURED_MACBETH_XYZ "Data\\MeasuredMacbethXYZ.csv"
+#define MONITOR_RGB "Data\\MonitorRGB4913DG.csv"
+#define MONITOR_XYZ "Data\\MonitorXYZ4913N.csv"
+#define SPECT_DIM 61
+//#define XYZ_FROM_SPECTRAL_DATA
+#define LIN_XYZ_CONVERT_DIM 4
+#define XYZ_DISP_CONVERT_DIM 4
+
+class CReferenceProc
+{
+private:
+ CDetectCasmatch *m_Casmatch; // �L���X�}�b�`���o�N���X
+ CDetectMacbeth *m_Macbeth; // �}�N�x�X���o�N���X
+ CMRegressionLinear *m_ccmCL; // ���`���N���X
+ CMRegressionRGB *m_ccmLX; // XYZ�ϊ��N���X
+ CMRegressionRGB *m_ccmXD; // ���j�^RGB�ϊ��N���X
+ CvMat *m_crgbRC; // �L���X�}�b�` �p�b�`�F
+ CvMat *m_lrgbRC; // �L���X�}�b�` ���`�p�b�`�F
+ CvMat *m_crgbRM; // �}�N�x�X �p�b�`�F
+ CvMat *m_lrgbRM; // �}�N�x�X ���`�p�b�`�F
+ CvMat *m_xyzM; // �}�N�x�X XYZ
+
+public:
+ CReferenceProc(void);
+ ~CReferenceProc(void);
+ bool Init();
+ bool CalcMatrix(const char *path, const char *file);
+ CvMat *GenMacbethXYZSpect();
+ CvMat *GenLinearize (const CvMat *data);
+ IplImage *GenLinearize (const IplImage *data);
+ CvMat *GenConvertXYZ(const CvMat *data);
+ IplImage *GenConvertXYZ(const IplImage *data);
+ CvMat *GenConvertDisp(const CvMat *data);
+ IplImage *GenConvertDisp(const IplImage *data);
+ CvMat *lrgbRC() { return m_lrgbRC; };
+ CvScalar ScalarLinearize(const CvScalar data);
+ CvScalar ScalarConvertXYZ(const CvScalar data);
+ CvScalar ScalarConvertDisp(const CvScalar data);
+};
diff --git a/reference/Analysis/TongueAnalysis.cpp b/reference/Analysis/TongueAnalysis.cpp
new file mode 100644
index 0000000..0f08c17
--- /dev/null
+++ b/reference/Analysis/TongueAnalysis.cpp
@@ -0,0 +1,18 @@
+// TongueAnalysis.cpp : �R���\�[�� �A�v���P�[�V�����̃G���g�� �|�C���g���`���܂��B
+//
+
+#include "stdafx.h"
+#include "Analysis.h"
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+ CAnalysis analysis;
+ if (!analysis.GlobalProc())
+ {
+ getchar();
+ return 1;
+ }
+
+ return 0;
+}
diff --git a/reference/Analysis/Tracking.cpp b/reference/Analysis/Tracking.cpp
new file mode 100644
index 0000000..0beecd8
--- /dev/null
+++ b/reference/Analysis/Tracking.cpp
@@ -0,0 +1,295 @@
+#include "Tracking.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CTracking::CTracking(void)
+{
+ m_Point[0] = NULL;
+ m_Point[1] = NULL;
+ m_PointInit = NULL;
+ m_Gray = NULL;
+ m_GrayPre = NULL;
+ m_Pyramid = NULL;
+ m_PyramidPre = NULL;
+ m_Mask1 = NULL;
+ m_Mask2 = NULL;
+ m_Mask3 = NULL;
+ m_Mask4 = NULL;
+ m_Status = NULL;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CTracking::~CTracking(void)
+{
+ this->Release();
+}
+
+//-------------------------------------------------------------------------------
+// ���������
+void CTracking::Release(void)
+{
+ SAFE_DELETEA(m_Point[0]);
+ SAFE_DELETEA(m_Point[1]);
+ SAFE_DELETEA(m_PointInit);
+ SAFE_RELEASEIMG(m_Gray);
+ SAFE_RELEASEIMG(m_GrayPre);
+ SAFE_RELEASEIMG(m_Pyramid);
+ SAFE_RELEASEIMG(m_PyramidPre);
+ SAFE_RELEASEIMG(m_Mask1);
+ SAFE_RELEASEIMG(m_Mask2);
+ SAFE_RELEASEIMG(m_Mask3);
+ SAFE_RELEASEIMG(m_Mask4);
+ SAFE_DELETEA(m_Status)
+}
+
+//-------------------------------------------------------------------------------
+// ������
+bool CTracking::Init(const CvPoint *point, const int numPoint,
+ const IplImage *img)
+{
+ this->Release();
+
+ m_NumPoint = numPoint;
+ m_Criteria = cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03);
+ m_PyrFlag = 0;
+ m_TotalMovement = 0;
+
+ m_Point[0] = new CvPoint2D32f [numPoint];
+ m_Point[1] = new CvPoint2D32f [numPoint];
+ m_PointInit = new CvPoint [numPoint];
+ m_Gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_GrayPre = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Pyramid = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_PyramidPre = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask1 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask2 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask3 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask4 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Status = new char [numPoint];
+
+ for (int i = 0; i < numPoint; i++)
+ {
+ m_PointInit[i] = point[i];
+ m_Point[0][i] = cvPointTo32f(point[i]);
+ }
+
+ cvCvtColor(img, m_GrayPre, CV_BGR2GRAY);
+
+ cvFindCornerSubPix(m_GrayPre, m_Point[0], numPoint,
+ cvSize(TRACK_WIN_SIZE, TRACK_WIN_SIZE), cvSize(-1, -1),
+ m_Criteria);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �t���[�������i�g���b�L���O�j
+bool CTracking::Frame(const IplImage *img)
+{
+ // �O���[�X�P�[���ϊ�
+ cvCvtColor(img, m_Gray, CV_BGR2GRAY);
+
+ // �I�v�e�B�J���t���[���o
+ cvCalcOpticalFlowPyrLK(m_GrayPre, m_Gray, m_PyramidPre, m_Pyramid,
+ m_Point[0], m_Point[1], m_NumPoint,
+ cvSize(TRACK_WIN_SIZE, TRACK_WIN_SIZE),
+ 3, m_Status, 0, m_Criteria, m_PyrFlag);
+
+ // ����O�t���[���̃s���~�b�h�v�Z���ȗ�����
+ m_PyrFlag |= CV_LKFLOW_PYR_A_READY;
+
+ // �o�b�t�@�̌���
+ IplImage *swap;
+ CV_SWAP(m_Gray, m_GrayPre, swap);
+ CV_SWAP(m_Pyramid, m_PyramidPre, swap);
+ CvPoint2D32f *swapP;
+ CV_SWAP(m_Point[0], m_Point[1], swapP);
+
+ // �ړ��ʎZ�o
+ for (int i = 0; i < m_NumPoint; i ++)
+ {
+ m_Movement = sqrt(
+ pow((double)(m_Point[0][i].x - m_Point[1][i].x), 2.0) +
+ pow((double)(m_Point[0][i].y - m_Point[1][i].y), 2.0));
+ m_TotalMovement += m_Movement;
+ }
+
+ // �g���b�L���O����
+ for (int i = 0; i < m_NumPoint; i ++) if (!m_Status[i]) return false;
+ this->ValidatePoints();
+ if (m_Validate) this->CalcROIMask();
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ǐՓ_��`��
+bool CTracking::DrawPoints(IplImage *img)
+{
+ // �_�̕`��
+ for (int i = 0; i < m_NumPoint; i++)
+ {
+ cvCircle(img, cvPointFrom32f(m_Point[0][i]), 5,
+ CV_RGB(0, 0, i*60), CV_FILLED);
+ cvLine(img, cvPointFrom32f(m_Point[0][i]),
+ cvPointFrom32f(m_Point[0][(i+1) % m_NumPoint]), CV_RGB(0, 0, 255));
+ }
+
+ // �����_�̕`��
+ for (int i = 0; i < m_NumPoint; i++)
+ {
+ cvLine(img, m_PointInit[i],
+ m_PointInit[(i+1) % m_NumPoint], CV_RGB(0, 255, 0));
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ROI��`��
+bool CTracking::DrawROI(IplImage *img)
+{
+ cvSet(img, CV_RGB(255, 0, 0), m_Mask1);
+ cvSet(img, CV_RGB(0, 255, 0), m_Mask2);
+ cvSet(img, CV_RGB(0, 0, 255), m_Mask3);
+ cvSet(img, CV_RGB(244, 163, 46), m_Mask4);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ǐՓ_��`��
+void CTracking::ValidatePoints()
+{
+ int conX[8][2] = {{0,2},{0,3},{0,4},{1,2},{1,3},{1,4},{2,3},{2,4}};
+ int conY[8][2] = {{0,1},{0,2},{0,3},{1,2},{3,2},{4,1},{4,1},{4,3}};
+
+ m_Validate = true;
+ for (int i = 0; i < 8; i++)
+ {
+ if (m_Point[0][conX[i][0]].x >= m_Point[0][conX[i][1]].x) m_Validate = false;
+ if (m_Point[0][conY[i][0]].y >= m_Point[0][conY[i][1]].y) m_Validate = false;
+ }
+}
+
+//-------------------------------------------------------------------------------
+// ROI�}�X�N�摜�����
+void CTracking::CalcROIMask()
+{
+ // ROI�}�X�N�摜1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|�� ��|3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points1[2][3] = {{0, 2, 1}, {4, 2, 3}};
+
+ cvSet(m_Mask1, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = (m_Point[0][points1[i][0]].x + m_Point[0][points1[i][1]].x) / 2;
+ float cy1 = (m_Point[0][points1[i][0]].y + m_Point[0][points1[i][1]].y) / 2;
+ roi.x = (m_Point[0][points1[i][2]].x + cx1) / 2;
+ roi.y = (m_Point[0][points1[i][2]].y + cy1) / 2;
+ cvDrawCircle(m_Mask1, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜2
+ // 0____________ 4
+ // | �� �� |
+ // | |
+ // | |
+ // 1| |3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points2[2][4] = {{0, 3, 0, 4}, {4, 1, 4, 0}};
+
+ cvSet(m_Mask2, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = m_Point[0][points2[i][0]].x + (m_Point[0][points2[i][1]].x - m_Point[0][points2[i][0]].x) / 4;
+ float cy1 = m_Point[0][points2[i][0]].y + (m_Point[0][points2[i][1]].y - m_Point[0][points2[i][0]].y) / 4;
+ float cx2 = (m_Point[0][points2[i][2]].x + m_Point[0][points2[i][3]].x) / 2 ;
+ float cy2 = (m_Point[0][points2[i][2]].y + m_Point[0][points2[i][3]].y) / 2;
+ roi.x = (cx1 + cx2) / 2;
+ roi.y = (cy1 + cy2) / 2;
+ cvDrawCircle(m_Mask2, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| �� �� |3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points3[2][3] = {{0, 3, 2}, {4, 1, 2}};
+
+ cvSet(m_Mask3, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = m_Point[0][points3[i][0]].x + (m_Point[0][points3[i][1]].x - m_Point[0][points3[i][0]].x) / 4;
+ float cy1 = m_Point[0][points3[i][0]].y + (m_Point[0][points3[i][1]].y - m_Point[0][points3[i][0]].y) / 4;
+ roi.x = (m_Point[0][points3[i][2]].x + cx1) / 2;
+ roi.y = (m_Point[0][points3[i][2]].y + cy1) / 2;
+ cvDrawCircle(m_Mask3, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // ��@�@�@ /
+ // ��_��_��_/
+ // 2
+
+ int points4[2][2] = {{0, 2}, {4, 2}};
+
+ cvSet(m_Mask4, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ roi.x = m_Point[0][points4[i][0]].x + (m_Point[0][points4[i][1]].x - m_Point[0][points4[i][0]].x) * 7 / 8;
+ roi.y = m_Point[0][points4[i][0]].y + (m_Point[0][points4[i][1]].y - m_Point[0][points4[i][0]].y) * 7 / 8;
+ cvDrawCircle(m_Mask4, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+
+}
+
+//-------------------------------------------------------------------------------
+// ROI�̕��ϐF���v�Z
+CvScalar CTracking::ROIColor1(IplImage *img)
+{
+ return cvAvg(img, m_Mask1);
+}
+
+CvScalar CTracking::ROIColor2(IplImage *img)
+{
+ return cvAvg(img, m_Mask2);
+}
+
+CvScalar CTracking::ROIColor3(IplImage *img)
+{
+ return cvAvg(img, m_Mask3);
+}
+
+CvScalar CTracking::ROIColor4(IplImage *img)
+{
+ return cvAvg(img, m_Mask4);
+}
diff --git a/reference/Analysis/Tracking.h b/reference/Analysis/Tracking.h
new file mode 100644
index 0000000..ee567e9
--- /dev/null
+++ b/reference/Analysis/Tracking.h
@@ -0,0 +1,49 @@
+#pragma once
+
+#include "stdafx.h"
+
+#define TRACK_WIN_SIZE 10
+#define CALC_ROI_RADIUS 10
+
+class CTracking
+{
+private:
+ int m_NumPoint;
+ CvPoint2D32f *m_Point[2];
+ CvPoint *m_PointInit;
+ IplImage *m_Gray;
+ IplImage *m_GrayPre;
+ IplImage *m_Pyramid;
+ IplImage *m_PyramidPre;
+ IplImage *m_Mask1;
+ IplImage *m_Mask2;
+ IplImage *m_Mask3;
+ IplImage *m_Mask4;
+ char *m_Status;
+ CvTermCriteria m_Criteria;
+ int m_PyrFlag;
+ double m_Movement;
+ double m_TotalMovement;
+ bool m_Validate;
+
+private:
+ void Release();
+ void ValidatePoints();
+ void CalcROIMask();
+
+public:
+ CTracking(void);
+ ~CTracking(void);
+ bool Init(const CvPoint *point, const int numPoint,
+ const IplImage *img);
+ bool Frame(const IplImage *img);
+ bool DrawPoints(IplImage *img);
+ bool DrawROI(IplImage *img);
+ double Movement() { return m_Movement; }
+ double TotalMovement() { return m_TotalMovement; }
+ bool IsVaildate() { return m_Validate; }
+ CvScalar ROIColor1(IplImage *img);
+ CvScalar ROIColor2(IplImage *img);
+ CvScalar ROIColor3(IplImage *img);
+ CvScalar ROIColor4(IplImage *img);
+};
diff --git a/reference/Analysis/resource.h b/reference/Analysis/resource.h
new file mode 100644
index 0000000..1750e5d
--- /dev/null
+++ b/reference/Analysis/resource.h
@@ -0,0 +1,14 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by TongueAnalysis.rc
+
+// �V�����I�u�W�F�N�g�̎��̊���l
+//
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE 101
+#define _APS_NEXT_COMMAND_VALUE 40001
+#define _APS_NEXT_CONTROL_VALUE 1001
+#define _APS_NEXT_SYMED_VALUE 101
+#endif
+#endif
diff --git a/reference/Analysis/stdafx.cpp b/reference/Analysis/stdafx.cpp
new file mode 100644
index 0000000..3bfafe9
--- /dev/null
+++ b/reference/Analysis/stdafx.cpp
@@ -0,0 +1,328 @@
+#include "stdafx.h"
+
+//-------------------------------------------------------------------------------
+// �s���W���o�͂ɕ\��
+void GShowMat(const CvMat *mat, const char *name, const char *format)
+{
+ printf("%s[%d,%d] = \n", name, mat->rows, mat->cols);
+ for(int row = 0; row < mat->rows && row < SHOW_MAX_ROW; row ++)
+ {
+ printf("");
+ for(int col=0; colcols; col ++)
+ {
+ CvScalar v = cvGet2D(mat, row, col);
+ printf(format, v.val[0]);
+ if (col < mat->cols - 1) printf(", ");
+ }
+ printf("\n");
+ }
+ if (mat->rows > SHOW_MAX_ROW) printf("continue....\n");
+ printf("\n");
+}
+
+//-------------------------------------------------------------------------------
+// �t�@�C���̗L���肷��
+bool GFileExists(const char *path)
+{
+ WIN32_FIND_DATA ffd;
+
+ return (FindFirstFile(path, &ffd) != INVALID_HANDLE_VALUE);
+}
+
+//-------------------------------------------------------------------------------
+// �C�Ӄ`���l���C�C�ӌ`���̍s��f�[�^�����o��
+double GcvmGet(const CvMat *mat, const int row, const int col, const int ch)
+{
+ CvScalar v = cvGet2D(mat, row, col);
+ return v.val[ch];
+}
+
+//-------------------------------------------------------------------------------
+// �k�����摜�\���i�K�v�Ȃ�E�C���h�E�������s���j
+int GShowImage(const IplImage *img, const int num, const char *title,
+ const int wait)
+{
+ static int numDisp = 0;
+
+ if (num < 1) return numDisp;
+
+ // �\���摜����
+ IplImage *show = cvCreateImage(cvSize(DISP_W, DISP_H),
+ img->depth, img->nChannels);
+
+ if (img->depth == IPL_DEPTH_8U) cvResize(img, show);
+ else GcvResizeD(img, show);
+
+ // �^�C�g���`��
+ char titleM[256];
+ sprintf_s(titleM, sizeof(titleM), "%s%s", title,
+ (wait < 1 ? " [WAIT]" : ""));
+ CvFont font;
+ CvSize textSize;
+ int baseline;
+ cvInitFont(&font, CV_FONT_HERSHEY_COMPLEX, 0.7, 0.7, 0, 1);
+ cvGetTextSize(titleM, &font, &textSize, &baseline);
+ cvRectangle(show, cvPoint(0, 0),
+ cvPoint(textSize.width + 10, textSize.height + 10), cvScalarAll(0),
+ CV_FILLED);
+ cvPutText(show, titleM, cvPoint(5, textSize.height + 5), &font, cvScalarAll(255));
+
+ // �E�C���h�E�쐬
+ char dispname[256];
+ while (numDisp < num)
+ {
+ numDisp ++;
+ sprintf_s(dispname, sizeof(dispname), "��͌��� %d", numDisp);
+ cvNamedWindow(dispname);
+ }
+
+ // �\��
+ sprintf_s(dispname, sizeof(dispname), "��͌��� %d", num);
+ cvShowImage(dispname, show);
+ int key = cvWaitKey(wait);
+ if (key == 27) exit(1);
+
+ SAFE_RELEASEIMG(show);
+
+ return key;
+}
+
+//-------------------------------------------------------------------------------
+// ���������_�f�[�^�ɑΉ������摜���T�C�Y�i�ŋߖT�@�j
+void GcvResizeD(const IplImage *src, IplImage *dst)
+{
+ const double xScale = (double)dst->width / (double)src->width;
+ const double yScale = (double)dst->height / (double)src->height;
+
+ double max, min, gmax, gmin, sc;
+
+ // ���K���̂��߂̍ő�C�ŏ��l����
+ if (src->nChannels > 1)
+ {
+ IplImage *ch0 = cvCreateImage(cvGetSize(src), src->depth, 1);
+ IplImage *ch1 = cvCreateImage(cvGetSize(src), src->depth, 1);
+ IplImage *ch2 = cvCreateImage(cvGetSize(src), src->depth, 1);
+ cvSplit(src, ch0, ch1, ch2, NULL);
+ cvMinMaxLoc(ch0, &gmin, &gmax);
+ cvMinMaxLoc(ch1, &min, &max);
+ gmin = (min < gmin ? min : gmin);
+ gmax = (max > gmax ? max : gmax);
+ cvMinMaxLoc(ch2, &min, &max);
+ gmin = (min < gmin ? min : gmin);
+ gmax = (max > gmax ? max : gmax);
+ SAFE_RELEASEIMG(ch0);
+ SAFE_RELEASEIMG(ch1);
+ SAFE_RELEASEIMG(ch2);
+ }
+ else
+ {
+ cvMinMaxLoc(src, &gmin, &gmax);
+ }
+ sc = 2.0 / (gmax - gmin);
+
+ // OpenMP�ŃX�P�[�����O
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for (int dy = 0; dy < dst->height; dy ++)
+ {
+ for (int dx = 0; dx < dst->width; dx ++)
+ {
+ int sx = (int)((double)dx / xScale + 0.5);
+ int sy = (int)((double)dy / yScale + 0.5);
+ sx = sx >= src->width ? src->width - 1 : sx;
+ sy = sy >= src->height ? src->height - 1 : sy;
+ CvScalar v = cvGet2D(src, sy, sx);
+ v = cvScalar((v.val[0] - gmin) * sc,
+ (v.val[1] - gmin) * sc,
+ (v.val[2] - gmin) * sc);
+ cvSet2D(dst, dy, dx, v);
+ }
+ }
+}
+
+//-------------------------------------------------------------------------------
+// CSV�t�@�C����ǂݍ���ōs�������
+//
+// const char *filename CSV�t�@�C����
+// const int rows �ǂݍ��ލs��
+// const int cols �ǂݍ��ޗ�
+// const int startRow �ǂݍ��݊J�n�s�i1���琔����j
+// const int startCol �ǂݍ��݊J�n��i1���琔����j
+//
+CvMat *GLoadCsv(const char *filename, const int rows, const int cols,
+ const int startRow, const int startCol)
+{
+ // CSV�t�@�C�����J��
+ FILE *fp;
+ fopen_s(&fp, filename, "r");
+ if (!fp)
+ {
+ printf("Can't open csv file: %s\n", filename);
+ return NULL;
+ }
+
+ // �ǂݍ��݃o�b�t�@�̏���
+ char buffer[1024];
+ CvMat *mat = cvCreateMat(rows, cols, CV_64F);
+ int readRow = 0, readCol = 0;
+
+ // �f�[�^�ǂݍ���
+ for (int row = 0; row < rows + startRow - 1; row ++)
+ {
+ fgets(buffer, sizeof(buffer), fp);
+ char *begin = buffer;
+
+ if (row < startRow - 1) continue;
+
+ readCol = 0;
+ for (int col = 0; col < cols + startCol - 1; col ++)
+ {
+ char *pt = begin;
+ for (; *pt != ',' && *pt != '\n' && *pt != '\0'; pt ++);
+ if (*pt == '\0')
+ {
+ printf("Csv file error: %s\n", filename);
+ return NULL;
+ }
+ *pt = '\0';
+
+ if (col >= startCol - 1)
+ {
+ cvmSet(mat, readRow, readCol, atof(begin));
+ readCol ++;
+ }
+
+ begin = pt + 1;
+ }
+ readRow ++;
+ }
+
+ // �t�@�C�������
+ fclose(fp);
+
+ // �f�[�^�\��
+#ifdef SHOW_FILELOAD
+ GShowMat(mat, filename, "%8.4f");
+#endif // SHOW_FILELOAD
+
+ return mat;
+}
+
+//-------------------------------------------------------------------------------
+// �摜�̐��l�f�[�^��W���o�͂Ƀ_���v�\��
+void GImageDumpD(const IplImage *src, const int num, const char *name,
+ const char *format)
+{
+ if (src->depth != IPL_DEPTH_64F) return;
+
+ printf("Image dump '%s'\n", name);
+ for (int i = 0; i < num; i ++)
+ {
+ double val = *(((double*)src->imageData)+i);
+ printf(format, val);
+ printf(", ");
+ }
+ printf("\n");
+}
+
+//-------------------------------------------------------------------------------
+// ��������g���~���O����
+// �������NULL���������ށi�����ʒu���ς��j�̂Œ���
+void GTrimStr(char **buf)
+{
+ for (; GIsSpace(**buf); *buf ++);
+ char *pt;
+ for (pt = *buf + strlen(*buf) - 1; GIsSpace(*pt) && pt >= *buf; pt --);
+ pt ++;
+ if (GIsSpace(*pt)) *pt = '\0';
+}
+
+//-------------------------------------------------------------------------------
+// �������n�̕����i�^�u�C���s�܂ށj���ǂ���
+bool GIsSpace(const char ch)
+{
+ return (strchr(" \t\r\n", ch) != NULL);
+}
+
+//-------------------------------------------------------------------------------
+// OpenMP ���g����MatMul
+// ���ӁFcvMatMul ��茀�I�ɒx���̂Ŏg���Ȃ� �icvmGet/Set �̂����H�j
+void GcvPMatMul(const CvMat *src1, const CvMat *src2, CvMat *dst)
+{
+ // �s��T�C�Y�`�F�b�N
+ assert( src1->cols == src2->rows );
+
+ const bool mode = (src1->rows < src2->cols);
+ const int nLoop1 = mode ? src2->cols : src1->rows;
+ const int nLoop2 = !mode ? src2->cols : src1->rows;
+ //const double *data1 = (double*)src1->data.db;
+ //const double *data2 = (double*)src2->data.db;
+ //double *dataD = (double*)dst ->data.db;
+
+ // OpenMP�ŊO���[�v�����
+#ifdef _OPENMP
+#pragma omp parallel for schedule(dynamic)
+#endif
+ for (int loop1 = 0; loop1 < nLoop1; loop1 ++)
+ {
+ for (int loop2 = 0; loop2 < nLoop2; loop2 ++)
+ {
+ const int r1 = mode ? loop2 : loop1;
+ const int c2 = mode ? loop1 : loop2;
+
+ double sum = 0;
+ for (int i = 0; i < src1->cols; i ++)
+ sum += cvmGet(src1, r1, i) * cvmGet(src2, i, c2);
+ cvmSet(dst, r1, c2, sum);
+
+ // sum += data1[r1 * src1->cols + i] * data2[i * src2->cols + c2];
+ //dataD[r1 * dst->cols + c2] = sum;
+ }
+ }
+}
+
+//-------------------------------------------------------------------------------
+// �摜�ɃK���}���|����
+IplImage *GGenAddGamma(const IplImage *img)
+{
+ IplImage *gamma = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, COLOR);
+
+ const double g = 1.0 / 2.2;
+ int idx = 0;
+ for (int y = 0; y < img->height; y ++)
+ {
+ for (int x = 0; x < img->width; x ++)
+ {
+ CvScalar v = cvGet2D(img, y, x);
+ for (int c = 0; c < COLOR; c ++)
+ {
+ v.val[c] = pow(v.val[c] * 1000.0, g);
+ v.val[c] = v.val[c] < 0 ? 0 : (v.val[c] > 255.0 ? 255.0 : v.val[c]);
+ gamma->imageData[idx ++] = (BYTE)v.val[c];
+ }
+ }
+ }
+
+ return gamma;
+}
+
+CvScalar GXYZtoLab(CvScalar xyz)
+{
+ // ���������Ŏg�p�ł��郍�[�J�������`
+ struct local {
+ static double F(double x)
+ { return x > 0.008856 ? pow(x, 1.0/3.0) : 7.787 * x + (16.0/116.0); }
+ };
+
+ double white[3] = {92.219, 100.0, 95.965};
+ double fx = local::F(xyz.val[0] / white[0]);
+ double fy = local::F(xyz.val[1] / white[1]);
+ double fz = local::F(xyz.val[2] / white[2]);
+ CvScalar lab;
+ lab.val[0] = 116.0 * fy - 16.0;
+ lab.val[1] = 500.0 * (fx - fy);
+ lab.val[2] = 200.0 * (fy - fz);
+
+ return lab;
+}
diff --git a/reference/Analysis/stdafx.h b/reference/Analysis/stdafx.h
new file mode 100644
index 0000000..0d443b4
--- /dev/null
+++ b/reference/Analysis/stdafx.h
@@ -0,0 +1,76 @@
+// stdafx.h : �W���̃V�X�e�� �C���N���[�h �t�@�C���̃C���N���[�h �t�@�C���A�܂���
+// �Q�Ɖ������A�����܂�ύX����Ȃ��A�v���W�F�N�g��p�̃C���N���[�h �t�@�C��
+// ���L�q���܂��B
+//
+
+#pragma once
+
+// ���ʃw�b�_�[�t�@�C��
+
+#include "targetver.h"
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+
+#include "HQTime.h"
+
+// ���ʒ萔
+
+#define PATH_LEN 1024 // �t�@�C���p�X������̍ő咷
+#define COLOR 3 // �F�`���l����
+#define DISP_W 640
+#define DISP_H 512
+#define SHOW_MAX_ROW 100
+//#define DEBUG_NUM_FRAMES 40 // �����t���[�����i�f�o�b�O�p�j
+//#define DEBUG_TRACK_ONLY
+//#define CONVERT_IMAGE
+//#define SHOW_CV_IMAGE
+//#define SHOW_CALIBRATED_MACBETH
+//#define SHOW_XYZ_IMAGE
+//#define SHOW_RGB_VALUES
+//#define SHOW_REGRESSION_COEF
+//#define SHOW_FILELOAD
+//#define MANUAL_TRACKING_EVALUATION
+
+// ���ʃ}�N����`
+
+#define SAFE_DELETE(a) if (a) { delete a; a = NULL; }
+#define SAFE_DELETEA(a) if (a) { delete [] a; a = NULL; }
+#define SAFE_RELEASEIMG(a) if (a) { cvReleaseImage(&a); a = NULL; }
+#define SAFE_RELEASEMAT(a) if (a) { cvReleaseMat(&a); a = NULL; }
+#define CALL(a) if (!a) return false;
+
+#ifdef _CONSOLE
+#define ERROR_RET(msg) { \
+fprintf(stderr, "%s\n", msg); \
+return false; }
+#else
+#define ERROR_RET(msg) { \
+OutputDebugString(msg); \
+OutputDebugString("\n"); \
+return false; }
+#endif // _CONSOLE
+
+// ���ʊ�
+
+void GShowMat(const CvMat *mat, const char *name, const char *format = "%6.2f");
+bool GFileExists(const char *path);
+double GcvmGet(const CvMat *mat, const int row, const int col, const int ch = 0);
+int GShowImage(const IplImage *img, const int num = 1,
+ const char *title = NULL, const int wait = 100);
+void GcvResizeD(const IplImage *src, IplImage *dst);
+CvMat *GLoadCsv(const char *filename, const int rows, const int cols,
+ const int startRow = 1, const int startCol = 1);
+void GImageDumpD(const IplImage *src, const int num = 10, const char *name = "",
+ const char *format = "%6.2f");
+void GTrimStr(char **buf);
+bool GIsSpace(const char ch);
+void GcvPMatMul(const CvMat *src1, const CvMat *src2, CvMat *dst);
+IplImage *GGenAddGamma(const IplImage *img);
+CvScalar GXYZtoLab(CvScalar xyz);
diff --git a/reference/Analysis/targetver.h b/reference/Analysis/targetver.h
new file mode 100644
index 0000000..b18cde4
--- /dev/null
+++ b/reference/Analysis/targetver.h
@@ -0,0 +1,13 @@
+#pragma once
+
+// �ȉ��̃}�N���́A�Œ���K�v�ȃv���b�g�t�H�[�����`���܂��B�Œ���K�v�ȃv���b�g�t�H�[���Ƃ́A
+// �A�v���P�[�V���������s���邽�߂ɕK�v�ȋ@�\��������ł��Â��o�[�W������ Windows �� Internet Explorer �Ȃ�
+// �������܂��B�����̃}�N���́A�w�肵���o�[�W�����ƁA����ȑO�̃o�[�W�����̃v���b�g�t�H�[����ŗ��p�ł��邷�ׂĂ̋@�\��L���ɂ��邱�Ƃɂ����
+// ���삵�܂��B
+
+// ���Ŏw�肳�ꂽ��`�̑O�ɑΏۃv���b�g�t�H�[�����w�肵�Ȃ���Ȃ�Ȃ��ꍇ�A�ȉ��̒�`��ύX���Ă��������B
+// �قȂ�v���b�g�t�H�[���ɑΉ�����l�Ɋւ���ŐV���ɂ��ẮAMSDN ���Q�Ƃ��Ă��������B
+#ifndef _WIN32_WINNT // �Œ���K�v�ȃv���b�g�t�H�[���� Windows Vista �ł��邱�Ƃ��w�肵�܂��B
+#define _WIN32_WINNT 0x0600 // ����� Windows �̑��̃o�[�W���������ɓK�Ȓl�ɕύX���Ă��������B
+#endif
+
diff --git a/reference/ColorCorrection/Analysis.cpp b/reference/ColorCorrection/Analysis.cpp
new file mode 100644
index 0000000..fda1af8
--- /dev/null
+++ b/reference/ColorCorrection/Analysis.cpp
@@ -0,0 +1,653 @@
+#include "Analysis.h"
+
+//------------------------------------------------------------------------------
+// �R���X�g���N�^
+Analysis::Analysis(void)
+{
+ // �|�C���^�̏�����
+ m_macbeth = NULL;
+ m_camera = NULL;
+ m_show = NULL;
+ m_log = NULL;
+ m_gLog = NULL;
+ m_param = NULL;
+ m_bMat = NULL;
+ for (int i=0; iCloseLogFile(false);
+ if (m_gLog != NULL) this->CloseLogFile(true);
+ if (m_param != NULL) delete [] m_param;
+ if (m_bMat != NULL) cvReleaseMat(&m_bMat);
+ for (int i=0; iInit() ) return false;
+
+ for (int param=0; param < m_numParam; param ++)
+ {
+ for (int timeMDE=0; timeMDE < m_param[param].timeMDE; timeMDE++)
+ {
+ bool firstTime = true;
+ for (int subject=1; subject <= m_param[param].subject; subject ++)
+ {
+ for (int count=1; count <= m_param[param].count; count ++)
+ {
+ if (!this->SetPath(m_param[param].dateStr, timeMDE, subject, count))
+ return false;
+
+ if (firstTime)
+ {
+ // �Q�Ɖ摜�̏���
+ SPRLOG "Ref processing: %s", m_refPath); WriteLog(true);
+ if (!this->OpenLogFile(false, true)) return false;
+ m_numFrames = 1;
+ if (!this->GetPatchPosition(true)) return false;
+ if (!this->GetAllPatchColor(true)) return false;
+ if (!this->CloseLogFile() ) return false;
+ firstTime = false;
+ }
+
+ // ��摜�̏���
+ SPRLOG "Start processing: %s", m_filePath); WriteLog(true);
+ if (!this->OpenLogFile() ) return false;
+ if (!this->GetNumFrames() ) return false;
+ if (!this->GetPatchPosition() ) return false;
+ if (!this->GetAllPatchColor() ) return false;
+ if (!this->CorrectionMat() ) return false;
+ if (!this->CloseLogFile() ) return false;
+ }
+ }
+ }
+ }
+
+ this->CloseLogFile(true);
+ cvWaitKey(0);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// ������
+bool Analysis::Init()
+{
+ // �~�j�}�N�x�X�̎Q�ƃe���v���[�g�摜��ǂݍ���
+ m_macbeth = cvLoadImage(MINIMACBETH);
+ if (!m_macbeth) ERROR_RET("Can't open Macbeth image file.");
+
+ // �S�̃��O���J��
+ this->OpenLogFile(true);
+
+ // �������X�g�t�@�C�����J��
+ FILE *fp = NULL;
+ fopen_s(&fp, PROCESS_LIST, "r");
+ if (!fp) ERROR_RET("Can't open Process List file.");
+
+ // �������X�g�̌����擾
+ char buffer[1024];
+ int num = 0;
+ while (!feof(fp))
+ {
+ fgets(buffer, sizeof(buffer), fp);
+ if (strncmp(buffer, "//", 2))
+ {
+ Param p;
+ sscanf_s(buffer, "%s %d %d %d",
+ p.dateStr, DATE_STRING, &p.timeMDE, &p.subject, &p.count);
+ if (p.timeMDE > 0) num ++;
+ }
+ }
+ if (num < 1) ERROR_RET("No parameter data is found in file.");
+ m_numParam = num;
+ SPRLOG "Parameter list: %d", m_numParam); WriteLog(true);
+
+ // �S�������X�g��ǂݍ���
+ m_param = new Param [m_numParam];
+ int i = 0;
+ fseek(fp, 0, SEEK_SET);
+ while (!feof(fp))
+ {
+ fgets(buffer, sizeof(buffer), fp);
+ if (strncmp(buffer, "//", 2))
+ {
+ sscanf_s(buffer, "%s %d %d %d",
+ m_param[i].dateStr, DATE_STRING,
+ &m_param[i].timeMDE, &m_param[i].subject, &m_param[i].count);
+ if (m_param[i].timeMDE > 0) i ++;
+ }
+ }
+
+ cvNamedWindow(DISP1);
+ cvSetMouseCallback(DISP1, OnMouse1, this);
+#ifdef DISP2
+ cvNamedWindow(DISP2);
+#endif
+#ifdef DISP3
+ cvNamedWindow(DISP3);
+#endif
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// ��������t�H���_�̃p�X������
+bool Analysis::SetPath(const char *dateStr, int timeMDE, int subject, int count)
+{
+ const char *timeString[3] = {"morning", "daytime", "evening"};
+
+ sprintf_s(m_filePath, MAX_PATH_LEN, "%s\\%s\\%s%d-%d",
+ BASE_PATH, dateStr, timeString[timeMDE], subject, count);
+
+ sprintf_s(m_refPath, MAX_PATH_LEN, "%s\\%s\\macbeth_%s",
+ BASE_PATH, dateStr, timeString[timeMDE]);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// ���O�t�@�C�����J��
+bool Analysis::OpenLogFile(bool gLog, bool ref)
+{
+ char filename[MAX_PATH_LEN];
+ SYSTEMTIME st;
+ GetLocalTime(&st);
+
+ if (gLog)
+ {
+ sprintf_s(filename, MAX_PATH_LEN, "%s", GLOBAL_LOG);
+ fopen_s(&m_gLog, filename, "w");
+ if (!m_gLog) ERROR_RET("Can't create global log file.");
+ }
+ else
+ {
+ sprintf_s(filename, MAX_PATH_LEN, "%s\\%s",
+ (ref ? m_refPath : m_filePath), LOCAL_LOG);
+ fopen_s(&m_log, filename, "w");
+ if (!m_log) ERROR_RET("Can't create log file.");
+ }
+
+ SPRLOG "Log file, \"%s\"", filename); WriteLog(gLog);
+ SPRLOG "Start, %02d-%02d-%02d %02d:%02d:%02d",
+ st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); WriteLog(gLog);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// ���O�t�@�C�������
+bool Analysis::CloseLogFile(bool gLog)
+{
+ SYSTEMTIME st;
+ GetLocalTime(&st);
+ SPRLOG ""); WriteLog(gLog);
+ SPRLOG "End, %02d-%02d-%02d %02d:%02d:%02d",
+ st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); WriteLog(gLog);
+
+ if (gLog) { fclose(m_gLog); m_gLog = NULL; }
+ else { fclose(m_log ); m_log = NULL; }
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �t�H���_���̃t���[�������擾
+bool Analysis::GetNumFrames()
+{
+ char filename[MAX_PATH_LEN];
+ int frame = -1;
+ WIN32_FIND_DATA ffd;
+ do
+ {
+ sprintf_s(filename, MAX_PATH_LEN, "%s\\" CAMERA_FILE, m_filePath, ++ frame);
+ } while (FindFirstFile(filename, &ffd) != INVALID_HANDLE_VALUE);
+
+ if (frame < 1) ERROR_RET("No camera file found.");
+ m_numFrames = frame;
+#ifdef DEBUG_STOP_FRAME
+ m_numFrames = DEBUG_STOP_FRAME
+#endif
+
+ SPRLOG "Frames, %d", m_numFrames); WriteLog();
+
+ // �z��̊m��
+ for (int i=0; iReadImage(0, ref);
+ this->ShowImage(DISP3);
+
+ // ���֕��z�摜�o�b�t�@�쐬
+ IplImage *match = cvCreateImage(
+ cvSize(SEARCH_AREA - m_macbeth->width + 1,
+ SEARCH_AREA - m_macbeth->height + 1),
+ IPL_DEPTH_32F , 1);
+
+ // �}�b�`���O�̏���
+ CvMat *rotateMat = cvCreateMat(2, 3, CV_32FC1);
+ IplImage *rotateMacbeth = cvCreateImage(cvGetSize(m_macbeth), IPL_DEPTH_8U, 3);
+ CvRect roi;
+ double gAngle = 0;
+ double minC, maxC, gMinC = -1.0;
+ CvPoint minPos, maxPos, gMinPos;
+
+ SPRLOG ""); WriteLog();
+ SPRLOG "Mini Macbeth searching"); WriteLog();
+ SPRLOG "Place, Angle, min value, x, y"); WriteLog();
+
+ // �E��ƍ����̃}�N�x�X����������
+ for (int place = 0; place < NUM_MACBETH; place ++)
+ {
+ // �}�N�x�X���ӗ̈��ROI��ݒ�
+ if (place==0) roi = cvRect(m_camera->width-SEARCH_AREA, 0, SEARCH_AREA, SEARCH_AREA);
+ else roi = cvRect(0, m_camera->height-SEARCH_AREA, SEARCH_AREA, SEARCH_AREA);
+ cvSetImageROI(m_camera, roi);
+
+ // �����ʃe���v���[�g�}�b�`���O
+ gMinC = -1.0;
+ int count = 0;
+ for(double angle = 0; angle < 360.0; angle += ROTATE_RESOLUTION)
+ {
+ cv2DRotationMatrix(cvPoint2D32f(m_macbeth->width/2.0, m_macbeth->height/2.0),
+ angle, 1.0, rotateMat);
+ cvWarpAffine(m_macbeth, rotateMacbeth, rotateMat,
+ CV_WARP_FILL_OUTLIERS, cvScalarAll(255.0));
+
+ cvMatchTemplate(m_camera, rotateMacbeth, match, CV_TM_SQDIFF_NORMED);
+ cvMinMaxLoc(match, &minC, &maxC, &minPos, &maxPos);
+ if (minC < gMinC || gMinC < 0)
+ {
+ gAngle = angle;
+ gMinPos.x = minPos.x + (m_macbeth->width / 2) + roi.x;
+ gMinPos.y = minPos.y + (m_macbeth->height / 2) + roi.y;
+ gMinC = minC;
+ }
+
+ if ((++ count) % 10 == 0) cvWaitKey(1);
+ }
+ SPRLOG "%s, %3.0f, %f, %4d, %4d", (place==1 ? "UR" : "BL"),
+ gAngle, gMinC, gMinPos.x, gMinPos.y); WriteLog();
+
+ // �e�p�b�`�̒��S���W���v�Z
+ m_patchPos[place * NUM_PATCH] = gMinPos;
+ for(int i=0; i < NUM_PATCH - 1; i++)
+ {
+ m_patchPos[place * NUM_PATCH + i + 1].x =
+ cvRound(gMinPos.x + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ cos((BLACK_ANGLE - gAngle + i*45.0)/180.0*CV_PI));
+ m_patchPos[place * NUM_PATCH + i + 1].y =
+ cvRound(gMinPos.y + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ sin((BLACK_ANGLE - gAngle + i*45.0)/180.0*CV_PI));
+ }
+
+ // ROI����
+ cvResetImageROI(m_camera);
+ }
+
+ // �摜�o�b�t�@�̉��
+ cvReleaseImage(&rotateMacbeth);
+ cvReleaseImage(&match);
+ cvReleaseMat(&rotateMat);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �p�b�`�̐F���擾
+bool Analysis::GetPatchColor(int frame, bool ref)
+{
+ // �p�b�`��f�l�̕��ςƕW�������Z�o
+ for(int i=0; iWriteLog(false, true);
+
+ // ���ʉ摜�̕\��
+ cvSetImageROI(m_show,
+ cvRect(m_camera->width-SEARCH_AREA, 0, SEARCH_AREA, SEARCH_AREA));
+ this->ShowImage();
+ cvSetImageROI(m_show,
+ cvRect(0, m_camera->height-SEARCH_AREA, SEARCH_AREA, SEARCH_AREA));
+ this->ShowImage(DISP2);
+
+ cvResetImageROI(m_show);
+
+ cvWaitKey(1);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �p�b�`�̐F���擾
+bool Analysis::GetAllPatchColor(bool ref)
+{
+ // ���O�Ƀw�b�_�[����������
+ SPRLOG ""); WriteLog();
+ SPRLOG "Patch Color"); WriteLog();
+ const char *rgb = "RGB";
+ const char *ms = "ms";
+ SPRLOG "Frame, ");
+ for(int i=0; iReadImage(frame, ref);
+
+ if (!this->GetPatchColor(frame, ref)) return false;
+ }
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �F��W�����Z�o
+bool Analysis::CorrectionMat()
+{
+ // ���O�Ƀw�b�_�[����������
+ SPRLOG ""); WriteLog();
+ SPRLOG "Color Correction"); WriteLog();
+
+ CvMat *s = cvCreateMat(SSIZE, SSIZE, CV_64FC1);
+ CvMat *y = cvCreateMat(SSIZE, COLOR, CV_64FC1);
+ CvMat *si = cvCreateMat(SSIZE, SSIZE, CV_64FC1);
+ CvMat *x = cvCreateMat(1, SSIZE, CV_64FC1);
+ CvMat *c = cvCreateMat(1, COLOR, CV_64FC1);
+
+ for (int frame = 0; frame < m_numFrames; frame ++)
+ {
+ // S�s������߂�
+ for (int row = 0; row < SSIZE; row ++)
+ {
+ for (int col = 0; col < SSIZE; col ++)
+ {
+ double val = 0;
+ for (int patch = 0; patch < NUM_PATCH; patch ++)
+ val += (GetMRVal(patch, frame, row) * GetMRVal(patch, frame, col));
+ cvmSet(s, row, col, val);
+ }
+ }
+
+ // Y�s������߂�
+ for (int row = 0; row < SSIZE; row ++)
+ {
+ for (int col = 0; col < COLOR; col ++)
+ {
+ double val = 0;
+ for (int patch = 0; patch < NUM_PATCH; patch ++)
+ val += (GetMRVal(patch, frame, row) * m_miniPatchRef[patch].val[col]);
+ cvmSet(y, row, col, val);
+ }
+ }
+
+ // S�̋t�s������߂�
+ cvInvert(s, si, CV_LU);
+
+ // �W���s������߂�
+ if (m_bMat != NULL) cvReleaseMat(&m_bMat);
+ m_bMat = cvCreateMat(SSIZE, COLOR, CV_64FC1);
+ cvMatMul(si, y, m_bMat);
+
+ SPRLOG "B Matrix of frame %d", frame); WriteLog();
+ for(int col = 0; col < COLOR; col ++)
+ {
+ *m_logString = '\0';
+ for (int row = 0; row < SSIZE; row ++)
+ {
+ SPRLOG "%s%.4f, ", m_logString, cvmGet(m_bMat, row, col));
+ }
+ WriteLog();
+ }
+
+ // �摜�ǂݍ���
+ this->ReadImage(frame, false);
+
+ // �F�ϊ�
+ for (int i = 0; i < m_camera->width * m_camera->height * COLOR; i += COLOR)
+ {
+ double v0 = (double)((BYTE)m_camera->imageData[i ]);
+ double v1 = (double)((BYTE)m_camera->imageData[i+1]);
+ double v2 = (double)((BYTE)m_camera->imageData[i+2]);
+ for (int col = 0; col < SSIZE; col ++)
+ cvmSet(x,0, col, GetMRVal2(v0, v1, v2, col));
+ cvMatMul(x, m_bMat, c);
+ m_camera->imageData[i ] = ClipValue(cvmGet(c, 0, 0));
+ m_camera->imageData[i+1] = ClipValue(cvmGet(c, 0, 1));
+ m_camera->imageData[i+2] = ClipValue(cvmGet(c, 0, 2));
+
+ // DEBUG
+ if (((i/3) % m_camera->width == 400) && ((i/3) / m_camera->width == 500))
+ {
+ printf ("400,500 : %f %f %f -> %d %d %d\n",
+ v0, v1, v2, (BYTE)m_camera->imageData[i ], (BYTE)m_camera->imageData[i+1],
+ (BYTE)m_camera->imageData[i+2]);
+ }
+ }
+
+ // �ϊ���̃p�b�`�F�v�Z
+ this->GetPatchColor(frame);
+
+ // �\��
+ cvCopy(m_camera, m_show);
+ this->ShowImage();
+ cvWaitKey(100);
+ }
+
+ cvReleaseMat(&s);
+ cvReleaseMat(&y);
+ cvReleaseMat(&si);
+ cvReleaseMat(&x);
+ cvReleaseMat(&c);
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �d��A����s��̗v�f�l��Ԃ�
+double Analysis::GetMRVal(int patch, int frame, int index)
+{
+ return GetMRVal2(m_miniPatch[patch][frame].val[0],
+ m_miniPatch[patch][frame].val[1], m_miniPatch[patch][frame].val[2], index);
+
+ return 0;
+}
+
+//------------------------------------------------------------------------------
+// �d��A����s��̗v�f�l��Ԃ�
+double Analysis::GetMRVal2(double v0, double v1, double v2, int index)
+{
+ switch (index)
+ {
+ case 0: return 1.0;
+ case 1: return v0;
+ case 2: return v1;
+ case 3: return v2;
+ case 4: return v0 * v1;
+ case 5: return v1 * v2;
+ case 6: return v2 * v0;
+ case 7: return v0 * v0;
+ case 8: return v1 * v1;
+ case 9: return v2 * v2;
+ case 10: return v0 * v1 * v2;
+ }
+ return 0;
+}
+
+//------------------------------------------------------------------------------
+// �Z�o�l�̃N���b�s���O
+char Analysis::ClipValue(double val)
+{
+ // �l�̌ܓ��Ŋۂ߂�
+ int intVal = cvRound(val);
+
+ // �N���b�s���O
+ if (intVal > 255) intVal = 255;
+ if (intVal < 0 ) intVal = 0;
+
+ return (char)intVal;
+}
+
+//------------------------------------------------------------------------------
+// ��͉摜��\��
+bool Analysis::ShowImage(char *window)
+{
+ // �\���̃X�P�[�����O
+ int width, height;
+ if (m_show->roi) width = m_show->roi->width, height = m_show->roi->height;
+ else width = m_show->width, height = m_show->height;
+ int showHeight = height * SHOW_WIDTH / width;
+ if (!strcmp(window, DISP1)) m_showScale[0] = (float)SHOW_WIDTH / width;
+
+ // �\��
+ IplImage *temp = cvCreateImage(cvSize(SHOW_WIDTH, showHeight), IPL_DEPTH_8U, 3);
+ cvResize(m_show, temp, CV_INTER_LINEAR);
+ cvShowImage(window, temp);
+
+ // �㏈��
+ cvWaitKey(1);
+ cvReleaseImage(&temp);
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// ���O���o��
+bool Analysis::WriteLog(bool gLog, bool fileOnly)
+{
+ fprintf(gLog ? m_gLog : m_log, "%s\n", m_logString);
+ fflush(gLog ? m_gLog : m_log);
+
+ if (fileOnly) return true;
+
+#ifdef _CONSOLE
+ if (gLog && strlen(m_logString) > 0) fprintf(stdout, "***** ");
+ fprintf(stdout, "%s\n", m_logString);
+ fflush(stdout);
+#else
+ if (gLog) OutputDebugString("***** ");
+ OutputDebugString(m_logString);
+#endif
+
+ return true;
+}
+
+//------------------------------------------------------------------------------
+// �}�E�X�N���b�N�R�[���o�b�N
+void Analysis::OnMouse(int disp, int ev, int x, int y, int flags)
+{
+ if (ev == CV_EVENT_LBUTTONDOWN)
+ {
+ printf("Disp:%d x,y=%d,%d\n",
+ disp, (int)(x/m_showScale[disp-1]), (int)(y/m_showScale[disp-1]));
+ }
+}
+
+//------------------------------------------------------------------------------
+// �}�E�X�N���b�N�R�[���o�b�N�i�O���[�o���j
+void OnMouse1(int ev, int x, int y, int flags, void* param)
+{
+ ((Analysis*)param)->OnMouse(1, ev, x, y, flags);
+}
diff --git a/reference/ColorCorrection/Analysis.h b/reference/ColorCorrection/Analysis.h
new file mode 100644
index 0000000..08123a8
--- /dev/null
+++ b/reference/ColorCorrection/Analysis.h
@@ -0,0 +1,107 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+
+#define PROCESS_LIST "ProcList.txt"
+#define GLOBAL_LOG "global_log.txt"
+#define LOCAL_LOG "analysis_log.csv"
+#define BASE_PATH "D:\\usr\\work\\��data"
+#define MINIMACBETH "MiniMacbeth.jpg"
+#define CAMERA_FILE "snapshot%d.jpg"
+#define MAX_PATH_LEN 1024 // �t�@�C���p�X������̍ő咷
+#define LOG_STRING 2048 // ���O������̒���
+#define DATE_STRING 64 // ���t������̒���
+#define SEARCH_AREA 200 // �}�N�x�X��T������R�[�i�[�̈�̃T�C�Y
+#define ROTATE_RESOLUTION 2.0 // �}�N�x�X����]�T������p�x���݁i�x�j
+#define PATCH_SIZE 3 // �p�b�`�̐F�ς���̈�T�C�Y
+#define NUM_MACBETH 2 // �~�j�}�N�x�X�̐�
+#define NUM_PATCH 9 // �}�N�x�X���p�b�`��
+#define COLOR 3 // �F�`���l����
+#define SSIZE 7 // r,g,b ��2���̏d��A����s��̃T�C�Y
+#define SHOW_WIDTH 400 // �\���摜�̕�
+#define DISP1 "��͌���1"
+#define DISP2 "��͌���2"
+#define DISP3 "��͌���3"
+//#define DEBUG_STOP_FRAME 3 // �������f����t���[�����i�f�o�b�O�j
+
+// �~�j�}�N�x�X�摜 MiniMacbeth0.jpg ����v�Z
+// ���S�p�b�`���W 39,39 ���p�b�`���W 20,45 (-19,+6)
+#define BLACK_ANGLE 160.0 // =atan(+6/-19) (degree)
+#define BLACK_LEN 19.5 // =sqrt((+6)^2 + (-19)^2)
+
+// �}�N��
+#define SPRLOG sprintf_s(m_logString, LOG_STRING,
+
+#ifdef _CONSOLE
+#define ERROR_RET(msg) { \
+SPRLOG "%s", msg); \
+getchar(); \
+return false; }
+#else
+#define ERROR_RET(msg) { \
+SPRLOG "%s", msg); \
+return false; }
+#endif // _CONSOLE
+
+void OnMouse1(int ev, int x, int y, int flags, void* param);
+
+struct Param
+{
+ char dateStr[DATE_STRING];
+ int timeMDE;
+ int subject;
+ int count;
+ Param() { *dateStr = '\0'; timeMDE = subject = count = 0; }
+};
+
+class Analysis
+{
+private:
+ IplImage *m_macbeth;
+ IplImage *m_camera;
+ IplImage *m_show;
+ FILE *m_log;
+ FILE *m_gLog;
+ Param *m_param;
+ CvMat *m_bMat;
+ int m_numParam;
+ char m_filePath[MAX_PATH_LEN];
+ char m_refPath[MAX_PATH_LEN];
+ char m_logString[LOG_STRING];
+ int m_numFrames;
+ CvPoint m_patchPos[NUM_PATCH * NUM_MACBETH];
+ CvScalar m_mean [NUM_PATCH * NUM_MACBETH];
+ CvScalar m_stdev [NUM_PATCH * NUM_MACBETH];
+ CvScalar m_meanRef [NUM_PATCH * NUM_MACBETH];
+ CvScalar m_stdevRef[NUM_PATCH * NUM_MACBETH];
+ CvScalar *m_miniPatch[NUM_PATCH];
+ CvScalar m_miniPatchRef[NUM_PATCH];
+ float m_showScale[3];
+
+public:
+ Analysis(void);
+ ~Analysis(void);
+ bool Process();
+ void OnMouse(int disp, int ev, int x, int y, int flags);
+
+private:
+ bool Init();
+ bool ReadImage(int frame, bool ref = false);
+ bool OpenLogFile(bool gLog = false, bool ref = false);
+ bool CloseLogFile(bool gLog = false);
+ bool SetPath(const char *dateStr, int timeMDE, int subject, int count);
+ bool GetNumFrames();
+ bool GetPatchPosition(bool ref = false);
+ bool GetAllPatchColor(bool ref = false);
+ bool GetPatchColor(int frame, bool ref = false);
+ bool ShowImage(char *window = DISP1);
+ bool WriteLog(bool gLog = false, bool fileOnly = false);
+ bool CorrectionMat();
+ double GetMRVal(int patch, int frame, int index);
+ double GetMRVal2(double v0, double v1, double v2, int index);
+ char ClipValue(double val);
+};
diff --git a/reference/ColorCorrection/ColorCorrection.cpp b/reference/ColorCorrection/ColorCorrection.cpp
new file mode 100644
index 0000000..5475275
--- /dev/null
+++ b/reference/ColorCorrection/ColorCorrection.cpp
@@ -0,0 +1,14 @@
+// ColorCorrection.cpp : �R���\�[�� �A�v���P�[�V�����̃G���g�� �|�C���g���`���܂��B
+//
+
+#include "stdafx.h"
+#include "Analysis.h"
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+ Analysis ana;
+ if (!ana.Process()) return 1;
+
+ return 0;
+}
+
diff --git a/reference/ColorCorrection/MiniMacbeth.cpp b/reference/ColorCorrection/MiniMacbeth.cpp
new file mode 100644
index 0000000..f12577f
--- /dev/null
+++ b/reference/ColorCorrection/MiniMacbeth.cpp
@@ -0,0 +1,162 @@
+#include "MiniMacbeth.h"
+
+MiniMacbeth::MiniMacbeth(void)
+{
+ m_macbeth = NULL;
+ m_camera = NULL;
+ m_show = NULL;
+ m_fpOutput = NULL;
+}
+
+MiniMacbeth::~MiniMacbeth(void)
+{
+ if (m_macbeth != NULL) cvReleaseImage(&m_macbeth);
+ if (m_camera != NULL) cvReleaseImage(&m_macbeth);
+ if (m_show != NULL) cvReleaseImage(&m_show);
+ if (m_fpOutput != NULL) fclose(m_fpOutput);
+
+ cvDestroyAllWindows();
+}
+
+bool MiniMacbeth::Init()
+{
+ m_macbeth = cvLoadImage(MACBETH_PATH);
+ if (!m_macbeth) return false;
+
+ cvNamedWindow(DISP1);
+// cvNamedWindow(DISP2);
+
+ return true;
+}
+
+bool MiniMacbeth::ReadImage(char *filename)
+{
+ // �J�����摜��荞��
+ if (m_camera != NULL) cvReleaseImage(&m_camera);
+ m_camera = cvLoadImage(filename);
+ if (!m_camera) return false;
+
+ // �\���p�摜�o�b�t�@
+ if (m_show != NULL) cvReleaseImage(&m_show);
+ m_show = (IplImage*)cvClone(m_camera);
+
+ return true;
+}
+
+bool MiniMacbeth::OpenOutputFile(char *filename)
+{
+ fopen_s(&m_fpOutput, filename, "w");
+ if (!m_fpOutput) return false;
+
+ const char *rgb = "RGB";
+ const char *ms = "ms";
+ for(int i=0; i<18*6; i++) fprintf(m_fpOutput, "p%d_%cm, ", (i%54)/3, rgb[i%3], ms[i/54]);
+ fprintf(m_fpOutput, "\n");
+
+ return true;
+}
+
+bool MiniMacbeth::Process()
+{
+ if (!m_camera || !m_macbeth) return false;
+
+ // �T�����ʗp�A���C�쐬
+ IplImage *match = cvCreateImage(
+ cvSize(SEARCH_AREA - m_macbeth->width + 1,
+ SEARCH_AREA - m_macbeth->height + 1),
+ IPL_DEPTH_32F , 1);
+
+ // �}�b�`���O�̏���
+ CvMat *rotateMat = cvCreateMat(2, 3, CV_32FC1);
+ IplImage *rotateMacbeth = cvCreateImage(cvGetSize(m_macbeth), IPL_DEPTH_8U, 3);
+ CvRect roi;
+ double gAngle = 0;
+ double minC, maxC, gMinC = -1.0;
+ CvPoint minPos, maxPos, gMinPos;
+ CvScalar ave[18], std[18];
+
+ // �E��ƍ����̃}�N�x�X����������
+ for (int place=0; place<2; place ++)
+ {
+ // �}�N�x�X���ӗ̈��ROI��ݒ�
+ if (place==0) roi = cvRect(m_camera->width-SEARCH_AREA, 0, SEARCH_AREA, SEARCH_AREA);
+ else roi = cvRect(0, m_camera->height-SEARCH_AREA, SEARCH_AREA, SEARCH_AREA);
+ cvSetImageROI(m_camera, roi);
+
+ // �����ʃe���v���[�g�}�b�`���O
+ gMinC = -1.0;
+ for(double angle = 0; angle < 360.0; angle += ROTATE_RESOLUTION)
+ {
+ cv2DRotationMatrix(cvPoint2D32f(m_macbeth->width/2.0, m_macbeth->height/2.0),
+ angle, 1.0, rotateMat);
+ cvWarpAffine(m_macbeth, rotateMacbeth, rotateMat);
+
+ cvMatchTemplate(m_camera, rotateMacbeth, match, CV_TM_SQDIFF);
+ cvMinMaxLoc(match, &minC, &maxC, &minPos, &maxPos);
+ if (minC < gMinC || gMinC < 0)
+ {
+ gAngle = angle;
+ gMinPos.x = minPos.x + (m_macbeth->width / 2) + roi.x;
+ gMinPos.y = minPos.y + (m_macbeth->height / 2) + roi.y;
+ gMinC = minC;
+ }
+ }
+
+ // �e�p�b�`�̕��ϒl�ƕW�������v�Z
+ printf("Best Angle:%.0f min:%f x,y=%d,%d\n", gAngle, gMinC, gMinPos.x, gMinPos.y);
+ this->PatchProc(gMinPos.x, gMinPos.y, &ave[place*9], &std[place*9]);
+
+ for(int i=0; i<8; i++)
+ {
+ int bx = cvRound(gMinPos.x + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ cos((BLACK_ANGLE - gAngle + i*45.0)/180.0*CV_PI));
+ int by = cvRound(gMinPos.y + BLACK_LEN * (i%2 ? cvSqrt(2.0) : 1.0) *
+ sin((BLACK_ANGLE - gAngle + i*45.0)/180.0*CV_PI));
+ this->PatchProc(bx, by, &ave[i+1+place*9], &std[i+1+place*9]);
+ }
+ }
+
+ // ���ʂ̕\��
+ for(int i=0; i<18; i++)
+ {
+ fprintf(m_fpOutput, "%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, ",
+ ave[i].val[2], ave[i].val[1], ave[i].val[0],
+ std[i].val[2], std[i].val[1], std[i].val[0]);
+ }
+ fprintf(m_fpOutput, "\n");
+ this->ShowImage();
+
+ // �摜�o�b�t�@�̉��
+ cvReleaseImage(&rotateMacbeth);
+ cvReleaseMat(&rotateMat);
+ cvReleaseImage(&match);
+
+ return true;
+}
+
+bool MiniMacbeth::PatchProc(int x, int y, CvScalar *ave, CvScalar *stdev)
+{
+ // ���ʉ摜�Ƀp�b�`��`��
+ cvRectangle(m_show, cvPoint(x-PATCH_SIZE, y-PATCH_SIZE),
+ cvPoint(x+PATCH_SIZE, y+PATCH_SIZE),
+ cvScalar(255,0,0),CV_FILLED);
+
+ // �p�b�`ROI�ݒ�
+ cvSetImageROI(m_camera, cvRect(x-PATCH_SIZE, y-PATCH_SIZE,
+ PATCH_SIZE*2+1, PATCH_SIZE*2+1));
+
+ // �p�b�`�̕��ςƕW�������v�Z
+ cvAvgSdv(m_camera, ave, stdev);
+
+ return true;
+}
+
+bool MiniMacbeth::ShowImage()
+{
+ IplImage *temp = cvCreateImage(cvSize(m_show->width/2, m_show->height/2), IPL_DEPTH_8U, 3);
+ cvResize(m_show, temp, CV_INTER_LINEAR);
+ cvShowImage(DISP1, temp);
+ cvWaitKey(1);
+ cvReleaseImage(&temp);
+ return true;
+}
\ No newline at end of file
diff --git a/reference/ColorCorrection/MiniMacbeth.h b/reference/ColorCorrection/MiniMacbeth.h
new file mode 100644
index 0000000..e514bb5
--- /dev/null
+++ b/reference/ColorCorrection/MiniMacbeth.h
@@ -0,0 +1,39 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+
+#define MACBETH_PATH "D:\\usr\\work\\��data\\MiniMacbeth0.jpg"
+#define MAX_PATH_LEN 1024
+#define SEARCH_AREA 200
+#define PATCH_SIZE 4
+#define DISP1 "DISPLAY1"
+#define DISP2 "DISPLAY2"
+#define ROTATE_RESOLUTION 2.0 // Searching rotation interval in degree
+
+// �~�j�}�N�x�X�摜 MiniMacbeth0.jpg ����v�Z
+// ���S�p�b�`���W 39,39 ���p�b�`���W 20,45 (-19,+6)
+#define BLACK_ANGLE 161.0 // =atan(+6/-19) (degree)
+#define BLACK_LEN 20.0 // =sqrt((+6)^2 + (-19)^2)
+
+
+class MiniMacbeth
+{
+private:
+ IplImage *m_macbeth;
+ IplImage *m_camera;
+ IplImage *m_show;
+ FILE *m_fpOutput;
+
+public:
+ MiniMacbeth(void);
+ ~MiniMacbeth(void);
+ bool Init();
+ bool ReadImage(char *filename);
+ bool OpenOutputFile(char *filename);
+ bool Process();
+ bool PatchProc(int x, int y, CvScalar *ave, CvScalar *stdev);
+ bool ShowImage();
+};
diff --git a/reference/ColorCorrection/stdafx.h b/reference/ColorCorrection/stdafx.h
new file mode 100644
index 0000000..93c07ff
--- /dev/null
+++ b/reference/ColorCorrection/stdafx.h
@@ -0,0 +1,15 @@
+// stdafx.h : �W���̃V�X�e�� �C���N���[�h �t�@�C���̃C���N���[�h �t�@�C���A�܂���
+// �Q�Ɖ������A�����܂�ύX����Ȃ��A�v���W�F�N�g��p�̃C���N���[�h �t�@�C��
+// ���L�q���܂��B
+//
+
+#pragma once
+
+#include "targetver.h"
+
+#include
+#include
+
+
+
+// TODO: �v���O�����ɕK�v�Ȓlj��w�b�_�[�������ŎQ�Ƃ��Ă��������B
diff --git a/reference/ColorCorrection/targetver.h b/reference/ColorCorrection/targetver.h
new file mode 100644
index 0000000..b18cde4
--- /dev/null
+++ b/reference/ColorCorrection/targetver.h
@@ -0,0 +1,13 @@
+#pragma once
+
+// �ȉ��̃}�N���́A�Œ���K�v�ȃv���b�g�t�H�[�����`���܂��B�Œ���K�v�ȃv���b�g�t�H�[���Ƃ́A
+// �A�v���P�[�V���������s���邽�߂ɕK�v�ȋ@�\��������ł��Â��o�[�W������ Windows �� Internet Explorer �Ȃ�
+// �������܂��B�����̃}�N���́A�w�肵���o�[�W�����ƁA����ȑO�̃o�[�W�����̃v���b�g�t�H�[����ŗ��p�ł��邷�ׂĂ̋@�\��L���ɂ��邱�Ƃɂ����
+// ���삵�܂��B
+
+// ���Ŏw�肳�ꂽ��`�̑O�ɑΏۃv���b�g�t�H�[�����w�肵�Ȃ���Ȃ�Ȃ��ꍇ�A�ȉ��̒�`��ύX���Ă��������B
+// �قȂ�v���b�g�t�H�[���ɑΉ�����l�Ɋւ���ŐV���ɂ��ẮAMSDN ���Q�Ƃ��Ă��������B
+#ifndef _WIN32_WINNT // �Œ���K�v�ȃv���b�g�t�H�[���� Windows Vista �ł��邱�Ƃ��w�肵�܂��B
+#define _WIN32_WINNT 0x0600 // ����� Windows �̑��̃o�[�W���������ɓK�Ȓl�ɕύX���Ă��������B
+#endif
+
diff --git a/reference/Tracking.cpp b/reference/Tracking.cpp
new file mode 100644
index 0000000..0beecd8
--- /dev/null
+++ b/reference/Tracking.cpp
@@ -0,0 +1,295 @@
+#include "Tracking.h"
+
+//-------------------------------------------------------------------------------
+// �R���X�g���N�^
+CTracking::CTracking(void)
+{
+ m_Point[0] = NULL;
+ m_Point[1] = NULL;
+ m_PointInit = NULL;
+ m_Gray = NULL;
+ m_GrayPre = NULL;
+ m_Pyramid = NULL;
+ m_PyramidPre = NULL;
+ m_Mask1 = NULL;
+ m_Mask2 = NULL;
+ m_Mask3 = NULL;
+ m_Mask4 = NULL;
+ m_Status = NULL;
+}
+
+//-------------------------------------------------------------------------------
+// �f�X�g���N�^
+CTracking::~CTracking(void)
+{
+ this->Release();
+}
+
+//-------------------------------------------------------------------------------
+// ���������
+void CTracking::Release(void)
+{
+ SAFE_DELETEA(m_Point[0]);
+ SAFE_DELETEA(m_Point[1]);
+ SAFE_DELETEA(m_PointInit);
+ SAFE_RELEASEIMG(m_Gray);
+ SAFE_RELEASEIMG(m_GrayPre);
+ SAFE_RELEASEIMG(m_Pyramid);
+ SAFE_RELEASEIMG(m_PyramidPre);
+ SAFE_RELEASEIMG(m_Mask1);
+ SAFE_RELEASEIMG(m_Mask2);
+ SAFE_RELEASEIMG(m_Mask3);
+ SAFE_RELEASEIMG(m_Mask4);
+ SAFE_DELETEA(m_Status)
+}
+
+//-------------------------------------------------------------------------------
+// ������
+bool CTracking::Init(const CvPoint *point, const int numPoint,
+ const IplImage *img)
+{
+ this->Release();
+
+ m_NumPoint = numPoint;
+ m_Criteria = cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03);
+ m_PyrFlag = 0;
+ m_TotalMovement = 0;
+
+ m_Point[0] = new CvPoint2D32f [numPoint];
+ m_Point[1] = new CvPoint2D32f [numPoint];
+ m_PointInit = new CvPoint [numPoint];
+ m_Gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_GrayPre = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Pyramid = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_PyramidPre = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask1 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask2 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask3 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Mask4 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
+ m_Status = new char [numPoint];
+
+ for (int i = 0; i < numPoint; i++)
+ {
+ m_PointInit[i] = point[i];
+ m_Point[0][i] = cvPointTo32f(point[i]);
+ }
+
+ cvCvtColor(img, m_GrayPre, CV_BGR2GRAY);
+
+ cvFindCornerSubPix(m_GrayPre, m_Point[0], numPoint,
+ cvSize(TRACK_WIN_SIZE, TRACK_WIN_SIZE), cvSize(-1, -1),
+ m_Criteria);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �t���[�������i�g���b�L���O�j
+bool CTracking::Frame(const IplImage *img)
+{
+ // �O���[�X�P�[���ϊ�
+ cvCvtColor(img, m_Gray, CV_BGR2GRAY);
+
+ // �I�v�e�B�J���t���[���o
+ cvCalcOpticalFlowPyrLK(m_GrayPre, m_Gray, m_PyramidPre, m_Pyramid,
+ m_Point[0], m_Point[1], m_NumPoint,
+ cvSize(TRACK_WIN_SIZE, TRACK_WIN_SIZE),
+ 3, m_Status, 0, m_Criteria, m_PyrFlag);
+
+ // ����O�t���[���̃s���~�b�h�v�Z���ȗ�����
+ m_PyrFlag |= CV_LKFLOW_PYR_A_READY;
+
+ // �o�b�t�@�̌���
+ IplImage *swap;
+ CV_SWAP(m_Gray, m_GrayPre, swap);
+ CV_SWAP(m_Pyramid, m_PyramidPre, swap);
+ CvPoint2D32f *swapP;
+ CV_SWAP(m_Point[0], m_Point[1], swapP);
+
+ // �ړ��ʎZ�o
+ for (int i = 0; i < m_NumPoint; i ++)
+ {
+ m_Movement = sqrt(
+ pow((double)(m_Point[0][i].x - m_Point[1][i].x), 2.0) +
+ pow((double)(m_Point[0][i].y - m_Point[1][i].y), 2.0));
+ m_TotalMovement += m_Movement;
+ }
+
+ // �g���b�L���O����
+ for (int i = 0; i < m_NumPoint; i ++) if (!m_Status[i]) return false;
+ this->ValidatePoints();
+ if (m_Validate) this->CalcROIMask();
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ǐՓ_��`��
+bool CTracking::DrawPoints(IplImage *img)
+{
+ // �_�̕`��
+ for (int i = 0; i < m_NumPoint; i++)
+ {
+ cvCircle(img, cvPointFrom32f(m_Point[0][i]), 5,
+ CV_RGB(0, 0, i*60), CV_FILLED);
+ cvLine(img, cvPointFrom32f(m_Point[0][i]),
+ cvPointFrom32f(m_Point[0][(i+1) % m_NumPoint]), CV_RGB(0, 0, 255));
+ }
+
+ // �����_�̕`��
+ for (int i = 0; i < m_NumPoint; i++)
+ {
+ cvLine(img, m_PointInit[i],
+ m_PointInit[(i+1) % m_NumPoint], CV_RGB(0, 255, 0));
+ }
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// ROI��`��
+bool CTracking::DrawROI(IplImage *img)
+{
+ cvSet(img, CV_RGB(255, 0, 0), m_Mask1);
+ cvSet(img, CV_RGB(0, 255, 0), m_Mask2);
+ cvSet(img, CV_RGB(0, 0, 255), m_Mask3);
+ cvSet(img, CV_RGB(244, 163, 46), m_Mask4);
+
+ return true;
+}
+
+//-------------------------------------------------------------------------------
+// �ǐՓ_��`��
+void CTracking::ValidatePoints()
+{
+ int conX[8][2] = {{0,2},{0,3},{0,4},{1,2},{1,3},{1,4},{2,3},{2,4}};
+ int conY[8][2] = {{0,1},{0,2},{0,3},{1,2},{3,2},{4,1},{4,1},{4,3}};
+
+ m_Validate = true;
+ for (int i = 0; i < 8; i++)
+ {
+ if (m_Point[0][conX[i][0]].x >= m_Point[0][conX[i][1]].x) m_Validate = false;
+ if (m_Point[0][conY[i][0]].y >= m_Point[0][conY[i][1]].y) m_Validate = false;
+ }
+}
+
+//-------------------------------------------------------------------------------
+// ROI�}�X�N�摜�����
+void CTracking::CalcROIMask()
+{
+ // ROI�}�X�N�摜1
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1|�� ��|3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points1[2][3] = {{0, 2, 1}, {4, 2, 3}};
+
+ cvSet(m_Mask1, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = (m_Point[0][points1[i][0]].x + m_Point[0][points1[i][1]].x) / 2;
+ float cy1 = (m_Point[0][points1[i][0]].y + m_Point[0][points1[i][1]].y) / 2;
+ roi.x = (m_Point[0][points1[i][2]].x + cx1) / 2;
+ roi.y = (m_Point[0][points1[i][2]].y + cy1) / 2;
+ cvDrawCircle(m_Mask1, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜2
+ // 0____________ 4
+ // | �� �� |
+ // | |
+ // | |
+ // 1| |3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points2[2][4] = {{0, 3, 0, 4}, {4, 1, 4, 0}};
+
+ cvSet(m_Mask2, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = m_Point[0][points2[i][0]].x + (m_Point[0][points2[i][1]].x - m_Point[0][points2[i][0]].x) / 4;
+ float cy1 = m_Point[0][points2[i][0]].y + (m_Point[0][points2[i][1]].y - m_Point[0][points2[i][0]].y) / 4;
+ float cx2 = (m_Point[0][points2[i][2]].x + m_Point[0][points2[i][3]].x) / 2 ;
+ float cy2 = (m_Point[0][points2[i][2]].y + m_Point[0][points2[i][3]].y) / 2;
+ roi.x = (cx1 + cx2) / 2;
+ roi.y = (cy1 + cy2) / 2;
+ cvDrawCircle(m_Mask2, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| �� �� |3
+ // ��@�@�@ /
+ // ��_______/
+ // 2
+
+ int points3[2][3] = {{0, 3, 2}, {4, 1, 2}};
+
+ cvSet(m_Mask3, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ float cx1 = m_Point[0][points3[i][0]].x + (m_Point[0][points3[i][1]].x - m_Point[0][points3[i][0]].x) / 4;
+ float cy1 = m_Point[0][points3[i][0]].y + (m_Point[0][points3[i][1]].y - m_Point[0][points3[i][0]].y) / 4;
+ roi.x = (m_Point[0][points3[i][2]].x + cx1) / 2;
+ roi.y = (m_Point[0][points3[i][2]].y + cy1) / 2;
+ cvDrawCircle(m_Mask3, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+ // ROI�}�X�N�摜3
+ // 0____________ 4
+ // | |
+ // | |
+ // | |
+ // 1| |3
+ // ��@�@�@ /
+ // ��_��_��_/
+ // 2
+
+ int points4[2][2] = {{0, 2}, {4, 2}};
+
+ cvSet(m_Mask4, cvScalar(0));
+ for (int i = 0; i < 2; i ++)
+ {
+ CvPoint2D32f roi;
+ roi.x = m_Point[0][points4[i][0]].x + (m_Point[0][points4[i][1]].x - m_Point[0][points4[i][0]].x) * 7 / 8;
+ roi.y = m_Point[0][points4[i][0]].y + (m_Point[0][points4[i][1]].y - m_Point[0][points4[i][0]].y) * 7 / 8;
+ cvDrawCircle(m_Mask4, cvPointFrom32f(roi), CALC_ROI_RADIUS, cvScalar(1), CV_FILLED);
+ }
+
+
+}
+
+//-------------------------------------------------------------------------------
+// ROI�̕��ϐF���v�Z
+CvScalar CTracking::ROIColor1(IplImage *img)
+{
+ return cvAvg(img, m_Mask1);
+}
+
+CvScalar CTracking::ROIColor2(IplImage *img)
+{
+ return cvAvg(img, m_Mask2);
+}
+
+CvScalar CTracking::ROIColor3(IplImage *img)
+{
+ return cvAvg(img, m_Mask3);
+}
+
+CvScalar CTracking::ROIColor4(IplImage *img)
+{
+ return cvAvg(img, m_Mask4);
+}
diff --git a/reference/Tracking.h b/reference/Tracking.h
new file mode 100644
index 0000000..ee567e9
--- /dev/null
+++ b/reference/Tracking.h
@@ -0,0 +1,49 @@
+#pragma once
+
+#include "stdafx.h"
+
+#define TRACK_WIN_SIZE 10
+#define CALC_ROI_RADIUS 10
+
+class CTracking
+{
+private:
+ int m_NumPoint;
+ CvPoint2D32f *m_Point[2];
+ CvPoint *m_PointInit;
+ IplImage *m_Gray;
+ IplImage *m_GrayPre;
+ IplImage *m_Pyramid;
+ IplImage *m_PyramidPre;
+ IplImage *m_Mask1;
+ IplImage *m_Mask2;
+ IplImage *m_Mask3;
+ IplImage *m_Mask4;
+ char *m_Status;
+ CvTermCriteria m_Criteria;
+ int m_PyrFlag;
+ double m_Movement;
+ double m_TotalMovement;
+ bool m_Validate;
+
+private:
+ void Release();
+ void ValidatePoints();
+ void CalcROIMask();
+
+public:
+ CTracking(void);
+ ~CTracking(void);
+ bool Init(const CvPoint *point, const int numPoint,
+ const IplImage *img);
+ bool Frame(const IplImage *img);
+ bool DrawPoints(IplImage *img);
+ bool DrawROI(IplImage *img);
+ double Movement() { return m_Movement; }
+ double TotalMovement() { return m_TotalMovement; }
+ bool IsVaildate() { return m_Validate; }
+ CvScalar ROIColor1(IplImage *img);
+ CvScalar ROIColor2(IplImage *img);
+ CvScalar ROIColor3(IplImage *img);
+ CvScalar ROIColor4(IplImage *img);
+};
diff --git a/reference/cvimage_01.cpp b/reference/cvimage_01.cpp
new file mode 100644
index 0000000..780c01a
--- /dev/null
+++ b/reference/cvimage_01.cpp
@@ -0,0 +1,211 @@
+//---------------------------------------------------------
+// �T�v : �摜��\������
+// File Name : cvimage_01.cpp
+//
+// �����Ӂ�
+// OpenCV�́yRGB�z�̏��Ԃł͂Ȃ��C�yBGR�z�̏��ɂȂ��Ă���D
+//
+//
+// 2009/06/15 Yuya Ishikawa
+// 2009/06/22 T.Nakaguchi
+//---------------------------------------------------------
+
+#include
+#include
+#include
+#include
+
+#define MAX_POINTS 5
+
+// �ϐ��錾
+IplImage *src_img; // ���͉摜
+IplImage **dst_img; // �o�͉摜
+IplImage *g_showImg; // �\���p�摜
+
+
+int levels = 0; // �g���b�N�o�[�̒l���͂��鐔�l�̍ŏ��l
+int g_numClick = 0;
+CvPoint g_click[MAX_POINTS];
+
+
+char g_path[1024];
+
+//-------------------------------------------
+//
+// �g���b�N�o�[�̒l���ς�������ɌĂ���
+//
+// �����@pos�F�g���b�N�o�[�̒l
+//
+//-------------------------------------------
+void on_change(int pos)
+{
+ // �N���b�N��`��
+ cvCopy(dst_img[pos], g_showImg);
+ for (int i=0; iwidth/2, src_img->height/2),IPL_DEPTH_8U, 3);
+
+ // ���T�C�Y
+// cvResize(src_img, dst_img[i], CV_INTER_CUBIC);
+ cvResize(src_img, dst_img[i], CV_INTER_NN);
+ //cvResize��3�Ԗڂ̈����ɂ���Ԗ@���w��ł���
+ //[��Ԗ@]------------------------------------------
+ // CV_INTER_NN : �ŋߖT���
+ // CV_INTER_LINEAR : �o�C���j�A��ԁi�o������ԁj
+ // CV_INTER_AREA : �s�N�Z���̈�̊W��p�������T���v�����O�D
+ // ( �g��� CV_INTER_NN ���l�D�k���̓��A��������ł����@ )
+ // CV_INTER_CUBIC : �o�O�����
+ //--------------------------------------------------
+
+ cvReleaseImage(&src_img);
+ }
+
+ // �\���p�摜�̍쐬
+ g_showImg = cvCreateImage(cvSize(dst_img[0]->width, dst_img[0]->height),IPL_DEPTH_8U, 3);
+
+ // on_change����0�b�ڂ��Ăяo���ĕ\��
+ on_change(0);
+
+ // �g���b�N�o�[���쐬����
+ cvCreateTrackbar("�B�e�摜��", "dst_img", &levels, numFrames-1, on_change);
+
+ // �}�E�X�C�x���g
+ cvSetMouseCallback("dst_img", on_click, 0);
+
+ // �L�[���͂�҂�
+ cvWaitKey( 0 );
+
+ // ���������������
+ cvReleaseImage(&g_showImg);
+ for(int j=0; j 0.5) && (scores[0, i] > max_score))
+ // {
+ // max_score = scores[0, i];
+ // float y_min = boxes[0, i, 0] * (float)bitmap_bitch.Height;
+ // float x_min = boxes[0, i, 1] * (float)bitmap_bitch.Width;
+ // float y_max = boxes[0, i, 2] * (float)bitmap_bitch.Height;
+ // float x_max = boxes[0, i, 3] * (float)bitmap_bitch.Width;
+ // P1.X = (int)x_min;
+ // P1.Y = (int)y_min;
+ // P2.X = (int)x_max;
+ // P2.Y = (int)y_max;
+ // Cv2.Rectangle(mat_drawBox, P1, P2, new Scalar(0, 255, 0), 5);
+ // rectangle.X = (int)x_min;
+ // rectangle.Y = (int)y_min;
+ // rectangle.Width = (int)(x_max - x_min);
+ // rectangle.Height = (int)(y_max - y_min);
+
+ // check_detection = 1;
+ // }
+ // }
+ // }
+ //}
+ //if (check_detection == 1)
+ //{
+ // mat_drawBox.SaveImage(basepath + "\\detection" + imageFile);
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_drawBox);
+ // pictureBox_detection.Image = bitmap_bitch;
+ // pictureBox_detection.Refresh();
+ // label1.BackColor = Color.White;
+ // label2.BackColor = Color.Red;
+ //}
+ //else
+ //{
+ // MessageBox.Show("Error: Sorry can not detect any tongue in this image. Press [OK] to skip preprocessing.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ // time = DateTime.Now.ToLocalTime().ToString();
+ // File.AppendAllText("DetectionFailedLog.txt ", time + " " + imageFile + "\n");
+ // pictureBox_detection.Image = null;
+ // pictureBox_detection.Refresh();
+ // pictureBox_cropResized.Image = null;
+ // pictureBox_cropResized.Refresh();
+ // pictureBox_output.Image = null;
+ // pictureBox_output.Refresh();
+ // pictureBox_outputSRG.Image = null;
+ // pictureBox_outputSRG.Refresh();
+ // pictureBox_maskSRG.Image = null;
+ // pictureBox_maskSRG.Refresh();
+ // pictureBox_extraction.Image = null;
+ // pictureBox_extraction.Refresh();
+ // pictureBox_contract.Image = null;
+ // pictureBox_contract.Refresh();
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ // byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+
+ // Thread.Sleep(1000);
+ // modelFile = DownloadDefaultModel_noBoxPix2Pix(basepath);
+ // using (var graph = new TFGraph())
+ // {
+ // var model = File.ReadAllBytes(modelFile);
+ // graph.Import(model, "");
+
+ // using (var session = new TFSession(graph))
+ // {
+ // var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+
+ // var runner = session.GetRunner();
+ // runner
+ // .AddInput(graph["generator/input_image"][0], tensor)
+ // .Fetch(graph["generator/prediction"][0]);
+ // var output = runner.Run();
+ // float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ // for (int p = 0; p < 256; p++)
+ // {
+ // for (int q = 0; q < 256; q++)
+ // {
+ // float check = resultfloat[0, p, q, 0];
+ // if (check < 0)
+ // {
+ // mask[ii] = 0;
+ // }
+ // else
+ // {
+ // mask[ii] = 255;
+ // }
+ // ii++;
+ // }
+ // }
+ // }
+ // }
+ // Thread.Sleep(1000);
+ // bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ // mat_outputNoBox = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputNoBox);
+ // pictureBox_output.Image = bitmap_bitch;
+ // pictureBox_output.Refresh();
+ // label1.BackColor = Color.White;
+ // label4.BackColor = Color.Red;
+ // mat_outputNoBox.SaveImage(basepath + "\\output256" + imageFile);
+ // try
+ // {
+ // RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ // RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ // }
+ // catch
+ // {
+ // MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ // break;
+ // }
+ // mat_outputSRGNoBox = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRGNoBox);
+ // pictureBox_outputSRG.Image = bitmap_bitch;
+ // pictureBox_outputSRG.Refresh();
+ // label4.BackColor = Color.White;
+ // label5.BackColor = Color.Red;
+ // Cv2.Resize(mat_outputSRGNoBox, mat_mask, mat_input.Size());
+ // mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ // try
+ // {
+ // RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ // RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ // }
+ // catch
+ // {
+ // MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ // break;
+ // }
+ // mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ // Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ // pictureBox_maskSRG.Image = bitmap_bitch;
+ // pictureBox_maskSRG.Refresh();
+ // label5.BackColor = Color.White;
+ // label6.BackColor = Color.Red;
+ // mat_extraction = mat_input.Clone();
+ // for (int i = 0; i < mat_input.Height; i++)
+ // {
+ // for (int j = 0; j < mat_input.Width; j++)
+ // {
+ // Vec3b pix = mat_extraction.At(i, j);
+ // if (mat_maskSRG.At(i, j) == 0)
+ // {
+ // pix[0] = (byte)(255);
+ // pix[1] = (byte)(255);
+ // pix[2] = (byte)(255);
+ // mat_extraction.Set(i, j, pix);
+ // }
+ // else
+ // {
+ // pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ // pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ // pix[2] = (byte)(mat_extraction.At(i, j).Item2);
+ // mat_extraction.Set(i, j, pix);
+ // }
+ // }
+ // }
+ // mat_extraction.SaveImage(basepath + "\\extraction" + imageFile);
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_extraction);
+ // pictureBox_extraction.Image = bitmap_bitch;
+ // pictureBox_extraction.Refresh();
+ // label6.BackColor = Color.White;
+ // label7.BackColor = Color.Red;
+ // bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_input);
+ // pictureBox_contract.Image = bitmap_bitch;
+ // pictureBox_contract.Refresh();
+ // label7.BackColor = Color.White;
+ // label8.BackColor = Color.Red;
+ // continue;
+ //}
+ //for (int i = P1.Y; i < P2.Y; i++)
+ //{
+ // for (int j = P1.X; j < P2.X; j++)
+ // {
+ // Vec3b pix = mat_input.At(i, j);
+ // mat_cropped.Set(i, j, pix);
+ // }
+ //}
+ //mat_cropped.SaveImage(basepath + "\\cropped" + imageFile);
+ //OpenCvSharp.Size size_roi = new OpenCvSharp.Size();
+ //size_roi.Height = rectangle.Height;
+ //size_roi.Width = rectangle.Width;
+ //roi = new Rect(P1, size_roi);
+ //mat_roisize = mat_input.Clone(roi);
+ //Cv2.Resize(mat_roisize, mat_roi, mat_roi256.Size());
+ //mat_roi.SaveImage(basepath + "\\cropresized" + imageFile);
+ //bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_roi);
+ //pictureBox_cropResized.Image = bitmap_bitch;
+ //pictureBox_cropResized.Refresh();
+ //label2.BackColor = Color.White;
+ //label3.BackColor = Color.Red;
+ //byte_inputSegmentation = Bitmap2Byte(bitmap_bitch);
+ //Thread.Sleep(1000);
+ //modelFile = DownloadDefaultModel(basepath);
+ //using (var graph = new TFGraph())
+ //{
+ // var model = File.ReadAllBytes(modelFile);
+ // graph.Import(model, "");
+
+ // using (var session = new TFSession(graph))
+ // {
+ // var tensor = ImageUtil2.CreateTensorFromImageFile(byte_inputSegmentation);
+ // var runner = session.GetRunner();
+ // runner
+ // //.AddInput(graph["generator/input_image"][0], tensor)
+ // //.Fetch(graph["generator/prediction"][0]);
+
+ // .AddInput(graph["input_image"][0], tensor)
+ // .Fetch(graph["generator1/decoder_1/Tanh"][0]);
+
+ // var output = runner.Run();
+ // float[,,,] resultfloat = (float[,,,])output[0].GetValue(jagged: false);
+
+ // for (int p = 0; p < 256; p++)
+ // {
+ // for (int q = 0; q < 256; q++)
+ // {
+ // float check = resultfloat[0, p, q, 0];
+ // if (check < 0)
+ // {
+ // mask[ii] = 0;
+ // }
+ // else
+ // {
+ // mask[ii] = 255;
+ // }
+ // ii++;
+ // }
+ // }
+ // }
+ //}
+ //Thread.Sleep(1000);
+ //bitmap_bitch = ToGrayBitmap(mask, 256, 256);
+ //mat_output = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap_bitch);
+ //bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_output);
+ //pictureBox_output.Image = bitmap_bitch;
+ //pictureBox_output.Refresh();
+ //label3.BackColor = Color.White;
+ //label4.BackColor = Color.Red;
+ //mat_output.SaveImage(basepath + "\\output256" + imageFile);
+ //try
+ //{
+ // RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\output_changed1" + imageFile, 500, 1, 1);
+ // RemoveSmallRegion(basepath + "\\output_changed1" + imageFile, basepath + "\\output_changed2" + imageFile, 500, 0, 0);
+ //}
+ //catch
+ //{
+ // MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ // break;
+ //}
+ //mat_outputSRG = new Mat(basepath + "\\output_changed2" + imageFile, ImreadModes.GrayScale);
+ //bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_outputSRG);
+ //pictureBox_outputSRG.Image = bitmap_bitch;
+ //pictureBox_outputSRG.Refresh();
+ //label4.BackColor = Color.White;
+ //label5.BackColor = Color.Red;
+ //Cv2.Resize(mat_outputSRG, mat_outputChanged, mat_roisize.Size());
+ //mat_outputChanged.SaveImage(basepath + "\\output_resized" + imageFile);
+ //mat_mask = new Mat(mat_input.Size(), MatType.CV_8UC1, 1);
+ //for (int i = P1.Y; i < P2.Y; i++)
+ //{
+ // for (int j = P1.X; j < P2.X; j++)
+ // {
+ // int pix = mat_outputChanged.At(mmp, pmm);
+ // mat_mask.Set(i, j, pix);
+ // pmm++;
+ // }
+ // mmp++;
+ // pmm = 0;
+ //}
+ //mmp = 0;
+ //Cv2.Resize(mat_mask, mat_mask, mat_input.Size());
+ //mat_mask.SaveImage(basepath + "\\mask" + imageFile);
+ //try
+ //{
+ // RemoveSmallRegion(basepath + "\\mask" + imageFile, basepath + "\\mask_changed1" + imageFile, 500, 1, 1);
+ // RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 500, 0, 0);
+ //}
+ //catch
+ //{
+ // MessageBox.Show("Error: Unable to reprocess! Please check is there [RemoveSmallRegionDLL.dll] file in floder?", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
+ // break;
+ //}
+ RemoveSmallRegion(basepath + "\\output256" + imageFile, basepath + "\\mask_changed1" + imageFile, 2000, 1, 1);
+ RemoveSmallRegion(basepath + "\\mask_changed1" + imageFile, basepath + "\\mask_changed2" + imageFile, 2000, 0, 0);
+ mat_maskSRG = new Mat(basepath + "\\mask_changed2" + imageFile, ImreadModes.GrayScale);
+ Cv2.Threshold(mat_maskSRG, mat_maskSRG, 128, 255, ThresholdTypes.Binary);
+ bitmap_bitch = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat_maskSRG);
+ pictureBox_maskSRG.Image = bitmap_bitch;
+ pictureBox_maskSRG.Refresh();
+ label5.BackColor = Color.White;
+ label6.BackColor = Color.Red;
+ mat_extraction = mat_input.Clone();
+ for (int i = 0; i < mat_input.Height; i++)
+ {
+ for (int j = 0; j < mat_input.Width; j++)
+ {
+ Vec3b pix = mat_extraction.At(i, j);
+ if (mat_maskSRG.At(i, j) == 0)
+ {
+ pix[0] = (byte)(255);
+ pix[1] = (byte)(255);
+ pix[2] = (byte)(255);
+ mat_extraction.Set(i, j, pix);
+ }
+ else
+ {
+ pix[0] = (byte)(mat_extraction.At(i, j).Item0);
+ pix[1] = (byte)(mat_extraction.At(i, j).Item1);
+ pix[2] = (byte)(mat_extraction.At