diff --git a/embeddings_and_difficulty/__pycache__/optimizers.cpython-38.pyc b/embeddings_and_difficulty/__pycache__/optimizers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c631684c6b59f7b05282c4625b048553f49d005
Binary files /dev/null and b/embeddings_and_difficulty/__pycache__/optimizers.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/configs/config_augmentations/standard_augmenter.yaml b/embeddings_and_difficulty/configs/config_augmentations/standard_augmenter.yaml
index 0fe125e8d86ee6662222c60442ea26e67f62bd8a..9b79c3b1b43a7b2c5737eac026ff25316d65223b 100644
--- a/embeddings_and_difficulty/configs/config_augmentations/standard_augmenter.yaml
+++ b/embeddings_and_difficulty/configs/config_augmentations/standard_augmenter.yaml
@@ -1,9 +1,9 @@
-input_size: [224,224]
-random_resize: True
-same_size: False
-mean: [0.0,0.0,0.0]
-std: [1.0,1.0,1.0]
-full_rot: 180
-scale: (0.8, 1.2)
-shear: 10
+input_size: [224,224]
+random_resize: True
+same_size: False
+mean: [0.0,0.0,0.0]
+std: [1.0,1.0,1.0]
+full_rot: 180
+scale: [0.8, 1.2]
+shear: 10
 cutout: 16
\ No newline at end of file
diff --git a/embeddings_and_difficulty/configs/config_losses/backbone_losses.yaml b/embeddings_and_difficulty/configs/config_losses/backbone_losses.yaml
index 405eec91f5e821f0b888ea4ce575fb4b4567f673..65ce77964a2ab1f5384d6cb32ecc956ab24916a2 100644
--- a/embeddings_and_difficulty/configs/config_losses/backbone_losses.yaml
+++ b/embeddings_and_difficulty/configs/config_losses/backbone_losses.yaml
@@ -1,14 +1,14 @@
-
-TripletMargin:
-  triplets_per_anchor: all
-  margin: 0.09610074859813894
-  sampler:
-    MPerClassSampler:
-      m: 4
-Contrastive:
-  pos_margin: 0.26523381895861114
-  neg_margin: 0.5409405918690342
-  sampler:
-    MPerClassSampler:
-      m: 4
-
+
+TripletMargin:
+  triplets_per_anchor: all
+  margin: 0.09610074859813894
+  sampler:
+    MPerClassSampler:
+      m: 4
+Contrastive:
+  pos_margin: 0.26523381895861114
+  neg_margin: 0.5409405918690342
+  sampler:
+    MPerClassSampler:
+      m: 4
+
diff --git a/embeddings_and_difficulty/configs/config_losses/header_losses.yaml b/embeddings_and_difficulty/configs/config_losses/header_losses.yaml
index b4fc26964283baba3ce3943bcfd495380498e693..f4fa3cecb9c34b98862889c572dc7cc42d0f1a3f 100644
--- a/embeddings_and_difficulty/configs/config_losses/header_losses.yaml
+++ b/embeddings_and_difficulty/configs/config_losses/header_losses.yaml
@@ -1,7 +1,7 @@
-
-LeastSquares:
-  reduction: mean
-L1Loss:
-  reduction: mean
-KendallsTau:
-  SomeParameter: 0
+
+LeastSquares:
+  reduction: mean
+L1Loss:
+  reduction: mean
+KendallsTau:
+  SomeParameter: 0
diff --git a/embeddings_and_difficulty/configs/config_optimizers/config_optim.yaml b/embeddings_and_difficulty/configs/config_optimizers/config_optim.yaml
index 04b99b07dca203b7921d20febeb09f907ba9258e..74217e74c478dbfc47ca848511317367446c133d 100644
--- a/embeddings_and_difficulty/configs/config_optimizers/config_optim.yaml
+++ b/embeddings_and_difficulty/configs/config_optimizers/config_optim.yaml
@@ -1,11 +1,11 @@
-ADAM:
-    lr: 0.001
-    betas: (0.9, 0.999)
-    eps: 1e-08
-    weight_decay: 0
-
-SGD:
-    lr: 0.01
-    momentum: 0
-    dampening: 0
-    weight_decay: 0
+ADAM:
+    lr: 0.001
+    betas: [0.9, 0.999]
+    eps: 1e-08
+    weight_decay: 0
+
+SGD:
+    lr: 0.01
+    momentum: 0
+    dampening: 0
+    weight_decay: 0
diff --git a/embeddings_and_difficulty/configs/general.yaml b/embeddings_and_difficulty/configs/general.yaml
index 01c2de3ba7e2aca46bc5089ada36a152abe9da0d..3f86700e1d95bc512919e8953ea9dc1fe7b3f5aa 100644
--- a/embeddings_and_difficulty/configs/general.yaml
+++ b/embeddings_and_difficulty/configs/general.yaml
@@ -1,55 +1,60 @@
-TRAIN:
-  ENABLE: True
-  DATASET: AISC
-  BATCH_SIZE: 32
-  EVAL_PERIOD: 2
-  CHECKPOINT_PERIOD: 2
-  AUTO_RESUME: True
-DATA:
-  PATH_TO_DATA: r"C:\Users\ptrkm\PycharmProjects\BachelorDeeplearning\Embeddings\New_Embeddings"
-  PATH_TO_LABEL: r"C:\Users\ptrkm\PycharmProjects\BachelorDeeplearning\Embeddings\New_Embeddings"
-  PATH_TO_DIFFICULTIES: r"C:\Users\ptrkm\PycharmProjects\BachelorDeeplearning\Embeddings\New_Embeddings"
-  PATH_TO_SPLIT: r"C:\Users\ptrkm\PycharmProjects\BachelorDeeplearning\Embeddings\New_Embeddings"
-NETWORK:
-  PATH_TO_SAVED: None
-  BACKBONE:
-    NAME: 'efficientnet-b5'
-    OUTPUT_DIM: 128
-    ALREADY_TRAINED: False
-    FREEZE_BATCHNORM: True
-  HEAD:
-    STRUCTURE: [128, 64, 16, 1]
-    ACTIVATION: sigmoid
-    BATCH_NORM_STRUCTURE: [False, False, False, False]
-TRAINING:
-  BACKBONE:
-    MAX_EPOCH: 100
-    LOSS: contrastive
-    EARLY_STOP_PATIENCE: 3
-  HEAD:
-    MAX_EPOCH: 20
-    LOSS: least_squares
-    EARLY_STOP_PATIENCE: 2
-  COMBINED:
-    MAX_EPOCH: 10
-    ALPHA: 0.5
-    EARLY_STOP_PATIENCE: 1
-SOLVER:
-  BASE_LR: 0.1
-  MOMENTUM: 0.9
-  WEIGHT_DECAY: 1e-4
-  WARMUP_START_LR: 0.01
-  OPTIMIZING_METHOD: ADAM
-AUGMENTATION:
-  NAME: ngessert
-  CONFIG: standard_augmenter.yaml
-TEST:
-  ENABLE: True
-  BATCH_SIZE: 64
-DATA_LOADER:
-  NUM_WORKERS: 8
-  PIN_MEMORY: True
-NUM_GPUS: 1 # Not set up to handle more currently
-NUM_SHARDS: 1
-RNG_SEED: 0
-OUTPUT_DIR: r"C:\Users\ptrkm\PycharmProjects\BachelorDeeplearning\Embeddings\New_Embeddings"
\ No newline at end of file
+TRAIN:
+  ENABLE: True
+  DATASET: AISC
+  BATCH_SIZE: 32
+  EVAL_PERIOD: 2
+  CHECKPOINT_PERIOD: 2
+  AUTO_RESUME: True
+DATA:
+  PATH_TO_DATA: [data/processed/additional-dermoscopic-images,
+                 data/processed/main-dermoscopic-images]
+  PATH_TO_LABEL: data/processed/labels.csv
+  PATH_TO_DIFFICULTIES: data/processed/difficulties.pkl
+  PATH_TO_SPLIT: data/processed/splits.pkl
+NETWORK:
+  PATH_TO_SAVED: None
+  BACKBONE:
+    NAME: 'efficientnet-b5'
+    OUTPUT_DIM: 128
+    ALREADY_TRAINED: False
+    FREEZE_BATCHNORM: True
+  HEAD:
+    STRUCTURE: [128, 64, 16, 1]
+    ACTIVATION: sigmoid
+    BATCH_NORM_STRUCTURE: [False, False, False, False]
+TRAINING:
+  BACKBONE:
+    MAX_EPOCH: 100
+    LOSS: Contrastive
+    EARLY_STOP_PATIENCE: 3
+  HEAD:
+    MAX_EPOCH: 20
+    LOSS: LeastSquares
+    EARLY_STOP_PATIENCE: 2
+  COMBINED:
+    MAX_EPOCH: 10
+    ALPHA: 0.5
+    EARLY_STOP_PATIENCE: 1
+SOLVER:
+  BASE_LR: 0.1
+  MOMENTUM: 0.9
+  WEIGHT_DECAY: 1e-4
+  WARMUP_START_LR: 0.01
+  OPTIMIZER: ADAM
+  ALPHA: 0.5
+AUGMENTATION:
+  NAME: ngessert
+  CONFIG: config_augmentations/standard_augmenter.yaml
+TEST:
+  ENABLE: True
+  BATCH_SIZE: 64
+DATA_LOADER:
+  NUM_WORKERS: 0
+  PIN_MEMORY: True
+EVAL_METRICS:
+  BACKBONE: knn
+  HEAD: MSE
+NUM_GPUS: 1 # Not set up to handle more currently
+NUM_SHARDS: 1
+RNG_SEED: 0
+OUTPUT_DIR: r"data/output"
\ No newline at end of file
diff --git a/embeddings_and_difficulty/data_augmentations/__pycache__/augmentations.cpython-38.pyc b/embeddings_and_difficulty/data_augmentations/__pycache__/augmentations.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..299c627f33acac000e9f03e9df00cde490f5f7ad
Binary files /dev/null and b/embeddings_and_difficulty/data_augmentations/__pycache__/augmentations.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/data_augmentations/augmentations.py b/embeddings_and_difficulty/data_augmentations/augmentations.py
index ad1d5a89911ba0eecb8ee84fa7bfebe371e96c81..4e2a288b8853b33d04460c4957994f99eb9ae181 100644
--- a/embeddings_and_difficulty/data_augmentations/augmentations.py
+++ b/embeddings_and_difficulty/data_augmentations/augmentations.py
@@ -1,174 +1,173 @@
-import numpy as np
-import torch.nn.functional as F
-import torch.nn as nn
-from torch.autograd import Variable
-from torchvision import transforms, utils
-import math
-from PIL import Image
-from numba import jit
-import color_constancy as cc
-import pickle
-from argparse import Namespace
-
-model_params = {}
-model_params['input_size'] = [224, 224, 3]
-model_params['random_resize'] = True
-model_params['same_size'] = False
-
-
-model_params['mean'] = np.array([0.0,0.0,0.0])
-model_params['std'] = np.array([1.0,1.0,1.0])
-model_params['full_rot'] = 180
-model_params['scale'] = (0.8,1.2)
-model_params['shear'] = 10
-model_params['cutout'] = 16
-
-class DataAugmentISIC_AISC:
-    def __init__(self, model_params):
-        """
-        To initialize all transformations in the correct order, subsequently applied in method "apply"
-        :param model_params: (Dict)  of chosen hyperparameters for the data augmentation.
-        random_resize, same_size and input_size are the only parameters, with no default values
-        """
-        assert model_params.get('random_resize', False) + model_params.get('same_size', False) == 1
-
-        self.random_resize = model_params.get('random_resize', False)
-        self.same_size = model_params.get('same_size', False)
-        self.input_size = model_params.get('input_size')
-
-        all_transforms = []
-        if self.same_size:
-            all_transforms.append(transforms.RandomCrop(self.input_size, padding_mode='reflect', pad_if_needed=True))
-        elif self.random_resize:
-            all_transforms.append(transforms.RandomResizedCrop(self.input_size[0], scale=(0.08, 1.0)))
-
-        all_transforms.append(cc.general_color_constancy(gaussian_differentiation=0, minkowski_norm=6, sigma=0))
-        all_transforms.append(transforms.RandomHorizontalFlip())
-        all_transforms.append(transforms.RandomVerticalFlip())
-
-        all_transforms.append(transforms.RandomChoice([transforms.RandomAffine(model_params.get('full_rot',180),
-                                                                               scale=model_params.get('scale', (0.8,1.2)),
-                                                                               shear=model_params.get('shear', 10),
-                                                                               interpolation=Image.NEAREST),
-                                                       transforms.RandomAffine(model_params.get('full_rot',180),
-                                                                               scale=model_params.get('scale',(0.8,1.2)),
-                                                                               shear=model_params.get('shear', 10),
-                                                                               interpolation=Image.BICUBIC),
-                                                       transforms.RandomAffine(model_params.get('full_rot',180),
-                                                                               scale=model_params.get('scale',(0.8,1.2)),
-                                                                               shear=model_params.get('shear', 10),
-                                                                               interpolation=Image.BILINEAR)]))
-
-        all_transforms.append(transforms.ColorJitter(brightness=32. /255., saturation=0.5))
-        all_transforms.append(RandomCutOut(n_holes=1, length=model_params.get('cutout',16), prob = 0.5))
-
-        all_transforms.append(transforms.ToTensor())
-        all_transforms.append(transforms.Normalize(np.float32(model_params.get('mean', np.array([0.0,0.0,0.0]))),
-                                                   np.float32(model_params.get('std', np.array([1.0,1.0,1.0])))))
-
-        self.composed_train = transforms.Compose(all_transforms)
-
-        self.composed_eval = transforms.Compose([
-            cc.general_color_constancy(gaussian_differentiation=0, minkowski_norm=6, sigma = 0),
-            transforms.Resize(self.input_size),
-            transforms.ToTensor(),
-            transforms.Normalize(np.float32(model_params.get('mean', np.array([0.0, 0.0, 0.0]))),
-                                 np.float32(model_params.get('std', np.array([1.0, 1.0, 1.0]))))
-        ])
-
-    def __call__(self, image, mode):
-        """
-        Applies the composite of all transforms as seen in __init__
-        :param image: Image of type PIL.Image
-        :return: A torch.Tensor of the input image on which all augmentations have been applied
-        """
-        if mode == 'train':
-            return self.composed_train(image)
-        else:
-            return self.composed_eval(image)
-
-
-class RandomCutOut(object):
-
-    """
-    Randomly mask out zero or more patches from an image
-    """
-
-    def __init__(self, n_holes = 1, length = 16, prob = 0.5):
-        self.prob = prob
-        self.cutout = Cutout_v0(n_holes, length)
-    def __call__(self, img):
-        if np.random.uniform() < self.prob:
-            return self.cutout(img)
-        else:
-            return img
-
-
-class Cutout_v0(object):
-    """Randomly mask out one or more patches from an image.
-    Args:
-        n_holes (int): Number of patches to cut out of each image.
-        length (int): The length (in pixels) of each square patch.
-    """
-    def __init__(self, n_holes, length):
-        self.n_holes = n_holes
-        self.length = length
-
-    def __call__(self, img):
-        """
-        Args:
-            img (Tensor): Tensor image of size (C, H, W).
-        Returns:
-            Tensor: Image with n_holes of dimension length x length cut out of it.
-        """
-        img = np.array(img)
-        #print(img.shape)
-        h = img.shape[0]
-        w = img.shape[1]
-
-        mask = np.ones((h, w), np.uint8)
-
-        for n in range(self.n_holes):
-            y = np.random.randint(h)
-            x = np.random.randint(w)
-
-            y1 = np.clip(y - self.length // 2, 0, h)
-            y2 = np.clip(y + self.length // 2, 0, h)
-            x1 = np.clip(x - self.length // 2, 0, w)
-            x2 = np.clip(x + self.length // 2, 0, w)
-
-            mask[y1: y2, x1: x2] = 0.
-
-        #mask = torch.from_numpy(mask)
-        #mask = mask.expand_as(img)
-        img = img * np.expand_dims(mask,axis=2)
-        img = Image.fromarray(img)
-        return img
-
-DATA_AUGMENTERS = {'ngessert': DataAugmentISIC_AISC}
-
-def get_data_augmenter(augment_params):
-    return DATA_AUGMENTERS[augment_params.NAME](augment_params.vals)
-
-
-if __name__ == "__main__":
-    model_params = {}
-    model_params['input_size'] = [224, 224]
-    model_params['random_resize'] = True
-    model_params['same_size'] = False
-
-    model_params['mean'] = np.array([0.0, 0.0, 0.0])
-    model_params['std'] = np.array([1.0, 1.0, 1.0])
-    model_params['full_rot'] = 180
-    model_params['scale'] = (0.8, 1.2)
-    model_params['shear'] = 10
-    model_params['cutout'] = 16
-    model_params['name'] = 'ngessert'
-
-
-    data_aug = DataAugmentISIC_AISC(model_params)
-    test = Image.open(r'C:\Users\ptrkm\Downloads\3-non-polariset.jpeg')
-    trans = transforms.ToPILImage()
-    test_new = data_aug(test, 'train')
-
+import numpy as np
+import torch.nn.functional as F
+import torch.nn as nn
+from torch.autograd import Variable
+from torchvision import transforms, utils
+import math
+from PIL import Image
+from numba import jit
+# import color_constancy as cc
+import pickle
+from argparse import Namespace
+
+model_params = {}
+model_params['input_size'] = [224, 224, 3]
+model_params['random_resize'] = True
+model_params['same_size'] = False
+
+
+model_params['mean'] = np.array([0.0,0.0,0.0])
+model_params['std'] = np.array([1.0,1.0,1.0])
+model_params['full_rot'] = 180
+model_params['scale'] = (0.8,1.2)
+model_params['shear'] = 10
+model_params['cutout'] = 16
+
+class DataAugmentISIC_AISC:
+    def __init__(self, model_params):
+        """
+        To initialize all transformations in the correct order, subsequently applied in method "apply"
+        :param model_params: (Dict)  of chosen hyperparameters for the data augmentation.
+        random_resize, same_size and input_size are the only parameters, with no default values
+        """
+        assert model_params.get('random_resize', False) + model_params.get('same_size', False) == 1
+
+        self.random_resize = model_params.get('random_resize', False)
+        self.same_size = model_params.get('same_size', False)
+        self.input_size = model_params.get('input_size')
+
+        all_transforms = []
+        if self.same_size:
+            all_transforms.append(transforms.RandomCrop(self.input_size, padding_mode='reflect', pad_if_needed=True))
+        elif self.random_resize:
+            all_transforms.append(transforms.RandomResizedCrop(self.input_size[0], scale=(0.08, 1.0)))
+
+        # all_transforms.append(cc.general_color_constancy(gaussian_differentiation=0, minkowski_norm=6, sigma=0))
+        all_transforms.append(transforms.RandomHorizontalFlip())
+        all_transforms.append(transforms.RandomVerticalFlip())
+        all_transforms.append(transforms.RandomChoice([transforms.RandomAffine(model_params.get('full_rot',180),
+                                                                               scale=model_params.get('scale', (0.8,1.2)),
+                                                                               shear=model_params.get('shear', 10),
+                                                                               interpolation=Image.NEAREST),
+                                                       transforms.RandomAffine(model_params.get('full_rot',180),
+                                                                               scale=model_params.get('scale',(0.8,1.2)),
+                                                                               shear=model_params.get('shear', 10),
+                                                                               interpolation=Image.BICUBIC),
+                                                       transforms.RandomAffine(model_params.get('full_rot',180),
+                                                                               scale=model_params.get('scale',(0.8,1.2)),
+                                                                               shear=model_params.get('shear', 10),
+                                                                               interpolation=Image.BILINEAR)]))
+
+        all_transforms.append(transforms.ColorJitter(brightness=32. /255., saturation=0.5))
+        all_transforms.append(RandomCutOut(n_holes=1, length=model_params.get('cutout',16), prob = 0.5))
+
+        all_transforms.append(transforms.ToTensor())
+        all_transforms.append(transforms.Normalize(np.float32(model_params.get('mean', np.array([0.0,0.0,0.0]))),
+                                                   np.float32(model_params.get('std', np.array([1.0,1.0,1.0])))))
+
+        self.composed_train = transforms.Compose(all_transforms)
+
+        self.composed_eval = transforms.Compose([
+            # cc.general_color_constancy(gaussian_differentiation=0, minkowski_norm=6, sigma = 0),
+            transforms.Resize(self.input_size),
+            transforms.ToTensor(),
+            transforms.Normalize(np.float32(model_params.get('mean', np.array([0.0, 0.0, 0.0]))),
+                                 np.float32(model_params.get('std', np.array([1.0, 1.0, 1.0]))))
+        ])
+
+    def __call__(self, image, mode):
+        """
+        Applies the composite of all transforms as seen in __init__
+        :param image: Image of type PIL.Image
+        :return: A torch.Tensor of the input image on which all augmentations have been applied
+        """
+        if mode == 'train':
+            return self.composed_train(image)
+        else:
+            return self.composed_eval(image)
+
+
+class RandomCutOut(object):
+
+    """
+    Randomly mask out zero or more patches from an image
+    """
+
+    def __init__(self, n_holes = 1, length = 16, prob = 0.5):
+        self.prob = prob
+        self.cutout = Cutout_v0(n_holes, length)
+    def __call__(self, img):
+        if np.random.uniform() < self.prob:
+            return self.cutout(img)
+        else:
+            return img
+
+
+class Cutout_v0(object):
+    """Randomly mask out one or more patches from an image.
+    Args:
+        n_holes (int): Number of patches to cut out of each image.
+        length (int): The length (in pixels) of each square patch.
+    """
+    def __init__(self, n_holes, length):
+        self.n_holes = n_holes
+        self.length = length
+
+    def __call__(self, img):
+        """
+        Args:
+            img (Tensor): Tensor image of size (C, H, W).
+        Returns:
+            Tensor: Image with n_holes of dimension length x length cut out of it.
+        """
+        img = np.array(img)
+        #print(img.shape)
+        h = img.shape[0]
+        w = img.shape[1]
+
+        mask = np.ones((h, w), np.uint8)
+
+        for n in range(self.n_holes):
+            y = np.random.randint(h)
+            x = np.random.randint(w)
+
+            y1 = np.clip(y - self.length // 2, 0, h)
+            y2 = np.clip(y + self.length // 2, 0, h)
+            x1 = np.clip(x - self.length // 2, 0, w)
+            x2 = np.clip(x + self.length // 2, 0, w)
+
+            mask[y1: y2, x1: x2] = 0.
+
+        #mask = torch.from_numpy(mask)
+        #mask = mask.expand_as(img)
+        img = img * np.expand_dims(mask,axis=2)
+        img = Image.fromarray(img)
+        return img
+
+DATA_AUGMENTERS = {'ngessert': DataAugmentISIC_AISC}
+
+def get_data_augmenter(augment_params):
+    return DATA_AUGMENTERS[augment_params.name](augment_params.vals)
+
+
+if __name__ == "__main__":
+    model_params = {}
+    model_params['input_size'] = [224, 224]
+    model_params['random_resize'] = True
+    model_params['same_size'] = False
+
+    model_params['mean'] = np.array([0.0, 0.0, 0.0])
+    model_params['std'] = np.array([1.0, 1.0, 1.0])
+    model_params['full_rot'] = 180
+    model_params['scale'] = (0.8, 1.2)
+    model_params['shear'] = 10
+    model_params['cutout'] = 16
+    model_params['name'] = 'ngessert'
+
+
+    data_aug = DataAugmentISIC_AISC(model_params)
+    test = Image.open(r'C:\Users\ptrkm\Downloads\3-non-polariset.jpeg')
+    trans = transforms.ToPILImage()
+    test_new = data_aug(test, 'train')
+
     breakpoint()
\ No newline at end of file
diff --git a/embeddings_and_difficulty/dataloaders/AISC.py b/embeddings_and_difficulty/dataloaders/AISC.py
index e9c1ab276018d480540810d92662c31b7f39c1bf..ca798754c011178732afaf72c15db8da340a340b 100644
--- a/embeddings_and_difficulty/dataloaders/AISC.py
+++ b/embeddings_and_difficulty/dataloaders/AISC.py
@@ -1,130 +1,137 @@
-import torch
-from torch.utils.data import Dataset
-from PIL import Image
-import os
-import pickle
-import pandas as pd
-from Embeddings.New_Embeddings.data_augmentations import augmentations as aug
-
-
-class AISC(Dataset):
-    def __init__(self, dataset_params):
-        self.path_to_data = dataset_params.PATH_TO_DATA
-        self.path_to_labels = dataset_params.PATH_TO_LABELS
-        self.path_to_difficulties = dataset_params.PATH_TO_DIFFICULTIES
-        self.path_to_split = dataset_params.PATH_TO_SPLIT
-        self.difficulties = None
-        self.name_to_file_label_difficulty = self.read_data_labels_and_difficulty()
-        self.name_to_file_label_difficulty, self.loading_order = self.split_dataset()
-
-        self.mode = 'train'
-        self.data_augmenter = aug.get_data_augmenter(dataset_params.data_augmentation)
-
-    def __len__(self):
-        return len(self.name_to_file_label_difficulty[self.mode])
-
-    def read_data_labels_and_difficulty(self):
-        self.difficulties = self.read_difficulties()
-        file_names_to_file = self.read_data()
-        label_names, labels = self.read_labels()
-
-        if not all(name in file_names_to_file for name in label_names):
-            raise ValueError("Not all names in the labels file are present in the image path")
-
-        return self.ensure_order(file_names_to_file, label_names, labels)
-
-    def ensure_order(self, file_names_to_file, label_names, labels):
-        """
-        Function to ensure that the file order corresponds to the label order
-        :param file_names_to_file: (dict) image_name to full path to image
-        :param label_names: (list) of file names, not full path
-        :param labels: (np.ndarray) of size (N, C) where C is the number of classes, one-hot encoded
-        :return: (dict) with keys equal to label_names
-        """
-
-        name_to_file_label_difficulty = dict()
-
-        for idx, name in enumerate(label_names):
-            name_to_file_label_difficulty[name] = {
-                'path': file_names_to_file[name],
-                'label': labels[idx],
-                'difficulty': self.difficulties[name],
-                'has_difficulty': self.difficulties[name] == -1
-            }
-
-        return name_to_file_label_difficulty
-
-    def read_data(self):
-        if not all(os.path.isdir(path) for path in self.path_to_data):
-            raise ValueError("The path to data attribute is not a directory on this device")
-
-        file_name_to_file = {}
-        for p in self.path_to_data:
-            for file in os.listdir(p):
-                if file not in file_name_to_file:
-                    file_name_to_file[file] = os.path.join(p, file)
-
-        return file_name_to_file
-
-    def read_labels(self):
-        """
-        Function to read labels assuming it is saved as csv
-        :return:
-        """
-        if not os.path.isfile(self.path_to_labels):
-            raise ValueError("Path to labels is not a path to file on this device")
-
-        labels = pd.read_csv(self.path_to_labels)
-        label_names = list(labels['names'])
-        labels = labels.drop('names', axis=1).values()
-
-        return label_names, labels
-
-    def read_difficulties(self):
-        """
-        Function to read difficulty estimates for images
-        :return: (dict) with image names as keys (not full path) and difficulty as value
-        """
-        if not os.path.isfile(self.path_to_difficulties):
-            raise ValueError("Chosen path to difficulties is not a file on this device")
-
-        difficulties = pickle.load(open(self.path_to_difficulties, 'rb'))
-        return difficulties
-
-    def split_dataset(self):
-        """
-        Function to split the dataset into the number of splits, specified in dataset_params.path_to_split
-        :return: (dict) with names, labels and difficulties for the splits
-        """
-        split = pickle.load(open(self.path_to_split, 'rb'))
-
-        temp = dict()
-        loading_order = dict()
-        for mode, names in split.items():
-            temp[mode] = {
-                name: self.name_to_file_label_difficulty[name]
-                for name in names
-            }
-            loading_order[mode] = names
-        return temp, loading_order
-
-    def __getitem__(self, item):
-
-        """
-
-        :param item: (int) conforming to the index of names
-        :return: (tuple) of (torch.Tensor, torch.Tensor, torch.Tensor) of image, label and difficulty
-        """
-        file, label, difficulty, has_diff = self.name_to_file_label_difficulty[
-            self.loading_order[self.mode][item]
-        ]
-
-        image = Image.open(file)
-        image = self.data_augmenter(image, self.mode)
-        label = torch.tensor(label)
-        difficulty = torch.tensor(difficulty)
-
-        if self.mode == 'train':
-            return image, label, difficulty, has_diff
-        else:
-            return image, label, difficulty, file, has_diff
+import torch
+from torch.utils.data import Dataset
+from PIL import Image
+import os
+import pickle
+import pandas as pd
+from data_augmentations import augmentations as aug
+
+
+class AISC(Dataset):
+    def __init__(self, dataset_params):
+
+        self.path_to_data = dataset_params.PATH_TO_DATA
+        self.path_to_labels = dataset_params.PATH_TO_LABEL
+        self.path_to_difficulties = dataset_params.PATH_TO_DIFFICULTIES
+        self.path_to_split = dataset_params.PATH_TO_SPLIT
+        self.difficulties = None
+        self.name_to_file_label_difficulty = self.read_data_labels_and_difficulty()
+        self.name_to_file_label_difficulty, self.loading_order = self.split_dataset()
+
+        self.mode = 'train'
+        self.data_augmenter = aug.get_data_augmenter(dataset_params.data_augmentation)
+
+    def __len__(self):
+        return len(self.name_to_file_label_difficulty[self.mode])
+
+    def read_data_labels_and_difficulty(self):
+        self.difficulties = self.read_difficulties()
+        file_names_to_file = self.read_data()
+        label_names, labels = self.read_labels()
+
+        if not all(name in file_names_to_file for name in label_names):
+            raise ValueError("Not all names in the labels file are present in the image path")
+
+        return self.ensure_order(file_names_to_file, label_names, labels)
+
+    def ensure_order(self, file_names_to_file, label_names, labels):
+        """
+        Function to ensure that the file order corresponds to the label order
+        :param file_names_to_file: (dict) image_name to full path to image
+        :param label_names: (list) of file names, not full path
+        :param labels: (np.ndarray) of size (N, C) where C is the number of classes, one-hot encoded
+        :return: (dict) with keys equal to label_names
+        """
+
+        name_to_file_label_difficulty = dict()
+
+        for idx, name in enumerate(label_names):
+            name_to_file_label_difficulty[name] = {
+                'path': file_names_to_file[name],
+                'label': labels[idx],
+                'difficulty': self.difficulties[name],
+                'has_difficulty': self.difficulties[name] == -1
+            }
+
+        return name_to_file_label_difficulty
+
+    def read_data(self):
+        if not all(os.path.isdir(path) for path in self.path_to_data):
+            raise ValueError("The path to data attribute is not a directory on this device")
+
+        file_name_to_file = {}
+        for p in self.path_to_data:
+            for file in os.listdir(p):
+                if file not in file_name_to_file:
+                    file_name_to_file[file] = os.path.join(p, file)
+
+        return file_name_to_file
+
+    def read_labels(self):
+        """
+        Function to read labels assuming it is saved as csv
+        :return:
+        """
+        if not os.path.isfile(self.path_to_labels):
+            raise ValueError("Path to labels is not a path to file on this device")
+
+        labels = pd.read_csv(self.path_to_labels)
+        label_names = list(labels['names'])
+        labels = labels.drop('names', axis=1)
+        labels = labels.values
+        return label_names, labels
+
+    def read_difficulties(self):
+        """
+        Function to read difficulty estimates for images
+        :return: (dict) with image names as keys (not full path) and difficulty as value
+        """
+        if not os.path.isfile(self.path_to_difficulties):
+            breakpoint()
+            raise ValueError("Chosen path to difficulties is not a file on this device")
+
+        difficulties = pickle.load(open(self.path_to_difficulties, 'rb'))
+        return difficulties
+
+    def split_dataset(self):
+        """
+        Function to split the dataset into the number of splits, specified in dataset_params.path_to_split
+        :return: (dict) with names, labels and difficulties for the splits
+        """
+        split = pickle.load(open(self.path_to_split, 'rb'))
+
+        temp = dict()
+        loading_order = dict()
+        for split, val in split.items():
+            for mode, names in val.items():
+                temp[mode] = {
+                    name: self.name_to_file_label_difficulty[name]
+                    for name in names
+                }
+                loading_order[mode] = names
+
+
+        return temp, loading_order
+
+    def __getitem__(self, item):
+
+        """
+
+        :param item: (int) conforming to the index of names
+        :return: (tuple) of (torch.Tensor, torch.Tensor, torch.Tensor) of image, label and difficulty
+        """
+
+        file, label, difficulty, has_diff = (self.name_to_file_label_difficulty[self.mode][
+            self.loading_order[self.mode][item]
+        ]).values()
+
+
+        image = Image.open(file)
+        image = self.data_augmenter(image, self.mode)
+        label = torch.tensor(label).reshape(-1)
+        difficulty = torch.tensor(difficulty)
+
+        if self.mode == 'train':
+            return image, label, difficulty
+        else:
+            return image, label, difficulty, file, has_diff
diff --git a/embeddings_and_difficulty/dataloaders/BaseAISC.py b/embeddings_and_difficulty/dataloaders/BaseAISC.py
index bc0b63e74189e7aca0c247a3f413a164376a879f..49364729906ce4b458cf20eb8ad53f4127a8560a 100644
--- a/embeddings_and_difficulty/dataloaders/BaseAISC.py
+++ b/embeddings_and_difficulty/dataloaders/BaseAISC.py
@@ -1,111 +1,111 @@
-import os
-import pickle
-import numpy as np
-import pandas as pd
-
-
-class BaseAISC:
-    def __init__(self, dataset_params):
-        self.path_to_data = dataset_params.path_to_data
-        self.path_to_labels = dataset_params.path_to_label
-        self.path_to_difficulties = dataset_params.path_to_difficulties
-        self.path_to_split = dataset_params.path_to_split
-
-        self.name_to_file_label_difficulty = self.read_data_labels_and_difficulty()
-        self.name_to_file_label_difficulty = self.split_dataset()
-
-        self.mode = 'train'
-
-    def __len__(self):
-        return len(self.name_to_file_label_difficulty[self.mode])
-
-    def read_data_labels_and_difficulty(self):
-        file_names_to_file = self.read_data()
-        label_names, labels = self.read_labels()
-
-        if not all(name in file_names_to_file for name in label_names):
-            raise ValueError("Not all names in the labels file are present in the image path")
-
-        return self.ensure_order(file_names_to_file, label_names, labels)
-
-
-    def ensure_order(self, file_names_to_file, label_names, labels):
-        """
-        Function to ensure that the file order corresponds to the label order
-        :param file_names_to_file: (dict) image_name to full path to image
-        :param label_names: (list) of file names, not full path
-        :param labels: (np.ndarray) of size (N, C) where C is the number of classes, one-hot encoded
-        :return: (dict) with keys equal to label_names
-        """
-
-        name_to_file_label_difficulty = dict()
-
-        for idx, name in enumerate(label_names):
-            name_to_file_label_difficulty[name] = {
-                'path': file_names_to_file[name],
-                'label': labels[idx],
-                'difficulty': self.difficulties[name]
-            }
-
-        return name_to_file_label_difficulty
-
-
-    def read_data(self):
-        if not os.path.isdir(self.path_to_data):
-            raise ValueError("The path to data attribute is not a directory on this device")
-
-        file_names_to_file = {
-            file: os.path.join(self.path_to_data, file) for file in os.listdir(self.path_to_data)
-        }
-        return file_names_to_file
-
-
-    def read_labels(self):
-        """
-        Function to read labels assuming it is saved as csv
-        :return:
-        """
-        if not os.path.isfile(self.path_to_labels):
-            raise ValueError("Path to labels is not a path to file on this device")
-
-        labels = pd.read_csv(self.path_to_labels)
-        label_names = list(labels['names'])
-        labels = labels.drop('names', axis=1).values()
-
-        return label_names, labels
-
-
-    def read_difficulties(self):
-        """
-        Function to read difficulty estimates for images
-        :return: (dict) with image names as keys (not full path) and difficulty as value
-        """
-        if not os.path.isfile(self.path_to_difficulties):
-            raise ValueError("Chosen path to difficulties is not a file on this device")
-
-        difficulties_all = pickle.load(open(self.path_to_difficulties, 'rb'))
-
-        difficulties = dict()
-        for lesion_uid, val in difficulties_all.items():
-            if len(val['image']) > 1:
-                for idx, name in enumerate(val['image']):
-                    difficulties[name] = val['diff'][idx]
-
-        return difficulties
-
-
-    def split_dataset(self):
-        """
-        Function to split the dataset into the number of splits, specified in dataset_params.path_to_split
-        :return: (dict) with names, labels and difficulties for the splits
-        """
-        split = pickle.load(open(self.path_to_split, 'rb'))
-
-        temp = dict()
-        for mode, names in split.items():
-            temp[mode] = {
-                name: self.name_to_file_label_difficulty[name]
-                for name in names
-            }
-
+import os
+import pickle
+import numpy as np
+import pandas as pd
+
+
+class BaseAISC:
+    def __init__(self, dataset_params):
+        self.path_to_data = dataset_params.path_to_data
+        self.path_to_labels = dataset_params.path_to_label
+        self.path_to_difficulties = dataset_params.path_to_difficulties
+        self.path_to_split = dataset_params.path_to_split
+
+        self.name_to_file_label_difficulty = self.read_data_labels_and_difficulty()
+        self.name_to_file_label_difficulty = self.split_dataset()
+
+        self.mode = 'train'
+
+    def __len__(self):
+        return len(self.name_to_file_label_difficulty[self.mode])
+
+    def read_data_labels_and_difficulty(self):
+        file_names_to_file = self.read_data()
+        label_names, labels = self.read_labels()
+
+        if not all(name in file_names_to_file for name in label_names):
+            raise ValueError("Not all names in the labels file are present in the image path")
+
+        return self.ensure_order(file_names_to_file, label_names, labels)
+
+
+    def ensure_order(self, file_names_to_file, label_names, labels):
+        """
+        Function to ensure that the file order corresponds to the label order
+        :param file_names_to_file: (dict) image_name to full path to image
+        :param label_names: (list) of file names, not full path
+        :param labels: (np.ndarray) of size (N, C) where C is the number of classes, one-hot encoded
+        :return: (dict) with keys equal to label_names
+        """
+
+        name_to_file_label_difficulty = dict()
+
+        for idx, name in enumerate(label_names):
+            name_to_file_label_difficulty[name] = {
+                'path': file_names_to_file[name],
+                'label': labels[idx],
+                'difficulty': self.difficulties[name]
+            }
+
+        return name_to_file_label_difficulty
+
+
+    def read_data(self):
+        if not os.path.isdir(self.path_to_data):
+            raise ValueError("The path to data attribute is not a directory on this device")
+
+        file_names_to_file = {
+            file: os.path.join(self.path_to_data, file) for file in os.listdir(self.path_to_data)
+        }
+        return file_names_to_file
+
+
+    def read_labels(self):
+        """
+        Function to read labels assuming it is saved as csv
+        :return:
+        """
+        if not os.path.isfile(self.path_to_labels):
+            raise ValueError("Path to labels is not a path to file on this device")
+
+        labels = pd.read_csv(self.path_to_labels)
+        label_names = list(labels['names'])
+        labels = labels.drop('names', axis=1).values()
+
+        return label_names, labels
+
+
+    def read_difficulties(self):
+        """
+        Function to read difficulty estimates for images
+        :return: (dict) with image names as keys (not full path) and difficulty as value
+        """
+        if not os.path.isfile(self.path_to_difficulties):
+            raise ValueError("Chosen path to difficulties is not a file on this device")
+
+        difficulties_all = pickle.load(open(self.path_to_difficulties, 'rb'))
+
+        difficulties = dict()
+        for lesion_uid, val in difficulties_all.items():
+            if len(val['image']) > 1:
+                for idx, name in enumerate(val['image']):
+                    difficulties[name] = val['diff'][idx]
+
+        return difficulties
+
+
+    def split_dataset(self):
+        """
+        Function to split the dataset into the number of splits, specified in dataset_params.path_to_split
+        :return: (dict) with names, labels and difficulties for the splits
+        """
+        split = pickle.load(open(self.path_to_split, 'rb'))
+
+        temp = dict()
+        for mode, names in split.items():
+            temp[mode] = {
+                name: self.name_to_file_label_difficulty[name]
+                for name in names
+            }
+
         return temp
\ No newline at end of file
diff --git a/embeddings_and_difficulty/dataloaders/__pycache__/AISC.cpython-38.pyc b/embeddings_and_difficulty/dataloaders/__pycache__/AISC.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1fc648a54bbb6e979f796fa98071c2ea362ca33c
Binary files /dev/null and b/embeddings_and_difficulty/dataloaders/__pycache__/AISC.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/dataloaders/init_data_stuff.ipynb b/embeddings_and_difficulty/dataloaders/init_data_stuff.ipynb
index b30576743399b6f3a5d7b9b14ef588245c8d623e..91c55984f9329582847a73886a3e9967f93bbb04 100644
--- a/embeddings_and_difficulty/dataloaders/init_data_stuff.ipynb
+++ b/embeddings_and_difficulty/dataloaders/init_data_stuff.ipynb
@@ -1,350 +1,350 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "import pandas as pd\n",
-    "import pickle\n",
-    "import os\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "outputs": [],
-   "source": [
-    "\n",
-    "data = pd.read_csv(r'C:\\Users\\ptrkm\\data_aisc\\training-assessments.csv', sep = \";\")\n",
-    "data = data.dropna(subset = ['correctDiagnosisName'])"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "outputs": [],
-   "source": [],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "0\n"
-     ]
-    }
-   ],
-   "source": [
-    "correct_diagnosis = data['correctDiagnosisName']\n",
-    "image_name = data['dermoscopicImageName']\n",
-    "training_id = data['trainingCaseId']\n",
-    "user_id = data['userId']\n",
-    "correct_assesments = data['assessedCorrectly']\n",
-    "print(len(image_name.unique()) - len(training_id.unique()))"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 46,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Nevus                              37088\n",
-      "Melanoma                           35165\n",
-      "Seb. keratosis/ Lentigo solaris    32615\n",
-      "Dermatofibroma                     16307\n",
-      "Basal cell carcinoma               16142\n",
-      "Hemangioma                         15508\n",
-      "Squamous cell carcinoma            15387\n",
-      "Lentigo                              815\n",
-      "Vascular/Hemorrhage                  725\n",
-      "Actinic keratosis                    336\n",
-      "Other                                102\n",
-      "Bowen's disease                      100\n",
-      "Vascular lesion                       16\n",
-      "Seborrheic keratosis                  11\n",
-      "Lentigo solaris                        5\n",
-      "Name: correctDiagnosisName, dtype: int64\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(correct_diagnosis.value_counts())\n",
-    "diagnosis_aisc_isic = {\n",
-    "    'Melanoma': 'MEL',\n",
-    "    'Nevus': 'NV',\n",
-    "    'Seb. keratosis/ Lentigo solaris': 'BKL',\n",
-    "    'Actinic keratosis': 'AK',\n",
-    "    'Dermatofibroma': 'DF',\n",
-    "    'Basal cell carcinoma': 'BCC',\n",
-    "    'Hemangioma': 'VASC',\n",
-    "    'Squamous cell carcinoma': 'SCC',\n",
-    "    'Lentigo': 'BKL',\n",
-    "    'Lentigo solaris': 'BKL',\n",
-    "    'Vascular/Hemorrhage': 'VASC',\n",
-    "    'Vascular lesion': 'VASC',\n",
-    "    \"Bowen's disease\": 'SCC',\n",
-    "    'Seborrheic keratosis': 'BKL'\n",
-    "}\n",
-    "\n",
-    "isic_label_names =['MEL', 'NV', 'BCC', 'AK', 'BKL', 'DF', 'VASC', 'SCC']\n",
-    "isic_idxs = dict(zip(isic_label_names, range(len(isic_label_names))))\n",
-    "\n",
-    "diags = []\n",
-    "for diag in correct_diagnosis:\n",
-    "    if diag in diagnosis_aisc_isic:\n",
-    "        diags.append(isic_idxs[diagnosis_aisc_isic[diag]])\n",
-    "    else:\n",
-    "        diags.append(None)\n"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "170322\n"
-     ]
-    }
-   ],
-   "source": [
-    "from sklearn.metrics import accuracy_score\n",
-    "def calculate_difficulty(answers, labels):\n",
-    "    return accuracy_score(labels, answers)\n",
-    "\n",
-    "def run_through_all(id,ans):\n",
-    "\n",
-    "    test = pd.DataFrame(columns = ['id', 'ans'])\n",
-    "    test['id'] = id\n",
-    "    test['ans'] = ans\n",
-    "    print(len(test))\n",
-    "    difficulty = {}\n",
-    "    for i in test['id']:\n",
-    "        if i not in difficulty:\n",
-    "            ans = test[test['id'] == i]['ans'].values\n",
-    "            lab = np.ones((len(ans,)))\n",
-    "            if len(ans) > 5:\n",
-    "                difficulty[i] = calculate_difficulty(ans, lab)\n",
-    "\n",
-    "    return difficulty\n",
-    "\n",
-    "\n",
-    "difficulty = run_through_all(image_name, correct_assesments)\n"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "outputs": [],
-   "source": [
-    "for name in data['dermoscopicImageName'].unique():\n",
-    "    if name not in difficulty:\n",
-    "        difficulty[name] = -1\n",
-    "\n",
-    "\n",
-    "\n"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "352\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": "38507"
-     },
-     "execution_count": 35,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "specific_diagnosis_to_diagnosis = {}\n",
-    "\n",
-    "for spc, diag in zip(data['correctSpecificDiagnosisName'], data['correctDiagnosisName']):\n",
-    "    if spc not in specific_diagnosis_to_diagnosis:\n",
-    "        specific_diagnosis_to_diagnosis[spc] = diag\n",
-    "\n",
-    "all_imgs = pd.read_csv(r'C:\\Users\\ptrkm\\data_aisc\\additional-dermoscopic-images.csv', sep = \";\")\n",
-    "cdiags = []\n",
-    "not_there = []\n",
-    "for spc, img_name in zip(all_imgs['correctSpecificDiagnosisName'], all_imgs['dermoscopicImageName']):\n",
-    "    if spc not in specific_diagnosis_to_diagnosis:\n",
-    "        not_there.append(img_name)\n",
-    "        cdiags.append(-1)\n",
-    "    else:\n",
-    "        cdiags.append(specific_diagnosis_to_diagnosis[spc])\n",
-    "print(len(not_there))\n",
-    "all_imgs['correctDiagnosisName'] = cdiags\n",
-    "all_imgs = all_imgs[all_imgs['correctDiagnosisName'] != -1]\n",
-    "len(all_imgs['dermoscopicImageName'].unique())"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "outputs": [],
-   "source": [
-    "for name in all_imgs['dermoscopicImageName']:\n",
-    "    if name not in difficulty:\n",
-    "        difficulty[name] = -1"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "outputs": [],
-   "source": [
-    "with open(r'C:\\Users\\ptrkm\\data_aisc\\difficulties.pkl', 'wb') as handle:\n",
-    "    pickle.dump(difficulty, handle, protocol=pickle.HIGHEST_PROTOCOL)"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 52,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "208416\n",
-      "208416\n"
-     ]
-    }
-   ],
-   "source": [
-    "all_images_ = list(all_imgs['dermoscopicImageName']) + list(data['dermoscopicImageName'])\n",
-    "all_labels = list(all_imgs['correctDiagnosisName']) + list(data['correctDiagnosisName'])\n",
-    "diags = []\n",
-    "labels = []\n",
-    "for name, lab in zip(all_images_, all_labels):\n",
-    "    if lab != 'Other':\n",
-    "        diags.append(isic_idxs[diagnosis_aisc_isic[lab]])\n",
-    "        labels.append(name)\n",
-    "\n",
-    "\n",
-    "print(len(labels))\n",
-    "print(len(diags))\n",
-    "\n",
-    "labels_csv = pd.DataFrame()\n",
-    "labels_csv['names'] = labels\n",
-    "labels_csv['labels'] = diags\n",
-    "\n",
-    "labels_csv.to_csv(r'C:\\Users\\ptrkm\\data_aisc\\labels.csv', index = None)"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "outputs": [],
-   "source": [],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import pandas as pd\n",
+    "import pickle\n",
+    "import os\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "outputs": [],
+   "source": [
+    "\n",
+    "data = pd.read_csv(r'C:\\Users\\ptrkm\\data_aisc\\training-assessments.csv', sep = \";\")\n",
+    "data = data.dropna(subset = ['correctDiagnosisName'])"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "outputs": [],
+   "source": [],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "0\n"
+     ]
+    }
+   ],
+   "source": [
+    "correct_diagnosis = data['correctDiagnosisName']\n",
+    "image_name = data['dermoscopicImageName']\n",
+    "training_id = data['trainingCaseId']\n",
+    "user_id = data['userId']\n",
+    "correct_assesments = data['assessedCorrectly']\n",
+    "print(len(image_name.unique()) - len(training_id.unique()))"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 46,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Nevus                              37088\n",
+      "Melanoma                           35165\n",
+      "Seb. keratosis/ Lentigo solaris    32615\n",
+      "Dermatofibroma                     16307\n",
+      "Basal cell carcinoma               16142\n",
+      "Hemangioma                         15508\n",
+      "Squamous cell carcinoma            15387\n",
+      "Lentigo                              815\n",
+      "Vascular/Hemorrhage                  725\n",
+      "Actinic keratosis                    336\n",
+      "Other                                102\n",
+      "Bowen's disease                      100\n",
+      "Vascular lesion                       16\n",
+      "Seborrheic keratosis                  11\n",
+      "Lentigo solaris                        5\n",
+      "Name: correctDiagnosisName, dtype: int64\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(correct_diagnosis.value_counts())\n",
+    "diagnosis_aisc_isic = {\n",
+    "    'Melanoma': 'MEL',\n",
+    "    'Nevus': 'NV',\n",
+    "    'Seb. keratosis/ Lentigo solaris': 'BKL',\n",
+    "    'Actinic keratosis': 'AK',\n",
+    "    'Dermatofibroma': 'DF',\n",
+    "    'Basal cell carcinoma': 'BCC',\n",
+    "    'Hemangioma': 'VASC',\n",
+    "    'Squamous cell carcinoma': 'SCC',\n",
+    "    'Lentigo': 'BKL',\n",
+    "    'Lentigo solaris': 'BKL',\n",
+    "    'Vascular/Hemorrhage': 'VASC',\n",
+    "    'Vascular lesion': 'VASC',\n",
+    "    \"Bowen's disease\": 'SCC',\n",
+    "    'Seborrheic keratosis': 'BKL'\n",
+    "}\n",
+    "\n",
+    "isic_label_names =['MEL', 'NV', 'BCC', 'AK', 'BKL', 'DF', 'VASC', 'SCC']\n",
+    "isic_idxs = dict(zip(isic_label_names, range(len(isic_label_names))))\n",
+    "\n",
+    "diags = []\n",
+    "for diag in correct_diagnosis:\n",
+    "    if diag in diagnosis_aisc_isic:\n",
+    "        diags.append(isic_idxs[diagnosis_aisc_isic[diag]])\n",
+    "    else:\n",
+    "        diags.append(None)\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "170322\n"
+     ]
+    }
+   ],
+   "source": [
+    "from sklearn.metrics import accuracy_score\n",
+    "def calculate_difficulty(answers, labels):\n",
+    "    return accuracy_score(labels, answers)\n",
+    "\n",
+    "def run_through_all(id,ans):\n",
+    "\n",
+    "    test = pd.DataFrame(columns = ['id', 'ans'])\n",
+    "    test['id'] = id\n",
+    "    test['ans'] = ans\n",
+    "    print(len(test))\n",
+    "    difficulty = {}\n",
+    "    for i in test['id']:\n",
+    "        if i not in difficulty:\n",
+    "            ans = test[test['id'] == i]['ans'].values\n",
+    "            lab = np.ones((len(ans,)))\n",
+    "            if len(ans) > 5:\n",
+    "                difficulty[i] = calculate_difficulty(ans, lab)\n",
+    "\n",
+    "    return difficulty\n",
+    "\n",
+    "\n",
+    "difficulty = run_through_all(image_name, correct_assesments)\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "outputs": [],
+   "source": [
+    "for name in data['dermoscopicImageName'].unique():\n",
+    "    if name not in difficulty:\n",
+    "        difficulty[name] = -1\n",
+    "\n",
+    "\n",
+    "\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 35,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "352\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": "38507"
+     },
+     "execution_count": 35,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "specific_diagnosis_to_diagnosis = {}\n",
+    "\n",
+    "for spc, diag in zip(data['correctSpecificDiagnosisName'], data['correctDiagnosisName']):\n",
+    "    if spc not in specific_diagnosis_to_diagnosis:\n",
+    "        specific_diagnosis_to_diagnosis[spc] = diag\n",
+    "\n",
+    "all_imgs = pd.read_csv(r'C:\\Users\\ptrkm\\data_aisc\\additional-dermoscopic-images.csv', sep = \";\")\n",
+    "cdiags = []\n",
+    "not_there = []\n",
+    "for spc, img_name in zip(all_imgs['correctSpecificDiagnosisName'], all_imgs['dermoscopicImageName']):\n",
+    "    if spc not in specific_diagnosis_to_diagnosis:\n",
+    "        not_there.append(img_name)\n",
+    "        cdiags.append(-1)\n",
+    "    else:\n",
+    "        cdiags.append(specific_diagnosis_to_diagnosis[spc])\n",
+    "print(len(not_there))\n",
+    "all_imgs['correctDiagnosisName'] = cdiags\n",
+    "all_imgs = all_imgs[all_imgs['correctDiagnosisName'] != -1]\n",
+    "len(all_imgs['dermoscopicImageName'].unique())"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "outputs": [],
+   "source": [
+    "for name in all_imgs['dermoscopicImageName']:\n",
+    "    if name not in difficulty:\n",
+    "        difficulty[name] = -1"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "outputs": [],
+   "source": [
+    "with open(r'C:\\Users\\ptrkm\\data_aisc\\difficulties.pkl', 'wb') as handle:\n",
+    "    pickle.dump(difficulty, handle, protocol=pickle.HIGHEST_PROTOCOL)"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 52,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "208416\n",
+      "208416\n"
+     ]
+    }
+   ],
+   "source": [
+    "all_images_ = list(all_imgs['dermoscopicImageName']) + list(data['dermoscopicImageName'])\n",
+    "all_labels = list(all_imgs['correctDiagnosisName']) + list(data['correctDiagnosisName'])\n",
+    "diags = []\n",
+    "labels = []\n",
+    "for name, lab in zip(all_images_, all_labels):\n",
+    "    if lab != 'Other':\n",
+    "        diags.append(isic_idxs[diagnosis_aisc_isic[lab]])\n",
+    "        labels.append(name)\n",
+    "\n",
+    "\n",
+    "print(len(labels))\n",
+    "print(len(diags))\n",
+    "\n",
+    "labels_csv = pd.DataFrame()\n",
+    "labels_csv['names'] = labels\n",
+    "labels_csv['labels'] = diags\n",
+    "\n",
+    "labels_csv.to_csv(r'C:\\Users\\ptrkm\\data_aisc\\labels.csv', index = None)"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "outputs": [],
+   "source": [],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
 }
\ No newline at end of file
diff --git a/embeddings_and_difficulty/dataloaders/test_for_fun.ipynb b/embeddings_and_difficulty/dataloaders/test_for_fun.ipynb
index 72a1bf4ea547b887aa593ec753e1fa126742d149..084046f07dcdef4de332422410c8cc1c0ac2db2e 100644
--- a/embeddings_and_difficulty/dataloaders/test_for_fun.ipynb
+++ b/embeddings_and_difficulty/dataloaders/test_for_fun.ipynb
@@ -1,354 +1,354 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "from PIL import Image\n",
-    "import cv2\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "outputs": [],
-   "source": [
-    "pth = r\"C:\\Users\\ptrkm\\Downloads\"\n",
-    "polarized = [os.path.join(pth, str(i)+\"_polariseret.jpeg\") for i in range(1, 3)] + [os.path.join(pth, str(i)+\"-polariseret.jpeg\") for i in range(3, 7)]\n",
-    "non_polarized = [os.path.join(pth, str(i)+\"-non-polariset.jpeg\") for i in range(1, 7)]\n",
-    "\n",
-    "polarized = [np.asarray(Image.open(pol)) for pol in polarized]\n",
-    "non_polarized = [np.asarray(Image.open(pol)) for pol in non_polarized]"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "outputs": [
-    {
-     "data": {
-      "text/plain": "<Figure size 1440x720 with 18 Axes>",
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAABHcAAAJKCAYAAACmm6lSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABOqklEQVR4nO39f6ytd1kn/L8vW2ASqVE8lenTH55KitrxG6GeIDMQMiF5FCqhOk906iiSDPH8IxPIiKbIHz2dZJ6MTiRowpip0gSVoXECauOgiEof9RlBTrH8aDstFVDaqbQ8okDGEZDr+8e6T7s5nP3rnL3Wve77fr2SlbP2WmvvdV37s/fnOvu6P/fnru4OAAAAANP0VWMHAAAAAMD509wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmLC1NXeq6raqerSqPnzA1/9AVd1bVfdU1X9ZV1wAbAd1AoDdqBEAh1PdvZ4vXPWCJJ9L8svd/W37vPaaJL+W5IXd/emq+obufnQtgQGwFdQJAHajRgAcztpW7nT3Hyb5652PVdUzqup3ququqvqjqvqW4akfTfLG7v708LkmY4CZUycA2I0aAXA4m95z59Yk/6a7vyPJa5L8p+HxZyZ5ZlX9v1X1nqp60YbjAmA7qBMA7EaNANjFxZt6o6p6apJ/luS/VtWZh5+yI45rkvzzJFck+cOq+v91999sKj4AxqVOALAbNQJgbxtr7mS1SuhvuvtZ53juoSTv7e4vJPlYVT2Q1QT9vg3GB8C41AkAdqNGAOxhY6dldfdnsppsvz9JauXbh6d/I6tOe6rqWFZLKz+6qdgAGJ86AcBu1AiAva3zUuhvTfInSb65qh6qqlck+aEkr6iqDyS5J8kNw8vfmeT/q6p7k7w7yU909/+3rtgAGJ86AcBu1AiAw1nbpdABWJaquijJ6SQPd/dLxo4HAACWYtNXywJgvl6V5L6xgwAAgKXR3AHgglXVFUm+J8kvjR0LAAAszVqulnXs2LE+fvz4Or40wKTdddddn+ruS8eOYw3ekOQnk1yy3wvVCIDdzbhOHJg6AbC73erEWpo7x48fz+nTp9fxpQEmrar+YuwYjlpVvSTJo919V1X9811eczLJySS56qqr1AiAXcyxThyWvyUAdrdbnXBaFgAX6nlJXlpVH09ye5IXVtWv7nxBd9/a3Se6+8Slly76gDQAABw5zR0ALkh3v7a7r+ju40luTPIH3f3DI4cFAACLsZbTsoBlqVtqlPftm3uU9wW4UDXOtJk2bQJMgjrBYWnuAHBkuvvOJHeOHAYAACyK07IAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJuzisQOAuara/Ht2b/49AQCAeahbRvgjJknf7A+ZC2XlDgAAAMCEae4AAAAATJjmDgAAAMCE2XMHAIC1socDAKyXlTsAAAAAE6a5AwAAADBhTsuCGRnj8utJklMjvS8AhzLW6VEAwHpZuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABN28dgBAACwIad6pPetcd4XABZCcwcAAAAYTd0yzkGAvnmkgx5r4LQsAAAAgAnbt7lTVVdW1bur6t6quqeqXrWJwAAAAADY30FW7nwxyY9397VJnpvkx6rq2vWGBcBUOAgAwF7UCYD123fPne5+JMkjw/3PVtV9SS5Pcu+aY9sY5/cBXJAzBwHeX1WXJLmrqt7V3bOpEwBckNnXCX9PAGM71J47VXU8ybOTvHct0QAwOd39SHe/f7j/2SRnDgIAgDoBsAEHvlpWVT01yduSvLq7P3OO508mOZkkV1111ZEFCMB0zPUggCOyAEdjrnUCYGwHWrlTVU/KqrHzlu5++7le0923dveJ7j5x6aWXHmWMAEzAXgcBqupkVZ2uqtOPPfbYOAECMCp1AmB9DnK1rErypiT3dffr1x8SAFOz30EABwAAlk2dAFivg6zceV6SlyV5YVXdPdyuX3NcAEyEgwAA7EWdAFi/g1wt64+TjLPZAABTcOYgwIeq6u7hsZ/q7neMFxIAW0SdAFizA2+oPGunRtqw8uZx3hbgKDkIAMBe1AmA9dPcAQAAgHMobUkm4kBXywIAAABgO2nuAAAAAEyY5g4AAADAhGnuAAAAAEyY5g4AAADAhGnuAAAAAEyYS6EDAAAAyake6X1dc/5CWbkDAAAAMGFW7gDAlqqRDmL1SAftAAA4P1vV3KlbxlqK5X+xAAAAwDQ5LQsAAABgwrZq5Q4A7GW0FZ5jbS4IAHsY6/TdsThtGHanuQMAAMDWW1ozCw5ju5o7joyyBooAAAAAc2bPHQAAAIAJ266VOwAAG2aFJ8zHeFffBRiX5g4A0+H0XQAA+ApOywIAAACYMCt32BjL3gEAAODoae6MaKxmRzurAYAt5CAAMFlOGwZGprkDAADMgyYLsFD23AEAAACYMM0dAAAAgAnT3AEAAACYMHvuABdurPPbbx7nbWHubGwMADAtmjsAAKyXgwAAsFaaOwAAAMB4HAS4YJo7C2S5PQAAAEs31t/GvYZelg2VAQAAACbsQM2dqnpRVd1fVQ9W1U3rDgqAaVEnANiLOgGwXvuellVVFyV5Y5L/M8lDSd5XVXd0973rDg5gL3XLOOso++aRzgneUuoEsK3Uie2gTgCs30H23HlOkge7+6NJUlW3J7khickYgESdALaVDTq3hToBsGYHae5cnuQTOz5+KMl3riccgEPwn/ZtoU4AsBd1AmDNjuxqWVV1MsnJ4cPPVdX95/FljiX51FHFtOXkOj9LyTNZTq7nzPMCd9X/xgv67Ik6ohqRLOdnL1lOrkvJM1lOrkvJM1Enjow6cV6WkutS8kyWk+tS8kw2WCcO0tx5OMmVOz6+Ynjsy3T3rUluPa/QBlV1urtPXMjXmAq5zs9S8kyWk+tS8jwC+9aJo6gRybLGZCm5LiXPZDm5LiXPZFm5XiB1Yg2WkutS8kyWk+tS8kw2m+tBrpb1viTXVNXVVfXkJDcmuWO9YQEwIeoEAHtRJwDWbN+VO939xap6ZZJ3JrkoyW3dfc/aIwNgEtQJAPaiTgCs30FW7qS739Hdz+zuZ3T3vz/I51TVbVX1aFV9+ICv/4Ek/0dV3VNV/+UgnzNxF7zsdEKWkutS8kyWk+tS8rxg6sRaLOXnbyl5JsvJdSl5JsvK9YIctk6cT42oqnuzqhNLqBHJcn7+lpJnspxcl5JnssFcq3s9V5upqhck+VySX+7ub9vntdck+bUkL+zuT1fVN3T3o2sJDICtoE4AsBs1AuBwDrRy53x09x8m+eudj1XVM6rqd6rqrqr6o6r6luGpH03yxu7+9PC5JmOAmVMnANiNGgFwOGtr7uzi1iT/pru/I8lrkvyn4fFnJvnuqvq7qvrfVXXbhuNau6r6eFV9qKrurqrTw2NPq6p3VdVHhn+/buw4D+tcS2Z3y6tWfr6qHqyqD1bVdeNFfni75Hqqqh4exvXuqrp+x3OvHXK9v6q+e5yoD6+qrqyqd1fVvcPpL68aHp/VuO6R5+zGdGIWWSfmWiMSdWKOc4o6Mb8xnZC9asQzh3H6u6r6n1V102hRroE6Me35JFlOjUjUidHGtbvXdktyPMmHh/tPTfJ3Se7ecbtveO63slp2+czh9vkk37nO2DZ9S/LxJMfOeuxnktw03L8pyU+PHed55PWCJNedGee98kpyfZLfTlJJnpvkvWPHfwS5nkrymnO89tokH0jylCRXJ/nzJBeNncMB87wsyXXD/UuSPDDkM6tx3SPP2Y3pNt/Uice/D7OsEUPs6sTM5hR1Yn5juq23Q9aIXx++5y9I8okkH0py7dg5HOH3Qp3o6c4ne+Q5y/lEnRhnXDe5cuerkvxNdz9rx+1bh+e+kORj3f1Adz+Q5C+TvGyDsY3lhiRvHu6/Ocn3jhfK+elzLJnN7nndkNV5093d70nytVV12UYCPQK75LqbG5Lc3t1/390fS/JgkuesLbgj1N2PdPf7h/ufTXJfksszs3HdI8/dTHZMJ0Sd+HKTrxGJOrGHyc4p6sSuJjumE7FXjXgoyT1JHhx+Dx9I8v9kNSZzpk5MZD5JllMjEnVij09Z67hurLnT3Z9J8rGq+v7k8aVX3z48fU9W3atU1bEkX5fkH20qtg3pJL9bq3OETw6PPb27Hxnu/1WSp48T2pHbLa/LszqScsZD2fuHfypeOSwfvG3HcthZ5FpVx5M8O8l7M+NxPSvPZMZjus0WXieWVCOSGc8nu5jtnKJOzG9Mt9U+NeI3kjw/ySeGGvHMrOrGnL7/6sTKHH/PZj2fqBObG9e1NXeq6q1J/iTJN1fVQ1X1iiQ/lOQVVfWBrCbcM930u5P8fa0uX/juJLcn+d/rim0kz+/u65K8OMmP1eoKAI/r1Tqt9Vy6bERzzWuHX0jyjCTPSvJIkp8dNZojVFVPTfK2JK8e/kP1uDmN6znynO2Ybht14sssskYk885tMNs5RZ2Y35huk0PWiHcm+WyS/yurGvETWZ3KOyfqxDzNej5RJzY7rhev6wt39w/u8tSLzvHYw0n+Z3d/d7LaZGhdcY2lux8e/n20qn49q+VXn6yqy7r7kWHZ2Vx29t8tr4eTXLnjdVcMj01Wd3/yzP2q+sWszvlOJp5rVT0pqwnqLd399uHh2Y3rufKc65huI3XiCQurEckM55PdzHVOUScef342Y7ptDlMjurur6v9OcuqsOjGb7786Mf355FzmPJ+oE48/v7Fx3fTVsnbzviTXVNXVVfXkJDcmuWPkmI5MVX11VV1y5n6S70ry4axyfPnwspcn+c1xIjxyu+V1R5IfGZbRPjfJ3+5YljdJZ50L+n1ZjWuyyvXGqnpKVV2d5Jokf7rp+M5HVVWSN2W1SeHrdzw1q3HdLc85julMzLZOLLBGJDObT/YyxzlFnZjfmM6EOqFOTM5c5xN1YqRx7S3YZbqf2CH7gax2jH7d2PEccW7flNWu2GeWkL5uePzrk/x+ko8k+b0kTxs71vPI7a1ZLTX7QlbnDL5it7yy2v38jcMYfyjJibHjP4Jcf2XI5YPDL+tlO17/uiHX+5O8eOz4D5Hn87NaIvnBPHE1iuvnNq575Dm7MZ3Lba51Ys41YshDnZjZnKJOzG9M53JTJ9SJbb4tpUYMsasTI4xrDW9wpI4dO9bHjx8/8q8LMHV33XXXp7r70rHjGJMaAbA7dUKdANjLbnViLXvuHD9+PKdPn17HlwaYtKr6i7FjGJsaAbA7dUKdANjLbnViW/bcAQAAAOA8aO4AAAAATNjaLoUOS1e1+fdcwxZaAKzBGDUiUScYV1VdlOR0koe7+yVjxwPbTJ3gsA68cqeqLqqqP6uq39r/1QAAAF/mVUnuGzsIgDk6zGlZJmMAAODQquqKJN+T5JfGjgVgjg7U3DEZAwAAF+ANSX4yyZdGjgNglg66cucNMRkDAACHVFUvSfJod9+1x2tOVtXpqjr92GOPbTA6gHnYt7lzkMl4eJ0JGQAAONvzkry0qj6e5PYkL6yqX935gu6+tbtPdPeJSy+9dIwYASbtICt39p2MExMywNLZeB+Ac+nu13b3Fd19PMmNSf6gu3945LAAZmXf5o7JGIADsvE+AACM4OKxAwBg+nZsvP/vk/zbkcMBYEt1951J7hw5DGAXdUuN8r59c4/yvnNyqOaOyRiAXbwhq433Lxk5DgAAWJyDXi0LAM7JVVAAAGBcTssCLpjlm4t3ZuP965P8oyRfU1W/unN/tu6+NcmtSXLixAkDBwAAR8jKHQAuiI33AQBgXJo7AAAAABPmtCwAjoyN9wEAYPOs3AEAAACYMM0dAAAAgAnT3AEAAACYMHvuAACwVnVLjfK+fXOP8r4AsGlW7gAAAABMmJU7AAALMdYKGgBgvazcAQAAAJgwK3dgRhyRBQAAWB4rdwAAAAAmTHMHAAAAYMI0dwAAAAAmzJ47AAAAwGjG2ju0b+5R3ncdrNwBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJcyl0mJNTI13K79Q4ly4EAADAyh0AAACASdPcAQAA1qaqrqyqd1fVvVV1T1W9auyYAOZm3+aOyRgAALgAX0zy4919bZLnJvmxqrp25JgAZuUge+6cmYzfX1WXJLmrqt7V3feuObaNqVvG2S+kbx5pfxQAANiQ7n4kySPD/c9W1X1JLk8ym78nAMa2b3PHZAzAXqrqyiS/nOTpSTrJrd39c+NGBcA2qqrjSZ6d5L0jh3KkHCwGxnaoPXfmOhkDcEEstwdgX1X11CRvS/Lq7v7MWc+drKrTVXX6scceGydAgAk78KXQ95qMh+dPJjmZJFddddWRBQjAdlvCCk9HZAEuTFU9Kau/Jd7S3W8/+/nuvjXJrUly4sQJkx/AIR2oubPfZJyYkAGwwhO23qmR/ot2apwGKduhqirJm5Lc192vHzsegDk6yNWyTMYA7MtyewB28bwkL0vywqq6e7hdP3ZQAHNykJU7ZybjD1XV3cNjP9Xd71hbVABMiuX2AOymu/84ieVbAGt0kKtlmYwB2JUVngDAXJW/hJmIQ10tCwDOwXJ7AAAY0YGvljVrY20uePM4bwtwlKzwBACAcVm5AwAAADBhmjsAAAAAE6a5AwAAADBhmjsAAAAAE2ZDZQAAgAka6zLdPdL1aNiAsS42dMq1OS6UlTsAAAAAE6a5AwAAADBhmjsAAAAAE7ZVe+7ULWOdZ+ekUQC2j70UAAA4iK1q7gDAVhprc0EAADiA7Wru+M8zAAAAwKFsV3MHAPbg9F0AAPhKmjvM3lh7VgBrYIUnAHsY7yDAstgTDraP5g4AADAPDgIAC6W5AwAsmhWeAMDUae6MyHJGAAAA4EJp7gAAALD1rLSE3WnusDEmYwAAADh6mjsAwFZwEAAA4Pxo7gAAX0aTBQBgWr5q7AAAAAAAOH9W7gAAsF6nRrpU583jvC0AbJrmDnDh/KcdAAA4XyP9PVGnRnnb9BrSPdBpWVX1oqq6v6oerKqbjj4MNqlqnBswX+oEAHtRJwDWa9/mTlVdlOSNSV6c5NokP1hV1647MACmQZ0AYC/qBMD6HWTlznOSPNjdH+3uzye5PckN6w0LgAlRJwDYizoBsGYH2XPn8iSf2PHxQ0m+cz3hABzcWKf7reMc2YlTJ4CtpE5sDXUCYM2ObEPlqjqZ5OTw4eeq6v7z+DLHknzqqGLacnKdn6XkmSwn13PmeYF/LHzjBX32RB1RjUiW87OXLCfXpeSZLCfXpeSZqBNHRp04L0vJdSl5JsvJdSl5JhusEwdp7jyc5ModH18xPPZluvvWJLeeV2iDqjrd3Scu5GtMhVznZyl5JsvJdSl5HoF968RR1IhkWWOylFyXkmeynFyXkmeyrFwvkDqxBkvJdSl5JsvJdSl5JpvN9SB77rwvyTVVdXVVPTnJjUnuWG9YAEyIOgHAXtQJgDXbd+VOd3+xql6Z5J1JLkpyW3ffs/bIAJgEdQKAvagTAOt3oD13uvsdSd6x5liSI1iKOSFynZ+l5JksJ9el5HnB1Im1WEquS8kzWU6uS8kzWVauF0SdWIul5LqUPJPl5LqUPJMN5lq9pu38q+q2JC9J8mh3f9sBXv8DSU4l6SQf6O5/tZbAANgK6gQAu1EjAA5nnc2dFyT5XJJf3m9Crqprkvxakhd296er6hu6+9G1BAbAVlAnANiNGgFwOAfZUPm8dPcfJvnrnY9V1TOq6neq6q6q+qOq+pbhqR9NcmeS91TVg0n+9briGktVfbyqPlRVd1fV6eGxp1XVu6rqI8O/Xzd2nIdVVbdV1aNV9eEdj50zr1r5+ap6sKo+WFXXjRf54e2S66mqengY17ur6vodz712yPX+qvrucaI+vKq6sqreXVX3VtU9VfWq4fFZjeseec5uTLeVOvGEudaIRJ2Y45yiTsxvTLfRedSINyb5zlpdQv2/V9VNGw55rdSJac8nyXJqRKJOjDau3b22W5LjST684+PfT3LNcP87k/zBcP83knw6yekk703y0STXrjO2Td+SfDzJsbMe+5kkNw33b0ry02PHeR55vSDJdWeN8znzSnJ9kt9OUkmem+S9Y8d/BLmeSvKac7z22iQfSPKUJFcn+fMkF42dwwHzvCzJdcP9S5I8MOQzq3HdI8/Zjek239SJx/OeZY0YYlcnZjanqBPzG9NtvR2yRvzHJH+X5M+yOp3rA+rENG5LqRNLqRFD/OrECOO6tpU7Z6uqpyb5Z0n+a1XdneQ/D9+MJDmW5PNJ/mlWl0Z8WpJ/uanYRnRDkjcP99+c5HvHC+X89DmOqmT3vG7Iamltd/d7knxtVV2Widgl193ckOT27v777v5YkgeTPGdtwR2h7n6ku98/3P9skvuSXJ6Zjeseee5msmM6FerEV5h8jUjUiT1Mdk5RJ3Y12TGdgn1qxMVZfa//KMm/SPILWTV8bth4oJulTkxkPkmWUyMSdWKPT1nruG6suTO8199097N23L51eO5/Jfkf3f2FIcmHk3zrrl9pmjrJ7w7LSE8Ojz29ux8Z7v9VkqePE9qR2y2vy5N8YsfrHsreP/xT8cph+eBtO5bDziLXqjqe5NlZrZSY7bielWcy4zHdckuuE0uqEcmM55NdzHZOUSfmN6ZbbK8a8VCSDyf5y6FGPJDkHzKv7786sTLH37NZzyfqxObGdWPNne7+TJKPVdX3J4+fV/ftw9N/mqHzXlXHkvzjJJ/ZVGwb8vzuvi7Ji5P8WK02iXtcr9ZprWd36xHNNa8dfiHJM5I8K8kjSX521GiO0HCE7G1JXj38/j5uTuN6jjxnO6bbbuF1YpE1Ipl3boPZzinqxPzGdJvtUyN+I8k/GR4/luSZSR4bI841UifmadbziTqx2XFdW3Onqt6a5E+SfHNVPVRVr0jyQ0leUVUfSHJPnlgq+d+SPLmq7k3y7iS/m9X5Z7PR3Q8P/z6a5NezWn71yTPLzYZ/57Kr/255PZzkyh2vu2J4bLK6+5Pd/Q/d/aUkv5gnltVNOteqelJWE9Rbuvvtw8OzG9dz5TnXMd1G6sQTFlYjkhnOJ7uZ65yiTsxvTLfNIWvEO7P6A+rGrGrETyT52szo+69OTH8+OZc5zyfqxObHdZ1Xy/rB7r6su5/U3Vd095u6+2Pd/aLu/vbuvra7/93w8vcl+VKS70nyHVltNHTHumLbtKr66qq65Mz9JN+V1dLRO5K8fHjZy5P85jgRHrnd8rojyY8MR1qem+RvdyzLm6SzzgX9vqzGNVnlemNVPaWqrk5yTVYrD7ZeVVWSNyW5r7tfv+OpWY3rbnnOcUy3lTqxssAakcxsPtnLHOcUdWJ+Y7qNDlMjhhUAL8vqD8WXJnl7Vo0edWK6ZjWf7Gau84k6MdK49hbsMt1P7JD9QFZHYl83djxHnNs3ZbUr9pmjDK8bHv/6rHb9/0iS30vytLFjPY/c3prVkZIvZHXO4Ct2yyur3c/fOIzxh5KcGDv+I8j1V4ZcPjj8sl624/WvG3K9P8mLx47/EHk+P6slkh9Mcvdwu35u47pHnrMb07nc5lon5lwjhjzUiZnNKerE/MZ0Ljd1Qp3Y5ttSasQQuzoxwrjW8AYAAAAATNDF6/iix44d6+PHj6/jSwNM2l133fWp7r507DjGpEYA7E6dUCcA9rJbnVhLc+f48eM5ffr0Or40wKRV1V+MHcPY1AiA3akT6gTAXnarExu7FDoAAAAAR28tK3eApGrz72kLLYBpGKNGJOoEwFSoExyW5g5wweqWcapP36z6AMBUVNVFSU4nebi7XzJ2PABz4rQsAABgE16V5L6xgwCYI80dAABgrarqiiTfk+SXxo4FYI40dwAAgHV7Q5KfTPKlkeMAmCXNHQAAYG2q6iVJHu3uu/Z4zcmqOl1Vpx977LENRgcwD5o7AADAOj0vyUur6uNJbk/ywqr61Z0v6O5bu/tEd5+49NJLx4gRYNJcLQsAgLVyVcVl6+7XJnltklTVP0/ymu7+4TFjApgbK3cAAAAAJuzAzZ2quqiq/qyqfmudAQEAAPPU3Xd290vGjgNgbg6zcudVSe5bVyAATJuDAAAAMI4DNXeq6ook35Pkl9YbDgAT5iAAAACM4KArd96Q5CeTfGl9oQAwVQ4CAADAePa9WlZVvSTJo91917C7/W6vO5nkZJJcddVVRxUfANPwhqwOAlwychwAAEyMqypeuIOs3HlekpdW1ceT3J7khVX1q2e/qLtv7e4T3X3i0ksvPeIwAdhWOw8C7PGak1V1uqpOP/bYYxuMDgAA5m/f5k53v7a7r+ju40luTPIH3f3Da48MgKnY9yCAAwAAALA+h7laFgB8BQcBAABgXPvuubNTd9+Z5M61RAIAAADAoR2quQMAe3EQAAAANs9pWQAAAAATprkDAAAAMGGaOwAAAAATZs8dAAAAIHVLjR0C58nKHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAADWpqqurKp3V9W9VXVPVb1q7JgA5ubisQMAAGAz6pYaOwSW6YtJfry7319VlyS5q6re1d33jh0YwFxYuQMAAKxNdz/S3e8f7n82yX1JLh83KoB50dwBAAA2oqqOJ3l2kveOHArArOzb3HGOLAB7UScAOIiqemqStyV5dXd/5qznTlbV6ao6/dhjj40TIMCEHWTPHefIArAXdQKAPVXVk7Jq7Lylu99+9vPdfWuSW5PkxIkTveHwLthY+1n1zZP7VgFrsm9zp7sfSfLIcP+zVXXmHFn/aYctY6NMxqBOALCXqqokb0pyX3e/fux4AOboUFfLco4sAHtRJwA4h+cleVmSD1XV3cNjP9Xd7xgvJDiYcuyUiThwc2evc2SH508mOZkkV1111ZEFuAmWUQJcuP32UshEawQAF6a7/ziJP5EB1uhAzZ39zpFNpn+eLADnz14K6+EgAAAAB3GQq2U5RxaAXakTAAAwrn2bO3niHNkXVtXdw+36NccFwHSoEwAAMKKDXC3LObIA7EqdAACAcR1k5Q4AAAAAW+pQl0KfrVMjbVh58zhvCwAAAMyH5g4AAMAE1UgnRbeLOcLWcVoWAAAAwIRZuQNzMtYphqfspQsAADAWK3cAAAAAJkxzBwAAAGDCnJbF7I210RwwI66qyFw4fRcAZklzBwBYNAcBAICp26rmjv9cAcATXOIWAICDsOcOAJNRNc4NgGlQJ4Cl2qqVOwAAAJMz1n5WC6ORtgH2ZpsszR0AAAAOTJMFto/mzojspQAAAABcKHvuAAAAAEyYlTtsjOWbAOxFnQAAOD9W7gAAAABMmOYOAAAAwIQ5LQsA+DJOjwIAmBbNHQAAAGBx5nQFa82dBXJEFgAAgK1xag3djoU5UHOnql6U5OeSXJTkl7r7P6w1KmBaxpqMbx7nbflK6gSwJ3Vi8dQJgPXad0PlqrooyRuTvDjJtUl+sKquXXdgAEyDOgHAXtQJgPU7yNWynpPkwe7+aHd/PsntSW5Yb1gATIg6AWylqnFufAV1AmDNDtLcuTzJJ3Z8/NDwGMCo/Kd9a6gTAOxFnQBYsyPbULmqTiY5OXz4uaq6/zy+zLEknzqqmLacXOdnKXkmy8n1nHleYIPnGy/osyfqiGpEspyfvWQ5uS4lz2Q5uS4lz0SdODLqxHlZSq5LyTNZTq5LyTPZYJ04SHPn4SRX7vj4iuGxL9Pdtya59bxCG1TV6e4+cSFfYyrkOj9LyTNZTq5LyfMI7FsnjqJGJMsak6XkupQ8k+XkupQ8k2XleoHUiTVYSq5LyTNZTq5LyTPZbK4HOS3rfUmuqaqrq+rJSW5Mcsd6wwJgQtQJAPaiTgCs2b4rd7r7i1X1yiTvzOrShbd19z1rjwyASVAnANiLOgGwfgfac6e735HkHWuOJTmCpZgTItf5WUqeyXJyXUqeF0ydWIul5LqUPJPl5LqUPJNl5XpB1Im1WEquS8kzWU6uS8kz2WCu1d3r+cJVtyV5SZJHu/vbDvD6H0hyKkkn+UB3/6u1BAbAVlAnANiNGgFwOOts7rwgyeeS/PJ+E3JVXZPk15K8sLs/XVXf0N2PriUwALaCOgHAbtQIgMM5yIbK56W7/zDJX+98rKqeUVW/U1V3VdUfVdW3DE/9aJI7k7ynqh5M8q/XFddYqurjVfWhqrq7qk4Pjz2tqt5VVR8Z/v26seM8rKq6raoeraoP73jsnHnVys9X1YNV9cGqum68yA9vl1xPVdXDw7jeXVXX73jutUOu91fVd48T9eFV1ZVV9e6qureq7qmqVw2Pz2pc98hzdmO6rdSJJ8y1RiTqxBznFHVifmO6jc6jRrwxyXfW6hLq/72qbtpwyGulTkx7PkmWUyMSdWK0ce3utd2SHE/y4R0f/36Sa4b735nkD4b7v5Hk00lOJ3lvko8muXadsW36luTjSY6d9djPJLlpuH9Tkp8eO87zyOsFSa47a5zPmVeS65P8dpJK8twk7x07/iPI9VSS15zjtdcm+UCSpyS5OsmfJ7lo7BwOmOdlSa4b7l+S5IEhn1mN6x55zm5Mt/mmTjye9yxrxBC7OjGzOUWdmN+YbuvtkDXiPyb5uyR/ltXpXB9QJ6ZxW0qdWEqNGOJXJ0YY17Wt3DlbVT01yT9L8l+r6u4k/3n4ZiTJsSSfT/JPs7o04tOS/MtNxTaiG5K8ebj/5iTfO14o56fPcVQlu+d1Q1ZLa7u735Pka6vqskzELrnu5oYkt3f333f3x5I8mOQ5awvuCHX3I939/uH+Z5Pcl+TyzGxc98hzN5Md06lQJ77C5GtEok7sYbJzijqxq8mO6RTsUyMuzup7/UdJ/kWSX8iq4XPDxgPdLHViIvNJspwakagTe3zKWsd1Y82d4b3+prufteP2rcNz/yvJ/+juLwxJPpzkW3f9StPUSX53WEZ6cnjs6d39yHD/r5I8fZzQjtxueV2e5BM7XvdQ9v7hn4pXDssHb9uxHHYWuVbV8STPzmqlxGzH9aw8kxmP6ZZbcp1YUo1IZjyf7GK2c4o6Mb8x3WJ71YiHknw4yV8ONeKBJP+QeX3/1YmVOf6ezXo+USc2N64ba+5092eSfKyqvj95/Ly6bx+e/tMMnfeqOpbkHyf5zKZi25Dnd/d1SV6c5MdqtUnc43q1Tms9u1uPaK557fALSZ6R5FlJHknys6NGc4SGI2RvS/Lq4ff3cXMa13PkOdsx3XYLrxOLrBHJvHMbzHZOUSfmN6bbbJ8a8RtJ/snw+LEkz0zy2BhxrpE6MU+znk/Uic2O69qaO1X11iR/kuSbq+qhqnpFkh9K8oqq+kCSe/LEUsn/luTJVXVvkncn+d2szj+bje5+ePj30SS/ntXyq0+eWW42/DuXXf13y+vhJFfueN0Vw2OT1d2f7O5/6O4vJfnFPLGsbtK5VtWTspqg3tLdbx8ent24nivPuY7pNlInnrCwGpHMcD7ZzVznFHVifmO6bQ5ZI96Z1R9QN2ZVI34iyddmRt9/dWL688m5zHk+USc2P67rvFrWD3b3Zd39pO6+orvf1N0f6+4Xdfe3d/e13f3vhpe/L8mXknxPku/IaqOhO9YV26ZV1VdX1SVn7if5rqyWjt6R5OXDy16e5DfHifDI7ZbXHUl+ZDjS8twkf7tjWd4knXUu6PdlNa7JKtcbq+opVXV1kmuyWnmw9aqqkrwpyX3d/fodT81qXHfLc45juq3UiZUF1ohkZvPJXuY4p6gT8xvTbXSYGjGsAHhZVn8ovjTJ27Nq9KgT0zWr+WQ3c51P1ImRxrUPvhP0RVntPv9bB/2cw9yy2iH7gayOxL5uHe8x1i3JN2W1K/aZowyvGx7/+qx2/f9Ikt9L8rSxYz2P3N6a1ZGSL2R1zuArdssrq93P3ziM8YeSnBg7/iPI9VeGXD44/LJetuP1rxtyvT/Ji8eO/xB5Pj+rJZIfTHL3cLt+buO6R56zG9O53OZaJ+ZcI4Y81ImZzSnqxPzGdC43dUKd2ObbUmrEELs6McK41vAG+6qqf5vkRJKv6e6XHOiTAAAAAFiriw/yoqq6Iqul8P8+yb/d7/XHjh3r48ePX1hkADN01113faq7Lx07jjGpEQC7UyfUCYC97FYnDtTcSfKGJD+Z5JKDvPj48eM5ffr0waMDWIiq+ouxYxibGgGwO3VCnQDYy251Yt8NlavqJUke7e679nndyao6XVWnH3tsblceBAAAANhOB1m587wkL62q65P8oyRfU1W/2t0/vPNF3X1rkluT5MSJE7O4Xj1MTd1SG3/PvtmvOytVdVGS00ketjcb7K02P10nSQ641SIAI1MnOKx9mzvd/dokr02SqvrnSV5zdmMHAJK8Ksl9Sb5m7ECAcxvjIEDiQAAArNu+p2UB56dq8zcYy46N939p7FgAAGBpDrqhcpKku+9McudaIgFgyt6QQ2y8DwAAHJ1DNXcA4Gw7N94fTt8912tOJjmZJFddddXmggMA4MCcvjtdTssC4EKd2Xj/40luT/LCqvrVnS/o7lu7+0R3n7j00kvHiBEAAGZLcweAC9Ldr+3uK7r7eJIbk/yBjfcBOFtVXVRVf1ZVvzV2LABzo7kDAABswpmrKgJwxDR3ADgy3X1nd79k7DgA2C6uqgiwXpo7AADAur0hq6sqfmnkOABmSXMHAABYm51XVdzjNSer6nRVnX7sscc2GB3APGjuAAAA6+SqigBrprkDAACsjasqAqyf5g4AAADAhF08dgAAAMAydPedSe4cOQyA2bFyBwAAAGDCNHcAAAAAJsxpWTAnp3rjb1mnkt782wIAADCwcgcAAABgwjR3AAAAACZMcwcAAABgwjR3AAAAACZMcwcAAABgwjR3AAAAACZMcwcAAABgwi4eOwBg+uqW2vh79s298fcEAADYRlbuAAAAAEyY5g4AAADAhDktCwBgKU6NdErrzeO8LQAshZU7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYftuqFxVVyb55SRPT9JJbu3un1t3YAAAzEPdUqO8b9880gbSALBhB7la1heT/Hh3v7+qLklyV1W9q7vvXXNsG+M/HAAAAMBU7XtaVnc/0t3vH+5/Nsl9SS5fd2AATENVXVlV766qe6vqnqp61dgxAQDAkhxk5c7jqup4kmcnee9aogFgiqzwXBMrPAEAOIgDb6hcVU9N8rYkr+7uz5zj+ZNVdbqqTj/22GNHGSMAW8wKTwAAGNeBmjtV9aSsGjtv6e63n+s13X1rd5/o7hOXXnrpUcYIwERY4QnA2Zy+C7B+B7laViV5U5L7uvv16w8JgCnaa4VnVZ1McjJJrrrqqhGiA2BETt9dE6fvAmccZOXO85K8LMkLq+ru4Xb9muMCYEL2W+FpdSfAcjl9F2D99l25091/nGScVjQAW88KTwAOyum7AOtxqKtlzdapkZYz3jzO2wIcsTMrPD9UVXcPj/1Ud79jvJAA2DZO3wVYH80dAC6IFZ4A7Ocgp+8muTVJTpw4YSMZgEM68KXQAQAADsvpuwDrZ+UOAACwTk7fhamwZclkae4AAABr4/RdgPVzWhYAAADAhGnuAAAAAEyY5g4AAADAhGnuAAAAAEyYDZUBYD+uHAEAwBbT3AEAAABGU7eMc0G9vnmkA3hrsFXNnXKBRAAAAIBDsecOrMFYnWcAAACWZ6tW7gDAXsZrnM5nyS4A8zHWmQ+tLMLW0dwBYDrG2th4JP7TDgDjsnUIU6G5w+yNMyH7ywgAAIDN0NwBAABmYbTTdxe2shTYPjZUBgAAAJgwK3cAgEWznwIAMHWaOwAAwDw4PQpYKKdlAQAAAEyY5g6zNtqmegAAALAhTssa0Vjn+LfVqgAAADAbmjsAAAAcmIPUsH00d9iYcYqACgAAoxtrk9ubx3lbANg0zR0AYCu4JDkAe1EnYHc2VAYAAACYMCt32IjVVaucIjVbIyy3r1POuwYAAEgO2Nypqhcl+bkkFyX5pe7+D2uNCoBJUSfmxbJ34KipE8Ce7M12wfZt7lTVRUnemOT/TPJQkvdV1R3dfe+6g2M9bGwMHCV1AoC9qBPAtprTld8OsufOc5I82N0f7e7PJ7k9yQ1HHwoAE6VOALAXdQJgzQ7S3Lk8ySd2fPzQ8BgAJOoEsKWqxrnxFdQJgDU7sg2Vq+pkkpPDh5+rqvvP48scS/Kpo4ppy8l1fpaSZ7IluW7gP9DryPMbj/jrTcIR1YhkS372NmQpuS4lz2Q5uS4lz2SXXC+wPqkT6sRBLSXXpeSZLCfXpeSZbLBOHKS583CSK3d8fMXw2Jfp7luT3HpeoQ2q6nR3n7iQrzEVcp2fpeSZLCfXpeR5BPatE0dRI5JljclScl1Knslycl1Knsmycr1A6sQaLCXXpeSZLCfXpeSZbDbXg5yW9b4k11TV1VX15CQ3JrljvWEBMCHqBAB7UScA1mzflTvd/cWqemWSd2Z16cLbuvuetUcGwCSoEwDsRZ0AWL8D7bnT3e9I8o41x5IcwVLMCZHr/Cwlz2Q5uS4lzwumTqzFUnJdSp7JcnJdSp7JsnK9IOrEWiwl16XkmSwn16XkmWww1+p1XGA9SVXdluQlSR7t7m87wOt/IMmpJJ3kA939r9YSGABbQZ0AYDdqBMDhrLO584Ikn0vyy/tNyFV1TZJfS/LC7v50VX1Ddz+6lsAA2ArqBAC7USMADucgGyqfl+7+wyR/vfOxqnpGVf1OVd1VVX9UVd8yPPWjSe5M8p6qejDJv15XXGOpqo9X1Yeq6u6qOj089rSqeldVfWT49+vGjvOwquq2qnq0qj6847Fz5lUrP19VD1bVB6vquvEiP7xdcj1VVQ8P43p3VV2/47nXDrneX1XfPU7Uh1dVV1bVu6vq3qq6p6peNTw+q3HdI8/Zjem2UieeMNcakagTc5xT1In5jek2Oo8a8cYk31mrS6j/96q6acMhr5U6Me35JFlOjUjUidHGtbvXdktyPMmHd3z8+0muGe5/Z5I/GO7/RpJPJzmd5L1JPprk2nXGtulbko8nOXbWYz+T5Kbh/k1JfnrsOM8jrxckue6scT5nXkmuT/LbSSrJc5O8d+z4jyDXU0lec47XXpvkA0mekuTqJH+e5KKxczhgnpcluW64f0mSB4Z8ZjWue+Q5uzHd5ps68Xjes6wRQ+zqxMzmFHVifmO6rbdD1oj/mOTvkvxZVqdzfUCdmMZtKXViKTViiF+dGGFc17Zy52xV9dQk/yzJf62qu5P85+GbkSTHknw+yT/N6tKIT0vyLzcV24huSPLm4f6bk3zveKGcnz7HUZXsntcNWS2t7e5+T5KvrarLMhG75LqbG5Lc3t1/390fS/JgkuesLbgj1N2PdPf7h/ufTXJfksszs3HdI8/dTHZMp0Kd+AqTrxGJOrGHyc4p6sSuJjumU7BPjbg4q+/1HyX5F0l+IauGzw0bD3Sz1ImJzCfJcmpEok7s8SlrHdeNNXeG9/qb7n7Wjtu3Ds/9ryT/o7u/MCT5cJJv3fUrTVMn+d1hGenJ4bGnd/cjw/2/SvL0cUI7crvldXmST+x43UPZ+4d/Kl45LB+8bcdy2FnkWlXHkzw7q5USsx3Xs/JMZjymW27JdWJJNSKZ8Xyyi9nOKerE/MZ0i+1VIx5K8uEkfznUiAeS/EPm9f1XJ1bm+Hs26/lEndjcuG6sudPdn0nysar6/uTx8+q+fXj6TzN03qvqWJJ/nOQzm4ptQ57f3dcleXGSH6vVJnGP69U6rfXsbj2iuea1wy8keUaSZyV5JMnPjhrNERqOkL0tyauH39/HzWlcz5HnbMd02y28TiyyRiTzzm0w2zlFnZjfmG6zfWrEbyT5J8Pjx5I8M8ljY8S5RurEPM16PlEnNjuua2vuVNVbk/xJkm+uqoeq6hVJfijJK6rqA0nuyRNLJf9bkidX1b1J3p3kd7M6/2w2uvvh4d9Hk/x6VsuvPnlmudnw71x29d8tr4eTXLnjdVcMj01Wd3+yu/+hu7+U5BfzxLK6SedaVU/KaoJ6S3e/fXh4duN6rjznOqbbSJ14wsJqRDLD+WQ3c51T1In5jem2OWSNeGdWf0DdmFWN+IkkX5sZff/VienPJ+cy5/lEndj8uK7zalk/2N2XdfeTuvuK7n5Td3+su1/U3d/e3dd2978bXv6+JF9K8j1JviOrjYbuWFdsm1ZVX11Vl5y5n+S7slo6ekeSlw8ve3mS3xwnwiO3W153JPmR4UjLc5P87Y5leZN01rmg35fVuCarXG+sqqdU1dVJrslq5cHWq6pK8qYk93X363c8Natx3S3POY7ptlInVhZYI5KZzSd7meOcok7Mb0y30WFqxLAC4GVZ/aH40iRvz6rRo05M16zmk93MdT5RJ0Ya196CXab7iR2yH8jqSOzrxo7niHP7pqx2xT5zlOF1w+Nfn9Wu/x9J8ntJnjZ2rOeR21uzOlLyhazOGXzFbnlltfv5G4cx/lCSE2PHfwS5/sqQyweHX9bLdrz+dUOu9yd58djxHyLP52e1RPKDSe4ebtfPbVz3yHN2YzqX21zrxJxrxJCHOjGzOUWdmN+YzuWmTqgT23xbSo0YYlcnRhjXGt4AAAAAgAm6eB1f9NixY338+PF1fGmASbvrrrs+1d2Xjh3HmNQIgN2pE+oEwF52qxNrae4cP348p0+fXseXBpi0qvqLsWMYmxoBsDt1Qp0A2MtudWJjl0IHAAAA4Ohp7gAAAABM2FpOywLGUbfUKO/bN9uYHWAK1AmAaahxpuu43tJ0WbkDAAAAMGFW7gAAbNhYR2RzaqT3BWASrPCcLit3AAAAACZMcwcAAFi7qrqoqv6sqn5r7FgA5kZzBwAA2IRXJblv7CAA5khzBwAAWKuquiLJ9yT5pbFjAZgjzR1Yk6rN3wAAttQbkvxkki+NHAfALGnuAAAAa1NVL0nyaHfftcdrTlbV6ao6/dhjj20wOoB50NwB4EjYKBOAXTwvyUur6uNJbk/ywqr61Z0v6O5bu/tEd5+49NJLx4gRYNI0dwA4KjbKBOArdPdru/uK7j6e5MYkf9DdPzxyWACzorkDwAWzUSYAAIzn4rEDAGAW3pDVRpmXjBwHAFusu+9McufIYQDMjpU7AFwQG2UCAMC4NHcAuFA2ygQAgBEduLnjKigAnIuNMgEAYFyHWbnjKigAAAAAW+ZAzR1XQQHgILr7zu5+ydhxAADAkhx05c4bsroKypfWFwoAAAAAh7Vvc+cgV0EZXudKKAAAAAAbdpCVO/teBSVxJRQAAACAMezb3HEVFAAAAIDtdZirZQEAAACwZS4+zIu7+84kd64lEgAAAAAOzcodAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAm7eOwAAADYkFM9zvvePM7bsh2q6sokv5zk6Uk6ya3d/XPjRgUwL5o7AADAOn0xyY939/ur6pIkd1XVu7r73rEDOyp1S43yvn3zSA1bYOs4LQuAC1JVV1bVu6vq3qq6p6peNXZMAGyP7n6ku98/3P9skvuSXD5uVADzYuUOABfKEdk1cUQWmJuqOp7k2UneO3IoALNi5Q4AF8QRWQAOoqqemuRtSV7d3Z8567mTVXW6qk4/9thj4wQIMGH7NncstwfgoByRBeBcqupJWTV23tLdbz/7+e6+tbtPdPeJSy+9dPMBAkzcQVbunFluf22S5yb5saq6dr1hATA1jsgCcC5VVUnelOS+7n792PEAzNG+e+509yNJHhnuf7aqziy3t5fCBbKXAkfOJW4ZyUGOyCa5NUlOnDhh8gNYlucleVmSD1XV3cNjP9Xd7xgvJIB5OdSGypbbA3A2R2QB2Et3/3GScY6mAizEgTdU3mu5/fC8JfcAy3TmiOwLq+ru4Xb92EEBAMBSHGjlzn7L7RNL7gGWyhFZYD9OgQeA9dq3uWO5PQAAACyAPTwn6yCnZVluDwAAALClDnK1rPkvt9edBAAA4Cw177+EmZEDb6gMAAAAwPbR3AEAAACYMM0dAAAAgAk70KXQAWDR7M0GwBYaaz+YHqksArvT3AEuWN0yzv8s+mb/swAAgKnz98SFc1oWAAAAwIRp7gAAAABM2FadljXWOaMATMPS6oS9FAAAOIitau4AAACcr7H27Rht432AgeYOAAAwD5oswELZcwcAAABgwqzcYfaWtkcHAIejTgAAU6e5AwAAwIHZ8B+2j9OyAAAAACZMcwcAAABgwpyWNSLLGQEAAA7GHmmwO80dNsZkDAAAAEdPcwcA2AoOAszYqZGWDd88ztsCcEjqxAXT3AEAAAAWZ05bpRyouVNVL0ryc0kuSvJL3f0fjj4UYLJ02hdPnZgXK2iAo6ZOAKzXvlfLqqqLkrwxyYuTXJvkB6vq2nUHxvpUjXMD5kmdAGAv6gTA+h3kUujPSfJgd3+0uz+f5PYkN6w3LAAmRJ0AtpIDWltDnQBYs4M0dy5P8okdHz80PAYwKv9p3xrqBAB7UScA1uzINlSuqpNJTg4ffq6q7j+PL3MsyaeOKqYtJ9f5WUqeyXJyPWeeF9jg+cYL+uyJOqIakSznZy9ZTq5LyTNZTq5LyTNRJ46MOnFelpLrUvJMlpPrUvJMNlgnDtLceTjJlTs+vmJ47Mt0961Jbj2v0AZVdbq7T1zI15gKuc7PUvJMlpPrUvI8AvvWiaOoEcmyxmQpuS4lz2Q5uS4lz2RZuV4gdWINlpLrUvJMlpPrUvJMNpvrQU7Lel+Sa6rq6qp6cpIbk9yx3rAAmBB1AoC9qBMAa7bvyp3u/mJVvTLJO7O6dOFt3X3P2iMDYBLUCQD2ok4ArN9BVu6ku9/R3c/s7md0978/yOdU1W1V9WhVffiAr/+BJP9HVd1TVf/lIJ8zcRe87HRClpLrUvJMlpPrUvK8YOrEWizl528peSbLyXUpeSbLyvWCHLZOnE+NqKp7s6oTS6gRyXJ+/paSZ7KcXJeSZ7LBXKu71/OFq16Q5HNJfrm7v22f116T5NeSvLC7P11V39Ddj64lMAC2gjoBwG7UCIDDOdDKnfPR3X+Y5K93PlZVz6iq36mqu6rqj6rqW4anfjTJG7v708PnmowBZk6dAGA3agTA4aytubOLW5P8m+7+jiSvSfKfhsefmeS7q+rvqup/V9VtG45r7arq41X1oaq6u6pOD489rareVVUfGf79urHjPKxzLZndLa9a+fmqerCqPlhV140X+eHtkuupqnp4GNe7q+r6Hc+9dsj1/qr67nGiPryqurKq3l1V9w6nv7xqeHxW47pHnrMb04lZZJ2Ya41I1Ik5zinqxPzGdEL2qhHPHMbp76rqf1bVTaNFuQbqxLTnk2Q5NSJRJ0Yb1+5e2y3J8SQfHu4/NcnfJbl7x+2+4bnfymrZ5TOH2+eTfOc6Y9v0LcnHkxw767GfSXLTcP+mJD89dpznkdcLklx3Zpz3yivJ9Ul+O0kleW6S944d/xHkeirJa87x2muTfCDJU5JcneTPk1w0dg4HzPOyJNcN9y9J8sCQz6zGdY88Zzem23xTJx7/PsyyRgyxqxMzm1PUifmN6bbeDlkjfn34nr8gySeSfCjJtWPncITfC3Wipzuf7JHnLOcTdWKccd3kyp2vSvI33f2sHbdvHZ77QpKPdfcD3f1Akr9M8rINxjaWG5K8ebj/5iTfO14o56fPsWQ2u+d1Q1bnTXd3vyfJ11bVZRsJ9Ajskutubkhye3f/fXd/LMmDSZ6ztuCOUHc/0t3vH+5/Nsl9SS7PzMZ1jzx3M9kxnRB14stNvkYk6sQeJjunqBO7muyYTsReNeKhJPckeXD4PXwgyf+T1ZjMmToxkfkkWU6NSNSJPT5lreO6seZOd38myceq6vuTx5deffvw9D1Zda9SVceSfF2Sf7Sp2Dakk/xurc4RPjk89vTufmS4/1dJnj5OaEdut7wuz+pIyhkPZe8f/ql45bB88LYdy2FnkWtVHU/y7CTvzYzH9aw8kxmP6TZbeJ1YUo1IZjyf7GK2c4o6Mb8x3Vb71IjfSPL8JJ8YasQzs6obc/r+qxMrc/w9m/V8ok5sblzX1typqrcm+ZMk31xVD1XVK5L8UJJXVNUHsppwz3TT707y97W6fOG7k9ye5H+vK7aRPL+7r0vy4iQ/VqsrADyuV+u01nPpshHNNa8dfiHJM5I8K8kjSX521GiOUFU9Ncnbkrx6+A/V4+Y0rufIc7Zjum3UiS+zyBqRzDu3wWznFHVifmO6TQ5ZI96Z5LNJ/q+sasRPZHUq75yoE/M06/lEndjsuF68ri/c3T+4y1MvOsdjDyf5n9393clqk6F1xTWW7n54+PfRqvr1rJZffbKqLuvuR4ZlZ3PZ2X+3vB5OcuWO110xPDZZ3f3JM/er6hezOuc7mXiuVfWkrCaot3T324eHZzeu58pzrmO6jdSJJyysRiQznE92M9c5RZ14/PnZjOm2OUyN6O6uqv87yamz6sRsvv/qxPTnk3OZ83yiTjz+/MbGddNXy9rN+5JcU1VXV9WTk9yY5I6RYzoyVfXVVXXJmftJvivJh7PK8eXDy16e5DfHifDI7ZbXHUl+ZFhG+9wkf7tjWd4knXUu6PdlNa7JKtcbq+opVXV1kmuS/Omm4zsfVVVJ3pTVJoWv3/HUrMZ1tzznOKYzMds6scAakcxsPtnLHOcUdWJ+YzoT6oQ6MTlznU/UiZHGtbdgl+l+YofsB7LaMfp1Y8dzxLl9U1a7Yp9ZQvq64fGvT/L7ST6S5PeSPG3sWM8jt7dmtdTsC1mdM/iK3fLKavfzNw5j/KEkJ8aO/why/ZUhlw8Ov6yX7Xj964Zc70/y4rHjP0Sez89qieQH88TVKK6f27jukefsxnQut7nWiTnXiCEPdWJmc4o6Mb8xnctNnVAntvm2lBoxxK5OjDCuNbzBkTp27FgfP378yL8uwNTdddddn+ruS8eOY0xqBMDu1Al1AmAvu9WJtey5c/z48Zw+fXodXxpg0qrqL8aOYWxqBMDu1Al1AmAvu9WJbdlzBwAAAIDzcODmTlVdVFV/VlW/tf+rAQAAANiEw5yW9aok9yX5mjXFArNStfn3XMMWWnBgVXVRktNJHu7ul4wdD2yzMWpEok4wLnUCDk6d4LAOtHKnqq5I8j1Jfmm94QAwYWcOAgDAuagTAGty0NOy3pDkJ5N8aX2hADBVDgIAsBd1AmC99j0tq6pekuTR7r6rqv75Hq87meRkklx11VVHFR8A0/CGrA4CXDJyHABspzdEnYCtV7eMcz5Y3+x8sAt1kJU7z0vy0qr6eJLbk7ywqn717Bd1963dfaK7T1x66Vdcch2Amdp5EGCP15ysqtNVdfqxxx7bYHQAjE2dAFi/fZs73f3a7r6iu48nuTHJH3T3D689MgCmYt+DAA4AACyaOgGwZge+FDoAnIuDAADsRZ0AWL/DXAo93X1nkjvXEgkAAAAAh3ao5g6w3WyAxtgcBABgL+oEwHo4LQsAAABgwjR3AAAAACbMaVkAAKyV04YBYL2s3AEAAACYMCt3AAAWYqwVNADAelm5AwAAADBhmjsAAAAAE6a5AwAAADBhmjsAAAAAE6a5AwAAADBhrpYFXLCxrr7SN/co7wsAALBNrNwBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmLCLxw4AOEKneqT3rXHeF4DDUScA2EJ1yzh1om8eqS6ugZU7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABO2b3Onqq6sqndX1b1VdU9VvWoTgQEwDeoEAHtRJwDW7+IDvOaLSX68u99fVZckuauq3tXd9645NgCmYfZ1om6pUd63b+5R3hfgiKkTa6JOAGfs29zp7keSPDLc/2xV3Zfk8iQm4wtkMgbmYAl1AoDzp04ArN+h9typquNJnp3kvWuJBoBJUycA2Is6AbAeBzktK0lSVU9N8rYkr+7uz5zj+ZNJTibJVVdddWQBAjANe9UJNQIAdQIm4NRIZ5ecGudsmjk50MqdqnpSVhPxW7r77ed6TXff2t0nuvvEpZdeepQxArDl9qsTagTAsqkTAOt1kKtlVZI3Jbmvu1+//pAAmBJ1AoC9qBMA63eQlTvPS/KyJC+sqruH2/VrjguA6VAnANiLOgGwZge5WtYfJ3ECHADnpE4AsBd1AmD9DnW1LAAAAAC2i+YOAAAAwIRp7gAAAABM2L577izCqR7nfW8e520BAACA+bByBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCtupqWXVLjfTOI10tCwD2UCOVxVYWASZBnQDO2KrmzmiXJAeAvahPAABsse1q7gDAHqzwBACAr6S5AwAAAOcw1qlvcFiaOwBMh9OjANjDeCs8AcaluQMAAMCB2cgZto/mDrNnKSUAAGtlZSkwMs2dEel4AwDAEdJkARbqq8YOAAAAAIDzZ+UOALBoTt8FAKbOyh0AAACACbNyh41xZBQAADhf/p6A3Vm5AwAAADBhVu4AAF/GkVEAgGmxcgcAAABgwqzcAQBgvU71OO978zhvC8AhqRMXTHNngSy358iZjAEAgIkZ62/jXsOfTwc6LauqXlRV91fVg1V109GHAcCUqRMA7EWdAFivfZs7VXVRkjcmeXGSa5P8YFVdu+7AAJgGdQKAvagTAOt3kJU7z0nyYHd/tLs/n+T2JDesNywAJkSdALZS1Tg3voI6AbBmB2nuXJ7kEzs+fmh4DGBU/tO+NdQJAPaiTgCs2ZFtqFxVJ5OcHD78XFXdfx5f5liSTx1VTFtOrvOzlDyT5eR6zjwvsMHzjRf02RN1RDUiWc7PXrKcXJeSZ7KcXJeSZ6JOHBl14rwsJdel5JksJ9el5JlssE4cpLnzcJIrd3x8xfDYl+nuW5Pcel6hDarqdHefuJCvMRVynZ+l5JksJ9el5HkE9q0TR1EjkmWNyVJyXUqeyXJyXUqeybJyvUDqxBosJdel5JksJ9el5JlsNteDnJb1viTXVNXVVfXkJDcmuWO9YQEwIeoEAHtRJwDWbN+VO939xap6ZZJ3JrkoyW3dfc/aIwNgEtQJAPaiTgCs34H23OnudyR5x5pjSY5gKeaEyHV+lpJnspxcl5LnBVMn1mIpuS4lz2Q5uS4lz2RZuV4QdWItlpLrUvJMlpPrUvJMNphrdfem3gsAAACAI3aQPXcAAAAA2FJb09ypqhdV1f1V9WBV3TR2PEetqj5eVR+qqrur6vTw2NOq6l1V9ZHh368bO87DqqrbqurRqvrwjsfOmVet/Pwwxh+squvGi/zwdsn1VFU9PIzr3VV1/Y7nXjvken9Vffc4UR9eVV1ZVe+uqnur6p6qetXw+KzGdY88ZzemczHnOjHXGpGoE3OcU9SJ+Y3pXKgT6sQ2W0qNSNSJ0ca1u0e/ZbWx2p8n+aYkT07ygSTXjh3XEef48STHznrsZ5LcNNy/KclPjx3neeT1giTXJfnwfnkluT7JbyepJM9N8t6x4z+CXE8lec05Xnvt8HP8lCRXDz/fF42dwwHzvCzJdcP9S5I8MOQzq3HdI8/ZjekcbnOvE3OtEUPs6sTM5hR1Yn5jOoebOqFObPttKTViiF+dGGFct2XlznOSPNjdH+3uzye5PckNI8e0CTckefNw/81Jvne8UM5Pd/9hkr8+6+Hd8rohyS/3ynuSfG1VXbaRQI/ALrnu5oYkt3f333f3x5I8mNXP+dbr7ke6+/3D/c8muS/J5ZnZuO6R524mO6YzscQ6MfkakagTe5jsnKJO7GqyYzoT6oQ6sdWWUiMSdWKPT1nruG5Lc+fyJJ/Y8fFD2fubMkWd5Her6q6qOjk89vTufmS4/1dJnj5OaEdut7zmOs6vHJYP3rZjOewscq2q40meneS9mfG4npVnMuMxnbC5f/+XVCOSGc8nu5jtnKJOzG9MJ2zu3391YmWO4zzr+USd2Ny4bktzZwme393XJXlxkh+rqhfsfLJX67Rmd+myuea1wy8keUaSZyV5JMnPjhrNEaqqpyZ5W5JXd/dndj43p3E9R56zHVO22iJrRDLv3AaznVPUifmNKVtNnZinWc8n6sRmx3VbmjsPJ7lyx8dXDI/NRnc/PPz7aJJfz2r51SfPLDcb/n10vAiP1G55zW6cu/uT3f0P3f2lJL+YJ5bVTTrXqnpSVhPUW7r77cPDsxvXc+U51zGdgVl//xdWI5IZzie7meucok7Mb0xnYNbff3Vi+vPJucx5PlEnNj+u29LceV+Sa6rq6qp6cpIbk9wxckxHpqq+uqouOXM/yXcl+XBWOb58eNnLk/zmOBEeud3yuiPJjwy7oT83yd/uWJY3SWedC/p9WY1rssr1xqp6SlVdneSaJH+66fjOR1VVkjclua+7X7/jqVmN6255znFMZ2K2dWKBNSKZ2XyylznOKerE/MZ0JtQJdWJy5jqfqBMjjWtvwS7T/cQO2Q9ktWP068aO54hz+6asdsX+QJJ7zuSX5OuT/H6SjyT5vSRPGzvW88jtrVktNftCVucMvmK3vLLa/fyNwxh/KMmJseM/glx/Zcjlg8Mv62U7Xv+6Idf7k7x47PgPkefzs1oi+cEkdw+36+c2rnvkObsxncttrnVizjViyEOdmNmcok7Mb0znclMn1Iltvi2lRgyxqxMjjGsNbwAAAADABG3LaVkAAAAAnAfNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ+/8DAB3N4yUyTBgAAAAASUVORK5CYII=\n"
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "\n",
-    "fig, ax = plt.subplots(6, 3, figsize = (20, 10))\n",
-    "\n",
-    "for j in range(6):\n",
-    "    for i in range(3):\n",
-    "        ax[j, i].hist(polarized[j][:, :, i].reshape(-1), density = False, color = \"green\")\n",
-    "        ax[j, i].hist(non_polarized[j][:, :, i].reshape(-1), density = False, color = \"blue\")\n",
-    "\n",
-    "plt.show()"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "outputs": [
-    {
-     "data": {
-      "text/plain": "<Figure size 720x432 with 1 Axes>",
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlMAAAFlCAYAAADPim3FAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAToklEQVR4nO3db4xld33f8c83XptEAWGCR8SyvaxbrFYkKsbdukZUEQKRGIpwqzqVURscRLQthTZRUzU4D8yftg+o1JASEJYLDoaSgOX86RaZUksgJXmAYe3YBtugLoTIaznxYoOJSwJa8u2De0zHw4zn7v7O7N7Zfb2kqz333N/e+9ufzozfvv9OdXcAADgxP3SqJwAAsJuJKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAF7TtUDn3feeb1v375T9fAAAEu78847v97da5vddspiat++fTl06NCpengAgKVV1Z9udZuX+QAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAHbxlRV/XBVfa6q7qmq+6rqHZuMeUZVfbyqDlfVHVW1b0dmCwCwYpZ5Zuo7SV7e3S9KcmmSK6vqig1j3pjkG939giTvTvKuWWcJALCito2pXnhiunr2dOkNw65KcvO0fWuSV1RVzTZLAIAVtWeZQVV1VpI7k7wgyfu6+44NQy5I8mCSdPexqno8yXOTfH3D/RxIciBJ9u7dOzZz2CG74X8DeuP/zgCcRPWO1fpF2W87tb8Ul3oDend/r7svTXJhksur6idP5MG6+8bu3t/d+9fW1k7kLgAAVspxfZqvu7+Z5DNJrtxw00NJLkqSqtqT5NlJHp1hfgAAK22ZT/OtVdW50/aPJHllki9tGHYwybXT9tVJPt3thQgA4PS3zHumzk9y8/S+qR9Kckt3f6Kq3pnkUHcfTPLBJB+pqsNJHktyzY7NGABghWwbU919b5IXb7L/+nXbf5XkZ+edGgDA6vMN6AAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBg25iqqouq6jNVdX9V3VdVv7jJmJdV1eNVdfd0uX5npgsAsFr2LDHmWJJf7u67qupZSe6sqtu7+/4N4/6wu18z/xQBAFbXts9MdffD3X3XtP0XSR5IcsFOTwwAYDc4rvdMVdW+JC9OcscmN7+kqu6pqk9W1U9s8fcPVNWhqjp09OjR458tAMCKWTqmquqZSX4nyS9197c23HxXkud394uS/EaS39/sPrr7xu7e393719bWTnDKAACrY6mYqqqzswipj3b37268vbu/1d1PTNu3JTm7qs6bdaYAACtomU/zVZIPJnmgu39tizE/Po1LVV0+3e+jc04UAGAVLfNpvpcm+bkkX6iqu6d9v5pkb5J09w1Jrk7ypqo6luQvk1zT3T3/dAEAVsu2MdXdf5Skthnz3iTvnWtSAAC7hW9ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGbBtTVXVRVX2mqu6vqvuq6hc3GVNV9Z6qOlxV91bVZTszXQCA1bJniTHHkvxyd99VVc9KcmdV3d7d968b86okl0yXv5/k/dOfAACntW2fmeruh7v7rmn7L5I8kOSCDcOuSvLhXvhsknOr6vzZZwsAsGKO6z1TVbUvyYuT3LHhpguSPLju+pH8YHABAJx2lnmZL0lSVc9M8jtJfqm7v3UiD1ZVB5IcSJK9e/eeyF2cdqrmvb/uee8vmX+OjNsNxw2cCvUOv7A4+ZZ6Zqqqzs4ipD7a3b+7yZCHkly07vqF076n6O4bu3t/d+9fW1s7kfkCAKyUZT7NV0k+mOSB7v61LYYdTPL66VN9VyR5vLsfnnGeAAAraZmX+V6a5OeSfKGq7p72/WqSvUnS3TckuS3Jq5McTvLtJG+YfaYAACto25jq7j9K8rQvQnd3J3nzXJMCANgtfAM6AMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADNg2pqrqpqp6pKq+uMXtL6uqx6vq7uly/fzTBABYTXuWGPOhJO9N8uGnGfOH3f2aWWYEALCLbPvMVHf/QZLHTsJcAAB2nbneM/WSqrqnqj5ZVT+x1aCqOlBVh6rq0NGjR2d6aACAU2eOmLoryfO7+0VJfiPJ7281sLtv7O793b1/bW1thocGADi1hmOqu7/V3U9M27clObuqzhueGQDALjAcU1X141VV0/bl030+Onq/AAC7wbaf5quq307ysiTnVdWRJG9LcnaSdPcNSa5O8qaqOpbkL5Nc0929YzMGAFgh28ZUd79um9vfm8VXJwAAnHF8AzoAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAM2Damquqmqnqkqr64xe1VVe+pqsNVdW9VXTb/NAEAVtMyz0x9KMmVT3P7q5JcMl0OJHn/+LQAAHaHbWOqu/8gyWNPM+SqJB/uhc8mObeqzp9rggAAq2zPDPdxQZIH110/Mu17eOPAqjqQxbNX2bt37wwPvb2qee+ve977m9vc/17g9FXvmOcXRr9tnl+Mc80HTraT+gb07r6xu/d39/61tbWT+dAAADtijph6KMlF665fOO0DADjtzRFTB5O8fvpU3xVJHu/uH3iJDwDgdLTte6aq6reTvCzJeVV1JMnbkpydJN19Q5Lbkrw6yeEk307yhp2aLADAqtk2prr7ddvc3knePNuMAAB2Ed+ADgAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAOWiqmqurKqvlxVh6vqrZvc/vNVdbSq7p4uvzD/VAEAVs+e7QZU1VlJ3pfklUmOJPl8VR3s7vs3DP14d79lB+YIALCylnlm6vIkh7v7q9393SQfS3LVzk4LAGB3WCamLkjy4LrrR6Z9G/2Tqrq3qm6tqos2u6OqOlBVh6rq0NGjR09gugAAq2WuN6D/zyT7uvvvJLk9yc2bDeruG7t7f3fvX1tbm+mhAQBOnWVi6qEk659punDa933d/Wh3f2e6+oEkf3ee6QEArLZlYurzSS6pqour6pwk1yQ5uH5AVZ2/7uprkzww3xQBAFbXtp/m6+5jVfWWJJ9KclaSm7r7vqp6Z5JD3X0wyb+pqtcmOZbksSQ/v4NzBgBYGdvGVJJ0921Jbtuw7/p129cluW7eqQEArD7fgA4AMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADFgqpqrqyqr6clUdrqq3bnL7M6rq49Ptd1TVvtlnCgCwgraNqao6K8n7krwqyQuTvK6qXrhh2BuTfKO7X5Dk3UneNfdEAQBW0TLPTF2e5HB3f7W7v5vkY0mu2jDmqiQ3T9u3JnlFVdV80wQAWE3LxNQFSR5cd/3ItG/TMd19LMnjSZ47xwQBAFbZnpP5YFV1IMmB6eoTVfVokq+fzDmM2kXPt52XXba2u8xptb4rdlyfVmu7gmZf33r7ah1Ap5Bjd2dtub4n6Rh8/lY3LBNTDyW5aN31C6d9m405UlV7kjw7yaMb76i7b0xy45PXq+pQd+9fYg4cJ2u7s6zvzrG2O8v67hxru7NWeX2XeZnv80kuqaqLq+qcJNckObhhzMEk107bVyf5dHf3fNMEAFhN2z4z1d3HquotST6V5KwkN3X3fVX1ziSHuvtgkg8m+UhVHU7yWBbBBQBw2lvqPVPdfVuS2zbsu37d9l8l+dkTePwbtx/CCbK2O8v67hxru7Os786xtjtrZde3vBoHAHDinE4GAGDArDFVVT9cVZ+rqnuq6r6qese0/+LpNDOHp9POnDPt3/I0NFV13bT/y1X1M3POczd6mrX9UFX9SVXdPV0unfZXVb1nWsN7q+qydfd1bVX9n+ly7RYPeUaqqrOq6o+r6hPTdcfuTDZZW8fuTKrqa1X1hWkdD037fqyqbp/W6vaqes603/oehy3W9u1V9dC6Y/fV68Zv+vNf25yW7UxVVedW1a1V9aWqeqCqXrIrj93unu2SpJI8c9o+O8kdSa5IckuSa6b9NyR507T9r5LcMG1fk+Tj0/YLk9yT5BlJLk7ylSRnzTnX3XZ5mrX9UJKrNxn/6iSfnP7eFUnumPb/WJKvTn8+Z9p+zqn+963KJcm/TfJbST4xXXfs7tzaOnbnW9uvJTlvw77/nOSt0/Zbk7zL+s62tm9P8u82Gbvpz/90+UqSv5HknGnMC0/1v20VLlmcPeUXpu1zkpy7G4/dWZ+Z6oUnpqtnT5dO8vIsTjPz5ML9o2l7q9PQXJXkY939ne7+kySHszitzRnradZ2K1cl+fD09z6b5NyqOj/JzyS5vbsf6+5vJLk9yZU7OffdoqouTPIPk3xgul5x7M5i49puw7E7j/XH6MZj1/rujK1+/pc5LdsZp6qeneSnsvhGgHT3d7v7m9mFx+7s75mansq/O8kjWfyDvpLkm704zUzy1NPRbHUammVOYXPG2bi23X3HdNN/mp7yfHdVPWPat9UaWtut/XqSf5/kr6frz41jdy6/nqeu7ZMcu/PoJP+7qu6sxZkmkuR53f3wtP1nSZ43bVvf47PZ2ibJW6Zj96YnX4aKtT1eFyc5muQ3p7cAfKCqfjS78NidPaa6+3vdfWkW35R+eZK/PfdjnKk2rm1V/WSS67JY47+XxVOcv3LqZrh7VdVrkjzS3Xee6rmcbp5mbR278/kH3X1ZklcleXNV/dT6G3vxWoiPbp+Yzdb2/Un+ZpJLkzyc5L+cuuntanuSXJbk/d394iT/N4uX9b5vtxy7O/Zpvumpus8keUkWT8U9+Z1W609H8/1T1dRTT0OzzClszljr1vbK7n54esrzO0l+M///JaWt1tDabu6lSV5bVV/L4in4lyf5r3HszuEH1raq/rtjdz7d/dD05yNJfi+Ltfzz6SWQTH8+Mg23vsdhs7Xt7j+f/uf2r5P8tzh2T9SRJEfWvcpyaxZxteuO3bk/zbdWVedO2z+S5JVJHsjiP/xXT8OuTfI/pu2tTkNzMMk1tfjE1MVJLknyuTnnuttssbZfWnfAVRavK39x+isHk7x++vTDFUken542/VSSn66q50xPTf/0tO+M1t3XdfeF3b0vizeUf7q7/1kcu8O2WNt/7tidR1X9aFU968ntLNbli3nqMbrx2LW+S9hqbZ88dif/OE89djf7+V/mtGxnnO7+syQPVtXfmna9Isn92YXH7lLfgH4czk9yc1WdlUWo3dLdn6iq+5N8rKr+Y5I/zvRms2xxGppenK7mliwW9ViSN3f392ae626z1dp+uqrWsvh0w91J/uU0/rYsPvlwOMm3k7whSbr7sar6D1n8cCfJO7v7sZP3z9h1fiWO3Z3yUcfuLJ6X5PcWTZo9SX6ru/9XVX0+yS1V9cYkf5rkn07jre/ytlrbj9Tiqzw6i0/7/Yvk6X/+a5PTsp3kf8uq+tdZ/C44J4tP4b0h03/jdtOx6xvQAQAG+AZ0AIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAG/D9myjn6CDJd1wAAAABJRU5ErkJggg==\n"
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "\n",
-    "pol = []\n",
-    "non_pol = []\n",
-    "for j in range(6):\n",
-    "    for i in range(3):\n",
-    "        pol.append(polarized[j][:,:, i].var())\n",
-    "        non_pol.append(non_polarized[j][:,:, i].var())\n",
-    "\n",
-    "fig, ax = plt.subplots(1,1, figsize = (10, 6))\n",
-    "ax.hist(pol, color = \"green\")\n",
-    "ax.hist(non_pol, color = \"blue\")\n",
-    "plt.show()"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[6039.948706249593, 5894.693889706704, 5506.752148360592, 5898.551808247116, 5818.329513453474, 5495.461078784447, 6014.040060886491, 5980.951874902513, 5653.274391287833, 5648.983016042914, 5548.381099502415, 5257.680483140387, 5353.952319166475, 5323.796165140175, 5016.745985959243, 5809.600562122754, 5851.178305706107, 5556.992416721962]\n",
-      "[4223.73249762943, 3863.3349727941827, 3142.630990175311, 4060.9031071145787, 3809.180706647902, 3374.0058052150257, 3872.650070489109, 3772.7925729848907, 3446.6755561809537, 4299.032363256617, 4143.829968257108, 3705.2661585803226, 4127.691935971295, 4014.4101585219423, 3577.012812597752, 4154.935083286517, 4009.442123252845, 3651.851893411681]\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(pol)\n",
-    "print(non_pol)"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "markdown",
-   "source": [],
-   "metadata": {
-    "collapsed": false
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "outputs": [
-    {
-     "data": {
-      "text/plain": "<Figure size 720x432 with 1 Axes>",
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkkAAAFlCAYAAAD/BnzkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAQ80lEQVR4nO3de6yteV3f8c/XOQOoUC7ODiEO48HW0BBTYdwSjIREjDjQRtqEP8akFS/JSawaSDQthETgj/7RJrWXlGhGRbBS0aJEYlSkZQwxKYPn4IAzDMhwMQ5B5yBy8w8o+O0f6zlxz+l3n732zF57rX14vZKV/axnPXvt3/rtZ+3zPs+6VXcHAICH+pptDwAAYBeJJACAgUgCABiIJACAgUgCABiIJACAwblNXOlNN93U58+f38RVAwCcqEuXLn2qu/euXr+RSDp//nwuXry4iasGADhRVfXn03oPtwEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBgrUiqqidU1Vuq6oNVdV9VfeemBwYAsE3rfnbbf0ny+939kqp6VJKv2+CYAAC27shIqqrHJ3lekh9Kku7+UpIvbXZYAADbtc6RpKcluZzkl6vq25JcSvKy7v7bgxtV1YUkF5LklltuOelxXpfqtfWIvr9f3Sc0EoBZPbI/U/+f9meLM2Sd5ySdS3Jrkp/r7mcl+dskr7h6o+6+o7v3u3t/b2/vhIcJAHC61omkB5I80N13LeffklU0AQBct46MpO7+yyR/UVVPX1Z9T5IPbHRUAABbtu6r234yyZuWV7Z9NMkPb25IAADbt1YkdffdSfY3OxQAgN3hHbcBAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAbn1tmoqj6e5PNJvpLky929v8lBAQBs21qRtPju7v7UxkYCALBDPNwGADBYN5I6yR9U1aWqujBtUFUXqupiVV28fPnyyY0QAGAL1o2k53b3rUlemOTHq+p5V2/Q3Xd093537+/t7Z3oIAEATttakdTdn1i+PpjkrUmevclBAQBs25GRVFVfX1WPu7Kc5AVJ7tn0wAAAtmmdV7c9Oclbq+rK9v+ju39/o6MCANiyIyOpuz+a5NtOYSwAADvDWwAAAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAYO1IqqobqupPqup3NjkgAIBdcJwjSS9Lct+mBgIAsEvWiqSqujnJP03yi5sdDgDAblj3SNJ/TvJvkvzd5oYCALA7joykqvpnSR7s7ktHbHehqi5W1cXLly+f2AABALZhnSNJ35Xk+6vq40nenOT5VfWrV2/U3Xd093537+/t7Z3wMAEATteRkdTdr+zum7v7fJLbk7yzu//lxkcGALBF3icJAGBw7jgbd/cfJvnDjYwEAGCHOJIEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADA4MpKq6jFV9Z6qel9V3VtVrz2NgQEAbNO5Nbb5YpLnd/cXqurGJH9UVb/X3e/e8NgAALbmyEjq7k7yheXsjcupNzkoAIBtW+dIUqrqhiSXkvyjJK/r7ruGbS4kuZAkt9xyy0mOcR7Ta+sRfX+/+pF33iMdAyczh4/0d7kL+xJf3cqfkuvWSf9ue8f/3Fxvt3etJ25391e6+5lJbk7y7Kr61mGbO7p7v7v39/b2TniYAACn61ivbuvuzyS5M8ltGxkNAMCOWOfVbXtV9YRl+WuTfG+SD254XAAAW7XOc5KekuSNy/OSvibJb3T372x2WAAA27XOq9ven+RZpzAWAICd4R23AQAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGR0ZSVT21qu6sqg9U1b1V9bLTGBgAwDadW2ObLyf5qe5+b1U9LsmlqnpHd39gw2MDANiaI48kdfcnu/u9y/Lnk9yX5Bs3PTAAgG061nOSqup8kmcluWu47EJVXayqi5cvXz6h4QEAbMfakVRVj03ym0le3t2fu/ry7r6ju/e7e39vb+8kxwgAcOrWiqSqujGrQHpTd//WZocEALB967y6rZL8UpL7uvtnNz8kAIDtW+dI0ncl+VdJnl9Vdy+nF214XAAAW3XkWwB09x8lqVMYCwDAzvCO2wAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAgyMjqapeX1UPVtU9pzEgAIBdsM6RpDckuW3D4wAA2ClHRlJ3vyvJp09hLAAAO+PEnpNUVReq6mJVXbx8+fJJXS0AwFacWCR19x3dvd/d+3t7eyd1tQAAW+HVbQAAA5EEADBY5y0Afi3J/0ny9Kp6oKp+dPPDAgDYrnNHbdDdP3AaAwEA2CUebgMAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAIDBWpFUVbdV1Yeq6v6qesWmBwUAsG1HRlJV3ZDkdUlemOQZSX6gqp6x6YEBAGzTOkeSnp3k/u7+aHd/Kcmbk7x4s8MCANiudSLpG5P8xYHzDyzrAACuW+dO6oqq6kKSC8vZL1TVh07qujehXlMneXU3JfnUSV7hOk74NpymE52vbc/DKfz8rexfZ5w5O55Tm686s3+2HmJr+9cZnb+HPV+neHu/aVq5TiR9IslTD5y/eVn3EN19R5I7HtbQzriqutjd+9sex1lhvo7HfB2fOTse83U85ut4zvJ8rfNw2x8n+ZaqelpVPSrJ7UnettlhAQBs15FHkrr7y1X1E0nenuSGJK/v7ns3PjIAgC1a6zlJ3f27SX53w2M5y74qH2Z8BMzX8Ziv4zNnx2O+jsd8Hc+Zna/q7m2PAQBg5/hYEgCAgUg6oKo+XlV/WlV3V9XFZd2TquodVfXh5esTl/VVVf91+aiW91fVrQeu56XL9h+uqpceWP/ty/Xfv3zvmXsxZ1W9vqoerKp7Dqzb+Bwd9jN23SHz9Zqq+sSyn91dVS86cNkrl9v+oar6vgPrx48GWl5Qcdey/teXF1ekqh69nL9/ufz8Kd3kh62qnlpVd1bVB6rq3qp62bLe/nWIa8yZfWxQVY+pqvdU1fuW+Xrtsv7Yt/Gk5nGXXWO+3lBVHzuwfz1zWX/93Se722k5Jfl4kpuuWvcfkrxiWX5Fkn+/LL8oye8lqSTPSXLXsv5JST66fH3isvzE5bL3LNvW8r0v3PZtfhhz9Lwktya55zTn6LCfseunQ+brNUl+etj2GUnel+TRSZ6W5CNZvVjihmX5m5M8atnmGcv3/EaS25fln0/yY8vyv07y88vy7Ul+fdtzscZcPSXJrcvy45L82TIn9q/jz5l9bJ6vSvLYZfnGJHct+8OxbuNJzuMun64xX29I8pJh++vuPulI0tFenOSNy/Ibk/zzA+t/pVfeneQJVfWUJN+X5B3d/enu/psk70hy23LZP+jud/fqt/4rB67rzOjudyX59FWrT2OODvsZO+2Q+TrMi5O8ubu/2N0fS3J/Vh8LNH400PI/rucnecvy/VfP/ZX5ekuS77nyP7Rd1d2f7O73LsufT3JfVu/ub/86xDXm7DBf7ftYd/cXlrM3LqfO8W/jSc7jzrrGfB3murtPiqSH6iR/UFWXavUO4kny5O7+5LL8l0mevCwf9nEt11r/wLD+enAac3TYzzirfmI5HP36A4eRjztf35DkM9395avWP+S6lss/u2x/JiwPazwrq/+52r/WcNWcJfaxUVXdUFV3J3kwq3+sP5Lj38aTnMeddvV8dfeV/evfLfvXf6qqRy/rrrv7pEh6qOd2961JXpjkx6vqeQcvXErXywGv4TTm6Dr4Pfxckn+Y5JlJPpnkP251NDumqh6b5DeTvLy7P3fwMvvXbJgz+9ghuvsr3f3MrD494tlJ/vF2R7Tbrp6vqvrWJK/Mat6+I6uH0P7thsewtfukSDqguz+xfH0wyVuzugP91XJIMMvXB5fND/u4lmutv3lYfz04jTk67GecOd39V8sfnr9L8gtZ7WfJ8efrr7M6nH3uqvUPua7l8scv2++0qroxq3/s39Tdv7Wstn9dwzRn9rGjdfdnktyZ5Dtz/Nt4kvN4JhyYr9uWh3m7u7+Y5Jfz8Pevnb9PiqRFVX19VT3uynKSFyS5J6uPYLnyTPyXJvntZfltSX5weTb/c5J8djk0+PYkL6iqJy6HuF+Q5O3LZZ+rqucsj0//4IHrOutOY44O+xlnzpU7/uJfZLWfJavbeHutXlHztCTfktWTGsePBlr+d3Vnkpcs33/13F+Zr5ckeeey/c5afue/lOS+7v7ZAxfZvw5x2JzZx2ZVtVdVT1iWvzbJ92b1PK7j3saTnMeddch8ffBAvFRWzxU6uH9dX/fJ3oFn0O/CKatXI7xvOd2b5FXL+m9I8r+TfDjJ/0rypP77Z/2/LqvHs/80yf6B6/qRrJ7Id3+SHz6wfj+rnekjSf5bljfzPEunJL+W1eH7/5vV48c/ehpzdNjP2PXTIfP135f5eH9WfwiecmD7Vy23/UM58OrHrF418mfLZa+6ar99zzKP/zPJo5f1j1nO379c/s3bnos15uq5WR1Sf3+Su5fTi+xfD2vO7GPzfP2TJH+yzMs9SX7m4d7Gk5rHXT5dY77euexf9yT51fz9K+Cuu/ukd9wGABh4uA0AYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAG/w8g/cpoYoLvXgAAAABJRU5ErkJggg==\n"
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "1317075\n",
-      "5015553\n"
-     ]
-    }
-   ],
-   "source": [
-    "pol_ = []\n",
-    "non_pol_ = []\n",
-    "for j in range(6):\n",
-    "    for i in range(3):\n",
-    "        pol_.append(np.sum(polarized[j]>240))\n",
-    "        non_pol_.append(np.sum(non_polarized[j] > 240))\n",
-    "\n",
-    "fig, ax = plt.subplots(1,1,figsize = (10,6))\n",
-    "ax.hist(pol_, color = \"green\")\n",
-    "ax.hist(non_pol_, color = \"blue\")\n",
-    "plt.show()\n",
-    "print(np.sum(pol_))\n",
-    "print(np.sum(non_pol_))"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(1506, 1506, 3)\n",
-      "(1506, 1506, 3)\n",
-      "(1506, 1506, 3)\n",
-      "(1492, 1492, 3)\n",
-      "(1492, 1492, 3)\n",
-      "(4032, 3024, 3)\n",
-      "(4032, 3024, 3)\n",
-      "(4032, 3024, 3)\n",
-      "(4032, 3024, 3)\n",
-      "Prediction for variance method [False, False, False, False, True, True, True, True, True]\n",
-      "prediction for point method  [True, True, True, False, False, True, False, True, False]\n",
-      "['image (1).png', 'image (2).png', 'image (3).png', 'image (4).png', 'image (5).png', 'IMG_2062.jpeg', 'IMG_2063.jpeg', 'IMG_2057.jpeg', 'IMG_2059.jpeg']\n"
-     ]
-    }
-   ],
-   "source": [
-    "\n",
-    "\n",
-    "def decide_var(img):\n",
-    "    var = np.zeros((3, ))\n",
-    "    for i in range(img.shape[-1]):\n",
-    "        var[i] = (img[:, :, i].var())\n",
-    "\n",
-    "    if any(var > 3000):\n",
-    "        return True\n",
-    "    else:\n",
-    "        return False\n",
-    "\n",
-    "def decide_point(img):\n",
-    "\n",
-    "    if np.sum(img > 240) < 180000:\n",
-    "        return True\n",
-    "    else:\n",
-    "        return False\n",
-    "\n",
-    "\n",
-    "images = [os.path.join(pth, f\"image ({i}).png\") for i in range(1,6)] + [os.path.join(pth, p) for p in [\"IMG_2062.jpeg\",\"IMG_2063.jpeg\",\"IMG_2057.jpeg\", \"IMG_2059.jpeg\"]]\n",
-    "imgs = [np.asarray(Image.open(img)) for img in images]\n",
-    "\n",
-    "var_method = []\n",
-    "point_method = []\n",
-    "\n",
-    "for img in imgs:\n",
-    "    if img.shape[-1] > 3:\n",
-    "        img = img[:, :,:-1]\n",
-    "    print(img.shape)\n",
-    "    var_method.append(decide_var(img))\n",
-    "    point_method.append(decide_point(img))\n",
-    "\n",
-    "print(\"Prediction for variance method\", var_method)\n",
-    "print(\"prediction for point method \", point_method)\n",
-    "print([os.path.basename(imag) for imag in images])\n"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "outputs": [
-    {
-     "data": {
-      "text/plain": "<Figure size 720x432 with 2 Axes>",
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkkAAAFlCAYAAAD/BnzkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAXXUlEQVR4nO3de6xsZ3kf4N+Lj7kUEAZ8RCxfOFBQIjeKLzlyjUCIgqCGIqyqRLLVAqFUR6KmMSpRhEHi9h+VShICwnGBcgnhUi6pa5kQGhwRpGJybHw3Dgfkynad2JhgQy5Qw9s/Zplub3/77Nnbe2afOX4eaWmvWevba95vzexv/faaNTPV3QEA4MEetdsFAAAciYQkAIABIQkAYEBIAgAYEJIAAAaEJACAgT2L2Ojxxx/f+/btW8SmgSPQVVdd9b3u3rvbdewE4xc88mw0hi0kJO3bty8HDx5cxKaBI1BV/e/drmGnGL/gkWejMczLbQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwMBcIamqjquqz1bVt6rq5qp6zqILAzicqnpsVX2jqq6tqhur6p2DNo+pqk9X1aGqurKq9u1CqcCKmvdM0u8m+ePu/qUkpyW5eXElAczlx0le2N2nJTk9yTlVdfa6Nq9L8jfd/awkv53k3cstEVhlm4akqnpSkucn+VCSdPdPuvsHC64L4LB65kfTzWOnqdc1OzfJR6f5zyZ5UVXVkkoEVtyeOdo8I8ndSf5rVZ2W5KokF3b3365tVFUHkhxIklNOOWXuAt750DPkO+bt/faFbXtV614k+2T5Hun7vKqOyWxMelaS93f3leuanJjktiTp7vur6t4kT03yvXXb2db4BTwci/x/Zf3/S9szz8tte5KcmeQD3X1Gkr9N8uaHlNN9SXfv7+79e/fu3ZHiAA6nu3/a3acnOSnJWVX1y9vcjvELeIh5QtLtSW5f8x/aZzMLTQBHhOkSgCuSnLNu1R1JTk6SqtqT5ElJ7llqccDK2jQkdfdfJbmtqn5xWvSiJDcttCqATVTV3qo6bpp/XJIXJ/nWumaXJnnNNP/KJF/p7p05Dw8c9ea5JilJ/kOST1TVo5N8N8lrF1cSwFxOSPLR6bqkRyX5THdfVlXvSnKwuy/N7A0nH6+qQ0m+n+S83SsXWDVzhaTuvibJ/sWWAjC/7r4uyRmD5W9bM/8PSX5tmXUBRw+fuA0AMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADc4Wkqrq1qq6vqmuq6uCiiwLYTFWdXFVXVNVNVXVjVV04aPOCqrp3Gruuqaq37UatwGras4W2/6y7v7ewSgC25v4kb+ruq6vqiUmuqqovd/dN69r9eXe/fBfqA1acl9uAldTdd3b31dP8D5PcnOTE3a0KOJrMG5I6yZ9U1VVVdWDUoKoOVNXBqjp4991371yFAJuoqn1Jzkhy5WD1c6rq2qr6YlX9kw1+3/gFPMS8Iel53X1mkpcmuaCqnr++QXdf0t37u3v/3r17d7RIgI1U1ROSfC7JG7v7vnWrr07y9O4+LcnvJfmj0TaMX8DIXCGpu++Yft6V5AtJzlpkUQDzqKpjMwtIn+juz69f3933dfePpvnLkxxbVccvuUxgRW0akqrq8dNFkamqxyd5SZIbFl0YwOFUVSX5UJKbu/s9G7T5haldquqszMa8e5ZXJbDK5nl329OSfGEaZ/Yk+cPu/uOFVgWwuecmeVWS66vqmmnZW5KckiTdfXGSVyZ5fVXdn+Tvk5zX3b0LtQIraNOQ1N3fTXLaEmoBmFt3fy1JbdLmfUnet5yKgKONjwAAABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAbmDklVdUxVfbOqLltkQQDzqKqTq+qKqrqpqm6sqgsHbaqq3ltVh6rquqo6czdqBVbTVs4kXZjk5kUVArBF9yd5U3efmuTsJBdU1anr2rw0ybOn6UCSDyy3RGCVzRWSquqkJP8iyQcXWw7AfLr7zu6+epr/YWb/xJ24rtm5ST7WM19PclxVnbDkUoEVNe+ZpN9J8ltJfra4UgC2p6r2JTkjyZXrVp2Y5LY1t2/PQ4MUwNCmIamqXp7kru6+apN2B6rqYFUdvPvuu3esQIDDqaonJPlckjd2933b3MY2x69a8ATspnnOJD03ySuq6tYkn0rywqr6g/WNuvuS7t7f3fv37t27w2UCPFRVHZtZQPpEd39+0OSOJCevuX3StOxBjF/AyKYhqbsv6u6TuntfkvOSfKW7/83CKwM4jKqqJB9KcnN3v2eDZpcmefX0Lrezk9zb3XcurUhgpe3Z7QIAtum5SV6V5PqqumZa9pYkpyRJd1+c5PIkL0tyKMnfJXnt8ssEVtWWQlJ3/1mSP1tIJQBb0N1fyyYX7nR3J7lgORUBRxufuA0AMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADm4akqnpsVX2jqq6tqhur6p3LKAzgcKrqw1V1V1XdsMH6F1TVvVV1zTS9bdk1Aqttzxxtfpzkhd39o6o6NsnXquqL3f31BdcGcDgfSfK+JB87TJs/7+6XL6cc4Giz6ZmknvnRdPPYaeqFVgWwie7+apLv73YdwNFrnjNJqapjklyV5FlJ3t/dVw7aHEhyIElOOeWUnaxx297plUF4pHtOVV2b5P8k+c3uvnHU6Egcv4DdN9eF29390+4+PclJSc6qql8etLmku/d39/69e/fucJkAW3Z1kqd392lJfi/JH23U0PgFjGzp3W3d/YMkVyQ5ZyHVAOyQ7r7vgUsFuvvyJMdW1fG7XBawQuZ5d9veqjpumn9ckhcn+daC6wJ4WKrqF6qqpvmzMhvv7tndqoBVMs81SSck+eh0XdKjknymuy9bbFkAh1dVn0zygiTHV9XtSd6e2RtL0t0XJ3llktdX1f1J/j7Jed3tTSfA3DYNSd19XZIzllALwNy6+/xN1r8vs48IANgWn7gNADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA5uGpKo6uaquqKqbqurGqrpwGYUBHE5Vfbiq7qqqGzZYX1X13qo6VFXXVdWZy64RWG3znEm6P8mbuvvUJGcnuaCqTl1sWQCb+kiScw6z/qVJnj1NB5J8YAk1AUeRTUNSd9/Z3VdP8z9McnOSExddGMDhdPdXk3z/ME3OTfKxnvl6kuOq6oTlVAccDfZspXFV7UtyRpIrB+sOZPbfWk455ZSdqA3g4TgxyW1rbt8+LbtzfcMjd/yq3S5gm3qB217kPllk3atqVZ+DO2PuC7er6glJPpfkjd193/r13X1Jd+/v7v179+7dyRoBFsr4BYzMFZKq6tjMAtInuvvziy0JYEfckeTkNbdPmpYBzGWed7dVkg8lubm737P4kgB2xKVJXj29y+3sJPd290NeagPYyDzXJD03yauSXF9V10zL3tLdly+sKoBNVNUnk7wgyfFVdXuStyc5Nkm6++Iklyd5WZJDSf4uyWt3p1JgVW0akrr7a3mkX7kFHHG6+/xN1neSC5ZUDnAU8onbAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMLBpSKqqD1fVXVV1wzIKAphXVZ1TVbdU1aGqevNg/a9X1d1Vdc00/bvdqBNYTfOcSfpIknMWXAfAllTVMUnen+SlSU5Ncn5VnTpo+unuPn2aPrjUIoGVtmlI6u6vJvn+EmoB2Iqzkhzq7u9290+SfCrJubtcE3AU2bFrkqrqQFUdrKqDd999905tFmAjJya5bc3t26dl6/2rqrquqj5bVSePNmT8AkZ2LCR19yXdvb+79+/du3enNgvwcPyPJPu6+1eSfDnJR0eNjF/AiHe3AavqjiRrzwydNC37ue6+p7t/PN38YJJfXVJtwFFASAJW1V8keXZVPaOqHp3kvCSXrm1QVSesufmKJDcvsT5gxc3zEQCfTPK/kvxiVd1eVa9bfFkAh9fd9yd5Q5IvZRZ+PtPdN1bVu6rqFVOz36iqG6vq2iS/keTXd6daYBXt2axBd5+/jEIAtqq7L09y+bplb1szf1GSi5ZdF3B08HIbAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAwV0iqqnOq6paqOlRVb150UQDz2GxsqqrHVNWnp/VXVtW+XSgTWFGbhqSqOibJ+5O8NMmpSc6vqlMXXRjA4cw5Nr0uyd9097OS/HaSdy+3SmCVzXMm6awkh7r7u939kySfSnLuYssC2NQ8Y9O5ST46zX82yYuqqpZYI7DC5glJJya5bc3t26dlALtpnrHp5226+/4k9yZ56lKqA1benp3aUFUdSHJguvmjqronyfd2avtHiOOzhD69o96x6LtYayl9eri2uE9Wok9btPQ+bXGfP31BZSzFYPy6ZRubWbXn3QLr3fGTdUvatztSt+fB4myh1i0/lsMxbJ6QdEeSk9fcPmla9iDdfUmSSx64XVUHu3v/Fos8ounTatCnR4x5xqYH2txeVXuSPCnJPes3tH782o5Ve4xWqV61Ls4q1bsbtc7zcttfJHl2VT2jqh6d5Lwkly62LIBNzTM2XZrkNdP8K5N8pbt7iTUCK2zTM0ndfX9VvSHJl5Ick+TD3X3jwisDOIyNxqaqeleSg919aZIPJfl4VR1K8v3MghTAXOa6Jqm7L09y+Ra3/bBOXR+h9Gk16NMjxGhs6u63rZn/hyS/tqRyVu0xWqV61bo4q1Tv0mstZ54BAB7K15IAAAxsKSRV1Yer6q6qumHNsqdU1Zer6tvTzydPy6uq3jt9HcB1VXXmmt95zdT+21X1mtF9LUNVnVxVV1TVTVV1Y1VdeBT06bFV9Y2qunbq0zun5c+Yvpbh0PQ1DY+elm/4tQ1VddG0/Jaq+ue71KWfq6pjquqbVXXZdHul+1RVt1bV9VV1TVUdnJat7HPvaLHox6WqfnXa/qHpd7f0XuVa8Di8UX0b3cc2an1HVd0x7d9rqupla9YN/z5rg6+f2c4YcJhaF3482OF9u1G9R9z+rSUcl7bah7l199xTkucnOTPJDWuW/ackb57m35zk3dP8y5J8MbMPKzg7yZXT8qck+e7088nT/JO3UsdOTUlOSHLmNP/EJH+Z2dcbrHKfKskTpvljk1w51fqZJOdNyy9O8vpp/t8nuXiaPy/Jp6f5U5Ncm+QxSZ6R5DtJjtmNPq3p239M8odJLptur3Sfktya5Ph1y1b2uXe0TIt+XJJ8Y2pb0+++dIv1LXQc3qi+je5jG7W+I8lvDtoO/z6n6TtJnpnk0VObU6ff2dIYsEmtCz8e7PC+3ajeI27/ZsHHpe30Ye6/t20MIPvy4Cf8LUlOWPOg3TLN/36S89e3S3J+kt9fs/xB7XZzSvLfk7z4aOlTkn+U5Ook/zSzD+DaMy1/TpIvTfNfSvKcaX7P1K6SXJTkojXb+nm7XerLSUn+NMkLk1w21bjqfbo1Dz0YHxXPvVWeFvm4TOu+tWb5g9ptocZ9WcA4fLj6NrqPbdT6jowP4sO/z7V/22vbbWcM2OI+3tHjwSL27Qb1HtH7Nws4Lm2nD/NOO3FN0tO6+85p/q+SPG2a3+grA47IrzmZTuedkVnCXek+1exlqWuS3JXky5kl7B/07GsZkgfXt9HXNhxRfUryO0l+K8nPpttPzer3qZP8SVVdVbNPfE5W/Ll3lFjk43LiNL9++cO1jPo2uo/teMP0EtWH17y0tNVatzMGzGVBx4OF7dt19SZH4P5d8HFpO32Yy45euN2zqNY7uc1lqKonJPlckjd2931r161in7r7p919emZnX85K8ku7W9HDU1UvT3JXd1+127XssOd195mZfYv9BVX1/LUrV/G5d5RY6cdlGfU9zPv4QJJ/nOT0JHcm+c87VNaO2O3jwVbvY1DvEbl/V/W4tBMh6a+r6oQkmX7eNS3f6CsD5vqak2WpqmMze4J9ors/Py1e6T49oLt/kOSKzE4xHlezr2VIHlzfz2uvB39tw5HUp+cmeUVV3ZrZN72/MMnvZrX7lO6+Y/p5V5IvZDZwHBXPvVW24Mfljml+/fKHaxn1bXQfW9Ldfz0dMH+W5L9ktn+3U+s92foYcFgLPh7s+L4d1Xsk79+pvh9k549L2+nDXHYiJK392P/XZPa66APLX10zZye5dzqd+KUkL6mqJ0+nAV8yLVu6qqrMPpH35u5+z5pVq9ynvVV13DT/uMxeo745syflK6dm6/v0QF/Xfm3DpUnOm95l8Iwkz87sosOl6+6Luvuk7t6X2UV8X+nuf50V7lNVPb6qnvjAfGbPmRuyws+9o8GiH5dp3X1VdfY0/rx6zbYejmXUt9F9bMkDYWDyLzPbvw9sf/T3Ofz6melveqtjwOHqWujxYKf37Ub1Hon7dwnHpe30YT5buYApySczO333fzN7be91mb3m96dJvp3kfyZ5ytS2krw/s9cdr0+yf812/m2SQ9P02q3UsJNTkudldlrzuiTXTNPLVrxPv5Lkm1Ofbkjytmn5M6cn06Ek/y3JY6blj51uH5rWP3PNtt469fWWbPEdOAvs3wvy/9/dtrJ9mmq/dppuTPLWafnKPveOhmkZj0uS/dPf5neSvC9bv6B4oePwRvVtdB/bqPXjUy3XZXbQO2FN++HfZ2bj8l9O69667vHa0hhwmFoXfjzY4X27Ub1H3P7NEo5LW+3DvJNP3AYAGPCJ2wAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADPw/y2eL/LADdJkAAAAASUVORK5CYII=\n"
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "fig, ax = plt.subplots(1,2, figsize = (10,6))\n",
-    "\n",
-    "vars = []\n",
-    "for img in imgs:\n",
-    "    for i in range(3):\n",
-    "        vars.append(img.var())\n",
-    "\n",
-    "ax[0].hist(vars, color = \"purple\")\n",
-    "ax[1].hist([np.sum(img[:,:,:3] > 240) for img in imgs], color = \"yellow\")\n",
-    "\n",
-    "plt.show()"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 48,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(1506, 1506, 4)\n",
-      "[1051, 1618, 142, 14, 31, 363, 3084, 126, 1906]\n",
-      "['C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (1).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (2).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (3).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (4).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (5).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2062.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2063.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2057.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2059.jpeg']\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": "<Figure size 720x432 with 1 Axes>",
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlMAAAFlCAYAAADPim3FAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAU5ElEQVR4nO3df6zldX3n8dd7Z8YfWYmo3LQEGEdXko1tLNAJi7FpjEaLaGA31QSTrejaTOJKqkk3G2gTWv2rbrK6cTUSVkjRNYpFtzu1GJddaax/iA50QH6UOnXdAGHLCAqSVrvjvveP86V7vdzLPXc+53LPDI9HcnK/5/v9cM7nfuZ7yTPnZ3V3AAA4Pv9opycAAHAiE1MAAAPEFADAADEFADBATAEADBBTAAADdu/UHZ922mm9b9++nbp7AIC53Xbbbd/v7pX1ju1YTO3bty+HDh3aqbsHAJhbVf2vjY55mg8AYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABgwd0xV1a6q+ouq+tI6x55bVTdU1ZGqurWq9i10lgAAS2orj0y9L8m9Gxx7d5IfdPcrknwkyYdGJwYAcCKYK6aq6swkb07yyQ2GXJLk+mn7xiSvr6oanx4AwHLbPee4/5Dk3yY5ZYPjZyS5P0m6+1hVPZbkJUm+v3pQVR1IciBJ9u7dexzT3bp9V/zpM3I/2+17f/DmnZ4CALCOTR+Zqqq3JHm4u28bvbPuvqa793f3/pWVldGbAwDYcfM8zfeaJBdX1feSfC7J66rqP68Z82CSs5KkqnYneWGSRxY4TwCApbRpTHX3ld19ZnfvS3Jpkq92979cM+xgksum7bdOY3qhMwUAWELzvmbqKarqg0kOdffBJNcm+XRVHUnyaGbRBQBw0ttSTHX3nyX5s2n7qlX7f5zkbYucGADAicAnoAMADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAAzaNqap6XlV9s6ruqKq7q+oD64x5Z1UdrarD0+U3t2e6AADLZfccY36S5HXd/URV7Uny9ar6cnd/Y824G7r78sVPEQBgeW0aU93dSZ6Yru6ZLr2dkwIAOFHM9ZqpqtpVVYeTPJzk5u6+dZ1hv15Vd1bVjVV11ga3c6CqDlXVoaNHjx7/rAEAlsRcMdXdP+3uc5KcmeT8qvrFNUP+JMm+7n5VkpuTXL/B7VzT3fu7e//KysrAtAEAlsOW3s3X3T9MckuSC9fsf6S7fzJd/WSSX17I7AAAltw87+ZbqapTp+3nJ3lDkr9cM+b0VVcvTnLvAucIALC05nk33+lJrq+qXZnF1+e7+0tV9cEkh7r7YJLfqqqLkxxL8miSd27XhAEAlsk87+a7M8m56+y/atX2lUmuXOzUAACWn09ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGLBpTFXV86rqm1V1R1XdXVUfWGfMc6vqhqo6UlW3VtW+bZktAMCSmeeRqZ8keV13/1KSc5JcWFUXrBnz7iQ/6O5XJPlIkg8tdJYAAEtq05jqmSemq3umS68ZdkmS66ftG5O8vqpqYbMEAFhSc71mqqp2VdXhJA8nubm7b10z5Iwk9ydJdx9L8liSlyxwngAAS2mumOrun3b3OUnOTHJ+Vf3i8dxZVR2oqkNVdejo0aPHcxMAAEtlS+/m6+4fJrklyYVrDj2Y5KwkqardSV6Y5JF1/vtrunt/d+9fWVk5rgkDACyTed7Nt1JVp07bz0/yhiR/uWbYwSSXTdtvTfLV7l77uioAgJPO7jnGnJ7k+qralVl8fb67v1RVH0xyqLsPJrk2yaer6kiSR5Ncum0zBgBYIpvGVHffmeTcdfZftWr7x0nettipAQAsP5+ADgAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAM2DSmquqsqrqlqu6pqrur6n3rjHltVT1WVYeny1XbM10AgOWye44xx5L8dnffXlWnJLmtqm7u7nvWjPvz7n7L4qcIALC8Nn1kqrsf6u7bp+0fJbk3yRnbPTEAgBPBll4zVVX7kpyb5NZ1Dr+6qu6oqi9X1S8sYnIAAMtunqf5kiRV9YIkX0jy/u5+fM3h25O8tLufqKqLkvxxkrPXuY0DSQ4kyd69e493zgAAS2OuR6aqak9mIfWZ7v7i2uPd/Xh3PzFt35RkT1Wdts64a7p7f3fvX1lZGZw6AMDOm+fdfJXk2iT3dveHNxjz89O4VNX50+0+ssiJAgAso3me5ntNkt9I8u2qOjzt+50ke5Oku69O8tYk76mqY0n+Lsml3d2Lny4AwHLZNKa6++tJapMxH0vysUVNCgDgROET0AEABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAZvGVFWdVVW3VNU9VXV3Vb1vnTFVVR+tqiNVdWdVnbc90wUAWC675xhzLMlvd/ftVXVKktuq6ubuvmfVmDclOXu6/LMkn5h+AgCc1DZ9ZKq7H+ru26ftHyW5N8kZa4ZdkuRTPfONJKdW1ekLny0AwJLZ0mumqmpfknOT3Lrm0BlJ7l91/YE8NbhSVQeq6lBVHTp69OgWpwoAsHzmjqmqekGSLyR5f3c/fjx31t3XdPf+7t6/srJyPDcBALBU5oqpqtqTWUh9pru/uM6QB5Octer6mdM+AICT2jzv5qsk1ya5t7s/vMGwg0neMb2r74Ikj3X3QwucJwDAUprn3XyvSfIbSb5dVYenfb+TZG+SdPfVSW5KclGSI0n+Nsm7Fj5TAIAltGlMdffXk9QmYzrJexc1KQCAE4VPQAcAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABmwaU1V1XVU9XFV3bXD8tVX1WFUdni5XLX6aAADLafccY/4wyceSfOppxvx5d79lITMCADiBbPrIVHd/Lcmjz8BcAABOOIt6zdSrq+qOqvpyVf3CRoOq6kBVHaqqQ0ePHl3QXQMA7JxFxNTtSV7a3b+U5D8m+eONBnb3Nd29v7v3r6ysLOCuAQB21nBMdffj3f3EtH1Tkj1VddrwzAAATgDDMVVVP19VNW2fP93mI6O3CwBwItj03XxV9dkkr01yWlU9kOT3kuxJku6+Oslbk7ynqo4l+bskl3Z3b9uMAQCWyKYx1d1v3+T4xzL76AQAgGcdn4AOADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAzYNKaq6rqqeriq7trgeFXVR6vqSFXdWVXnLX6aAADLaZ5Hpv4wyYVPc/xNSc6eLgeSfGJ8WgAAJ4ZNY6q7v5bk0acZckmST/XMN5KcWlWnL2qCAADLbPcCbuOMJPevuv7AtO+htQOr6kBmj15l7969C7jrZ499V/zpTk9hYb73B2/e6SkszMny7+LfhO12Mp1jJ4uT6W9lp8+vZ/QF6N19TXfv7+79Kysrz+RdAwBsi0XE1INJzlp1/cxpHwDASW8RMXUwyTumd/VdkOSx7n7KU3wAACejTV8zVVWfTfLaJKdV1QNJfi/JniTp7quT3JTkoiRHkvxtkndt12QBAJbNpjHV3W/f5Hgnee/CZgQAcALxCegAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwIC5YqqqLqyq+6rqSFVdsc7xd1bV0ao6PF1+c/FTBQBYPrs3G1BVu5J8PMkbkjyQ5FtVdbC771kz9Ibuvnwb5ggAsLTmeWTq/CRHuvu73f33ST6X5JLtnRYAwIlhnpg6I8n9q64/MO1b69er6s6qurGqzlrvhqrqQFUdqqpDR48ePY7pAgAsl0W9AP1Pkuzr7lcluTnJ9esN6u5runt/d+9fWVlZ0F0DAOyceWLqwSSrH2k6c9r3D7r7ke7+yXT1k0l+eTHTAwBYbvPE1LeSnF1VL6uq5yS5NMnB1QOq6vRVVy9Ocu/ipggAsLw2fTdfdx+rqsuTfCXJriTXdffdVfXBJIe6+2CS36qqi5McS/Jokndu45wBAJbGpjGVJN19U5Kb1uy7atX2lUmuXOzUAACWn09ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGDBXTFXVhVV1X1Udqaor1jn+3Kq6YTp+a1XtW/hMAQCW0KYxVVW7knw8yZuSvDLJ26vqlWuGvTvJD7r7FUk+kuRDi54oAMAymueRqfOTHOnu73b33yf5XJJL1oy5JMn10/aNSV5fVbW4aQIALKd5YuqMJPevuv7AtG/dMd19LMljSV6yiAkCACyz3c/knVXVgSQHpqtPVNV923RXpyX5/jbd9rPNwteynp1PAi/1OXmC/Zss9VqeYJ6xtTzBzrGtck4uznGt5TN0fr10owPzxNSDSc5adf3Mad96Yx6oqt1JXpjkkbU31N3XJLlmjvscUlWHunv/dt/Ps4G1XAzruDjWcnGs5WJYx8U5Uddynqf5vpXk7Kp6WVU9J8mlSQ6uGXMwyWXT9luTfLW7e3HTBABYTps+MtXdx6rq8iRfSbIryXXdfXdVfTDJoe4+mOTaJJ+uqiNJHs0suAAATnpzvWaqu29KctOafVet2v5xkrctdmpDtv2pxGcRa7kY1nFxrOXiWMvFsI6Lc0KuZXk2DgDg+Pk6GQCAASddTG321Tf8rKr6XlV9u6oOV9Whad+Lq+rmqvrO9PNF0/6qqo9Oa3tnVZ23s7PfWVV1XVU9XFV3rdq35bWrqsum8d+pqsvWu6+T3QZr+ftV9eB0bh6uqotWHbtyWsv7qurXVu1/Vv/9V9VZVXVLVd1TVXdX1fum/c7LLXiadXROblFVPa+qvllVd0xr+YFp/8umr587UrOvo3vOtH/Dr6fbaI2XQnefNJfMXiD/10lenuQ5Se5I8sqdntcyX5J8L8lpa/b9uyRXTNtXJPnQtH1Rki8nqSQXJLl1p+e/w2v3q0nOS3LX8a5dkhcn+e7080XT9ot2+ndbkrX8/ST/Zp2xr5z+tp+b5GXT3/wuf/+dJKcnOW/aPiXJX03r5bxczDo6J7e+lpXkBdP2niS3Tufa55NcOu2/Osl7pu1/neTqafvSJDc83Rrv9O/35OVke2Rqnq++YXOrvx7o+iT/fNX+T/XMN5KcWlWn78D8lkJ3fy2zd6+uttW1+7UkN3f3o939gyQ3J7lw2ye/ZDZYy41ckuRz3f2T7v6fSY5k9rf/rP/77+6Huvv2aftHSe7N7BsqnJdb8DTruBHn5Aamc+uJ6eqe6dJJXpfZ188lTz0n1/t6uo3WeCmcbDE1z1ff8LM6yX+rqttq9gn1SfJz3f3QtP2/k/zctG19N7fVtbOmT+/y6emn6558airWci7T0yPnZvZIgPPyOK1Zx8Q5uWVVtauqDid5OLMw/+skP+zZ188lP7suG3093VKv5ckWU2zdr3T3eUnelOS9VfWrqw/27PFVb/k8DtZu2CeS/JMk5yR5KMm/39HZnECq6gVJvpDk/d39+Opjzsv5rbOOzsnj0N0/7e5zMvsGlfOT/NOdndHinWwxNc9X37BKdz84/Xw4yX/J7ET/myefvpt+PjwNt76b2+raWdMNdPffTP8T/r9J/lP+/0P61vJpVNWezALgM939xWm383KL1ltH5+SY7v5hkluSvDqzp5Sf/KzL1evyD2tWP/v1dEu9lidbTM3z1TdMquofV9UpT24neWOSu/KzXw90WZL/Om0fTPKO6R1AFyR5bNVTB8xsde2+kuSNVfWi6SmDN077nvXWvB7vX2R2biaztbx0etfPy5KcneSb8fef6bUl1ya5t7s/vOqQ83ILNlpH5+TWVdVKVZ06bT8/yRsyew3aLZl9/Vzy1HNyva+n22iNl8NOvwJ+0ZfM3p3yV5k9J/u7Oz2fZb5k9g6TO6bL3U+uV2bPT/+PJN9J8t+TvHjaX0k+Pq3tt5Ps3+nfYYfX77OZPdT/fzJ7/v7dx7N2Sf5VZi+mPJLkXTv9ey3RWn56Wqs7M/sf6emrxv/utJb3JXnTqv3P6r//JL+S2VN4dyY5PF0ucl4ubB2dk1tfy1cl+Ytpze5KctW0/+WZxdCRJH+U5LnT/udN149Mx1++2Rovw8UnoAMADDjZnuYDAHhGiSkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAY8P8AI4X8cRbJkc4AAAAASUVORK5CYII=\n"
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "import skimage.measure as m\n",
-    "imgs_gray = [cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) for img in imgs]\n",
-    "imgs_gray = [img > 250 for img in imgs_gray]\n",
-    "\n",
-    "nums = []\n",
-    "for img in imgs_gray:\n",
-    "    labels, num = m.label(img, return_num=True)\n",
-    "    nums.append(num)\n",
-    "\n",
-    "fig, ax = plt.subplots(1,1, figsize = (10,6 ))\n",
-    "ax.hist(nums)\n",
-    "print(imgs[0].shape)\n",
-    "print(nums)\n",
-    "print(images)"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "pycharm": {
-     "name": "#%%\n"
-    }
-   }
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "from PIL import Image\n",
+    "import cv2\n",
+    "import os"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "outputs": [],
+   "source": [
+    "pth = r\"C:\\Users\\ptrkm\\Downloads\"\n",
+    "polarized = [os.path.join(pth, str(i)+\"_polariseret.jpeg\") for i in range(1, 3)] + [os.path.join(pth, str(i)+\"-polariseret.jpeg\") for i in range(3, 7)]\n",
+    "non_polarized = [os.path.join(pth, str(i)+\"-non-polariset.jpeg\") for i in range(1, 7)]\n",
+    "\n",
+    "polarized = [np.asarray(Image.open(pol)) for pol in polarized]\n",
+    "non_polarized = [np.asarray(Image.open(pol)) for pol in non_polarized]"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "outputs": [
+    {
+     "data": {
+      "text/plain": "<Figure size 1440x720 with 18 Axes>",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAABHcAAAJKCAYAAACmm6lSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABOqklEQVR4nO39f6ytd1kn/L8vW2ASqVE8lenTH55KitrxG6GeIDMQMiF5FCqhOk906iiSDPH8IxPIiKbIHz2dZJ6MTiRowpip0gSVoXECauOgiEof9RlBTrH8aDstFVDaqbQ8okDGEZDr+8e6T7s5nP3rnL3Wve77fr2SlbP2WmvvdV37s/fnOvu6P/fnru4OAAAAANP0VWMHAAAAAMD509wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmLC1NXeq6raqerSqPnzA1/9AVd1bVfdU1X9ZV1wAbAd1AoDdqBEAh1PdvZ4vXPWCJJ9L8svd/W37vPaaJL+W5IXd/emq+obufnQtgQGwFdQJAHajRgAcztpW7nT3Hyb5652PVdUzqup3ququqvqjqvqW4akfTfLG7v708LkmY4CZUycA2I0aAXA4m95z59Yk/6a7vyPJa5L8p+HxZyZ5ZlX9v1X1nqp60YbjAmA7qBMA7EaNANjFxZt6o6p6apJ/luS/VtWZh5+yI45rkvzzJFck+cOq+v91999sKj4AxqVOALAbNQJgbxtr7mS1SuhvuvtZ53juoSTv7e4vJPlYVT2Q1QT9vg3GB8C41AkAdqNGAOxhY6dldfdnsppsvz9JauXbh6d/I6tOe6rqWFZLKz+6qdgAGJ86AcBu1AiAva3zUuhvTfInSb65qh6qqlck+aEkr6iqDyS5J8kNw8vfmeT/q6p7k7w7yU909/+3rtgAGJ86AcBu1AiAw1nbpdABWJaquijJ6SQPd/dLxo4HAACWYtNXywJgvl6V5L6xgwAAgKXR3AHgglXVFUm+J8kvjR0LAAAszVqulnXs2LE+fvz4Or40wKTdddddn+ruS8eOYw3ekOQnk1yy3wvVCIDdzbhOHJg6AbC73erEWpo7x48fz+nTp9fxpQEmrar+YuwYjlpVvSTJo919V1X9811eczLJySS56qqr1AiAXcyxThyWvyUAdrdbnXBaFgAX6nlJXlpVH09ye5IXVtWv7nxBd9/a3Se6+8Slly76gDQAABw5zR0ALkh3v7a7r+ju40luTPIH3f3DI4cFAACLsZbTsoBlqVtqlPftm3uU9wW4UDXOtJk2bQJMgjrBYWnuAHBkuvvOJHeOHAYAACyK07IAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJuzisQOAuara/Ht2b/49AQCAeahbRvgjJknf7A+ZC2XlDgAAAMCEae4AAAAATJjmDgAAAMCE2XMHAIC1socDAKyXlTsAAAAAE6a5AwAAADBhTsuCGRnj8utJklMjvS8AhzLW6VEAwHpZuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABN28dgBAACwIad6pPetcd4XABZCcwcAAAAYTd0yzkGAvnmkgx5r4LQsAAAAgAnbt7lTVVdW1bur6t6quqeqXrWJwAAAAADY30FW7nwxyY9397VJnpvkx6rq2vWGBcBUOAgAwF7UCYD123fPne5+JMkjw/3PVtV9SS5Pcu+aY9sY5/cBXJAzBwHeX1WXJLmrqt7V3bOpEwBckNnXCX9PAGM71J47VXU8ybOTvHct0QAwOd39SHe/f7j/2SRnDgIAgDoBsAEHvlpWVT01yduSvLq7P3OO508mOZkkV1111ZEFCMB0zPUggCOyAEdjrnUCYGwHWrlTVU/KqrHzlu5++7le0923dveJ7j5x6aWXHmWMAEzAXgcBqupkVZ2uqtOPPfbYOAECMCp1AmB9DnK1rErypiT3dffr1x8SAFOz30EABwAAlk2dAFivg6zceV6SlyV5YVXdPdyuX3NcAEyEgwAA7EWdAFi/g1wt64+TjLPZAABTcOYgwIeq6u7hsZ/q7neMFxIAW0SdAFizA2+oPGunRtqw8uZx3hbgKDkIAMBe1AmA9dPcAQAAgHMobUkm4kBXywIAAABgO2nuAAAAAEyY5g4AAADAhGnuAAAAAEyY5g4AAADAhGnuAAAAAEyYS6EDAAAAyake6X1dc/5CWbkDAAAAMGFW7gDAlqqRDmL1SAftAAA4P1vV3KlbxlqK5X+xAAAAwDQ5LQsAAABgwrZq5Q4A7GW0FZ5jbS4IAHsY6/TdsThtGHanuQMAAMDWW1ozCw5ju5o7joyyBooAAAAAc2bPHQAAAIAJ266VOwAAG2aFJ8zHeFffBRiX5g4A0+H0XQAA+ApOywIAAACYMCt32BjL3gEAAODoae6MaKxmRzurAYAt5CAAMFlOGwZGprkDAADMgyYLsFD23AEAAACYMM0dAAAAgAnT3AEAAACYMHvuABdurPPbbx7nbWHubGwMADAtmjsAAKyXgwAAsFaaOwAAAMB4HAS4YJo7C2S5PQAAAEs31t/GvYZelg2VAQAAACbsQM2dqnpRVd1fVQ9W1U3rDgqAaVEnANiLOgGwXvuellVVFyV5Y5L/M8lDSd5XVXd0973rDg5gL3XLOOso++aRzgneUuoEsK3Uie2gTgCs30H23HlOkge7+6NJUlW3J7khickYgESdALaVDTq3hToBsGYHae5cnuQTOz5+KMl3riccgEPwn/ZtoU4AsBd1AmDNjuxqWVV1MsnJ4cPPVdX95/FljiX51FHFtOXkOj9LyTNZTq7nzPMCd9X/xgv67Ik6ohqRLOdnL1lOrkvJM1lOrkvJM1Enjow6cV6WkutS8kyWk+tS8kw2WCcO0tx5OMmVOz6+Ynjsy3T3rUluPa/QBlV1urtPXMjXmAq5zs9S8kyWk+tS8jwC+9aJo6gRybLGZCm5LiXPZDm5LiXPZFm5XiB1Yg2WkutS8kyWk+tS8kw2m+tBrpb1viTXVNXVVfXkJDcmuWO9YQEwIeoEAHtRJwDWbN+VO939xap6ZZJ3JrkoyW3dfc/aIwNgEtQJAPaiTgCs30FW7qS739Hdz+zuZ3T3vz/I51TVbVX1aFV9+ICv/4Ek/0dV3VNV/+UgnzNxF7zsdEKWkutS8kyWk+tS8rxg6sRaLOXnbyl5JsvJdSl5JsvK9YIctk6cT42oqnuzqhNLqBHJcn7+lpJnspxcl5JnssFcq3s9V5upqhck+VySX+7ub9vntdck+bUkL+zuT1fVN3T3o2sJDICtoE4AsBs1AuBwDrRy53x09x8m+eudj1XVM6rqd6rqrqr6o6r6luGpH03yxu7+9PC5JmOAmVMnANiNGgFwOGtr7uzi1iT/pru/I8lrkvyn4fFnJvnuqvq7qvrfVXXbhuNau6r6eFV9qKrurqrTw2NPq6p3VdVHhn+/buw4D+tcS2Z3y6tWfr6qHqyqD1bVdeNFfni75Hqqqh4exvXuqrp+x3OvHXK9v6q+e5yoD6+qrqyqd1fVvcPpL68aHp/VuO6R5+zGdGIWWSfmWiMSdWKOc4o6Mb8xnZC9asQzh3H6u6r6n1V102hRroE6Me35JFlOjUjUidHGtbvXdktyPMmHh/tPTfJ3Se7ecbtveO63slp2+czh9vkk37nO2DZ9S/LxJMfOeuxnktw03L8pyU+PHed55PWCJNedGee98kpyfZLfTlJJnpvkvWPHfwS5nkrymnO89tokH0jylCRXJ/nzJBeNncMB87wsyXXD/UuSPDDkM6tx3SPP2Y3pNt/Uice/D7OsEUPs6sTM5hR1Yn5juq23Q9aIXx++5y9I8okkH0py7dg5HOH3Qp3o6c4ne+Q5y/lEnRhnXDe5cuerkvxNdz9rx+1bh+e+kORj3f1Adz+Q5C+TvGyDsY3lhiRvHu6/Ocn3jhfK+elzLJnN7nndkNV5093d70nytVV12UYCPQK75LqbG5Lc3t1/390fS/JgkuesLbgj1N2PdPf7h/ufTXJfksszs3HdI8/dTHZMJ0Sd+HKTrxGJOrGHyc4p6sSuJjumE7FXjXgoyT1JHhx+Dx9I8v9kNSZzpk5MZD5JllMjEnVij09Z67hurLnT3Z9J8rGq+v7k8aVX3z48fU9W3atU1bEkX5fkH20qtg3pJL9bq3OETw6PPb27Hxnu/1WSp48T2pHbLa/LszqScsZD2fuHfypeOSwfvG3HcthZ5FpVx5M8O8l7M+NxPSvPZMZjus0WXieWVCOSGc8nu5jtnKJOzG9Mt9U+NeI3kjw/ySeGGvHMrOrGnL7/6sTKHH/PZj2fqBObG9e1NXeq6q1J/iTJN1fVQ1X1iiQ/lOQVVfWBrCbcM930u5P8fa0uX/juJLcn+d/rim0kz+/u65K8OMmP1eoKAI/r1Tqt9Vy6bERzzWuHX0jyjCTPSvJIkp8dNZojVFVPTfK2JK8e/kP1uDmN6znynO2Ybht14sssskYk885tMNs5RZ2Y35huk0PWiHcm+WyS/yurGvETWZ3KOyfqxDzNej5RJzY7rhev6wt39w/u8tSLzvHYw0n+Z3d/d7LaZGhdcY2lux8e/n20qn49q+VXn6yqy7r7kWHZ2Vx29t8tr4eTXLnjdVcMj01Wd3/yzP2q+sWszvlOJp5rVT0pqwnqLd399uHh2Y3rufKc65huI3XiCQurEckM55PdzHVOUScef342Y7ptDlMjurur6v9OcuqsOjGb7786Mf355FzmPJ+oE48/v7Fx3fTVsnbzviTXVNXVVfXkJDcmuWPkmI5MVX11VV1y5n6S70ry4axyfPnwspcn+c1xIjxyu+V1R5IfGZbRPjfJ3+5YljdJZ50L+n1ZjWuyyvXGqnpKVV2d5Jokf7rp+M5HVVWSN2W1SeHrdzw1q3HdLc85julMzLZOLLBGJDObT/YyxzlFnZjfmM6EOqFOTM5c5xN1YqRx7S3YZbqf2CH7gax2jH7d2PEccW7flNWu2GeWkL5uePzrk/x+ko8k+b0kTxs71vPI7a1ZLTX7QlbnDL5it7yy2v38jcMYfyjJibHjP4Jcf2XI5YPDL+tlO17/uiHX+5O8eOz4D5Hn87NaIvnBPHE1iuvnNq575Dm7MZ3Lba51Ys41YshDnZjZnKJOzG9M53JTJ9SJbb4tpUYMsasTI4xrDW9wpI4dO9bHjx8/8q8LMHV33XXXp7r70rHjGJMaAbA7dUKdANjLbnViLXvuHD9+PKdPn17HlwaYtKr6i7FjGJsaAbA7dUKdANjLbnViW/bcAQAAAOA8aO4AAAAATNjaLoUOS1e1+fdcwxZaAKzBGDUiUScYV1VdlOR0koe7+yVjxwPbTJ3gsA68cqeqLqqqP6uq39r/1QAAAF/mVUnuGzsIgDk6zGlZJmMAAODQquqKJN+T5JfGjgVgjg7U3DEZAwAAF+ANSX4yyZdGjgNglg66cucNMRkDAACHVFUvSfJod9+1x2tOVtXpqjr92GOPbTA6gHnYt7lzkMl4eJ0JGQAAONvzkry0qj6e5PYkL6yqX935gu6+tbtPdPeJSy+9dIwYASbtICt39p2MExMywNLZeB+Ac+nu13b3Fd19PMmNSf6gu3945LAAZmXf5o7JGIADsvE+AACM4OKxAwBg+nZsvP/vk/zbkcMBYEt1951J7hw5DGAXdUuN8r59c4/yvnNyqOaOyRiAXbwhq433Lxk5DgAAWJyDXi0LAM7JVVAAAGBcTssCLpjlm4t3ZuP965P8oyRfU1W/unN/tu6+NcmtSXLixAkDBwAAR8jKHQAuiI33AQBgXJo7AAAAABPmtCwAjoyN9wEAYPOs3AEAAACYMM0dAAAAgAnT3AEAAACYMHvuAACwVnVLjfK+fXOP8r4AsGlW7gAAAABMmJU7AAALMdYKGgBgvazcAQAAAJgwK3dgRhyRBQAAWB4rdwAAAAAmTHMHAAAAYMI0dwAAAAAmzJ47AAAAwGjG2ju0b+5R3ncdrNwBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJcyl0mJNTI13K79Q4ly4EAADAyh0AAACASdPcAQAA1qaqrqyqd1fVvVV1T1W9auyYAOZm3+aOyRgAALgAX0zy4919bZLnJvmxqrp25JgAZuUge+6cmYzfX1WXJLmrqt7V3feuObaNqVvG2S+kbx5pfxQAANiQ7n4kySPD/c9W1X1JLk8ym78nAMa2b3PHZAzAXqrqyiS/nOTpSTrJrd39c+NGBcA2qqrjSZ6d5L0jh3KkHCwGxnaoPXfmOhkDcEEstwdgX1X11CRvS/Lq7v7MWc+drKrTVXX6scceGydAgAk78KXQ95qMh+dPJjmZJFddddWRBQjAdlvCCk9HZAEuTFU9Kau/Jd7S3W8/+/nuvjXJrUly4sQJkx/AIR2oubPfZJyYkAGwwhO23qmR/ot2apwGKduhqirJm5Lc192vHzsegDk6yNWyTMYA7MtyewB28bwkL0vywqq6e7hdP3ZQAHNykJU7ZybjD1XV3cNjP9Xd71hbVABMiuX2AOymu/84ieVbAGt0kKtlmYwB2JUVngDAXJW/hJmIQ10tCwDOwXJ7AAAY0YGvljVrY20uePM4bwtwlKzwBACAcVm5AwAAADBhmjsAAAAAE6a5AwAAADBhmjsAAAAAE2ZDZQAAgAka6zLdPdL1aNiAsS42dMq1OS6UlTsAAAAAE6a5AwAAADBhmjsAAAAAE7ZVe+7ULWOdZ+ekUQC2j70UAAA4iK1q7gDAVhprc0EAADiA7Wru+M8zAAAAwKFsV3MHAPbg9F0AAPhKmjvM3lh7VgBrYIUnAHsY7yDAstgTDraP5g4AADAPDgIAC6W5AwAsmhWeAMDUae6MyHJGAAAA4EJp7gAAALD1rLSE3WnusDEmYwAAADh6mjsAwFZwEAAA4Pxo7gAAX0aTBQBgWr5q7AAAAAAAOH9W7gAAsF6nRrpU583jvC0AbJrmDnDh/KcdAAA4XyP9PVGnRnnb9BrSPdBpWVX1oqq6v6oerKqbjj4MNqlqnBswX+oEAHtRJwDWa9/mTlVdlOSNSV6c5NokP1hV1647MACmQZ0AYC/qBMD6HWTlznOSPNjdH+3uzye5PckN6w0LgAlRJwDYizoBsGYH2XPn8iSf2PHxQ0m+cz3hABzcWKf7reMc2YlTJ4CtpE5sDXUCYM2ObEPlqjqZ5OTw4eeq6v7z+DLHknzqqGLacnKdn6XkmSwn13PmeYF/LHzjBX32RB1RjUiW87OXLCfXpeSZLCfXpeSZqBNHRp04L0vJdSl5JsvJdSl5JhusEwdp7jyc5ModH18xPPZluvvWJLeeV2iDqjrd3Scu5GtMhVznZyl5JsvJdSl5HoF968RR1IhkWWOylFyXkmeynFyXkmeyrFwvkDqxBkvJdSl5JsvJdSl5JpvN9SB77rwvyTVVdXVVPTnJjUnuWG9YAEyIOgHAXtQJgDXbd+VOd3+xql6Z5J1JLkpyW3ffs/bIAJgEdQKAvagTAOt3oD13uvsdSd6x5liSI1iKOSFynZ+l5JksJ9el5HnB1Im1WEquS8kzWU6uS8kzWVauF0SdWIul5LqUPJPl5LqUPJMN5lq9pu38q+q2JC9J8mh3f9sBXv8DSU4l6SQf6O5/tZbAANgK6gQAu1EjAA5nnc2dFyT5XJJf3m9Crqprkvxakhd296er6hu6+9G1BAbAVlAnANiNGgFwOAfZUPm8dPcfJvnrnY9V1TOq6neq6q6q+qOq+pbhqR9NcmeS91TVg0n+9briGktVfbyqPlRVd1fV6eGxp1XVu6rqI8O/Xzd2nIdVVbdV1aNV9eEdj50zr1r5+ap6sKo+WFXXjRf54e2S66mqengY17ur6vodz712yPX+qvrucaI+vKq6sqreXVX3VtU9VfWq4fFZjeseec5uTLeVOvGEudaIRJ2Y45yiTsxvTLfRedSINyb5zlpdQv2/V9VNGw55rdSJac8nyXJqRKJOjDau3b22W5LjST684+PfT3LNcP87k/zBcP83knw6yekk703y0STXrjO2Td+SfDzJsbMe+5kkNw33b0ry02PHeR55vSDJdWeN8znzSnJ9kt9OUkmem+S9Y8d/BLmeSvKac7z22iQfSPKUJFcn+fMkF42dwwHzvCzJdcP9S5I8MOQzq3HdI8/Zjek239SJx/OeZY0YYlcnZjanqBPzG9NtvR2yRvzHJH+X5M+yOp3rA+rENG5LqRNLqRFD/OrECOO6tpU7Z6uqpyb5Z0n+a1XdneQ/D9+MJDmW5PNJ/mlWl0Z8WpJ/uanYRnRDkjcP99+c5HvHC+X89DmOqmT3vG7Iamltd/d7knxtVV2Widgl193ckOT27v777v5YkgeTPGdtwR2h7n6ku98/3P9skvuSXJ6Zjeseee5msmM6FerEV5h8jUjUiT1Mdk5RJ3Y12TGdgn1qxMVZfa//KMm/SPILWTV8bth4oJulTkxkPkmWUyMSdWKPT1nruG6suTO8199097N23L51eO5/Jfkf3f2FIcmHk3zrrl9pmjrJ7w7LSE8Ojz29ux8Z7v9VkqePE9qR2y2vy5N8YsfrHsreP/xT8cph+eBtO5bDziLXqjqe5NlZrZSY7bielWcy4zHdckuuE0uqEcmM55NdzHZOUSfmN6ZbbK8a8VCSDyf5y6FGPJDkHzKv7786sTLH37NZzyfqxObGdWPNne7+TJKPVdX3J4+fV/ftw9N/mqHzXlXHkvzjJJ/ZVGwb8vzuvi7Ji5P8WK02iXtcr9ZprWd36xHNNa8dfiHJM5I8K8kjSX521GiO0HCE7G1JXj38/j5uTuN6jjxnO6bbbuF1YpE1Ipl3boPZzinqxPzGdJvtUyN+I8k/GR4/luSZSR4bI841UifmadbziTqx2XFdW3Onqt6a5E+SfHNVPVRVr0jyQ0leUVUfSHJPnlgq+d+SPLmq7k3y7iS/m9X5Z7PR3Q8P/z6a5NezWn71yTPLzYZ/57Kr/255PZzkyh2vu2J4bLK6+5Pd/Q/d/aUkv5gnltVNOteqelJWE9Rbuvvtw8OzG9dz5TnXMd1G6sQTFlYjkhnOJ7uZ65yiTsxvTLfNIWvEO7P6A+rGrGrETyT52szo+69OTH8+OZc5zyfqxObHdZ1Xy/rB7r6su5/U3Vd095u6+2Pd/aLu/vbuvra7/93w8vcl+VKS70nyHVltNHTHumLbtKr66qq65Mz9JN+V1dLRO5K8fHjZy5P85jgRHrnd8rojyY8MR1qem+RvdyzLm6SzzgX9vqzGNVnlemNVPaWqrk5yTVYrD7ZeVVWSNyW5r7tfv+OpWY3rbnnOcUy3lTqxssAakcxsPtnLHOcUdWJ+Y7qNDlMjhhUAL8vqD8WXJnl7Vo0edWK6ZjWf7Gau84k6MdK49hbsMt1P7JD9QFZHYl83djxHnNs3ZbUr9pmjDK8bHv/6rHb9/0iS30vytLFjPY/c3prVkZIvZHXO4Ct2yyur3c/fOIzxh5KcGDv+I8j1V4ZcPjj8sl624/WvG3K9P8mLx47/EHk+P6slkh9Mcvdwu35u47pHnrMb07nc5lon5lwjhjzUiZnNKerE/MZ0Ljd1Qp3Y5ttSasQQuzoxwrjW8AYAAAAATNDF6/iix44d6+PHj6/jSwNM2l133fWp7r507DjGpEYA7E6dUCcA9rJbnVhLc+f48eM5ffr0Or40wKRV1V+MHcPY1AiA3akT6gTAXnarExu7FDoAAAAAR28tK3eApGrz72kLLYBpGKNGJOoEwFSoExyW5g5wweqWcapP36z6AMBUVNVFSU4nebi7XzJ2PABz4rQsAABgE16V5L6xgwCYI80dAABgrarqiiTfk+SXxo4FYI40dwAAgHV7Q5KfTPKlkeMAmCXNHQAAYG2q6iVJHu3uu/Z4zcmqOl1Vpx977LENRgcwD5o7AADAOj0vyUur6uNJbk/ywqr61Z0v6O5bu/tEd5+49NJLx4gRYNJcLQsAgLVyVcVl6+7XJnltklTVP0/ymu7+4TFjApgbK3cAAAAAJuzAzZ2quqiq/qyqfmudAQEAAPPU3Xd290vGjgNgbg6zcudVSe5bVyAATJuDAAAAMI4DNXeq6ook35Pkl9YbDgAT5iAAAACM4KArd96Q5CeTfGl9oQAwVQ4CAADAePa9WlZVvSTJo91917C7/W6vO5nkZJJcddVVRxUfANPwhqwOAlwychwAAEyMqypeuIOs3HlekpdW1ceT3J7khVX1q2e/qLtv7e4T3X3i0ksvPeIwAdhWOw8C7PGak1V1uqpOP/bYYxuMDgAA5m/f5k53v7a7r+ju40luTPIH3f3Da48MgKnY9yCAAwAAALA+h7laFgB8BQcBAABgXPvuubNTd9+Z5M61RAIAAADAoR2quQMAe3EQAAAANs9pWQAAAAATprkDAAAAMGGaOwAAAAATZs8dAAAAIHVLjR0C58nKHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAADWpqqurKp3V9W9VXVPVb1q7JgA5ubisQMAAGAz6pYaOwSW6YtJfry7319VlyS5q6re1d33jh0YwFxYuQMAAKxNdz/S3e8f7n82yX1JLh83KoB50dwBAAA2oqqOJ3l2kveOHArArOzb3HGOLAB7UScAOIiqemqStyV5dXd/5qznTlbV6ao6/dhjj40TIMCEHWTPHefIArAXdQKAPVXVk7Jq7Lylu99+9vPdfWuSW5PkxIkTveHwLthY+1n1zZP7VgFrsm9zp7sfSfLIcP+zVXXmHFn/aYctY6NMxqBOALCXqqokb0pyX3e/fux4AOboUFfLco4sAHtRJwA4h+cleVmSD1XV3cNjP9Xd7xgvJDiYcuyUiThwc2evc2SH508mOZkkV1111ZEFuAmWUQJcuP32UshEawQAF6a7/ziJP5EB1uhAzZ39zpFNpn+eLADnz14K6+EgAAAAB3GQq2U5RxaAXakTAAAwrn2bO3niHNkXVtXdw+36NccFwHSoEwAAMKKDXC3LObIA7EqdAACAcR1k5Q4AAAAAW+pQl0KfrVMjbVh58zhvCwAAAMyH5g4AAMAE1UgnRbeLOcLWcVoWAAAAwIRZuQNzMtYphqfspQsAADAWK3cAAAAAJkxzBwAAAGDCnJbF7I210RwwI66qyFw4fRcAZklzBwBYNAcBAICp26rmjv9cAcATXOIWAICDsOcOAJNRNc4NgGlQJ4Cl2qqVOwAAAJMz1n5WC6ORtgH2ZpsszR0AAAAOTJMFto/mzojspQAAAABcKHvuAAAAAEyYlTtsjOWbAOxFnQAAOD9W7gAAAABMmOYOAAAAwIQ5LQsA+DJOjwIAmBbNHQAAAGBx5nQFa82dBXJEFgAAgK1xag3djoU5UHOnql6U5OeSXJTkl7r7P6w1KmBaxpqMbx7nbflK6gSwJ3Vi8dQJgPXad0PlqrooyRuTvDjJtUl+sKquXXdgAEyDOgHAXtQJgPU7yNWynpPkwe7+aHd/PsntSW5Yb1gATIg6AWylqnFufAV1AmDNDtLcuTzJJ3Z8/NDwGMCo/Kd9a6gTAOxFnQBYsyPbULmqTiY5OXz4uaq6/zy+zLEknzqqmLacXOdnKXkmy8n1nHleYIPnGy/osyfqiGpEspyfvWQ5uS4lz2Q5uS4lz0SdODLqxHlZSq5LyTNZTq5LyTPZYJ04SHPn4SRX7vj4iuGxL9Pdtya59bxCG1TV6e4+cSFfYyrkOj9LyTNZTq5LyfMI7FsnjqJGJMsak6XkupQ8k+XkupQ8k2XleoHUiTVYSq5LyTNZTq5LyTPZbK4HOS3rfUmuqaqrq+rJSW5Mcsd6wwJgQtQJAPaiTgCs2b4rd7r7i1X1yiTvzOrShbd19z1rjwyASVAnANiLOgGwfgfac6e735HkHWuOJTmCpZgTItf5WUqeyXJyXUqeF0ydWIul5LqUPJPl5LqUPJNl5XpB1Im1WEquS8kzWU6uS8kz2WCu1d3r+cJVtyV5SZJHu/vbDvD6H0hyKkkn+UB3/6u1BAbAVlAnANiNGgFwOOts7rwgyeeS/PJ+E3JVXZPk15K8sLs/XVXf0N2PriUwALaCOgHAbtQIgMM5yIbK56W7/zDJX+98rKqeUVW/U1V3VdUfVdW3DE/9aJI7k7ynqh5M8q/XFddYqurjVfWhqrq7qk4Pjz2tqt5VVR8Z/v26seM8rKq6raoeraoP73jsnHnVys9X1YNV9cGqum68yA9vl1xPVdXDw7jeXVXX73jutUOu91fVd48T9eFV1ZVV9e6qureq7qmqVw2Pz2pc98hzdmO6rdSJJ8y1RiTqxBznFHVifmO6jc6jRrwxyXfW6hLq/72qbtpwyGulTkx7PkmWUyMSdWK0ce3utd2SHE/y4R0f/36Sa4b735nkD4b7v5Hk00lOJ3lvko8muXadsW36luTjSY6d9djPJLlpuH9Tkp8eO87zyOsFSa47a5zPmVeS65P8dpJK8twk7x07/iPI9VSS15zjtdcm+UCSpyS5OsmfJ7lo7BwOmOdlSa4b7l+S5IEhn1mN6x55zm5Mt/mmTjye9yxrxBC7OjGzOUWdmN+YbuvtkDXiPyb5uyR/ltXpXB9QJ6ZxW0qdWEqNGOJXJ0YY17Wt3DlbVT01yT9L8l+r6u4k/3n4ZiTJsSSfT/JPs7o04tOS/MtNxTaiG5K8ebj/5iTfO14o56fPcVQlu+d1Q1ZLa7u735Pka6vqskzELrnu5oYkt3f333f3x5I8mOQ5awvuCHX3I939/uH+Z5Pcl+TyzGxc98hzN5Md06lQJ77C5GtEok7sYbJzijqxq8mO6RTsUyMuzup7/UdJ/kWSX8iq4XPDxgPdLHViIvNJspwakagTe3zKWsd1Y82d4b3+prufteP2rcNz/yvJ/+juLwxJPpzkW3f9StPUSX53WEZ6cnjs6d39yHD/r5I8fZzQjtxueV2e5BM7XvdQ9v7hn4pXDssHb9uxHHYWuVbV8STPzmqlxGzH9aw8kxmP6ZZbcp1YUo1IZjyf7GK2c4o6Mb8x3WJ71YiHknw4yV8ONeKBJP+QeX3/1YmVOf6ezXo+USc2N64ba+5092eSfKyqvj95/Ly6bx+e/tMMnfeqOpbkHyf5zKZi25Dnd/d1SV6c5MdqtUnc43q1Tms9u1uPaK557fALSZ6R5FlJHknys6NGc4SGI2RvS/Lq4ff3cXMa13PkOdsx3XYLrxOLrBHJvHMbzHZOUSfmN6bbbJ8a8RtJ/snw+LEkz0zy2BhxrpE6MU+znk/Uic2O69qaO1X11iR/kuSbq+qhqnpFkh9K8oqq+kCSe/LEUsn/luTJVXVvkncn+d2szj+bje5+ePj30SS/ntXyq0+eWW42/DuXXf13y+vhJFfueN0Vw2OT1d2f7O5/6O4vJfnFPLGsbtK5VtWTspqg3tLdbx8ent24nivPuY7pNlInnrCwGpHMcD7ZzVznFHVifmO6bQ5ZI96Z1R9QN2ZVI34iyddmRt9/dWL688m5zHk+USc2P67rvFrWD3b3Zd39pO6+orvf1N0f6+4Xdfe3d/e13f3vhpe/L8mXknxPku/IaqOhO9YV26ZV1VdX1SVn7if5rqyWjt6R5OXDy16e5DfHifDI7ZbXHUl+ZDjS8twkf7tjWd4knXUu6PdlNa7JKtcbq+opVXV1kmuyWnmw9aqqkrwpyX3d/fodT81qXHfLc45juq3UiZUF1ohkZvPJXuY4p6gT8xvTbXSYGjGsAHhZVn8ovjTJ27Nq9KgT0zWr+WQ3c51P1ImRxrUPvhP0RVntPv9bB/2cw9yy2iH7gayOxL5uHe8x1i3JN2W1K/aZowyvGx7/+qx2/f9Ikt9L8rSxYz2P3N6a1ZGSL2R1zuArdssrq93P3ziM8YeSnBg7/iPI9VeGXD44/LJetuP1rxtyvT/Ji8eO/xB5Pj+rJZIfTHL3cLt+buO6R56zG9O53OZaJ+ZcI4Y81ImZzSnqxPzGdC43dUKd2ObbUmrEELs6McK41vAG+6qqf5vkRJKv6e6XHOiTAAAAAFiriw/yoqq6Iqul8P8+yb/d7/XHjh3r48ePX1hkADN01113faq7Lx07jjGpEQC7UyfUCYC97FYnDtTcSfKGJD+Z5JKDvPj48eM5ffr0waMDWIiq+ouxYxibGgGwO3VCnQDYy251Yt8NlavqJUke7e679nndyao6XVWnH3tsblceBAAAANhOB1m587wkL62q65P8oyRfU1W/2t0/vPNF3X1rkluT5MSJE7O4Xj1MTd1SG3/PvtmvOytVdVGS00ketjcb7K02P10nSQ641SIAI1MnOKx9mzvd/dokr02SqvrnSV5zdmMHAJK8Ksl9Sb5m7ECAcxvjIEDiQAAArNu+p2UB56dq8zcYy46N939p7FgAAGBpDrqhcpKku+9McudaIgFgyt6QQ2y8DwAAHJ1DNXcA4Gw7N94fTt8912tOJjmZJFddddXmggMA4MCcvjtdTssC4EKd2Xj/40luT/LCqvrVnS/o7lu7+0R3n7j00kvHiBEAAGZLcweAC9Ldr+3uK7r7eJIbk/yBjfcBOFtVXVRVf1ZVvzV2LABzo7kDAABswpmrKgJwxDR3ADgy3X1nd79k7DgA2C6uqgiwXpo7AADAur0hq6sqfmnkOABmSXMHAABYm51XVdzjNSer6nRVnX7sscc2GB3APGjuAAAA6+SqigBrprkDAACsjasqAqyf5g4AAADAhF08dgAAAMAydPedSe4cOQyA2bFyBwAAAGDCNHcAAAAAJsxpWTAnp3rjb1mnkt782wIAADCwcgcAAABgwjR3AAAAACZMcwcAAABgwjR3AAAAACZMcwcAAABgwjR3AAAAACZMcwcAAABgwi4eOwBg+uqW2vh79s298fcEAADYRlbuAAAAAEyY5g4AAADAhDktCwBgKU6NdErrzeO8LQAshZU7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYftuqFxVVyb55SRPT9JJbu3un1t3YAAAzEPdUqO8b9880gbSALBhB7la1heT/Hh3v7+qLklyV1W9q7vvXXNsG+M/HAAAAMBU7XtaVnc/0t3vH+5/Nsl9SS5fd2AATENVXVlV766qe6vqnqp61dgxAQDAkhxk5c7jqup4kmcnee9aogFgiqzwXBMrPAEAOIgDb6hcVU9N8rYkr+7uz5zj+ZNVdbqqTj/22GNHGSMAW8wKTwAAGNeBmjtV9aSsGjtv6e63n+s13X1rd5/o7hOXXnrpUcYIwERY4QnA2Zy+C7B+B7laViV5U5L7uvv16w8JgCnaa4VnVZ1McjJJrrrqqhGiA2BETt9dE6fvAmccZOXO85K8LMkLq+ru4Xb9muMCYEL2W+FpdSfAcjl9F2D99l25091/nGScVjQAW88KTwAOyum7AOtxqKtlzdapkZYz3jzO2wIcsTMrPD9UVXcPj/1Ud79jvJAA2DZO3wVYH80dAC6IFZ4A7Ocgp+8muTVJTpw4YSMZgEM68KXQAQAADsvpuwDrZ+UOAACwTk7fhamwZclkae4AAABr4/RdgPVzWhYAAADAhGnuAAAAAEyY5g4AAADAhGnuAAAAAEyYDZUBYD+uHAEAwBbT3AEAAABGU7eMc0G9vnmkA3hrsFXNnXKBRAAAAIBDsecOrMFYnWcAAACWZ6tW7gDAXsZrnM5nyS4A8zHWmQ+tLMLW0dwBYDrG2th4JP7TDgDjsnUIU6G5w+yNMyH7ywgAAIDN0NwBAABmYbTTdxe2shTYPjZUBgAAAJgwK3cAgEWznwIAMHWaOwAAwDw4PQpYKKdlAQAAAEyY5g6zNtqmegAAALAhTssa0Vjn+LfVqgAAADAbmjsAAAAcmIPUsH00d9iYcYqACgAAoxtrk9ubx3lbANg0zR0AYCu4JDkAe1EnYHc2VAYAAACYMCt32IjVVaucIjVbIyy3r1POuwYAAEgO2Nypqhcl+bkkFyX5pe7+D2uNCoBJUSfmxbJ34KipE8Ce7M12wfZt7lTVRUnemOT/TPJQkvdV1R3dfe+6g2M9bGwMHCV1AoC9qBPAtprTld8OsufOc5I82N0f7e7PJ7k9yQ1HHwoAE6VOALAXdQJgzQ7S3Lk8ySd2fPzQ8BgAJOoEsKWqxrnxFdQJgDU7sg2Vq+pkkpPDh5+rqvvP48scS/Kpo4ppy8l1fpaSZ7IluW7gP9DryPMbj/jrTcIR1YhkS372NmQpuS4lz2Q5uS4lz2SXXC+wPqkT6sRBLSXXpeSZLCfXpeSZbLBOHKS583CSK3d8fMXw2Jfp7luT3HpeoQ2q6nR3n7iQrzEVcp2fpeSZLCfXpeR5BPatE0dRI5JljclScl1Knslycl1Knsmycr1A6sQaLCXXpeSZLCfXpeSZbDbXg5yW9b4k11TV1VX15CQ3JrljvWEBMCHqBAB7UScA1mzflTvd/cWqemWSd2Z16cLbuvuetUcGwCSoEwDsRZ0AWL8D7bnT3e9I8o41x5IcwVLMCZHr/Cwlz2Q5uS4lzwumTqzFUnJdSp7JcnJdSp7JsnK9IOrEWiwl16XkmSwn16XkmWww1+p1XGA9SVXdluQlSR7t7m87wOt/IMmpJJ3kA939r9YSGABbQZ0AYDdqBMDhrLO584Ikn0vyy/tNyFV1TZJfS/LC7v50VX1Ddz+6lsAA2ArqBAC7USMADucgGyqfl+7+wyR/vfOxqnpGVf1OVd1VVX9UVd8yPPWjSe5M8p6qejDJv15XXGOpqo9X1Yeq6u6qOj089rSqeldVfWT49+vGjvOwquq2qnq0qj6847Fz5lUrP19VD1bVB6vquvEiP7xdcj1VVQ8P43p3VV2/47nXDrneX1XfPU7Uh1dVV1bVu6vq3qq6p6peNTw+q3HdI8/Zjem2UieeMNcakagTc5xT1In5jek2Oo8a8cYk31mrS6j/96q6acMhr5U6Me35JFlOjUjUidHGtbvXdktyPMmHd3z8+0muGe5/Z5I/GO7/RpJPJzmd5L1JPprk2nXGtulbko8nOXbWYz+T5Kbh/k1JfnrsOM8jrxckue6scT5nXkmuT/LbSSrJc5O8d+z4jyDXU0lec47XXpvkA0mekuTqJH+e5KKxczhgnpcluW64f0mSB4Z8ZjWue+Q5uzHd5ps68Xjes6wRQ+zqxMzmFHVifmO6rbdD1oj/mOTvkvxZVqdzfUCdmMZtKXViKTViiF+dGGFc17Zy52xV9dQk/yzJf62qu5P85+GbkSTHknw+yT/N6tKIT0vyLzcV24huSPLm4f6bk3zveKGcnz7HUZXsntcNWS2t7e5+T5KvrarLMhG75LqbG5Lc3t1/390fS/JgkuesLbgj1N2PdPf7h/ufTXJfksszs3HdI8/dTHZMp0Kd+AqTrxGJOrGHyc4p6sSuJjumU7BPjbg4q+/1HyX5F0l+IauGzw0bD3Sz1ImJzCfJcmpEok7s8SlrHdeNNXeG9/qb7n7Wjtu3Ds/9ryT/o7u/MCT5cJJv3fUrTVMn+d1hGenJ4bGnd/cjw/2/SvL0cUI7crvldXmST+x43UPZ+4d/Kl45LB+8bcdy2FnkWlXHkzw7q5USsx3Xs/JMZjymW27JdWJJNSKZ8Xyyi9nOKerE/MZ0i+1VIx5K8uEkfznUiAeS/EPm9f1XJ1bm+Hs26/lEndjcuG6sudPdn0nysar6/uTx8+q+fXj6TzN03qvqWJJ/nOQzm4ptQ57f3dcleXGSH6vVJnGP69U6rfXsbj2iuea1wy8keUaSZyV5JMnPjhrNERqOkL0tyauH39/HzWlcz5HnbMd02y28TiyyRiTzzm0w2zlFnZjfmG6zfWrEbyT5J8Pjx5I8M8ljY8S5RurEPM16PlEnNjuua2vuVNVbk/xJkm+uqoeq6hVJfijJK6rqA0nuyRNLJf9bkidX1b1J3p3kd7M6/2w2uvvh4d9Hk/x6VsuvPnlmudnw71x29d8tr4eTXLnjdVcMj01Wd3+yu/+hu7+U5BfzxLK6SedaVU/KaoJ6S3e/fXh4duN6rjznOqbbSJ14wsJqRDLD+WQ3c51T1In5jem2OWSNeGdWf0DdmFWN+IkkX5sZff/VienPJ+cy5/lEndj8uK7zalk/2N2XdfeTuvuK7n5Td3+su1/U3d/e3dd2978bXv6+JF9K8j1JviOrjYbuWFdsm1ZVX11Vl5y5n+S7slo6ekeSlw8ve3mS3xwnwiO3W153JPmR4UjLc5P87Y5leZN01rmg35fVuCarXG+sqqdU1dVJrslq5cHWq6pK8qYk93X363c8Natx3S3POY7ptlInVhZYI5KZzSd7meOcok7Mb0y30WFqxLAC4GVZ/aH40iRvz6rRo05M16zmk93MdT5RJ0Ya196CXab7iR2yH8jqSOzrxo7niHP7pqx2xT5zlOF1w+Nfn9Wu/x9J8ntJnjZ2rOeR21uzOlLyhazOGXzFbnlltfv5G4cx/lCSE2PHfwS5/sqQyweHX9bLdrz+dUOu9yd58djxHyLP52e1RPKDSe4ebtfPbVz3yHN2YzqX21zrxJxrxJCHOjGzOUWdmN+YzuWmTqgT23xbSo0YYlcnRhjXGt4AAAAAgAm6eB1f9NixY338+PF1fGmASbvrrrs+1d2Xjh3HmNQIgN2pE+oEwF52qxNrae4cP348p0+fXseXBpi0qvqLsWMYmxoBsDt1Qp0A2MtudWJjl0IHAAAA4Ohp7gAAAABM2FpOywLGUbfUKO/bN9uYHWAK1AmAaahxpuu43tJ0WbkDAAAAMGFW7gAAbNhYR2RzaqT3BWASrPCcLit3AAAAACZMcwcAAFi7qrqoqv6sqn5r7FgA5kZzBwAA2IRXJblv7CAA5khzBwAAWKuquiLJ9yT5pbFjAZgjzR1Yk6rN3wAAttQbkvxkki+NHAfALGnuAAAAa1NVL0nyaHfftcdrTlbV6ao6/dhjj20wOoB50NwB4EjYKBOAXTwvyUur6uNJbk/ywqr61Z0v6O5bu/tEd5+49NJLx4gRYNI0dwA4KjbKBOArdPdru/uK7j6e5MYkf9DdPzxyWACzorkDwAWzUSYAAIzn4rEDAGAW3pDVRpmXjBwHAFusu+9McufIYQDMjpU7AFwQG2UCAMC4NHcAuFA2ygQAgBEduLnjKigAnIuNMgEAYFyHWbnjKigAAAAAW+ZAzR1XQQHgILr7zu5+ydhxAADAkhx05c4bsroKypfWFwoAAAAAh7Vvc+cgV0EZXudKKAAAAAAbdpCVO/teBSVxJRQAAACAMezb3HEVFAAAAIDtdZirZQEAAACwZS4+zIu7+84kd64lEgAAAAAOzcodAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAnT3AEAAACYMM0dAAAAgAm7eOwAAADYkFM9zvvePM7bsh2q6sokv5zk6Uk6ya3d/XPjRgUwL5o7AADAOn0xyY939/ur6pIkd1XVu7r73rEDOyp1S43yvn3zSA1bYOs4LQuAC1JVV1bVu6vq3qq6p6peNXZMAGyP7n6ku98/3P9skvuSXD5uVADzYuUOABfKEdk1cUQWmJuqOp7k2UneO3IoALNi5Q4AF8QRWQAOoqqemuRtSV7d3Z8567mTVXW6qk4/9thj4wQIMGH7NncstwfgoByRBeBcqupJWTV23tLdbz/7+e6+tbtPdPeJSy+9dPMBAkzcQVbunFluf22S5yb5saq6dr1hATA1jsgCcC5VVUnelOS+7n792PEAzNG+e+509yNJHhnuf7aqziy3t5fCBbKXAkfOJW4ZyUGOyCa5NUlOnDhh8gNYlucleVmSD1XV3cNjP9Xd7xgvJIB5OdSGypbbA3A2R2QB2Et3/3GScY6mAizEgTdU3mu5/fC8JfcAy3TmiOwLq+ru4Xb92EEBAMBSHGjlzn7L7RNL7gGWyhFZYD9OgQeA9dq3uWO5PQAAACyAPTwn6yCnZVluDwAAALClDnK1rPkvt9edBAAA4Cw177+EmZEDb6gMAAAAwPbR3AEAAACYMM0dAAAAgAk70KXQAWDR7M0GwBYaaz+YHqksArvT3AEuWN0yzv8s+mb/swAAgKnz98SFc1oWAAAAwIRp7gAAAABM2FadljXWOaMATMPS6oS9FAAAOIitau4AAACcr7H27Rht432AgeYOAAAwD5oswELZcwcAAABgwqzcYfaWtkcHAIejTgAAU6e5AwAAwIHZ8B+2j9OyAAAAACZMcwcAAABgwpyWNSLLGQEAAA7GHmmwO80dNsZkDAAAAEdPcwcA2AoOAszYqZGWDd88ztsCcEjqxAXT3AEAAAAWZ05bpRyouVNVL0ryc0kuSvJL3f0fjj4UYLJ02hdPnZgXK2iAo6ZOAKzXvlfLqqqLkrwxyYuTXJvkB6vq2nUHxvpUjXMD5kmdAGAv6gTA+h3kUujPSfJgd3+0uz+f5PYkN6w3LAAmRJ0AtpIDWltDnQBYs4M0dy5P8okdHz80PAYwKv9p3xrqBAB7UScA1uzINlSuqpNJTg4ffq6q7j+PL3MsyaeOKqYtJ9f5WUqeyXJyPWeeF9jg+cYL+uyJOqIakSznZy9ZTq5LyTNZTq5LyTNRJ46MOnFelpLrUvJMlpPrUvJMNlgnDtLceTjJlTs+vmJ47Mt0961Jbj2v0AZVdbq7T1zI15gKuc7PUvJMlpPrUvI8AvvWiaOoEcmyxmQpuS4lz2Q5uS4lz2RZuV4gdWINlpLrUvJMlpPrUvJMNpvrQU7Lel+Sa6rq6qp6cpIbk9yx3rAAmBB1AoC9qBMAa7bvyp3u/mJVvTLJO7O6dOFt3X3P2iMDYBLUCQD2ok4ArN9BVu6ku9/R3c/s7md0978/yOdU1W1V9WhVffiAr/+BJP9HVd1TVf/lIJ8zcRe87HRClpLrUvJMlpPrUvK8YOrEWizl528peSbLyXUpeSbLyvWCHLZOnE+NqKp7s6oTS6gRyXJ+/paSZ7KcXJeSZ7LBXKu71/OFq16Q5HNJfrm7v22f116T5NeSvLC7P11V39Ddj64lMAC2gjoBwG7UCIDDOdDKnfPR3X+Y5K93PlZVz6iq36mqu6rqj6rqW4anfjTJG7v708PnmowBZk6dAGA3agTA4aytubOLW5P8m+7+jiSvSfKfhsefmeS7q+rvqup/V9VtG45r7arq41X1oaq6u6pOD489rareVVUfGf79urHjPKxzLZndLa9a+fmqerCqPlhV140X+eHtkuupqnp4GNe7q+r6Hc+9dsj1/qr67nGiPryqurKq3l1V9w6nv7xqeHxW47pHnrMb04lZZJ2Ya41I1Ik5zinqxPzGdEL2qhHPHMbp76rqf1bVTaNFuQbqxLTnk2Q5NSJRJ0Yb1+5e2y3J8SQfHu4/NcnfJbl7x+2+4bnfymrZ5TOH2+eTfOc6Y9v0LcnHkxw767GfSXLTcP+mJD89dpznkdcLklx3Zpz3yivJ9Ul+O0kleW6S944d/xHkeirJa87x2muTfCDJU5JcneTPk1w0dg4HzPOyJNcN9y9J8sCQz6zGdY88Zzem23xTJx7/PsyyRgyxqxMzm1PUifmN6bbeDlkjfn34nr8gySeSfCjJtWPncITfC3Wipzuf7JHnLOcTdWKccd3kyp2vSvI33f2sHbdvHZ77QpKPdfcD3f1Akr9M8rINxjaWG5K8ebj/5iTfO14o56fPsWQ2u+d1Q1bnTXd3vyfJ11bVZRsJ9Ajskutubkhye3f/fXd/LMmDSZ6ztuCOUHc/0t3vH+5/Nsl9SS7PzMZ1jzx3M9kxnRB14stNvkYk6sQeJjunqBO7muyYTsReNeKhJPckeXD4PXwgyf+T1ZjMmToxkfkkWU6NSNSJPT5lreO6seZOd38myceq6vuTx5deffvw9D1Zda9SVceSfF2Sf7Sp2Dakk/xurc4RPjk89vTufmS4/1dJnj5OaEdut7wuz+pIyhkPZe8f/ql45bB88LYdy2FnkWtVHU/y7CTvzYzH9aw8kxmP6TZbeJ1YUo1IZjyf7GK2c4o6Mb8x3Vb71IjfSPL8JJ8YasQzs6obc/r+qxMrc/w9m/V8ok5sblzX1typqrcm+ZMk31xVD1XVK5L8UJJXVNUHsppwz3TT707y97W6fOG7k9ye5H+vK7aRPL+7r0vy4iQ/VqsrADyuV+u01nPpshHNNa8dfiHJM5I8K8kjSX521GiOUFU9Ncnbkrx6+A/V4+Y0rufIc7Zjum3UiS+zyBqRzDu3wWznFHVifmO6TQ5ZI96Z5LNJ/q+sasRPZHUq75yoE/M06/lEndjsuF68ri/c3T+4y1MvOsdjDyf5n9393clqk6F1xTWW7n54+PfRqvr1rJZffbKqLuvuR4ZlZ3PZ2X+3vB5OcuWO110xPDZZ3f3JM/er6hezOuc7mXiuVfWkrCaot3T324eHZzeu58pzrmO6jdSJJyysRiQznE92M9c5RZ14/PnZjOm2OUyN6O6uqv87yamz6sRsvv/qxPTnk3OZ83yiTjz+/MbGddNXy9rN+5JcU1VXV9WTk9yY5I6RYzoyVfXVVXXJmftJvivJh7PK8eXDy16e5DfHifDI7ZbXHUl+ZFhG+9wkf7tjWd4knXUu6PdlNa7JKtcbq+opVXV1kmuS/Omm4zsfVVVJ3pTVJoWv3/HUrMZ1tzznOKYzMds6scAakcxsPtnLHOcUdWJ+YzoT6oQ6MTlznU/UiZHGtbdgl+l+YofsB7LaMfp1Y8dzxLl9U1a7Yp9ZQvq64fGvT/L7ST6S5PeSPG3sWM8jt7dmtdTsC1mdM/iK3fLKavfzNw5j/KEkJ8aO/why/ZUhlw8Ov6yX7Xj964Zc70/y4rHjP0Sez89qieQH88TVKK6f27jukefsxnQut7nWiTnXiCEPdWJmc4o6Mb8xnctNnVAntvm2lBoxxK5OjDCuNbzBkTp27FgfP378yL8uwNTdddddn+ruS8eOY0xqBMDu1Al1AmAvu9WJtey5c/z48Zw+fXodXxpg0qrqL8aOYWxqBMDu1Al1AmAvu9WJbdlzBwAAAIDzcODmTlVdVFV/VlW/tf+rAQAAANiEw5yW9aok9yX5mjXFArNStfn3XMMWWnBgVXVRktNJHu7ul4wdD2yzMWpEok4wLnUCDk6d4LAOtHKnqq5I8j1Jfmm94QAwYWcOAgDAuagTAGty0NOy3pDkJ5N8aX2hADBVDgIAsBd1AmC99j0tq6pekuTR7r6rqv75Hq87meRkklx11VVHFR8A0/CGrA4CXDJyHABspzdEnYCtV7eMcz5Y3+x8sAt1kJU7z0vy0qr6eJLbk7ywqn717Bd1963dfaK7T1x66Vdcch2Amdp5EGCP15ysqtNVdfqxxx7bYHQAjE2dAFi/fZs73f3a7r6iu48nuTHJH3T3D689MgCmYt+DAA4AACyaOgGwZge+FDoAnIuDAADsRZ0AWL/DXAo93X1nkjvXEgkAAAAAh3ao5g6w3WyAxtgcBABgL+oEwHo4LQsAAABgwjR3AAAAACbMaVkAAKyV04YBYL2s3AEAAACYMCt3AAAWYqwVNADAelm5AwAAADBhmjsAAAAAE6a5AwAAADBhmjsAAAAAE6a5AwAAADBhrpYFXLCxrr7SN/co7wsAALBNrNwBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmLCLxw4AOEKneqT3rXHeF4DDUScA2EJ1yzh1om8eqS6ugZU7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABOmuQMAAAAwYZo7AAAAABO2b3Onqq6sqndX1b1VdU9VvWoTgQEwDeoEAHtRJwDW7+IDvOaLSX68u99fVZckuauq3tXd9645NgCmYfZ1om6pUd63b+5R3hfgiKkTa6JOAGfs29zp7keSPDLc/2xV3Zfk8iQm4wtkMgbmYAl1AoDzp04ArN+h9typquNJnp3kvWuJBoBJUycA2Is6AbAeBzktK0lSVU9N8rYkr+7uz5zj+ZNJTibJVVdddWQBAjANe9UJNQIAdQIm4NRIZ5ecGudsmjk50MqdqnpSVhPxW7r77ed6TXff2t0nuvvEpZdeepQxArDl9qsTagTAsqkTAOt1kKtlVZI3Jbmvu1+//pAAmBJ1AoC9qBMA63eQlTvPS/KyJC+sqruH2/VrjguA6VAnANiLOgGwZge5WtYfJ3ECHADnpE4AsBd1AmD9DnW1LAAAAAC2i+YOAAAAwIRp7gAAAABM2L577izCqR7nfW8e520BAACA+bByBwAAAGDCNHcAAAAAJkxzBwAAAGDCNHcAAAAAJkxzBwAAAGDCtupqWXVLjfTOI10tCwD2UCOVxVYWASZBnQDO2KrmzmiXJAeAvahPAABsse1q7gDAHqzwBACAr6S5AwAAAOcw1qlvcFiaOwBMh9OjANjDeCs8AcaluQMAAMCB2cgZto/mDrNnKSUAAGtlZSkwMs2dEel4AwDAEdJkARbqq8YOAAAAAIDzZ+UOALBoTt8FAKbOyh0AAACACbNyh41xZBQAADhf/p6A3Vm5AwAAADBhVu4AAF/GkVEAgGmxcgcAAABgwqzcAQBgvU71OO978zhvC8AhqRMXTHNngSy358iZjAEAgIkZ62/jXsOfTwc6LauqXlRV91fVg1V109GHAcCUqRMA7EWdAFivfZs7VXVRkjcmeXGSa5P8YFVdu+7AAJgGdQKAvagTAOt3kJU7z0nyYHd/tLs/n+T2JDesNywAJkSdALZS1Tg3voI6AbBmB2nuXJ7kEzs+fmh4DGBU/tO+NdQJAPaiTgCs2ZFtqFxVJ5OcHD78XFXdfx5f5liSTx1VTFtOrvOzlDyT5eR6zjwvsMHzjRf02RN1RDUiWc7PXrKcXJeSZ7KcXJeSZ6JOHBl14rwsJdel5JksJ9el5JlssE4cpLnzcJIrd3x8xfDYl+nuW5Pcel6hDarqdHefuJCvMRVynZ+l5JksJ9el5HkE9q0TR1EjkmWNyVJyXUqeyXJyXUqeybJyvUDqxBosJdel5JksJ9el5JlsNteDnJb1viTXVNXVVfXkJDcmuWO9YQEwIeoEAHtRJwDWbN+VO939xap6ZZJ3JrkoyW3dfc/aIwNgEtQJAPaiTgCs34H23OnudyR5x5pjSY5gKeaEyHV+lpJnspxcl5LnBVMn1mIpuS4lz2Q5uS4lz2RZuV4QdWItlpLrUvJMlpPrUvJMNphrdfem3gsAAACAI3aQPXcAAAAA2FJb09ypqhdV1f1V9WBV3TR2PEetqj5eVR+qqrur6vTw2NOq6l1V9ZHh368bO87DqqrbqurRqvrwjsfOmVet/Pwwxh+squvGi/zwdsn1VFU9PIzr3VV1/Y7nXjvken9Vffc4UR9eVV1ZVe+uqnur6p6qetXw+KzGdY88ZzemczHnOjHXGpGoE3OcU9SJ+Y3pXKgT6sQ2W0qNSNSJ0ca1u0e/ZbWx2p8n+aYkT07ygSTXjh3XEef48STHznrsZ5LcNNy/KclPjx3neeT1giTXJfnwfnkluT7JbyepJM9N8t6x4z+CXE8lec05Xnvt8HP8lCRXDz/fF42dwwHzvCzJdcP9S5I8MOQzq3HdI8/ZjekcbnOvE3OtEUPs6sTM5hR1Yn5jOoebOqFObPttKTViiF+dGGFct2XlznOSPNjdH+3uzye5PckNI8e0CTckefNw/81Jvne8UM5Pd/9hkr8+6+Hd8rohyS/3ynuSfG1VXbaRQI/ALrnu5oYkt3f333f3x5I8mNXP+dbr7ke6+/3D/c8muS/J5ZnZuO6R524mO6YzscQ6MfkakagTe5jsnKJO7GqyYzoT6oQ6sdWWUiMSdWKPT1nruG5Lc+fyJJ/Y8fFD2fubMkWd5Her6q6qOjk89vTufmS4/1dJnj5OaEdut7zmOs6vHJYP3rZjOewscq2q40meneS9mfG4npVnMuMxnbC5f/+XVCOSGc8nu5jtnKJOzG9MJ2zu3391YmWO4zzr+USd2Ny4bktzZwme393XJXlxkh+rqhfsfLJX67Rmd+myuea1wy8keUaSZyV5JMnPjhrNEaqqpyZ5W5JXd/dndj43p3E9R56zHVO22iJrRDLv3AaznVPUifmNKVtNnZinWc8n6sRmx3VbmjsPJ7lyx8dXDI/NRnc/PPz7aJJfz2r51SfPLDcb/n10vAiP1G55zW6cu/uT3f0P3f2lJL+YJ5bVTTrXqnpSVhPUW7r77cPDsxvXc+U51zGdgVl//xdWI5IZzie7meucok7Mb0xnYNbff3Vi+vPJucx5PlEnNj+u29LceV+Sa6rq6qp6cpIbk9wxckxHpqq+uqouOXM/yXcl+XBWOb58eNnLk/zmOBEeud3yuiPJjwy7oT83yd/uWJY3SWedC/p9WY1rssr1xqp6SlVdneSaJH+66fjOR1VVkjclua+7X7/jqVmN6255znFMZ2K2dWKBNSKZ2XyylznOKerE/MZ0JtQJdWJy5jqfqBMjjWtvwS7T/cQO2Q9ktWP068aO54hz+6asdsX+QJJ7zuSX5OuT/H6SjyT5vSRPGzvW88jtrVktNftCVucMvmK3vLLa/fyNwxh/KMmJseM/glx/Zcjlg8Mv62U7Xv+6Idf7k7x47PgPkefzs1oi+cEkdw+36+c2rnvkObsxncttrnVizjViyEOdmNmcok7Mb0znclMn1Iltvi2lRgyxqxMjjGsNbwAAAADABG3LaVkAAAAAnAfNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ09wBAAAAmDDNHQAAAIAJ+/8DAB3N4yUyTBgAAAAASUVORK5CYII=\n"
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "\n",
+    "fig, ax = plt.subplots(6, 3, figsize = (20, 10))\n",
+    "\n",
+    "for j in range(6):\n",
+    "    for i in range(3):\n",
+    "        ax[j, i].hist(polarized[j][:, :, i].reshape(-1), density = False, color = \"green\")\n",
+    "        ax[j, i].hist(non_polarized[j][:, :, i].reshape(-1), density = False, color = \"blue\")\n",
+    "\n",
+    "plt.show()"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "outputs": [
+    {
+     "data": {
+      "text/plain": "<Figure size 720x432 with 1 Axes>",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlMAAAFlCAYAAADPim3FAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAToklEQVR4nO3db4xld33f8c83XptEAWGCR8SyvaxbrFYkKsbdukZUEQKRGIpwqzqVURscRLQthTZRUzU4D8yftg+o1JASEJYLDoaSgOX86RaZUksgJXmAYe3YBtugLoTIaznxYoOJSwJa8u2De0zHw4zn7v7O7N7Zfb2kqz333N/e+9ufzozfvv9OdXcAADgxP3SqJwAAsJuJKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAF7TtUDn3feeb1v375T9fAAAEu78847v97da5vddspiat++fTl06NCpengAgKVV1Z9udZuX+QAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAHbxlRV/XBVfa6q7qmq+6rqHZuMeUZVfbyqDlfVHVW1b0dmCwCwYpZ5Zuo7SV7e3S9KcmmSK6vqig1j3pjkG939giTvTvKuWWcJALCito2pXnhiunr2dOkNw65KcvO0fWuSV1RVzTZLAIAVtWeZQVV1VpI7k7wgyfu6+44NQy5I8mCSdPexqno8yXOTfH3D/RxIciBJ9u7dOzZz2CG74X8DeuP/zgCcRPWO1fpF2W87tb8Ul3oDend/r7svTXJhksur6idP5MG6+8bu3t/d+9fW1k7kLgAAVspxfZqvu7+Z5DNJrtxw00NJLkqSqtqT5NlJHp1hfgAAK22ZT/OtVdW50/aPJHllki9tGHYwybXT9tVJPt3thQgA4PS3zHumzk9y8/S+qR9Kckt3f6Kq3pnkUHcfTPLBJB+pqsNJHktyzY7NGABghWwbU919b5IXb7L/+nXbf5XkZ+edGgDA6vMN6AAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBg25iqqouq6jNVdX9V3VdVv7jJmJdV1eNVdfd0uX5npgsAsFr2LDHmWJJf7u67qupZSe6sqtu7+/4N4/6wu18z/xQBAFbXts9MdffD3X3XtP0XSR5IcsFOTwwAYDc4rvdMVdW+JC9OcscmN7+kqu6pqk9W1U9s8fcPVNWhqjp09OjR458tAMCKWTqmquqZSX4nyS9197c23HxXkud394uS/EaS39/sPrr7xu7e393719bWTnDKAACrY6mYqqqzswipj3b37268vbu/1d1PTNu3JTm7qs6bdaYAACtomU/zVZIPJnmgu39tizE/Po1LVV0+3e+jc04UAGAVLfNpvpcm+bkkX6iqu6d9v5pkb5J09w1Jrk7ypqo6luQvk1zT3T3/dAEAVsu2MdXdf5Skthnz3iTvnWtSAAC7hW9ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGbBtTVXVRVX2mqu6vqvuq6hc3GVNV9Z6qOlxV91bVZTszXQCA1bJniTHHkvxyd99VVc9KcmdV3d7d968b86okl0yXv5/k/dOfAACntW2fmeruh7v7rmn7L5I8kOSCDcOuSvLhXvhsknOr6vzZZwsAsGKO6z1TVbUvyYuT3LHhpguSPLju+pH8YHABAJx2lnmZL0lSVc9M8jtJfqm7v3UiD1ZVB5IcSJK9e/eeyF2cdqrmvb/uee8vmX+OjNsNxw2cCvUOv7A4+ZZ6Zqqqzs4ipD7a3b+7yZCHkly07vqF076n6O4bu3t/d+9fW1s7kfkCAKyUZT7NV0k+mOSB7v61LYYdTPL66VN9VyR5vLsfnnGeAAAraZmX+V6a5OeSfKGq7p72/WqSvUnS3TckuS3Jq5McTvLtJG+YfaYAACto25jq7j9K8rQvQnd3J3nzXJMCANgtfAM6AMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADNg2pqrqpqp6pKq+uMXtL6uqx6vq7uly/fzTBABYTXuWGPOhJO9N8uGnGfOH3f2aWWYEALCLbPvMVHf/QZLHTsJcAAB2nbneM/WSqrqnqj5ZVT+x1aCqOlBVh6rq0NGjR2d6aACAU2eOmLoryfO7+0VJfiPJ7281sLtv7O793b1/bW1thocGADi1hmOqu7/V3U9M27clObuqzhueGQDALjAcU1X141VV0/bl030+Onq/AAC7wbaf5quq307ysiTnVdWRJG9LcnaSdPcNSa5O8qaqOpbkL5Nc0929YzMGAFgh28ZUd79um9vfm8VXJwAAnHF8AzoAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAM2Damquqmqnqkqr64xe1VVe+pqsNVdW9VXTb/NAEAVtMyz0x9KMmVT3P7q5JcMl0OJHn/+LQAAHaHbWOqu/8gyWNPM+SqJB/uhc8mObeqzp9rggAAq2zPDPdxQZIH110/Mu17eOPAqjqQxbNX2bt37wwPvb2qee+ve977m9vc/17g9FXvmOcXRr9tnl+Mc80HTraT+gb07r6xu/d39/61tbWT+dAAADtijph6KMlF665fOO0DADjtzRFTB5O8fvpU3xVJHu/uH3iJDwDgdLTte6aq6reTvCzJeVV1JMnbkpydJN19Q5Lbkrw6yeEk307yhp2aLADAqtk2prr7ddvc3knePNuMAAB2Ed+ADgAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAOWiqmqurKqvlxVh6vqrZvc/vNVdbSq7p4uvzD/VAEAVs+e7QZU1VlJ3pfklUmOJPl8VR3s7vs3DP14d79lB+YIALCylnlm6vIkh7v7q9393SQfS3LVzk4LAGB3WCamLkjy4LrrR6Z9G/2Tqrq3qm6tqos2u6OqOlBVh6rq0NGjR09gugAAq2WuN6D/zyT7uvvvJLk9yc2bDeruG7t7f3fvX1tbm+mhAQBOnWVi6qEk659punDa933d/Wh3f2e6+oEkf3ee6QEArLZlYurzSS6pqour6pwk1yQ5uH5AVZ2/7uprkzww3xQBAFbXtp/m6+5jVfWWJJ9KclaSm7r7vqp6Z5JD3X0wyb+pqtcmOZbksSQ/v4NzBgBYGdvGVJJ0921Jbtuw7/p129cluW7eqQEArD7fgA4AMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADFgqpqrqyqr6clUdrqq3bnL7M6rq49Ptd1TVvtlnCgCwgraNqao6K8n7krwqyQuTvK6qXrhh2BuTfKO7X5Dk3UneNfdEAQBW0TLPTF2e5HB3f7W7v5vkY0mu2jDmqiQ3T9u3JnlFVdV80wQAWE3LxNQFSR5cd/3ItG/TMd19LMnjSZ47xwQBAFbZnpP5YFV1IMmB6eoTVfVokq+fzDmM2kXPt52XXba2u8xptb4rdlyfVmu7gmZf33r7ah1Ap5Bjd2dtub4n6Rh8/lY3LBNTDyW5aN31C6d9m405UlV7kjw7yaMb76i7b0xy45PXq+pQd+9fYg4cJ2u7s6zvzrG2O8v67hxru7NWeX2XeZnv80kuqaqLq+qcJNckObhhzMEk107bVyf5dHf3fNMEAFhN2z4z1d3HquotST6V5KwkN3X3fVX1ziSHuvtgkg8m+UhVHU7yWBbBBQBw2lvqPVPdfVuS2zbsu37d9l8l+dkTePwbtx/CCbK2O8v67hxru7Os786xtjtrZde3vBoHAHDinE4GAGDArDFVVT9cVZ+rqnuq6r6qese0/+LpNDOHp9POnDPt3/I0NFV13bT/y1X1M3POczd6mrX9UFX9SVXdPV0unfZXVb1nWsN7q+qydfd1bVX9n+ly7RYPeUaqqrOq6o+r6hPTdcfuTDZZW8fuTKrqa1X1hWkdD037fqyqbp/W6vaqes603/oehy3W9u1V9dC6Y/fV68Zv+vNf25yW7UxVVedW1a1V9aWqeqCqXrIrj93unu2SpJI8c9o+O8kdSa5IckuSa6b9NyR507T9r5LcMG1fk+Tj0/YLk9yT5BlJLk7ylSRnzTnX3XZ5mrX9UJKrNxn/6iSfnP7eFUnumPb/WJKvTn8+Z9p+zqn+963KJcm/TfJbST4xXXfs7tzaOnbnW9uvJTlvw77/nOSt0/Zbk7zL+s62tm9P8u82Gbvpz/90+UqSv5HknGnMC0/1v20VLlmcPeUXpu1zkpy7G4/dWZ+Z6oUnpqtnT5dO8vIsTjPz5ML9o2l7q9PQXJXkY939ne7+kySHszitzRnradZ2K1cl+fD09z6b5NyqOj/JzyS5vbsf6+5vJLk9yZU7OffdoqouTPIPk3xgul5x7M5i49puw7E7j/XH6MZj1/rujK1+/pc5LdsZp6qeneSnsvhGgHT3d7v7m9mFx+7s75mansq/O8kjWfyDvpLkm704zUzy1NPRbHUammVOYXPG2bi23X3HdNN/mp7yfHdVPWPat9UaWtut/XqSf5/kr6frz41jdy6/nqeu7ZMcu/PoJP+7qu6sxZkmkuR53f3wtP1nSZ43bVvf47PZ2ibJW6Zj96YnX4aKtT1eFyc5muQ3p7cAfKCqfjS78NidPaa6+3vdfWkW35R+eZK/PfdjnKk2rm1V/WSS67JY47+XxVOcv3LqZrh7VdVrkjzS3Xee6rmcbp5mbR278/kH3X1ZklcleXNV/dT6G3vxWoiPbp+Yzdb2/Un+ZpJLkzyc5L+cuuntanuSXJbk/d394iT/N4uX9b5vtxy7O/Zpvumpus8keUkWT8U9+Z1W609H8/1T1dRTT0OzzClszljr1vbK7n54esrzO0l+M///JaWt1tDabu6lSV5bVV/L4in4lyf5r3HszuEH1raq/rtjdz7d/dD05yNJfi+Ltfzz6SWQTH8+Mg23vsdhs7Xt7j+f/uf2r5P8tzh2T9SRJEfWvcpyaxZxteuO3bk/zbdWVedO2z+S5JVJHsjiP/xXT8OuTfI/pu2tTkNzMMk1tfjE1MVJLknyuTnnuttssbZfWnfAVRavK39x+isHk7x++vTDFUken542/VSSn66q50xPTf/0tO+M1t3XdfeF3b0vizeUf7q7/1kcu8O2WNt/7tidR1X9aFU968ntLNbli3nqMbrx2LW+S9hqbZ88dif/OE89djf7+V/mtGxnnO7+syQPVtXfmna9Isn92YXH7lLfgH4czk9yc1WdlUWo3dLdn6iq+5N8rKr+Y5I/zvRms2xxGppenK7mliwW9ViSN3f392ae626z1dp+uqrWsvh0w91J/uU0/rYsPvlwOMm3k7whSbr7sar6D1n8cCfJO7v7sZP3z9h1fiWO3Z3yUcfuLJ6X5PcWTZo9SX6ru/9XVX0+yS1V9cYkf5rkn07jre/ytlrbj9Tiqzw6i0/7/Yvk6X/+a5PTsp3kf8uq+tdZ/C44J4tP4b0h03/jdtOx6xvQAQAG+AZ0AIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAG/D9myjn6CDJd1wAAAABJRU5ErkJggg==\n"
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "\n",
+    "pol = []\n",
+    "non_pol = []\n",
+    "for j in range(6):\n",
+    "    for i in range(3):\n",
+    "        pol.append(polarized[j][:,:, i].var())\n",
+    "        non_pol.append(non_polarized[j][:,:, i].var())\n",
+    "\n",
+    "fig, ax = plt.subplots(1,1, figsize = (10, 6))\n",
+    "ax.hist(pol, color = \"green\")\n",
+    "ax.hist(non_pol, color = \"blue\")\n",
+    "plt.show()"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[6039.948706249593, 5894.693889706704, 5506.752148360592, 5898.551808247116, 5818.329513453474, 5495.461078784447, 6014.040060886491, 5980.951874902513, 5653.274391287833, 5648.983016042914, 5548.381099502415, 5257.680483140387, 5353.952319166475, 5323.796165140175, 5016.745985959243, 5809.600562122754, 5851.178305706107, 5556.992416721962]\n",
+      "[4223.73249762943, 3863.3349727941827, 3142.630990175311, 4060.9031071145787, 3809.180706647902, 3374.0058052150257, 3872.650070489109, 3772.7925729848907, 3446.6755561809537, 4299.032363256617, 4143.829968257108, 3705.2661585803226, 4127.691935971295, 4014.4101585219423, 3577.012812597752, 4154.935083286517, 4009.442123252845, 3651.851893411681]\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(pol)\n",
+    "print(non_pol)"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "markdown",
+   "source": [],
+   "metadata": {
+    "collapsed": false
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "outputs": [
+    {
+     "data": {
+      "text/plain": "<Figure size 720x432 with 1 Axes>",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkkAAAFlCAYAAAD/BnzkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAQ80lEQVR4nO3de6yteV3f8c/XOQOoUC7ODiEO48HW0BBTYdwSjIREjDjQRtqEP8akFS/JSawaSDQthETgj/7RJrWXlGhGRbBS0aJEYlSkZQwxKYPn4IAzDMhwMQ5B5yBy8w8o+O0f6zlxz+l3n732zF57rX14vZKV/axnPXvt3/rtZ+3zPs+6VXcHAICH+pptDwAAYBeJJACAgUgCABiIJACAgUgCABiIJACAwblNXOlNN93U58+f38RVAwCcqEuXLn2qu/euXr+RSDp//nwuXry4iasGADhRVfXn03oPtwEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBAJAEADEQSAMBgrUiqqidU1Vuq6oNVdV9VfeemBwYAsE3rfnbbf0ny+939kqp6VJKv2+CYAAC27shIqqrHJ3lekh9Kku7+UpIvbXZYAADbtc6RpKcluZzkl6vq25JcSvKy7v7bgxtV1YUkF5LklltuOelxXpfqtfWIvr9f3Sc0EoBZPbI/U/+f9meLM2Sd5ySdS3Jrkp/r7mcl+dskr7h6o+6+o7v3u3t/b2/vhIcJAHC61omkB5I80N13LeffklU0AQBct46MpO7+yyR/UVVPX1Z9T5IPbHRUAABbtu6r234yyZuWV7Z9NMkPb25IAADbt1YkdffdSfY3OxQAgN3hHbcBAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAYiCQBgIJIAAAbn1tmoqj6e5PNJvpLky929v8lBAQBs21qRtPju7v7UxkYCALBDPNwGADBYN5I6yR9U1aWqujBtUFUXqupiVV28fPnyyY0QAGAL1o2k53b3rUlemOTHq+p5V2/Q3Xd093537+/t7Z3oIAEATttakdTdn1i+PpjkrUmevclBAQBs25GRVFVfX1WPu7Kc5AVJ7tn0wAAAtmmdV7c9Oclbq+rK9v+ju39/o6MCANiyIyOpuz+a5NtOYSwAADvDWwAAAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAQCQBAAxEEgDAYO1IqqobqupPqup3NjkgAIBdcJwjSS9Lct+mBgIAsEvWiqSqujnJP03yi5sdDgDAblj3SNJ/TvJvkvzd5oYCALA7joykqvpnSR7s7ktHbHehqi5W1cXLly+f2AABALZhnSNJ35Xk+6vq40nenOT5VfWrV2/U3Xd093537+/t7Z3wMAEATteRkdTdr+zum7v7fJLbk7yzu//lxkcGALBF3icJAGBw7jgbd/cfJvnDjYwEAGCHOJIEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADA4MpKq6jFV9Z6qel9V3VtVrz2NgQEAbNO5Nbb5YpLnd/cXqurGJH9UVb/X3e/e8NgAALbmyEjq7k7yheXsjcupNzkoAIBtW+dIUqrqhiSXkvyjJK/r7ruGbS4kuZAkt9xyy0mOcR7Ta+sRfX+/+pF33iMdAyczh4/0d7kL+xJf3cqfkuvWSf9ue8f/3Fxvt3etJ25391e6+5lJbk7y7Kr61mGbO7p7v7v39/b2TniYAACn61ivbuvuzyS5M8ltGxkNAMCOWOfVbXtV9YRl+WuTfG+SD254XAAAW7XOc5KekuSNy/OSvibJb3T372x2WAAA27XOq9ven+RZpzAWAICd4R23AQAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGR0ZSVT21qu6sqg9U1b1V9bLTGBgAwDadW2ObLyf5qe5+b1U9LsmlqnpHd39gw2MDANiaI48kdfcnu/u9y/Lnk9yX5Bs3PTAAgG061nOSqup8kmcluWu47EJVXayqi5cvXz6h4QEAbMfakVRVj03ym0le3t2fu/ry7r6ju/e7e39vb+8kxwgAcOrWiqSqujGrQHpTd//WZocEALB967y6rZL8UpL7uvtnNz8kAIDtW+dI0ncl+VdJnl9Vdy+nF214XAAAW3XkWwB09x8lqVMYCwDAzvCO2wAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAA5EEADAQSQAAgyMjqapeX1UPVtU9pzEgAIBdsM6RpDckuW3D4wAA2ClHRlJ3vyvJp09hLAAAO+PEnpNUVReq6mJVXbx8+fJJXS0AwFacWCR19x3dvd/d+3t7eyd1tQAAW+HVbQAAA5EEADBY5y0Afi3J/0ny9Kp6oKp+dPPDAgDYrnNHbdDdP3AaAwEA2CUebgMAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAICBSAIAGIgkAIDBWpFUVbdV1Yeq6v6qesWmBwUAsG1HRlJV3ZDkdUlemOQZSX6gqp6x6YEBAGzTOkeSnp3k/u7+aHd/Kcmbk7x4s8MCANiudSLpG5P8xYHzDyzrAACuW+dO6oqq6kKSC8vZL1TVh07qujehXlMneXU3JfnUSV7hOk74NpymE52vbc/DKfz8rexfZ5w5O55Tm686s3+2HmJr+9cZnb+HPV+neHu/aVq5TiR9IslTD5y/eVn3EN19R5I7HtbQzriqutjd+9sex1lhvo7HfB2fOTse83U85ut4zvJ8rfNw2x8n+ZaqelpVPSrJ7UnettlhAQBs15FHkrr7y1X1E0nenuSGJK/v7ns3PjIAgC1a6zlJ3f27SX53w2M5y74qH2Z8BMzX8Ziv4zNnx2O+jsd8Hc+Zna/q7m2PAQBg5/hYEgCAgUg6oKo+XlV/WlV3V9XFZd2TquodVfXh5esTl/VVVf91+aiW91fVrQeu56XL9h+uqpceWP/ty/Xfv3zvmXsxZ1W9vqoerKp7Dqzb+Bwd9jN23SHz9Zqq+sSyn91dVS86cNkrl9v+oar6vgPrx48GWl5Qcdey/teXF1ekqh69nL9/ufz8Kd3kh62qnlpVd1bVB6rq3qp62bLe/nWIa8yZfWxQVY+pqvdU1fuW+Xrtsv7Yt/Gk5nGXXWO+3lBVHzuwfz1zWX/93Se722k5Jfl4kpuuWvcfkrxiWX5Fkn+/LL8oye8lqSTPSXLXsv5JST66fH3isvzE5bL3LNvW8r0v3PZtfhhz9Lwktya55zTn6LCfseunQ+brNUl+etj2GUnel+TRSZ6W5CNZvVjihmX5m5M8atnmGcv3/EaS25fln0/yY8vyv07y88vy7Ul+fdtzscZcPSXJrcvy45L82TIn9q/jz5l9bJ6vSvLYZfnGJHct+8OxbuNJzuMun64xX29I8pJh++vuPulI0tFenOSNy/Ibk/zzA+t/pVfeneQJVfWUJN+X5B3d/enu/psk70hy23LZP+jud/fqt/4rB67rzOjudyX59FWrT2OODvsZO+2Q+TrMi5O8ubu/2N0fS3J/Vh8LNH400PI/rucnecvy/VfP/ZX5ekuS77nyP7Rd1d2f7O73LsufT3JfVu/ub/86xDXm7DBf7ftYd/cXlrM3LqfO8W/jSc7jzrrGfB3murtPiqSH6iR/UFWXavUO4kny5O7+5LL8l0mevCwf9nEt11r/wLD+enAac3TYzzirfmI5HP36A4eRjztf35DkM9395avWP+S6lss/u2x/JiwPazwrq/+52r/WcNWcJfaxUVXdUFV3J3kwq3+sP5Lj38aTnMeddvV8dfeV/evfLfvXf6qqRy/rrrv7pEh6qOd2961JXpjkx6vqeQcvXErXywGv4TTm6Dr4Pfxckn+Y5JlJPpnkP251NDumqh6b5DeTvLy7P3fwMvvXbJgz+9ghuvsr3f3MrD494tlJ/vF2R7Tbrp6vqvrWJK/Mat6+I6uH0P7thsewtfukSDqguz+xfH0wyVuzugP91XJIMMvXB5fND/u4lmutv3lYfz04jTk67GecOd39V8sfnr9L8gtZ7WfJ8efrr7M6nH3uqvUPua7l8scv2++0qroxq3/s39Tdv7Wstn9dwzRn9rGjdfdnktyZ5Dtz/Nt4kvN4JhyYr9uWh3m7u7+Y5Jfz8Pevnb9PiqRFVX19VT3uynKSFyS5J6uPYLnyTPyXJvntZfltSX5weTb/c5J8djk0+PYkL6iqJy6HuF+Q5O3LZZ+rqucsj0//4IHrOutOY44O+xlnzpU7/uJfZLWfJavbeHutXlHztCTfktWTGsePBlr+d3Vnkpcs33/13F+Zr5ckeeey/c5afue/lOS+7v7ZAxfZvw5x2JzZx2ZVtVdVT1iWvzbJ92b1PK7j3saTnMeddch8ffBAvFRWzxU6uH9dX/fJ3oFn0O/CKatXI7xvOd2b5FXL+m9I8r+TfDjJ/0rypP77Z/2/LqvHs/80yf6B6/qRrJ7Id3+SHz6wfj+rnekjSf5bljfzPEunJL+W1eH7/5vV48c/ehpzdNjP2PXTIfP135f5eH9WfwiecmD7Vy23/UM58OrHrF418mfLZa+6ar99zzKP/zPJo5f1j1nO379c/s3bnos15uq5WR1Sf3+Su5fTi+xfD2vO7GPzfP2TJH+yzMs9SX7m4d7Gk5rHXT5dY77euexf9yT51fz9K+Cuu/ukd9wGABh4uA0AYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAGIgkAYCCSAAAG/w8g/cpoYoLvXgAAAABJRU5ErkJggg==\n"
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1317075\n",
+      "5015553\n"
+     ]
+    }
+   ],
+   "source": [
+    "pol_ = []\n",
+    "non_pol_ = []\n",
+    "for j in range(6):\n",
+    "    for i in range(3):\n",
+    "        pol_.append(np.sum(polarized[j]>240))\n",
+    "        non_pol_.append(np.sum(non_polarized[j] > 240))\n",
+    "\n",
+    "fig, ax = plt.subplots(1,1,figsize = (10,6))\n",
+    "ax.hist(pol_, color = \"green\")\n",
+    "ax.hist(non_pol_, color = \"blue\")\n",
+    "plt.show()\n",
+    "print(np.sum(pol_))\n",
+    "print(np.sum(non_pol_))"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 39,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(1506, 1506, 3)\n",
+      "(1506, 1506, 3)\n",
+      "(1506, 1506, 3)\n",
+      "(1492, 1492, 3)\n",
+      "(1492, 1492, 3)\n",
+      "(4032, 3024, 3)\n",
+      "(4032, 3024, 3)\n",
+      "(4032, 3024, 3)\n",
+      "(4032, 3024, 3)\n",
+      "Prediction for variance method [False, False, False, False, True, True, True, True, True]\n",
+      "prediction for point method  [True, True, True, False, False, True, False, True, False]\n",
+      "['image (1).png', 'image (2).png', 'image (3).png', 'image (4).png', 'image (5).png', 'IMG_2062.jpeg', 'IMG_2063.jpeg', 'IMG_2057.jpeg', 'IMG_2059.jpeg']\n"
+     ]
+    }
+   ],
+   "source": [
+    "\n",
+    "\n",
+    "def decide_var(img):\n",
+    "    var = np.zeros((3, ))\n",
+    "    for i in range(img.shape[-1]):\n",
+    "        var[i] = (img[:, :, i].var())\n",
+    "\n",
+    "    if any(var > 3000):\n",
+    "        return True\n",
+    "    else:\n",
+    "        return False\n",
+    "\n",
+    "def decide_point(img):\n",
+    "\n",
+    "    if np.sum(img > 240) < 180000:\n",
+    "        return True\n",
+    "    else:\n",
+    "        return False\n",
+    "\n",
+    "\n",
+    "images = [os.path.join(pth, f\"image ({i}).png\") for i in range(1,6)] + [os.path.join(pth, p) for p in [\"IMG_2062.jpeg\",\"IMG_2063.jpeg\",\"IMG_2057.jpeg\", \"IMG_2059.jpeg\"]]\n",
+    "imgs = [np.asarray(Image.open(img)) for img in images]\n",
+    "\n",
+    "var_method = []\n",
+    "point_method = []\n",
+    "\n",
+    "for img in imgs:\n",
+    "    if img.shape[-1] > 3:\n",
+    "        img = img[:, :,:-1]\n",
+    "    print(img.shape)\n",
+    "    var_method.append(decide_var(img))\n",
+    "    point_method.append(decide_point(img))\n",
+    "\n",
+    "print(\"Prediction for variance method\", var_method)\n",
+    "print(\"prediction for point method \", point_method)\n",
+    "print([os.path.basename(imag) for imag in images])\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "outputs": [
+    {
+     "data": {
+      "text/plain": "<Figure size 720x432 with 2 Axes>",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkkAAAFlCAYAAAD/BnzkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAXXUlEQVR4nO3de6xsZ3kf4N+Lj7kUEAZ8RCxfOFBQIjeKLzlyjUCIgqCGIqyqRLLVAqFUR6KmMSpRhEHi9h+VShICwnGBcgnhUi6pa5kQGhwRpGJybHw3Dgfkynad2JhgQy5Qw9s/Zplub3/77Nnbe2afOX4eaWmvWevba95vzexv/faaNTPV3QEA4MEetdsFAAAciYQkAIABIQkAYEBIAgAYEJIAAAaEJACAgT2L2Ojxxx/f+/btW8SmgSPQVVdd9b3u3rvbdewE4xc88mw0hi0kJO3bty8HDx5cxKaBI1BV/e/drmGnGL/gkWejMczLbQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwMBcIamqjquqz1bVt6rq5qp6zqILAzicqnpsVX2jqq6tqhur6p2DNo+pqk9X1aGqurKq9u1CqcCKmvdM0u8m+ePu/qUkpyW5eXElAczlx0le2N2nJTk9yTlVdfa6Nq9L8jfd/awkv53k3cstEVhlm4akqnpSkucn+VCSdPdPuvsHC64L4LB65kfTzWOnqdc1OzfJR6f5zyZ5UVXVkkoEVtyeOdo8I8ndSf5rVZ2W5KokF3b3365tVFUHkhxIklNOOWXuAt750DPkO+bt/faFbXtV614k+2T5Hun7vKqOyWxMelaS93f3leuanJjktiTp7vur6t4kT03yvXXb2db4BTwci/x/Zf3/S9szz8tte5KcmeQD3X1Gkr9N8uaHlNN9SXfv7+79e/fu3ZHiAA6nu3/a3acnOSnJWVX1y9vcjvELeIh5QtLtSW5f8x/aZzMLTQBHhOkSgCuSnLNu1R1JTk6SqtqT5ElJ7llqccDK2jQkdfdfJbmtqn5xWvSiJDcttCqATVTV3qo6bpp/XJIXJ/nWumaXJnnNNP/KJF/p7p05Dw8c9ea5JilJ/kOST1TVo5N8N8lrF1cSwFxOSPLR6bqkRyX5THdfVlXvSnKwuy/N7A0nH6+qQ0m+n+S83SsXWDVzhaTuvibJ/sWWAjC/7r4uyRmD5W9bM/8PSX5tmXUBRw+fuA0AMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADc4Wkqrq1qq6vqmuq6uCiiwLYTFWdXFVXVNVNVXVjVV04aPOCqrp3Gruuqaq37UatwGras4W2/6y7v7ewSgC25v4kb+ruq6vqiUmuqqovd/dN69r9eXe/fBfqA1acl9uAldTdd3b31dP8D5PcnOTE3a0KOJrMG5I6yZ9U1VVVdWDUoKoOVNXBqjp4991371yFAJuoqn1Jzkhy5WD1c6rq2qr6YlX9kw1+3/gFPMS8Iel53X1mkpcmuaCqnr++QXdf0t37u3v/3r17d7RIgI1U1ROSfC7JG7v7vnWrr07y9O4+LcnvJfmj0TaMX8DIXCGpu++Yft6V5AtJzlpkUQDzqKpjMwtIn+juz69f3933dfePpvnLkxxbVccvuUxgRW0akqrq8dNFkamqxyd5SZIbFl0YwOFUVSX5UJKbu/s9G7T5haldquqszMa8e5ZXJbDK5nl329OSfGEaZ/Yk+cPu/uOFVgWwuecmeVWS66vqmmnZW5KckiTdfXGSVyZ5fVXdn+Tvk5zX3b0LtQIraNOQ1N3fTXLaEmoBmFt3fy1JbdLmfUnet5yKgKONjwAAABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAaEJACAASEJAGBASAIAGBCSAAAGhCQAgAEhCQBgQEgCABgQkgAABoQkAIABIQkAYEBIAgAYEJIAAAbmDklVdUxVfbOqLltkQQDzqKqTq+qKqrqpqm6sqgsHbaqq3ltVh6rquqo6czdqBVbTVs4kXZjk5kUVArBF9yd5U3efmuTsJBdU1anr2rw0ybOn6UCSDyy3RGCVzRWSquqkJP8iyQcXWw7AfLr7zu6+epr/YWb/xJ24rtm5ST7WM19PclxVnbDkUoEVNe+ZpN9J8ltJfra4UgC2p6r2JTkjyZXrVp2Y5LY1t2/PQ4MUwNCmIamqXp7kru6+apN2B6rqYFUdvPvuu3esQIDDqaonJPlckjd2933b3MY2x69a8ATspnnOJD03ySuq6tYkn0rywqr6g/WNuvuS7t7f3fv37t27w2UCPFRVHZtZQPpEd39+0OSOJCevuX3StOxBjF/AyKYhqbsv6u6TuntfkvOSfKW7/83CKwM4jKqqJB9KcnN3v2eDZpcmefX0Lrezk9zb3XcurUhgpe3Z7QIAtum5SV6V5PqqumZa9pYkpyRJd1+c5PIkL0tyKMnfJXnt8ssEVtWWQlJ3/1mSP1tIJQBb0N1fyyYX7nR3J7lgORUBRxufuA0AMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADm4akqnpsVX2jqq6tqhur6p3LKAzgcKrqw1V1V1XdsMH6F1TVvVV1zTS9bdk1Aqttzxxtfpzkhd39o6o6NsnXquqL3f31BdcGcDgfSfK+JB87TJs/7+6XL6cc4Giz6ZmknvnRdPPYaeqFVgWwie7+apLv73YdwNFrnjNJqapjklyV5FlJ3t/dVw7aHEhyIElOOeWUnaxx297plUF4pHtOVV2b5P8k+c3uvnHU6Egcv4DdN9eF29390+4+PclJSc6qql8etLmku/d39/69e/fucJkAW3Z1kqd392lJfi/JH23U0PgFjGzp3W3d/YMkVyQ5ZyHVAOyQ7r7vgUsFuvvyJMdW1fG7XBawQuZ5d9veqjpumn9ckhcn+daC6wJ4WKrqF6qqpvmzMhvv7tndqoBVMs81SSck+eh0XdKjknymuy9bbFkAh1dVn0zygiTHV9XtSd6e2RtL0t0XJ3llktdX1f1J/j7Jed3tTSfA3DYNSd19XZIzllALwNy6+/xN1r8vs48IANgWn7gNADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA5uGpKo6uaquqKqbqurGqrpwGYUBHE5Vfbiq7qqqGzZYX1X13qo6VFXXVdWZy64RWG3znEm6P8mbuvvUJGcnuaCqTl1sWQCb+kiScw6z/qVJnj1NB5J8YAk1AUeRTUNSd9/Z3VdP8z9McnOSExddGMDhdPdXk3z/ME3OTfKxnvl6kuOq6oTlVAccDfZspXFV7UtyRpIrB+sOZPbfWk455ZSdqA3g4TgxyW1rbt8+LbtzfcMjd/yq3S5gm3qB217kPllk3atqVZ+DO2PuC7er6glJPpfkjd193/r13X1Jd+/v7v179+7dyRoBFsr4BYzMFZKq6tjMAtInuvvziy0JYEfckeTkNbdPmpYBzGWed7dVkg8lubm737P4kgB2xKVJXj29y+3sJPd290NeagPYyDzXJD03yauSXF9V10zL3tLdly+sKoBNVNUnk7wgyfFVdXuStyc5Nkm6++Iklyd5WZJDSf4uyWt3p1JgVW0akrr7a3mkX7kFHHG6+/xN1neSC5ZUDnAU8onbAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMLBpSKqqD1fVXVV1wzIKAphXVZ1TVbdU1aGqevNg/a9X1d1Vdc00/bvdqBNYTfOcSfpIknMWXAfAllTVMUnen+SlSU5Ncn5VnTpo+unuPn2aPrjUIoGVtmlI6u6vJvn+EmoB2Iqzkhzq7u9290+SfCrJubtcE3AU2bFrkqrqQFUdrKqDd999905tFmAjJya5bc3t26dl6/2rqrquqj5bVSePNmT8AkZ2LCR19yXdvb+79+/du3enNgvwcPyPJPu6+1eSfDnJR0eNjF/AiHe3AavqjiRrzwydNC37ue6+p7t/PN38YJJfXVJtwFFASAJW1V8keXZVPaOqHp3kvCSXrm1QVSesufmKJDcvsT5gxc3zEQCfTPK/kvxiVd1eVa9bfFkAh9fd9yd5Q5IvZRZ+PtPdN1bVu6rqFVOz36iqG6vq2iS/keTXd6daYBXt2axBd5+/jEIAtqq7L09y+bplb1szf1GSi5ZdF3B08HIbAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADAhJAAADQhIAwICQBAAwICQBAAwISQAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAwV0iqqnOq6paqOlRVb150UQDz2GxsqqrHVNWnp/VXVtW+XSgTWFGbhqSqOibJ+5O8NMmpSc6vqlMXXRjA4cw5Nr0uyd9097OS/HaSdy+3SmCVzXMm6awkh7r7u939kySfSnLuYssC2NQ8Y9O5ST46zX82yYuqqpZYI7DC5glJJya5bc3t26dlALtpnrHp5226+/4k9yZ56lKqA1benp3aUFUdSHJguvmjqronyfd2avtHiOOzhD69o96x6LtYayl9eri2uE9Wok9btPQ+bXGfP31BZSzFYPy6ZRubWbXn3QLr3fGTdUvatztSt+fB4myh1i0/lsMxbJ6QdEeSk9fcPmla9iDdfUmSSx64XVUHu3v/Fos8ounTatCnR4x5xqYH2txeVXuSPCnJPes3tH782o5Ve4xWqV61Ls4q1bsbtc7zcttfJHl2VT2jqh6d5Lwkly62LIBNzTM2XZrkNdP8K5N8pbt7iTUCK2zTM0ndfX9VvSHJl5Ick+TD3X3jwisDOIyNxqaqeleSg919aZIPJfl4VR1K8v3MghTAXOa6Jqm7L09y+Ra3/bBOXR+h9Gk16NMjxGhs6u63rZn/hyS/tqRyVu0xWqV61bo4q1Tv0mstZ54BAB7K15IAAAxsKSRV1Yer6q6qumHNsqdU1Zer6tvTzydPy6uq3jt9HcB1VXXmmt95zdT+21X1mtF9LUNVnVxVV1TVTVV1Y1VdeBT06bFV9Y2qunbq0zun5c+Yvpbh0PQ1DY+elm/4tQ1VddG0/Jaq+ue71KWfq6pjquqbVXXZdHul+1RVt1bV9VV1TVUdnJat7HPvaLHox6WqfnXa/qHpd7f0XuVa8Di8UX0b3cc2an1HVd0x7d9rqupla9YN/z5rg6+f2c4YcJhaF3482OF9u1G9R9z+rSUcl7bah7l199xTkucnOTPJDWuW/ackb57m35zk3dP8y5J8MbMPKzg7yZXT8qck+e7088nT/JO3UsdOTUlOSHLmNP/EJH+Z2dcbrHKfKskTpvljk1w51fqZJOdNyy9O8vpp/t8nuXiaPy/Jp6f5U5Ncm+QxSZ6R5DtJjtmNPq3p239M8odJLptur3Sfktya5Ph1y1b2uXe0TIt+XJJ8Y2pb0+++dIv1LXQc3qi+je5jG7W+I8lvDtoO/z6n6TtJnpnk0VObU6ff2dIYsEmtCz8e7PC+3ajeI27/ZsHHpe30Ye6/t20MIPvy4Cf8LUlOWPOg3TLN/36S89e3S3J+kt9fs/xB7XZzSvLfk7z4aOlTkn+U5Ook/zSzD+DaMy1/TpIvTfNfSvKcaX7P1K6SXJTkojXb+nm7XerLSUn+NMkLk1w21bjqfbo1Dz0YHxXPvVWeFvm4TOu+tWb5g9ptocZ9WcA4fLj6NrqPbdT6jowP4sO/z7V/22vbbWcM2OI+3tHjwSL27Qb1HtH7Nws4Lm2nD/NOO3FN0tO6+85p/q+SPG2a3+grA47IrzmZTuedkVnCXek+1exlqWuS3JXky5kl7B/07GsZkgfXt9HXNhxRfUryO0l+K8nPpttPzer3qZP8SVVdVbNPfE5W/Ll3lFjk43LiNL9++cO1jPo2uo/teMP0EtWH17y0tNVatzMGzGVBx4OF7dt19SZH4P5d8HFpO32Yy45euN2zqNY7uc1lqKonJPlckjd2931r161in7r7p919emZnX85K8ku7W9HDU1UvT3JXd1+127XssOd195mZfYv9BVX1/LUrV/G5d5RY6cdlGfU9zPv4QJJ/nOT0JHcm+c87VNaO2O3jwVbvY1DvEbl/V/W4tBMh6a+r6oQkmX7eNS3f6CsD5vqak2WpqmMze4J9ors/Py1e6T49oLt/kOSKzE4xHlezr2VIHlzfz2uvB39tw5HUp+cmeUVV3ZrZN72/MMnvZrX7lO6+Y/p5V5IvZDZwHBXPvVW24Mfljml+/fKHaxn1bXQfW9Ldfz0dMH+W5L9ktn+3U+s92foYcFgLPh7s+L4d1Xsk79+pvh9k549L2+nDXHYiJK392P/XZPa66APLX10zZye5dzqd+KUkL6mqJ0+nAV8yLVu6qqrMPpH35u5+z5pVq9ynvVV13DT/uMxeo745syflK6dm6/v0QF/Xfm3DpUnOm95l8Iwkz87sosOl6+6Luvuk7t6X2UV8X+nuf50V7lNVPb6qnvjAfGbPmRuyws+9o8GiH5dp3X1VdfY0/rx6zbYejmXUt9F9bMkDYWDyLzPbvw9sf/T3Ofz6melveqtjwOHqWujxYKf37Ub1Hon7dwnHpe30YT5buYApySczO333fzN7be91mb3m96dJvp3kfyZ5ytS2krw/s9cdr0+yf812/m2SQ9P02q3UsJNTkudldlrzuiTXTNPLVrxPv5Lkm1Ofbkjytmn5M6cn06Ek/y3JY6blj51uH5rWP3PNtt469fWWbPEdOAvs3wvy/9/dtrJ9mmq/dppuTPLWafnKPveOhmkZj0uS/dPf5neSvC9bv6B4oePwRvVtdB/bqPXjUy3XZXbQO2FN++HfZ2bj8l9O69667vHa0hhwmFoXfjzY4X27Ub1H3P7NEo5LW+3DvJNP3AYAGPCJ2wAAA0ISAMCAkAQAMCAkAQAMCEkAAANCEgDAgJAEADAgJAEADPw/y2eL/LADdJkAAAAASUVORK5CYII=\n"
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "fig, ax = plt.subplots(1,2, figsize = (10,6))\n",
+    "\n",
+    "vars = []\n",
+    "for img in imgs:\n",
+    "    for i in range(3):\n",
+    "        vars.append(img.var())\n",
+    "\n",
+    "ax[0].hist(vars, color = \"purple\")\n",
+    "ax[1].hist([np.sum(img[:,:,:3] > 240) for img in imgs], color = \"yellow\")\n",
+    "\n",
+    "plt.show()"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 48,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "(1506, 1506, 4)\n",
+      "[1051, 1618, 142, 14, 31, 363, 3084, 126, 1906]\n",
+      "['C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (1).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (2).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (3).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (4).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\image (5).png', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2062.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2063.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2057.jpeg', 'C:\\\\Users\\\\ptrkm\\\\Downloads\\\\IMG_2059.jpeg']\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": "<Figure size 720x432 with 1 Axes>",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlMAAAFlCAYAAADPim3FAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAU5ElEQVR4nO3df6zldX3n8dd7Z8YfWYmo3LQEGEdXko1tLNAJi7FpjEaLaGA31QSTrejaTOJKqkk3G2gTWv2rbrK6cTUSVkjRNYpFtzu1GJddaax/iA50QH6UOnXdAGHLCAqSVrvjvveP86V7vdzLPXc+53LPDI9HcnK/5/v9cM7nfuZ7yTPnZ3V3AAA4Pv9opycAAHAiE1MAAAPEFADAADEFADBATAEADBBTAAADdu/UHZ922mm9b9++nbp7AIC53Xbbbd/v7pX1ju1YTO3bty+HDh3aqbsHAJhbVf2vjY55mg8AYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABgwd0xV1a6q+ouq+tI6x55bVTdU1ZGqurWq9i10lgAAS2orj0y9L8m9Gxx7d5IfdPcrknwkyYdGJwYAcCKYK6aq6swkb07yyQ2GXJLk+mn7xiSvr6oanx4AwHLbPee4/5Dk3yY5ZYPjZyS5P0m6+1hVPZbkJUm+v3pQVR1IciBJ9u7dexzT3bp9V/zpM3I/2+17f/DmnZ4CALCOTR+Zqqq3JHm4u28bvbPuvqa793f3/pWVldGbAwDYcfM8zfeaJBdX1feSfC7J66rqP68Z82CSs5KkqnYneWGSRxY4TwCApbRpTHX3ld19ZnfvS3Jpkq92979cM+xgksum7bdOY3qhMwUAWELzvmbqKarqg0kOdffBJNcm+XRVHUnyaGbRBQBw0ttSTHX3nyX5s2n7qlX7f5zkbYucGADAicAnoAMADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAAzaNqap6XlV9s6ruqKq7q+oD64x5Z1UdrarD0+U3t2e6AADLZfccY36S5HXd/URV7Uny9ar6cnd/Y824G7r78sVPEQBgeW0aU93dSZ6Yru6ZLr2dkwIAOFHM9ZqpqtpVVYeTPJzk5u6+dZ1hv15Vd1bVjVV11ga3c6CqDlXVoaNHjx7/rAEAlsRcMdXdP+3uc5KcmeT8qvrFNUP+JMm+7n5VkpuTXL/B7VzT3fu7e//KysrAtAEAlsOW3s3X3T9MckuSC9fsf6S7fzJd/WSSX17I7AAAltw87+ZbqapTp+3nJ3lDkr9cM+b0VVcvTnLvAucIALC05nk33+lJrq+qXZnF1+e7+0tV9cEkh7r7YJLfqqqLkxxL8miSd27XhAEAlsk87+a7M8m56+y/atX2lUmuXOzUAACWn09ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGLBpTFXV86rqm1V1R1XdXVUfWGfMc6vqhqo6UlW3VtW+bZktAMCSmeeRqZ8keV13/1KSc5JcWFUXrBnz7iQ/6O5XJPlIkg8tdJYAAEtq05jqmSemq3umS68ZdkmS66ftG5O8vqpqYbMEAFhSc71mqqp2VdXhJA8nubm7b10z5Iwk9ydJdx9L8liSlyxwngAAS2mumOrun3b3OUnOTHJ+Vf3i8dxZVR2oqkNVdejo0aPHcxMAAEtlS+/m6+4fJrklyYVrDj2Y5KwkqardSV6Y5JF1/vtrunt/d+9fWVk5rgkDACyTed7Nt1JVp07bz0/yhiR/uWbYwSSXTdtvTfLV7l77uioAgJPO7jnGnJ7k+qralVl8fb67v1RVH0xyqLsPJrk2yaer6kiSR5Ncum0zBgBYIpvGVHffmeTcdfZftWr7x0nettipAQAsP5+ADgAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAM2DSmquqsqrqlqu6pqrur6n3rjHltVT1WVYeny1XbM10AgOWye44xx5L8dnffXlWnJLmtqm7u7nvWjPvz7n7L4qcIALC8Nn1kqrsf6u7bp+0fJbk3yRnbPTEAgBPBll4zVVX7kpyb5NZ1Dr+6qu6oqi9X1S8sYnIAAMtunqf5kiRV9YIkX0jy/u5+fM3h25O8tLufqKqLkvxxkrPXuY0DSQ4kyd69e493zgAAS2OuR6aqak9mIfWZ7v7i2uPd/Xh3PzFt35RkT1Wdts64a7p7f3fvX1lZGZw6AMDOm+fdfJXk2iT3dveHNxjz89O4VNX50+0+ssiJAgAso3me5ntNkt9I8u2qOjzt+50ke5Oku69O8tYk76mqY0n+Lsml3d2Lny4AwHLZNKa6++tJapMxH0vysUVNCgDgROET0AEABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAZvGVFWdVVW3VNU9VXV3Vb1vnTFVVR+tqiNVdWdVnbc90wUAWC675xhzLMlvd/ftVXVKktuq6ubuvmfVmDclOXu6/LMkn5h+AgCc1DZ9ZKq7H+ru26ftHyW5N8kZa4ZdkuRTPfONJKdW1ekLny0AwJLZ0mumqmpfknOT3Lrm0BlJ7l91/YE8NbhSVQeq6lBVHTp69OgWpwoAsHzmjqmqekGSLyR5f3c/fjx31t3XdPf+7t6/srJyPDcBALBU5oqpqtqTWUh9pru/uM6QB5Octer6mdM+AICT2jzv5qsk1ya5t7s/vMGwg0neMb2r74Ikj3X3QwucJwDAUprn3XyvSfIbSb5dVYenfb+TZG+SdPfVSW5KclGSI0n+Nsm7Fj5TAIAltGlMdffXk9QmYzrJexc1KQCAE4VPQAcAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABmwaU1V1XVU9XFV3bXD8tVX1WFUdni5XLX6aAADLafccY/4wyceSfOppxvx5d79lITMCADiBbPrIVHd/Lcmjz8BcAABOOIt6zdSrq+qOqvpyVf3CRoOq6kBVHaqqQ0ePHl3QXQMA7JxFxNTtSV7a3b+U5D8m+eONBnb3Nd29v7v3r6ysLOCuAQB21nBMdffj3f3EtH1Tkj1VddrwzAAATgDDMVVVP19VNW2fP93mI6O3CwBwItj03XxV9dkkr01yWlU9kOT3kuxJku6+Oslbk7ynqo4l+bskl3Z3b9uMAQCWyKYx1d1v3+T4xzL76AQAgGcdn4AOADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAzYNKaq6rqqeriq7trgeFXVR6vqSFXdWVXnLX6aAADLaZ5Hpv4wyYVPc/xNSc6eLgeSfGJ8WgAAJ4ZNY6q7v5bk0acZckmST/XMN5KcWlWnL2qCAADLbPcCbuOMJPevuv7AtO+htQOr6kBmj15l7969C7jrZ499V/zpTk9hYb73B2/e6SkszMny7+LfhO12Mp1jJ4uT6W9lp8+vZ/QF6N19TXfv7+79Kysrz+RdAwBsi0XE1INJzlp1/cxpHwDASW8RMXUwyTumd/VdkOSx7n7KU3wAACejTV8zVVWfTfLaJKdV1QNJfi/JniTp7quT3JTkoiRHkvxtkndt12QBAJbNpjHV3W/f5Hgnee/CZgQAcALxCegAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwAAxBQAwQEwBAAwQUwAAA8QUAMAAMQUAMEBMAQAMEFMAAAPEFADAADEFADBATAEADBBTAAADxBQAwIC5YqqqLqyq+6rqSFVdsc7xd1bV0ao6PF1+c/FTBQBYPrs3G1BVu5J8PMkbkjyQ5FtVdbC771kz9Ibuvnwb5ggAsLTmeWTq/CRHuvu73f33ST6X5JLtnRYAwIlhnpg6I8n9q64/MO1b69er6s6qurGqzlrvhqrqQFUdqqpDR48ePY7pAgAsl0W9AP1Pkuzr7lcluTnJ9esN6u5runt/d+9fWVlZ0F0DAOyceWLqwSSrH2k6c9r3D7r7ke7+yXT1k0l+eTHTAwBYbvPE1LeSnF1VL6uq5yS5NMnB1QOq6vRVVy9Ocu/ipggAsLw2fTdfdx+rqsuTfCXJriTXdffdVfXBJIe6+2CS36qqi5McS/Jokndu45wBAJbGpjGVJN19U5Kb1uy7atX2lUmuXOzUAACWn09ABwAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGCCmAAAGiCkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAYIKYAAAaIKQCAAWIKAGCAmAIAGDBXTFXVhVV1X1Udqaor1jn+3Kq6YTp+a1XtW/hMAQCW0KYxVVW7knw8yZuSvDLJ26vqlWuGvTvJD7r7FUk+kuRDi54oAMAymueRqfOTHOnu73b33yf5XJJL1oy5JMn10/aNSV5fVbW4aQIALKd5YuqMJPevuv7AtG/dMd19LMljSV6yiAkCACyz3c/knVXVgSQHpqtPVNV923RXpyX5/jbd9rPNwteynp1PAi/1OXmC/Zss9VqeYJ6xtTzBzrGtck4uznGt5TN0fr10owPzxNSDSc5adf3Mad96Yx6oqt1JXpjkkbU31N3XJLlmjvscUlWHunv/dt/Ps4G1XAzruDjWcnGs5WJYx8U5Uddynqf5vpXk7Kp6WVU9J8mlSQ6uGXMwyWXT9luTfLW7e3HTBABYTps+MtXdx6rq8iRfSbIryXXdfXdVfTDJoe4+mOTaJJ+uqiNJHs0suAAATnpzvWaqu29KctOafVet2v5xkrctdmpDtv2pxGcRa7kY1nFxrOXiWMvFsI6Lc0KuZXk2DgDg+Pk6GQCAASddTG321Tf8rKr6XlV9u6oOV9Whad+Lq+rmqvrO9PNF0/6qqo9Oa3tnVZ23s7PfWVV1XVU9XFV3rdq35bWrqsum8d+pqsvWu6+T3QZr+ftV9eB0bh6uqotWHbtyWsv7qurXVu1/Vv/9V9VZVXVLVd1TVXdX1fum/c7LLXiadXROblFVPa+qvllVd0xr+YFp/8umr587UrOvo3vOtH/Dr6fbaI2XQnefNJfMXiD/10lenuQ5Se5I8sqdntcyX5J8L8lpa/b9uyRXTNtXJPnQtH1Rki8nqSQXJLl1p+e/w2v3q0nOS3LX8a5dkhcn+e7080XT9ot2+ndbkrX8/ST/Zp2xr5z+tp+b5GXT3/wuf/+dJKcnOW/aPiXJX03r5bxczDo6J7e+lpXkBdP2niS3Tufa55NcOu2/Osl7pu1/neTqafvSJDc83Rrv9O/35OVke2Rqnq++YXOrvx7o+iT/fNX+T/XMN5KcWlWn78D8lkJ3fy2zd6+uttW1+7UkN3f3o939gyQ3J7lw2ye/ZDZYy41ckuRz3f2T7v6fSY5k9rf/rP/77+6Huvv2aftHSe7N7BsqnJdb8DTruBHn5Aamc+uJ6eqe6dJJXpfZ188lTz0n1/t6uo3WeCmcbDE1z1ff8LM6yX+rqttq9gn1SfJz3f3QtP2/k/zctG19N7fVtbOmT+/y6emn6558airWci7T0yPnZvZIgPPyOK1Zx8Q5uWVVtauqDid5OLMw/+skP+zZ188lP7suG3093VKv5ckWU2zdr3T3eUnelOS9VfWrqw/27PFVb/k8DtZu2CeS/JMk5yR5KMm/39HZnECq6gVJvpDk/d39+Opjzsv5rbOOzsnj0N0/7e5zMvsGlfOT/NOdndHinWwxNc9X37BKdz84/Xw4yX/J7ET/myefvpt+PjwNt76b2+raWdMNdPffTP8T/r9J/lP+/0P61vJpVNWezALgM939xWm383KL1ltH5+SY7v5hkluSvDqzp5Sf/KzL1evyD2tWP/v1dEu9lidbTM3z1TdMquofV9UpT24neWOSu/KzXw90WZL/Om0fTPKO6R1AFyR5bNVTB8xsde2+kuSNVfWi6SmDN077nvXWvB7vX2R2biaztbx0etfPy5KcneSb8fef6bUl1ya5t7s/vOqQ83ILNlpH5+TWVdVKVZ06bT8/yRsyew3aLZl9/Vzy1HNyva+n22iNl8NOvwJ+0ZfM3p3yV5k9J/u7Oz2fZb5k9g6TO6bL3U+uV2bPT/+PJN9J8t+TvHjaX0k+Pq3tt5Ps3+nfYYfX77OZPdT/fzJ7/v7dx7N2Sf5VZi+mPJLkXTv9ey3RWn56Wqs7M/sf6emrxv/utJb3JXnTqv3P6r//JL+S2VN4dyY5PF0ucl4ubB2dk1tfy1cl+Ytpze5KctW0/+WZxdCRJH+U5LnT/udN149Mx1++2Rovw8UnoAMADDjZnuYDAHhGiSkAgAFiCgBggJgCABggpgAABogpAIABYgoAYICYAgAY8P8AI4X8cRbJkc4AAAAASUVORK5CYII=\n"
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import skimage.measure as m\n",
+    "imgs_gray = [cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) for img in imgs]\n",
+    "imgs_gray = [img > 250 for img in imgs_gray]\n",
+    "\n",
+    "nums = []\n",
+    "for img in imgs_gray:\n",
+    "    labels, num = m.label(img, return_num=True)\n",
+    "    nums.append(num)\n",
+    "\n",
+    "fig, ax = plt.subplots(1,1, figsize = (10,6 ))\n",
+    "ax.hist(nums)\n",
+    "print(imgs[0].shape)\n",
+    "print(nums)\n",
+    "print(images)"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
 }
\ No newline at end of file
diff --git a/embeddings_and_difficulty/losses/__pycache__/losses.cpython-38.pyc b/embeddings_and_difficulty/losses/__pycache__/losses.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0e9e423ea73c264f543a02b8bfcd8d7c0e8b97ad
Binary files /dev/null and b/embeddings_and_difficulty/losses/__pycache__/losses.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/losses/__pycache__/losses_backbone.cpython-38.pyc b/embeddings_and_difficulty/losses/__pycache__/losses_backbone.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..be4474a7042f9b2c9f7ed9880a90fbd2c412a461
Binary files /dev/null and b/embeddings_and_difficulty/losses/__pycache__/losses_backbone.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/losses/__pycache__/losses_head.cpython-38.pyc b/embeddings_and_difficulty/losses/__pycache__/losses_head.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1871d882ae5e0c56235d934e86aeaf5a2d31d5c3
Binary files /dev/null and b/embeddings_and_difficulty/losses/__pycache__/losses_head.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/losses/losses.py b/embeddings_and_difficulty/losses/losses.py
index aedc4446a9c30047a402d968faf4c3028b65c448..7b37ec4f93a4d8ace1d4b5fa445b8d9a00348f3e 100644
--- a/embeddings_and_difficulty/losses/losses.py
+++ b/embeddings_and_difficulty/losses/losses.py
@@ -1,22 +1,22 @@
-
-
-
-def create_loss(losses, args):
-
-    alpha = args.ALPHA
-    loss_backbone = losses[0]
-    loss_head = losses[1]
-
-    def loss(embeddings, est_difficulties,labels, difficulties, score_keeper):
-
-        if score_keeper.is_training == 'backbone':
-            return loss_backbone(embeddings, labels)
-        if score_keeper.is_training == 'head':
-            return loss_head(est_difficulties, difficulties)
-        if score_keeper.is_training == 'combined':
-            return alpha * loss_backbone(embeddings, labels) + (1-alpha)*loss_head(est_difficulties, difficulties)
-    return loss
-
-
-
-
+
+
+
+def create_loss(losses, args):
+
+    alpha = args.SOLVER.ALPHA
+    loss_backbone = losses[0]
+    loss_head = losses[1]
+
+    def loss(embeddings, est_difficulties,labels, difficulties, score_keeper):
+        labels = labels.reshape(-1)
+        if score_keeper.is_training == 'backbone':
+            return loss_backbone(embeddings, labels)
+        if score_keeper.is_training == 'head':
+            return loss_head(est_difficulties, difficulties)
+        if score_keeper.is_training == 'combined':
+            return alpha * loss_backbone(embeddings, labels) + (1-alpha)*loss_head(est_difficulties, difficulties)
+    return loss
+
+
+
+
diff --git a/embeddings_and_difficulty/losses/losses_backbone.py b/embeddings_and_difficulty/losses/losses_backbone.py
index 8a662c3f001e07f110f10476fe1f4e783e752639..d1692b3f9a99694209b86c97c44a2af5585ffdec 100644
--- a/embeddings_and_difficulty/losses/losses_backbone.py
+++ b/embeddings_and_difficulty/losses/losses_backbone.py
@@ -1,35 +1,36 @@
-
-
-import numpy as np
-import torch
-from pytorch_metric_learning import losses
-
-
-"""
-All losses are added from pytorch_metric_learning - losses 
-https://kevinmusgrave.github.io/pytorch-metric-learning/losses/
-
-all functions should be in the form get_loss(args): return(losses.loss(args))
-args should point at yaml file in configs folder, if new loss is added, then there should also be added a yaml file
-with the same name e.g. contrastive.yaml this should correspond to the string put in "configs/general.yaml" under loss
-when added it should also be added to the dictionary in the bottom named all_losses
-"""
-def get_contrastive(args):
-    return losses.ContrastiveLoss(args.pos_margin, args.neg_margin, **args.kwargs)
-
-def get_triplet_margin(args):
-    return losses.TripletMarginLoss(margin=args.margin,
-                                    swap = args.swap,
-                                    smooth_loss=args.smooth_loss,
-                                    triplets_per_anchor=args.triplets_per_anchor,
-                                    **args.kwargs)
-
-
-
-all_losses = {
-    'contrastive': get_contrastive,
-    'triplet_marging': get_triplet_margin
-}
-
-def get_loss(loss, loss_args):
-    return all_losses[loss](loss_args)
+
+
+import numpy as np
+import torch
+from pytorch_metric_learning import losses
+
+
+"""
+All losses are added from pytorch_metric_learning - losses 
+https://kevinmusgrave.github.io/pytorch-metric-learning/losses/
+
+all functions should be in the form get_loss(args): return(losses.loss(args))
+args should point at yaml file in configs folder, if new loss is added, then there should also be added a yaml file
+with the same name e.g. contrastive.yaml this should correspond to the string put in "configs/general.yaml" under loss
+when added it should also be added to the dictionary in the bottom named all_losses
+"""
+def get_contrastive(args):
+
+    return losses.ContrastiveLoss(args.pos_margin, args.neg_margin)
+
+def get_triplet_margin(args):
+    return losses.TripletMarginLoss(margin=args.margin,
+                                    swap = args.swap,
+                                    smooth_loss=args.smooth_loss,
+                                    triplets_per_anchor=args.triplets_per_anchor,
+                                    **args.kwargs)
+
+
+
+all_losses = {
+    'Contrastive': get_contrastive,
+    'TripletMarging': get_triplet_margin
+}
+
+def get_loss(loss, loss_args):
+    return all_losses[loss](loss_args.__dict__[loss])
diff --git a/embeddings_and_difficulty/losses/losses_head.py b/embeddings_and_difficulty/losses/losses_head.py
index be3d0936b0c46f70cb11bca171f53aa1f46b5249..e654b0650eba08d3266c1b85c20833aaeeef36d9 100644
--- a/embeddings_and_difficulty/losses/losses_head.py
+++ b/embeddings_and_difficulty/losses/losses_head.py
@@ -1,29 +1,31 @@
-
-import numpy as np
-import torch
-from pytorch_metric_learning import losses
-import torch.nn as nn
-
-def get_least_squares(args):
-    return nn.MSELoss(reduction=args.reductions)
-
-def get_l1(args):
-    return nn.L1Loss(reduction=args.reductions)
-
-class KendallsTau(nn.modules.loss._Loss):
-
-    def __init__(self, args):
-        self.args = args
-
-    def forward(self, difficulty, values):
-
-        sgn_difficulty = torch.zeros()
-        tau = 2/(len(difficulty) * (len(difficulty)-1)) * torch.sum(torch)
-
-all_losses = {
-    'least_squares': get_least_squares,
-    'L1': get_l1,
-}
-
-def get_loss(loss, loss_args):
-    return all_losses[loss](loss_args)
\ No newline at end of file
+
+import numpy as np
+import torch
+from pytorch_metric_learning import losses
+import torch.nn as nn
+
+def get_least_squares(args):
+
+    return nn.MSELoss(reduction=args.reduction)
+
+def get_l1(args):
+    return nn.L1Loss(reduction=args.reductions)
+
+class KendallsTau(nn.modules.loss._Loss):
+
+    def __init__(self, args):
+        self.args = args
+
+    def forward(self, difficulty, values):
+
+        sgn_difficulty = torch.zeros()
+        tau = 2/(len(difficulty) * (len(difficulty)-1)) * torch.sum(torch)
+
+all_losses = {
+    'LeastSquares': get_least_squares,
+    'L1': get_l1,
+}
+
+def get_loss(loss, loss_args):
+
+    return all_losses[loss](loss_args.__dict__[loss])
\ No newline at end of file
diff --git a/embeddings_and_difficulty/misc/__pycache__/accuracy_calculator.cpython-38.pyc b/embeddings_and_difficulty/misc/__pycache__/accuracy_calculator.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c4767b1684528be5c55ddd9c01fb752752fbcb85
Binary files /dev/null and b/embeddings_and_difficulty/misc/__pycache__/accuracy_calculator.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/misc/__pycache__/read_configs.cpython-38.pyc b/embeddings_and_difficulty/misc/__pycache__/read_configs.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b8fa11432f92b54b9fb914287a4bb40aec592f05
Binary files /dev/null and b/embeddings_and_difficulty/misc/__pycache__/read_configs.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/misc/__pycache__/savers_and_loaders.cpython-38.pyc b/embeddings_and_difficulty/misc/__pycache__/savers_and_loaders.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9354e3272b6a528c46cf8e03bac9062b7721a80d
Binary files /dev/null and b/embeddings_and_difficulty/misc/__pycache__/savers_and_loaders.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/misc/accuracy_calculator.py b/embeddings_and_difficulty/misc/accuracy_calculator.py
index a33d2292e8c09ff0ff55feb19adde23e3a79c693..0d66391dca73af380e8e90f08010158941dbad43 100644
--- a/embeddings_and_difficulty/misc/accuracy_calculator.py
+++ b/embeddings_and_difficulty/misc/accuracy_calculator.py
@@ -1,62 +1,63 @@
-import numpy as np
-from sklearn.neighbors import KNeighborsClassifier
-from sklearn.metrics import accuracy_score
-from scipy.spatial.distance import cdist
-from scipy.stats import kendalltau
-
-
-def calculate_embedding_accuracy_knn(embeddings, labels):
-    embs = embeddings.detach().cpu().numpy()
-    labels = labels.detach().cpu().numpy()
-
-    knn_preds = KNeighborsClassifier(n_neighbors=10).fit(embs, labels).predict(embs)
-
-    return accuracy_score(labels, knn_preds)
-
-
-def calculate_embedding_accuracy_means(embeddings, labels):
-    embs = embeddings.detach().cpu().numpy()
-    labels = labels.detach().cpu().numpy()
-    means = np.array([embs[labels == i] for i in sorted(np.unique(labels))])
-    dist_mat = cdist(embs, means, metric='euclidian')
-
-    return accuracy_score(labels, np.argmin(dist_mat, -1))
-
-
-def calculate_difficulty_accuracy_kendall_tau(estimated_difficulty, difficulty):
-    estimated_difficulty = estimated_difficulty.detach().cpu().numpy()
-    difficulty = difficulty.detach().cpu().numpy()
-
-    corr, _ = kendalltau(estimated_difficulty, difficulty)
-    return corr
-
-
-def calculate_difficulty_accuracy_mean_squared_error(estimated_difficulty, difficulty):
-    estimated_difficulty = estimated_difficulty.detach().cpu().numpy()
-    difficulty = difficulty.detach().cpu().numpy()
-    return np.mean(np.linalg.norm(estimated_difficulty - difficulty))
-
-
-accuracy_methods_embeddings = {
-    'knn': calculate_embedding_accuracy_knn,
-    'means': calculate_embedding_accuracy_means,
-}
-
-accuracy_methods_difficulties = {
-    'kendall_tau': calculate_difficulty_accuracy_kendall_tau,
-    'MSE': calculate_difficulty_accuracy_mean_squared_error
-}
-
-
-def get_accuracy_methods(args):
-    if args.EVAL_METRICS.BACKBONE in accuracy_methods_embeddings:
-        backbone_func = accuracy_methods_embeddings[args.EVAL_METRICS.BACKBONE]
-    else:
-        raise NotImplementedError("Accuracy calculation method for backbone is not implemented yet")
-
-    if args.EVAL_METRICS.HEAD in accuracy_methods_difficulties:
-        head_func = accuracy_methods_difficulties[args.EVAL_METRICS.HEAD]
-    else:
-        raise NotImplementedError("Accuracy calculation method for head is not implemented yet")
-
-    return backbone_func, head_func
+import numpy as np
+from sklearn.neighbors import KNeighborsClassifier
+from sklearn.metrics import accuracy_score
+from scipy.spatial.distance import cdist
+from scipy.stats import kendalltau
+
+
+def calculate_embedding_accuracy_knn(embeddings, labels):
+    embs = embeddings.detach().cpu().numpy()
+    labels = labels.detach().cpu().numpy()
+
+    knn_preds = KNeighborsClassifier(n_neighbors=10).fit(embs, labels).predict(embs)
+
+    return accuracy_score(labels, knn_preds)
+
+
+def calculate_embedding_accuracy_means(embeddings, labels):
+    embs = embeddings.detach().cpu().numpy()
+    labels = labels.detach().cpu().numpy()
+    means = np.array([embs[labels == i] for i in sorted(np.unique(labels))])
+    dist_mat = cdist(embs, means, metric='euclidian')
+
+    return accuracy_score(labels, np.argmin(dist_mat, -1))
+
+
+def calculate_difficulty_accuracy_kendall_tau(estimated_difficulty, difficulty):
+    estimated_difficulty = estimated_difficulty.detach().cpu().numpy()
+    difficulty = difficulty.detach().cpu().numpy()
+
+    corr, _ = kendalltau(estimated_difficulty, difficulty)
+    return corr
+
+
+def calculate_difficulty_accuracy_mean_squared_error(estimated_difficulty, difficulty):
+    estimated_difficulty = estimated_difficulty.detach().cpu().numpy()
+    difficulty = difficulty.detach().cpu().numpy()
+    return np.mean(np.linalg.norm(estimated_difficulty - difficulty))
+
+
+accuracy_methods_embeddings = {
+    'knn': calculate_embedding_accuracy_knn,
+    'means': calculate_embedding_accuracy_means,
+}
+
+accuracy_methods_difficulties = {
+    'kendall_tau': calculate_difficulty_accuracy_kendall_tau,
+    'MSE': calculate_difficulty_accuracy_mean_squared_error
+}
+
+
+def get_accuracy_methods(args):
+
+    if args.EVAL_METRICS.BACKBONE in accuracy_methods_embeddings:
+        backbone_func = accuracy_methods_embeddings[args.EVAL_METRICS.BACKBONE]
+    else:
+        raise NotImplementedError("Accuracy calculation method for backbone is not implemented yet")
+
+    if args.EVAL_METRICS.HEAD in accuracy_methods_difficulties:
+        head_func = accuracy_methods_difficulties[args.EVAL_METRICS.HEAD]
+    else:
+        raise NotImplementedError("Accuracy calculation method for head is not implemented yet")
+
+    return backbone_func, head_func
diff --git a/embeddings_and_difficulty/misc/init_stuff.ipynb b/embeddings_and_difficulty/misc/init_stuff.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..d00d13aefa9d6c838e5cc69243163db93ed89d35
--- /dev/null
+++ b/embeddings_and_difficulty/misc/init_stuff.ipynb
@@ -0,0 +1,328 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import pandas as pd\n",
+    "import numpy as np\n",
+    "import pickle\n",
+    "import os"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "outputs": [
+    {
+     "data": {
+      "text/plain": "                                           names  labels\n0       9a298f46-46d0-4a3c-a9df-ca828ea73d5d.jpg       1\n1       9d60b81b-fdc6-492a-a3ef-ecdb99b79cd3.jpg       1\n2       e5787ca3-2978-4ef5-8d14-7d2519e6b0f9.jpg       1\n3       b6cab130-7489-45a1-a0bf-484afc502c75.jpg       1\n4       7e1ff9a0-e2fd-4e03-bce3-beb9dcc9090e.jpg       4\n...                                          ...     ...\n208411  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n208412  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n208413  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n208414  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n208415  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n\n[208416 rows x 2 columns]",
+      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>names</th>\n      <th>labels</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>9a298f46-46d0-4a3c-a9df-ca828ea73d5d.jpg</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>9d60b81b-fdc6-492a-a3ef-ecdb99b79cd3.jpg</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>e5787ca3-2978-4ef5-8d14-7d2519e6b0f9.jpg</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>b6cab130-7489-45a1-a0bf-484afc502c75.jpg</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>7e1ff9a0-e2fd-4e03-bce3-beb9dcc9090e.jpg</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>208411</th>\n      <td>829651a4-43cd-43e1-b741-9885532ff9e8.jpg</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>208412</th>\n      <td>829651a4-43cd-43e1-b741-9885532ff9e8.jpg</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>208413</th>\n      <td>829651a4-43cd-43e1-b741-9885532ff9e8.jpg</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>208414</th>\n      <td>829651a4-43cd-43e1-b741-9885532ff9e8.jpg</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>208415</th>\n      <td>829651a4-43cd-43e1-b741-9885532ff9e8.jpg</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n<p>208416 rows × 2 columns</p>\n</div>"
+     },
+     "execution_count": 4,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "\n",
+    "pcl = pickle.load(open(r'C:\\Users\\ptrkm\\data_aisc\\difficulties.pkl','rb'))\n",
+    "labels = pd.read_csv(r'C:\\Users\\ptrkm\\data_aisc\\labels.csv')\n",
+    "\n",
+    "labels"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "                                           names  labels\n",
+      "17      ce0e901f-6071-40c8-a43e-142b242d05cb.jpg       0\n",
+      "32      48ba2642-31da-495e-81b6-9429e56f983b.jpg       0\n",
+      "54      2b0bf191-fca9-43c6-ab93-a69579d280bd.jpg       0\n",
+      "74      555065cc-43a2-40ef-a624-2b5212476972.jpg       0\n",
+      "75      80776db5-3814-4495-8ed9-e93ec9a98ecd.jpg       0\n",
+      "...                                          ...     ...\n",
+      "206834  c9851b01-4518-41de-899b-62d4bf7d1ba6.jpg       0\n",
+      "206952  9e6be42d-f880-48a4-b21f-575b905841cd.jpg       0\n",
+      "207218  be21630a-354a-4769-adbe-0990fb1c5198.jpg       0\n",
+      "207338  32291605-bb1a-463a-9ddb-8c7ba990f124.jpg       0\n",
+      "208303  829651a4-43cd-43e1-b741-9885532ff9e8.jpg       0\n",
+      "\n",
+      "[2404 rows x 2 columns]\n",
+      "40635\n"
+     ]
+    }
+   ],
+   "source": [
+    "labels = labels.drop_duplicates(['names'])\n",
+    "print(labels[labels['labels'] == 0])\n",
+    "print(len(labels))"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "40967\n"
+     ]
+    }
+   ],
+   "source": [
+    "label_pcl = {name: lab for name, lab in zip(labels['names'], labels['labels'])}\n",
+    "print(len(pcl))"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[ 334. 1103.   74.   19.  285.  150.  140.   69.]\n"
+     ]
+    }
+   ],
+   "source": [
+    "dist = np.zeros((8, ))\n",
+    "for key, val in pcl.items():\n",
+    "    if val != -1:\n",
+    "        if key in label_pcl:\n",
+    "            dist[label_pcl[key]] += 1\n",
+    "\n",
+    "print(dist)\n",
+    "dist = dist/sum(dist)"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "outputs": [],
+   "source": [
+    "pcl = {key: val for key, val in pcl.items() if key in label_pcl}"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 49,
+   "outputs": [],
+   "source": [
+    "lab_and_diff = np.array([[val, label_pcl[key], val != -1, idx] for idx, (key, val) in enumerate(pcl.items())])\n",
+    "names = np.array([[key, val, idx] for idx, (key, val) in enumerate(pcl.items())])"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "source": [],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "execution_count": 53,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[False False  True False False False False False False False]\n"
+     ]
+    }
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 63,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Done\n"
+     ]
+    }
+   ],
+   "source": [
+    "def split_class(arr, c):\n",
+    "    relevant_ones = arr[arr[:, 1] == c]\n",
+    "    sample = np.random.permutation(relevant_ones[:, 3].ravel())\n",
+    "\n",
+    "    return np.array_split(sample.astype(int), 5)\n",
+    "\n",
+    "\n",
+    "splits = {f'split_{i}': {'train': [], 'val': []} for i in range(5)}\n",
+    "\n",
+    "for c in range(8):\n",
+    "\n",
+    "    splitted_class = split_class(lab_and_diff, c)\n",
+    "    for idx in range(len(splitted_class)):\n",
+    "        train = []\n",
+    "        for i, s in enumerate(splitted_class):\n",
+    "            if i != idx:\n",
+    "                train += list(s)\n",
+    "\n",
+    "        val = list(splitted_class[idx])\n",
+    "\n",
+    "        splits[f'split_{idx}']['train'] += train\n",
+    "        splits[f'split_{idx}']['val'] += val\n",
+    "\n",
+    "\n",
+    "\n",
+    "print(\"Done\")\n",
+    "\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 65,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "len train =  32507\n",
+      "len val =  8128\n",
+      "len diff =  0\n",
+      "len train =  32507\n",
+      "len val =  8128\n",
+      "len diff =  0\n",
+      "len train =  32508\n",
+      "len val =  8127\n",
+      "len diff =  0\n",
+      "len train =  32508\n",
+      "len val =  8127\n",
+      "len diff =  0\n",
+      "len train =  32510\n",
+      "len val =  8125\n",
+      "len diff =  0\n"
+     ]
+    }
+   ],
+   "source": [
+    "for key, val in splits.items():\n",
+    "    print(\"len train = \", len(val['train']))\n",
+    "    print(\"len val = \", len(val['val']))\n",
+    "    print(\"len diff = \", len(set('train').intersection(set(val['val']))))\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 66,
+   "outputs": [],
+   "source": [
+    "ac_splits = {f'split_{i}': {'train': [], 'val': []} for i in range(5)}\n",
+    "for key, val in splits.items():\n",
+    "    for idx in val['train']:\n",
+    "        ac_splits[key]['train'].append(names[idx, 0])\n",
+    "    for idx in val['val']:\n",
+    "        ac_splits[key]['val'].append(names[idx, 0])"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 68,
+   "outputs": [],
+   "source": [
+    "ac_splits\n",
+    "with open(r'C:\\Users\\ptrkm\\data_aisc\\splits.pkl', 'wb') as handle:\n",
+    "    pickle.dump(ac_splits, handle, protocol=pickle.HIGHEST_PROTOCOL)\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   }
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/embeddings_and_difficulty/misc/read_configs.py b/embeddings_and_difficulty/misc/read_configs.py
index 94330d225af005881770b969351128a56cc8514d..5b4c25126dce574db333995bd4d82114cec85f0c 100644
--- a/embeddings_and_difficulty/misc/read_configs.py
+++ b/embeddings_and_difficulty/misc/read_configs.py
@@ -1,86 +1,97 @@
-import yaml
-from argparse import Namespace
-import os
-from Embeddings.New_Embeddings.models.DEMD import Demd
-from Embeddings.New_Embeddings.dataloaders.AISC import AISC
-import savers_and_loaders
-from Embeddings.New_Embeddings.optimizers import get_optimizer
-from Embeddings.New_Embeddings.losses import losses_head, losses_backbone
-
-def read_yaml(path, return_namespace=False):
-    """
-
-    :param path: (str) absolute path to the yaml file
-    :return: Namespace object of the yaml file
-    """
-
-    with open(path, 'r') as f:
-        file = yaml.safe_load(f)
-    if return_namespace:
-        file = Namespace(**file)
-    return file
-
-
-def get_dataloader_from_args(args, path):
-    augmentation_args = read_yaml(path)
-    augmentation_args = {'name': args.AUGMENTATION.NAME, 'vals': augmentation_args}
-    dataset_params = args.DATA
-    dataset_params.data_augmentation = Namespace(augmentation_args)
-    dataloader = AISC(dataset_params)
-    return dataloader
-
-def get_model_from_args(args):
-
-    network = Demd(args.NETWORK)
-    network, score_keeper = savers_and_loaders.find_latest_network(network, args)
-
-    return network, score_keeper
-
-def get_optimizer_from_args(model, args, path):
-
-    optimizer_args = read_yaml(path, return_namespace=True)
-    optimizer_args = optimizer_args.args.OPTIMIZER
-    optimizer_args.NAME = args.OPTIMIZER
-
-    return get_optimizer(optimizer_args, model)
-
-
-def get_losses_from_args(args, paths):
-
-    backbone_loss_args = read_yaml(paths[0], return_namespace=False)
-    header_loss_args = read_yaml(paths[1], return_namespace=False)
-
-    header_loss = losses_head.get_loss(args.TRAINING.HEAD.LOSS, header_loss_args)
-    backbone_loss = losses_backbone.get_loss(args.TRAINING.BACKBONE.LOSS, backbone_loss_args)
-
-    return (backbone_loss, header_loss)
-
-
-def read_main_config(path):
-
-    if os.path.isfile(path):
-        general_args = read_yaml(path, return_namespace=True)
-    else:
-        raise ValueError("The path entered for the general config file is not valid on this device")
-
-    config_paths = os.path.join(os.path.dirname(path), 'configs')
-    augmentation_arguments_path = os.path.join(config_paths, general_args.AUGMENTATION.CONFIG)
-    optimizer_arguments_path = os.path.join(os.path.join(config_paths, 'config_optimizers'), 'config_optim.yaml')
-    dataloader = get_dataloader_from_args(general_args, augmentation_arguments_path)
-    model, score_keeper = get_model_from_args(general_args)
-    optimizer = get_optimizer_from_args(model, general_args, optimizer_arguments_path)
-    loss_argument_paths = [
-        os.path.join(os.path.join(config_paths, 'config_losses'), file)
-        for file in ('backbone_losses.yaml', 'header_losses.yaml')
-    ]
-    losses = get_losses_from_args(general_args, loss_argument_paths)
-
-    return model, optimizer, dataloader, score_keeper, losses, general_args
-
-
-
-
-
-
-
-
+import yaml
+from argparse import Namespace
+import os
+from models.DEMD import Demd
+from dataloaders.AISC import AISC
+from misc import savers_and_loaders
+from optimizers import get_optimizer
+from losses import losses_head, losses_backbone
+from torch.utils.data import DataLoader
+
+def read_yaml(path, return_namespace=False):
+    """
+
+    :param path: (str) absolute path to the yaml file
+    :return: Namespace object of the yaml file
+    """
+
+    with open(path, 'r') as f:
+        file = yaml.safe_load(f)
+    if return_namespace:
+        for key, val in file.items():
+            if type(val) is dict:
+                file[key] = Namespace(**val)
+            elif isinstance(val, (tuple, list)):
+                file[key] = Namespace(*val)
+        file = Namespace(**file)
+    return file
+
+
+def get_dataloader_from_args(args, path):
+    augmentation_args = read_yaml(path)
+    augmentation_args = {'name': args.AUGMENTATION.NAME, 'vals': augmentation_args}
+    dataset_params = args.DATA
+    dataset_params.data_augmentation = Namespace(**augmentation_args)
+    dataloader = AISC(dataset_params)
+    dataloader = DataLoader(dataloader, batch_size=args.TRAIN.BATCH_SIZE, num_workers=args.DATA_LOADER.NUM_WORKERS,
+                            pin_memory=args.DATA_LOADER.PIN_MEMORY)
+    return dataloader
+
+def get_model_from_args(args):
+
+    args.NETWORK.BACKBONE = Namespace(**args.NETWORK.BACKBONE)
+    args.NETWORK.HEAD = Namespace(**args.NETWORK.HEAD)
+    network = Demd(args.NETWORK)
+    network, score_keeper = savers_and_loaders.find_latest_network(network, args)
+
+    return network, score_keeper
+
+def get_optimizer_from_args(model, args, path):
+
+    optimizer_args = read_yaml(path, return_namespace=True)
+    optimizer_args.NAME = args.SOLVER.OPTIMIZER
+
+    return get_optimizer(optimizer_args, model)
+
+
+def get_losses_from_args(args, paths):
+
+    backbone_loss_args = read_yaml(paths[0], return_namespace=True)
+    header_loss_args = read_yaml(paths[1], return_namespace=True)
+
+    header_loss = losses_head.get_loss(args.TRAINING.HEAD.LOSS, header_loss_args)
+    backbone_loss = losses_backbone.get_loss(args.TRAINING.BACKBONE.LOSS, backbone_loss_args)
+
+    return (backbone_loss, header_loss)
+
+
+def read_main_config(path):
+
+    if os.path.isfile(path):
+        general_args = read_yaml(path, return_namespace=True)
+    else:
+        raise ValueError("The path entered for the general config file is not valid on this device")
+
+    config_paths = os.path.dirname(path)
+    augmentation_arguments_path = os.path.join(config_paths, general_args.AUGMENTATION.CONFIG)
+
+    optimizer_arguments_path = os.path.join(os.path.join(config_paths, 'config_optimizers'), 'config_optim.yaml')
+    dataloader = get_dataloader_from_args(general_args, augmentation_arguments_path)
+
+    model, score_keeper = get_model_from_args(general_args)
+    optimizer = get_optimizer_from_args(model, general_args, optimizer_arguments_path)
+    loss_argument_paths = [
+        os.path.join(os.path.join(config_paths, 'config_losses'), file)
+        for file in ('backbone_losses.yaml', 'header_losses.yaml')
+    ]
+    losses = get_losses_from_args(general_args, loss_argument_paths)
+
+    return model, optimizer, dataloader, score_keeper, losses, general_args
+
+
+
+
+
+
+
+
diff --git a/embeddings_and_difficulty/misc/savers_and_loaders.py b/embeddings_and_difficulty/misc/savers_and_loaders.py
index e8caac79c6f59bb8fae18c926de172386d6dccf7..d05e5f0e14812cc4b4176c9baf06991be10f6cf2 100644
--- a/embeddings_and_difficulty/misc/savers_and_loaders.py
+++ b/embeddings_and_difficulty/misc/savers_and_loaders.py
@@ -1,187 +1,192 @@
-import os
-import numpy as np
-import torch
-import pickle
-from Embeddings.New_Embeddings.misc import accuracy_calculator
-
-def save_trained(network, score_keeper, args):
-    """
-
-    :param network: (torch network) to be saved as pt file
-    :param args: (args.Namespace) The general arguments for running the script, where args.OUTPUT_DIR exists
-    :return: (None) but saves the network onto the output folder, with the desired name
-    """
-
-    if network.device() != 'cpu':
-        network = network.cpu()
-
-    if os.path.exists(args.OUTPUT_DIR):
-
-        torch.save(network.state_dict(),
-                   os.path.join(args.OUTPUT_DIR, args.SAVE_NAME + ".pt"))
-        score_keeper_path = os.path.join(args.OUTPUT_DIR, 'score_keepers.pkl')
-        scp = pickle.load(open(score_keeper_path, 'rb')) if os.path.exists(score_keeper_path) else {}
-        scp[os.path.join(args.OUTPUT_DIR, args.SAVE_NAME)] = score_keeper
-        with open(score_keeper_path, 'wb') as handle:
-            pickle.dump(scp, handle, protocol=pickle.HIGHEST_PROTOCOL)
-
-    return None
-
-
-def load_network(network, args, direct_path=None):
-    """
-    Function to load already trained network. It will raise an error if there is not a network at the location
-    args.OUTPUT_DIR/args.SAVE_NAME+.pt
-    :param network: An instance of the same network class as the one being looked for
-    :param args: The general args in config "General"
-    :return: The loaded network
-    """
-
-    if direct_path is not None:
-        network.load_state_dict(torch.load(direct_path))
-    else:
-        if os.path.exists(path := os.path.join(args.OUTPUT_DIR, args.SAVE_NAME + ".pt")):
-            network.load_state_dict(torch.load(path))
-
-    return network
-
-
-def find_latest_network(network, args):
-    """
-    Function to find the latest network and restart training with it, it returns the network and the number of epochs
-    left for training
-    :param network: an instance of the model class
-    :param args: The general args in config "General"
-    :return: (nn.Module, int) (loaded_network, num_epochs_left)
-    """
-    early_stop_patience = (args.NETWORK.BACKONE.EARLY_STOP_PATIENCE,
-                           args.NETWORK.HEAD.EARLY_STOP_PATIENCE,
-                           args.NETWORK.COMBINED.EARLY_STOP_PATIENCE)
-
-    if ((file := args.NETWORK.PATH_TO_SAVED is not None) and
-            (scp := os.path.exists(os.path.join(os.path.dirname(file), 'score_keepers.pkl')))):
-            score_keeper = pickle.load(open(scp, 'rb'))
-            if args.NETWORK.PATH_TO_SAVED in score_keeper:
-                score_keeper = score_keeper[args.NETWORK.PATH_TO_SAVED]
-                network = load_network(network, args, direct_path=file)
-    else:
-        print("Did not find a previously trained network, moving on with untrained")
-        score_keeper = ScoreKeeper(early_stop_patience, is_training='backbone')
-
-    score_keeper.make_score_functions(args)
-    return network, score_keeper
-
-
-def save_difficulty_results(difficulties, score_keeper, files, args):
-    if isinstance(difficulties, torch.Tensor):
-        if difficulties.device.type != 'cpu':
-            difficulties = difficulties.cpu()
-
-    name = os.path.join(
-        args.OUTPUT_DIR,
-        f"difficulties_{score_keeper.number_of_epochs_trained}_and_score_{score_keeper.score}.pkl"
-    )
-
-    pcl = {'difficulties': difficulties, 'names': files}
-    with open(name, 'wb') as handle:
-        pickle.dump(pcl, handle, protocol=pickle.HIGHEST_PROTOCOL)
-
-
-def save_embedding_results(embeddings, score_keeper, files, args):
-    if isinstance(embeddings, torch.Tensor):
-        if embeddings.device.type != 'cpu':
-            embeddings = embeddings.cpu()
-
-    name = os.path.join(
-        args.OUTPUT_DIR,
-        f"embeddings_{score_keeper.number_of_epochs_trained}_and_score_{score_keeper.score}.pkl"
-    )
-
-    pcl = {'embeddings': embeddings, 'names': files}
-    with open(name, 'wb') as handle:
-        pickle.dump(pcl, handle, protocol=pickle.HIGHEST_PROTOCOL)
-
-def save_all_scores(score_keeper, embeddings, est_difficulties, difficulties, files, labels, args):
-    from datetime import datetime
-    now = datetime.now()
-    date_time = now.strftime("%H_%M_%d_%m_%Y")
-    results = {
-        'accuracy_metric': {
-            'backbone': score_keeper.score_tuple[0],
-            'head': score_keeper.score_tuple[1],
-            'combined': score_keeper.score
-        },
-        'embeddings': embeddings,
-        'est_difficulties': est_difficulties,
-        'names': [os.path.basename(file) for file in files],
-        'labels': labels,
-        'difficulties': difficulties
-    }
-
-    save_path = os.path.join(args.OUTPUT_DIR, f'results_at_{date_time}_for_{args.SAVE_NAME}.pcl')
-
-    with open(save_path, 'wb') as handle:
-        pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL)
-
-
-class ScoreKeeper:
-    def __init__(self, early_stop_patience, is_training, backbone_score_func = None, head_score_func = None):
-        self.scores = {
-            'backbone': 0,
-            'head': 0,
-            'combined': 0
-        }
-        self.next_training = {
-            'backbone': 'head',
-            'head': 'mixed',
-            'combined': None
-        }
-        self.number_of_epochs_trained = {
-            'backbone': 0,
-            'head': 0,
-            'combined': 0
-        }
-
-        self.early_stop_patience = {
-            'backbone': early_stop_patience[0],
-            'head': early_stop_patience[1],
-            'combined': early_stop_patience[2]
-        }
-
-        self.is_training = is_training
-        self.memory_keeper = 0
-        self.breaker = False
-
-        self.backbone_score_func = backbone_score_func
-        self.head_score_func = head_score_func
-        self.score_tuple = (0,0)
-
-    def make_score_functions(self, args):
-        self.backbone_score_func, self.head_score_func = accuracy_calculator.get_accuracy_methods(args)
-
-    def calculate_score(self, embeddings, est_difficulties, labels, difficulties):
-
-        if self.is_training == 'backbone':
-            return self.backbone_score_func(embeddings, labels)
-        elif self.is_training == 'head':
-            return self.head_score_func(est_difficulties, difficulties)
-        else:
-            self.score_tuple = (
-                self.backbone_score_func(embeddings, labels),
-                self.head_score_func(est_difficulties, difficulties)
-            )
-            return np.sum(self.score_tuple)
-
-    def __call__(self, new_score):
-        if new_score > self.score:
-            self.score = new_score
-            self.memory_keeper = 0
-        else:
-            self.memory_keeper += 1
-
-        if self.memory_keeper >= self.early_stop_patience[self.is_training]:
-            self.is_training = self.next_training[self.is_training]
-            self.memory_keeper = 0
-            return True
-        else:
-            return False
+import os
+import numpy as np
+import torch
+import pickle
+from misc import accuracy_calculator
+from argparse import Namespace
+
+def save_trained(network, score_keeper, args):
+    """
+
+    :param network: (torch network) to be saved as pt file
+    :param args: (args.Namespace) The general arguments for running the script, where args.OUTPUT_DIR exists
+    :return: (None) but saves the network onto the output folder, with the desired name
+    """
+
+    if network.device() != 'cpu':
+        network = network.cpu()
+
+    if os.path.exists(args.OUTPUT_DIR):
+
+        torch.save(network.state_dict(),
+                   os.path.join(args.OUTPUT_DIR, args.SAVE_NAME + ".pt"))
+        score_keeper_path = os.path.join(args.OUTPUT_DIR, 'score_keepers.pkl')
+        scp = pickle.load(open(score_keeper_path, 'rb')) if os.path.exists(score_keeper_path) else {}
+        scp[os.path.join(args.OUTPUT_DIR, args.SAVE_NAME)] = score_keeper
+        with open(score_keeper_path, 'wb') as handle:
+            pickle.dump(scp, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+    return None
+
+
+def load_network(network, args, direct_path=None):
+    """
+    Function to load already trained network. It will raise an error if there is not a network at the location
+    args.OUTPUT_DIR/args.SAVE_NAME+.pt
+    :param network: An instance of the same network class as the one being looked for
+    :param args: The general args in config "General"
+    :return: The loaded network
+    """
+
+    if direct_path is not None:
+        network.load_state_dict(torch.load(direct_path))
+    else:
+        if os.path.exists(path := os.path.join(args.OUTPUT_DIR, args.SAVE_NAME + ".pt")):
+            network.load_state_dict(torch.load(path))
+
+    return network
+
+
+def find_latest_network(network, args):
+    """
+    Function to find the latest network and restart training with it, it returns the network and the number of epochs
+    left for training
+    :param network: an instance of the model class
+    :param args: The general args in config "General"
+    :return: (nn.Module, int) (loaded_network, num_epochs_left)
+    """
+
+    for key, val in args.TRAINING.__dict__.items():
+        args.TRAINING.__dict__[key] = Namespace(**val)
+
+    early_stop_patience = (args.TRAINING.BACKBONE.EARLY_STOP_PATIENCE,
+                           args.TRAINING.HEAD.EARLY_STOP_PATIENCE,
+                           args.TRAINING.COMBINED.EARLY_STOP_PATIENCE)
+
+    if (((file := args.NETWORK.PATH_TO_SAVED) is not None) and
+            (os.path.exists(scp := os.path.join(os.path.dirname(file), 'score_keepers.pkl')))):
+            score_keeper = pickle.load(open(scp, 'rb'))
+            if args.NETWORK.PATH_TO_SAVED in score_keeper:
+                score_keeper = score_keeper[args.NETWORK.PATH_TO_SAVED]
+                network = load_network(network, args, direct_path=file)
+    else:
+        print("Did not find a previously trained network, moving on with untrained")
+        score_keeper = ScoreKeeper(early_stop_patience, is_training='backbone')
+
+    score_keeper.make_score_functions(args)
+    return network, score_keeper
+
+
+def save_difficulty_results(difficulties, score_keeper, files, args):
+    if isinstance(difficulties, torch.Tensor):
+        if difficulties.device.type != 'cpu':
+            difficulties = difficulties.cpu()
+
+    name = os.path.join(
+        args.OUTPUT_DIR,
+        f"difficulties_{score_keeper.number_of_epochs_trained}_and_score_{score_keeper.score}.pkl"
+    )
+
+    pcl = {'difficulties': difficulties, 'names': files}
+    with open(name, 'wb') as handle:
+        pickle.dump(pcl, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+def save_embedding_results(embeddings, score_keeper, files, args):
+    if isinstance(embeddings, torch.Tensor):
+        if embeddings.device.type != 'cpu':
+            embeddings = embeddings.cpu()
+
+    name = os.path.join(
+        args.OUTPUT_DIR,
+        f"embeddings_{score_keeper.number_of_epochs_trained}_and_score_{score_keeper.score}.pkl"
+    )
+
+    pcl = {'embeddings': embeddings, 'names': files}
+    with open(name, 'wb') as handle:
+        pickle.dump(pcl, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+def save_all_scores(score_keeper, embeddings, est_difficulties, difficulties, files, labels, args):
+    from datetime import datetime
+    now = datetime.now()
+    date_time = now.strftime("%H_%M_%d_%m_%Y")
+    results = {
+        'accuracy_metric': {
+            'backbone': score_keeper.score_tuple[0],
+            'head': score_keeper.score_tuple[1],
+            'combined': score_keeper.score
+        },
+        'embeddings': embeddings,
+        'est_difficulties': est_difficulties,
+        'names': [os.path.basename(file) for file in files],
+        'labels': labels,
+        'difficulties': difficulties
+    }
+
+    save_path = os.path.join(args.OUTPUT_DIR, f'results_at_{date_time}_for_{args.SAVE_NAME}.pcl')
+
+    with open(save_path, 'wb') as handle:
+        pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+class ScoreKeeper:
+    def __init__(self, early_stop_patience, is_training, backbone_score_func = None, head_score_func = None):
+        self.scores = {
+            'backbone': 0,
+            'head': 0,
+            'combined': 0
+        }
+        self.next_training = {
+            'backbone': 'head',
+            'head': 'mixed',
+            'combined': None
+        }
+        self.number_of_epochs_trained = {
+            'backbone': 0,
+            'head': 0,
+            'combined': 0
+        }
+
+        self.early_stop_patience = {
+            'backbone': early_stop_patience[0],
+            'head': early_stop_patience[1],
+            'combined': early_stop_patience[2]
+        }
+
+        self.is_training = is_training
+        self.memory_keeper = 0
+        self.breaker = False
+
+        self.backbone_score_func = backbone_score_func
+        self.head_score_func = head_score_func
+        self.score_tuple = (0,0)
+
+    def make_score_functions(self, args):
+        self.backbone_score_func, self.head_score_func = accuracy_calculator.get_accuracy_methods(args)
+
+    def calculate_score(self, embeddings, est_difficulties, labels, difficulties):
+
+        if self.is_training == 'backbone':
+            return self.backbone_score_func(embeddings, labels)
+        elif self.is_training == 'head':
+            return self.head_score_func(est_difficulties, difficulties)
+        else:
+            self.score_tuple = (
+                self.backbone_score_func(embeddings, labels),
+                self.head_score_func(est_difficulties, difficulties)
+            )
+            return np.sum(self.score_tuple)
+
+    def __call__(self, new_score):
+        if new_score > self.score:
+            self.score = new_score
+            self.memory_keeper = 0
+        else:
+            self.memory_keeper += 1
+
+        if self.memory_keeper >= self.early_stop_patience[self.is_training]:
+            self.is_training = self.next_training[self.is_training]
+            self.memory_keeper = 0
+            return True
+        else:
+            return False
diff --git a/embeddings_and_difficulty/models/DEMD.py b/embeddings_and_difficulty/models/DEMD.py
index b7523550dfa6a9211c591ccd31f5ab29dffd71e9..71fc1d07934fde06cb9e281f3e0a0d4e6f5c5095 100644
--- a/embeddings_and_difficulty/models/DEMD.py
+++ b/embeddings_and_difficulty/models/DEMD.py
@@ -1,95 +1,96 @@
-
-import torch.nn as nn
-import numpy as np
-import torch
-import os
-import pretrained_models_getter as pmg
-
-
-activations = {'sigmoid': nn.Sigmoid()}
-class Demd(nn.Module):
-
-    def __init__(self, args):
-        """
-        The network subgroup of arguments from config general.yaml
-        :param args: (args.Namespace)
-        """
-
-        self.embedding_dimension = args.BACKBONE.OUTPUT_DIM
-        self.head_structure = args.HEAD.STRUCTURE
-        self.batch_norm_struct = args.HEAD.BATCH_NORM_STRUCTURE
-        self.freeze_affine_batchnorm = args.BACKBONE.FREEZE_AFFINE_BATCHNORM
-        self.freeze_full_batchnorm = args.BACKBONE.FREEZE_BATCHNORM
-        if self.head_structure[0] != self.embedding_dimension:
-            self.head_structure = [self.embedding_dimension] + self.head_structure
-
-        self.backbone = pmg.load_pretrained_model(args)
-        self.head = []
-
-        for inp, out, batch_norm in zip(self.head_structure[:-1], self.head_structure[1:], self.batch_norm_struct):
-            self.head.append(nn.Linear(inp, out))
-            self.head.append(nn.ReLU())
-            if batch_norm:
-                self.head.append(nn.BatchNorm1d(num_features=out))
-
-        self.head.pop()
-        if args.HEAD.ACTIVATION in activations:
-            self.head.append(activations[args.HEAD.ACTIVATION])
-        else:
-            raise NotImplementedError(f"{args.HEAD.ACTIVATION} is not implemented yet, only {activations.keys()} are")
-
-        self.head = nn.Sequential(*self.head)
-
-    def freeze(self, part):
-        """
-        Function to freeze specified "part" of network, this is used to ease training, such that we may first train
-        the backbone and subsequently the head and finally only for a few epochs the full network.
-        Batchnorm parameters can be freezed fully, such that they do not output anything new or can only be freezed so
-        they don't take a gradient. This is done to decrease overfitting
-        :param part: (str) defining which part of the network will be freezed, see below for the different possibilities
-        :return: None
-        Note, does not raise an error if called for (part) not in
-                                                    ['backbone', 'head', 'unfreeze', 'batchnorm','batchnorm_affine']
-        Instead it does nothing and gives an error.
-        """
-        if part == 'backbone':
-            for param in self.backbone.parameters():
-                param.requires_grad = False
-            for param in self.head.parameters():
-                param.requires_grad = True
-
-        elif part == 'head':
-            for param in self.backbone.parameters():
-                param.requires_grad = True
-            for param in self.head.parameters():
-                param.requires_grad = False
-
-        elif part == 'unfreeze':
-            for param in self.parameters():
-                param.requires_grad = True
-                if self.freeze_full_batchnorm:
-                    self.freeze(part='batchnorm')
-                elif not self.freeze_full_batchnorm and self.freeze_affine_batchnorm:
-                    self.freeze(part='batchnorm_affine')
-
-        elif part == 'batchnorm':
-            for name, child in self.backbone.named_children():
-                if isinstance(child, nn.BatchNorm2d):
-                    child.eval()
-
-        elif part == 'batchnorm_affine':
-            for name, child in self.backbone.named_children():
-                if isinstance(child, nn.BatchNorm2d):
-                    for param in child.parameters():
-                        param.requires_grad = False
-
-        else:
-            UserWarning(f"This model does not have a part called {part}")
-
-
-
-    def forward(self, x):
-        embedding = self.backbone(x)
-        prediction = self.head(embedding)
-        return embedding, prediction
-
+
+import torch.nn as nn
+import numpy as np
+import torch
+import os
+from models import pretrained_models_getter as pmg
+
+
+activations = {'sigmoid': nn.Sigmoid()}
+class Demd(nn.Module):
+
+    def __init__(self, args):
+        super(Demd, self).__init__()
+        """
+        The network subgroup of arguments from config general.yaml
+        :param args: (args.Namespace)
+        """
+
+        self.embedding_dimension = args.BACKBONE.OUTPUT_DIM
+        self.head_structure = args.HEAD.STRUCTURE
+        self.batch_norm_struct = args.HEAD.BATCH_NORM_STRUCTURE
+        # self.freeze_affine_batchnorm = args.BACKBONE.FREEZE_AFFINE_BATCHNORM
+        self.freeze_full_batchnorm = args.BACKBONE.FREEZE_BATCHNORM
+        if self.head_structure[0] != self.embedding_dimension:
+            self.head_structure = [self.embedding_dimension] + self.head_structure
+
+        self.backbone = pmg.load_pretrained_model(args)
+        self.head = []
+
+        for inp, out, batch_norm in zip(self.head_structure[:-1], self.head_structure[1:], self.batch_norm_struct):
+            self.head.append(nn.Linear(inp, out))
+            self.head.append(nn.ReLU())
+            if batch_norm:
+                self.head.append(nn.BatchNorm1d(num_features=out))
+
+        self.head.pop()
+        if args.HEAD.ACTIVATION in activations:
+            self.head.append(activations[args.HEAD.ACTIVATION])
+        else:
+            raise NotImplementedError(f"{args.HEAD.ACTIVATION} is not implemented yet, only {activations.keys()} are")
+
+        self.head = nn.Sequential(*self.head)
+
+    def freeze(self, part):
+        """
+        Function to freeze specified "part" of network, this is used to ease training, such that we may first train
+        the backbone and subsequently the head and finally only for a few epochs the full network.
+        Batchnorm parameters can be freezed fully, such that they do not output anything new or can only be freezed so
+        they don't take a gradient. This is done to decrease overfitting
+        :param part: (str) defining which part of the network will be freezed, see below for the different possibilities
+        :return: None
+        Note, does not raise an error if called for (part) not in
+                                                    ['backbone', 'head', 'unfreeze', 'batchnorm','batchnorm_affine']
+        Instead it does nothing and gives an error.
+        """
+        if part == 'backbone':
+            for param in self.backbone.parameters():
+                param.requires_grad = False
+            for param in self.head.parameters():
+                param.requires_grad = True
+
+        elif part == 'head':
+            for param in self.backbone.parameters():
+                param.requires_grad = True
+            for param in self.head.parameters():
+                param.requires_grad = False
+
+        elif part == 'unfreeze':
+            for param in self.parameters():
+                param.requires_grad = True
+                if self.freeze_full_batchnorm:
+                    self.freeze(part='batchnorm')
+                elif not self.freeze_full_batchnorm and self.freeze_affine_batchnorm:
+                    self.freeze(part='batchnorm_affine')
+
+        elif part == 'batchnorm':
+            for name, child in self.backbone.named_children():
+                if isinstance(child, nn.BatchNorm2d):
+                    child.eval()
+
+        elif part == 'batchnorm_affine':
+            for name, child in self.backbone.named_children():
+                if isinstance(child, nn.BatchNorm2d):
+                    for param in child.parameters():
+                        param.requires_grad = False
+
+        else:
+            UserWarning(f"This model does not have a part called {part}")
+
+
+
+    def forward(self, x):
+        embedding = self.backbone(x)
+        prediction = self.head(embedding)
+        return embedding, prediction
+
diff --git a/embeddings_and_difficulty/models/__pycache__/DEMD.cpython-38.pyc b/embeddings_and_difficulty/models/__pycache__/DEMD.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f4107e290b956b514a530dde5899efb14ea01120
Binary files /dev/null and b/embeddings_and_difficulty/models/__pycache__/DEMD.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/models/__pycache__/pretrained_models_getter.cpython-38.pyc b/embeddings_and_difficulty/models/__pycache__/pretrained_models_getter.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eaa4d9a8fed397c5057d9e6d4ab2f10bff4119db
Binary files /dev/null and b/embeddings_and_difficulty/models/__pycache__/pretrained_models_getter.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/models/pretrained_models_getter.py b/embeddings_and_difficulty/models/pretrained_models_getter.py
index ca77a5179bbcf25495d4f465686e8e0b999b2643..78f501dfe08f79c263ed7bb5bee8933e4781b1af 100644
--- a/embeddings_and_difficulty/models/pretrained_models_getter.py
+++ b/embeddings_and_difficulty/models/pretrained_models_getter.py
@@ -1,41 +1,41 @@
-import pretrainedmodels
-import torch
-import os
-from efficientnet_pytorch import EfficientNet
-from torchvision import models
-import re
-import torch.nn as nn
-
-resnet_models = {'50': models.resnet50,
-                '101': models.resnet101,
-                '152': models.resnet152}
-
-def load_pretrained_model(args):
-    if args.NETWORK.NAME.split("-")[0] == "efficientnet":
-        model = EfficientNet.from_pretrained(args.NETWORK.NAME, num_classes=args.NETWORK.BACKBONE_OUTPUT_DIM)
-
-    elif re.search(r"[a-zA-Z]*", args.NETWORK.NAME).group(0) == 'ResNet':
-        layers = re.search(r"\d+", args.NETWORK.NAME)
-
-        if layers is not None:
-            layers = layers.group(0)
-            if layers in resnet_models:
-                model = resnet_models[layers](pretrained = True)
-                model.fc = nn.Linear(in_features=model.fc.in_features, out_features=args.NETWORK.BACKBONE_OUTPUT_DIM)
-                UserWarning("Loaded network, but last linear layer is untrained")
-            else:
-                raise NotImplementedError(
-                    "ResNet model of depth " + layers + " is not implemented yet, add to resnet_models")
-        else:
-            raise ValueError("You have chosen a ResNet model without specifying the depth")
-    else:
-        raise NotImplementedError(
-            "The loading function is not implemented for other models currently than ResNet or EfficientNet")
-
-    return model
-
-
-
-
-
-
+import pretrainedmodels
+import torch
+import os
+from efficientnet_pytorch import EfficientNet
+from torchvision import models
+import re
+import torch.nn as nn
+
+resnet_models = {'50': models.resnet50,
+                '101': models.resnet101,
+                '152': models.resnet152}
+
+def load_pretrained_model(args):
+    if args.BACKBONE.NAME.split("-")[0] == "efficientnet":
+        model = EfficientNet.from_pretrained(args.BACKBONE.NAME, num_classes=args.BACKBONE.OUTPUT_DIM)
+
+    elif re.search(r"[a-zA-Z]*", args.BACKBONE.NAME).group(0) == 'ResNet':
+        layers = re.search(r"\d+", args.BACKBONE.NAME)
+
+        if layers is not None:
+            layers = layers.group(0)
+            if layers in resnet_models:
+                model = resnet_models[layers](pretrained = True)
+                model.fc = nn.Linear(in_features=model.fc.in_features, out_features=args.BACKBONE.OUTPUT_DIM)
+                UserWarning("Loaded network, but last linear layer is untrained")
+            else:
+                raise NotImplementedError(
+                    "ResNet model of depth " + layers + " is not implemented yet, add to resnet_models")
+        else:
+            raise ValueError("You have chosen a ResNet model without specifying the depth")
+    else:
+        raise NotImplementedError(
+            "The loading function is not implemented for other models currently than ResNet or EfficientNet")
+
+    return model
+
+
+
+
+
+
diff --git a/embeddings_and_difficulty/optimizers.py b/embeddings_and_difficulty/optimizers.py
index df8a55469970da105872a9c2caeb8c2b17e68d05..b704713f68aefd9e0055dd8e6e1f7bb1a0219609 100644
--- a/embeddings_and_difficulty/optimizers.py
+++ b/embeddings_and_difficulty/optimizers.py
@@ -1,20 +1,21 @@
-
-import torch.optim as optim
-
-
-def stochastic_gradient_descent(model,params):
-    return optim.SGD(model.parameters(), lr = params.lr, momentum=params.momentum)
-
-def Adam(model, params):
-    return optim.Adam(
-        model.parameters(),lr = params.lr, betas = params.betas, eps = params.eps, weight_decay=params.weight_decay
-    )
-
-
-optimizers = {'adam': Adam,
-              'sgd': stochastic_gradient_descent}
-
-def get_optimizer(args, model):
-    return optimizers[args.OPTIMIZER.NAME](model, args.OPTIMIZER.PARAMS)
-
-
+
+import torch.optim as optim
+
+
+def stochastic_gradient_descent(model,params):
+    return optim.SGD(model.parameters(), lr = params.lr, momentum=params.momentum)
+
+def Adam(model, params):
+
+    return optim.Adam(
+        model.parameters(),lr = params.lr, betas = params.betas, eps = float(params.eps), weight_decay=params.weight_decay
+    )
+
+
+optimizers = {'ADAM': Adam,
+              'SGD': stochastic_gradient_descent}
+
+def get_optimizer(args, model):
+    return optimizers[args.NAME](model, args.__dict__[args.NAME])
+
+
diff --git a/embeddings_and_difficulty/runner.py b/embeddings_and_difficulty/runner.py
index db04cddab06898ec7d9590c706f6bc11949114f4..189deb9af06ed41e5a73a6b82123e6ff7a51a364 100644
--- a/embeddings_and_difficulty/runner.py
+++ b/embeddings_and_difficulty/runner.py
@@ -1,28 +1,32 @@
-import os
-import argparse
-from argparse import Namespace
-from Embeddings.New_Embeddings.trainers import main_trainer as mt
-from Embeddings.New_Embeddings.misc import read_configs, savers_and_loaders, accuracy_calculator
-from Embeddings.New_Embeddings.losses import losses
-
-
-def main_runner(args):
-    model, optimizer, dataloader, score_keeper, loss_tuple, args = read_configs.read_main_config(args.path_to_config)
-    if args.TRAIN.ENABLE:
-        loss = losses.create_loss(loss_tuple, args)
-        model, score_keeper, embeddings, difficulties, labels = mt.train_model(
-            model, optimizer, dataloader, loss, score_keeper, args,
-        )
-
-    if args.TEST.ENABLE:
-        score, embeddings, difficulties, files, labels = mt.run_test_eval(model, dataloader, score_keeper)
-        savers_and_loaders.save_all_results(score, embeddings, difficulties, files, labels, args)
-
-    return None
-
-
-if __name__ == "__main__":
-    parser = argparse.ArgumentParser(allow_abbrev=False)
-    parser.add_argument("--path_to_config", type=str, default=None)
-    args, _ = parser.parse_known_args()
-    main_runner(args)
+import os
+import argparse
+from argparse import Namespace
+from trainers import main_trainer as mt
+from misc import read_configs, savers_and_loaders, accuracy_calculator
+from losses import losses
+import sys
+
+
+def main_runner(args):
+    model, optimizer, dataloader, score_keeper, loss_tuple, args = read_configs.read_main_config(args.path_to_config)
+    if args.TRAIN.ENABLE:
+        loss = losses.create_loss(loss_tuple, args)
+        model, score_keeper, embeddings, difficulties, labels = mt.train_model(
+            model, optimizer, dataloader, loss, score_keeper, args,
+        )
+
+    if args.TEST.ENABLE:
+        score, embeddings, difficulties, files, labels = mt.run_test_eval(model, dataloader, score_keeper)
+        savers_and_loaders.save_all_results(
+            score, embeddings, difficulties, files, labels, args
+        )
+
+    return None
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(allow_abbrev=False)
+    parser.add_argument("--path_to_config", type=str, default='configs/general.yaml')
+    args, _ = parser.parse_known_args()
+    os.chdir('embeddings_and_difficulty')
+    main_runner(args)
diff --git a/embeddings_and_difficulty/trainers/__pycache__/main_trainer.cpython-38.pyc b/embeddings_and_difficulty/trainers/__pycache__/main_trainer.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..76b1c23e899e472d8563cad49e607725b8796825
Binary files /dev/null and b/embeddings_and_difficulty/trainers/__pycache__/main_trainer.cpython-38.pyc differ
diff --git a/embeddings_and_difficulty/trainers/main_trainer.py b/embeddings_and_difficulty/trainers/main_trainer.py
index 600c188670d439f14ed375fcb62e4c538c1bb174..dc7bf90e3df4cfbe069c48067d68bee7d031f52d 100644
--- a/embeddings_and_difficulty/trainers/main_trainer.py
+++ b/embeddings_and_difficulty/trainers/main_trainer.py
@@ -1,93 +1,95 @@
-import torch.nn as nn
-import torch
-import numpy
-import os
-from tqdm import tqdm
-from Embeddings.New_Embeddings.misc import accuracy_calculator
-from Embeddings.New_Embeddings.misc import savers_and_loaders
-
-
-def train_model(model, optimizer, dataloader, losses, score_keeper, args):
-    """
-    Function to train a model
-    :type args: Namespace
-    :param model: (nn.Module) of neural network
-    :param optimizer: (torch.optim) with parameters of (model)
-    :param dataloader: dataloader function, yields (image, label, difficulty)
-    :param epochs: (int, int, int) of number of epochs left for training
-    :param score_keeper: object of class (ScoreKeeper), to keep track of early stopping
-    :return: A trained model
-    """
-
-    max_epochs = {'bacbkone': args.TRAINING.BACKBONE.MAX_EPOCH,
-                  'head': args.TRAINING.HEAD.MAX_EPOCH,
-                  'combined': args.TRAINING.COMBINED.MAX_EPOCH}
-    while score_keeper.is_training is not None:
-        if score_keeper.is_training == 'backbone':
-            model.freeze('head')
-        elif score_keeper.is_training == 'head':
-            model.freeze('backbone')
-        elif score_keeper.is_training == 'mixed':
-            model.freeze('unfreeze')
-        epochs = (max_epochs[score_keeper.is_training] -
-                  score_keeper.number_of_epochs_trained[score_keeper.is_training])
-
-        for epoch in range(epochs):
-            for idx, (image, label, difficulty) in enumerate(dataloader):
-                optimizer.zero_grad()
-                embedding, diff = model(image)
-                loss = losses(embedding, diff, score_keeper)
-
-                loss.backward()
-                optimizer.step()
-
-            if epoch % args.SAVE_POINT_PERIOD == 0:
-                savers_and_loaders.save_trained(model, score_keeper, args)
-            if epoch % args.EVAL_PERIOD == 0:
-                score, embeddings, difficulties, files, labels = run_validation_eval(model, dataloader)
-                breaker = score_keeper(score)
-                if breaker:
-                    break
-
-    return model, score_keeper, embeddings, difficulties, labels
-
-
-def run_validation_eval(model, dataloader, score_keeper):
-    """
-    Function to run validation procedure during training
-    :param model: (nn.Module)
-    :param dataloader: (torch.utils.data.DataLoader) dataloader.dataset.mode will be set equal to 'validation', changing
-    that attribute must therefore result in new data being loaded.
-    :return: (float, torch.Tensor, torch.Tensor, list, list) of validation scores, embeddings, difficulties, names of
-    files and their labels. This will result in files being returned as full paths.
-    """
-    dataloader.dataset.mode = 'validation'
-    model.eval()
-    embeddings, est_difficulties, files, labels, difficulties = eval_model(model, dataloader)
-    dataloader.dataset.mode = 'train'
-    score = score_keeper.calculate_score(embeddings, est_difficulties, labels, difficulties)
-    return score, embeddings, difficulties, files, labels
-
-def run_test_eval(model, dataloader, score_keeper):
-
-    dataloader.dataset.mode = 'test'
-    model.eval()
-    embeddings, est_difficulties, files, labels, difficulties = eval_model(model, dataloader)
-    score = score_keeper.calculate_score(embeddings, est_difficulties, labels, difficulties)
-
-    return score, embeddings, difficulties, files, labels
-
-def eval_model(model, dataloader):
-    embeddings = torch.zeros((len(dataloader, model.embedding_dimension)))
-    est_difficulties = torch.zeros((len(dataloader),))
-    files, labels, difficulties = list(), list(), list()
-
-    for idx, (image, label, difficulty, file) in enumerate(dataloader):
-        difficulties.append(difficulty)
-        embedding, difficulty = model(image)
-        embeddings[(idx * len(image)): (idx + 1) * len(image)] = embedding
-        est_difficulties[(idx * len(image)): (idx + 1) * len(image)] = difficulty
-        labels.append(label)
-        files += file
-
-    return embeddings, est_difficulties, files, label, difficulties
+import torch.nn as nn
+import torch
+import numpy
+import os
+from tqdm import tqdm
+from misc import accuracy_calculator, savers_and_loaders
+
+
+def train_model(model, optimizer, dataloader, losses, score_keeper, args):
+    """
+    Function to train a model
+    :type args: Namespace
+    :param model: (nn.Module) of neural network
+    :param optimizer: (torch.optim) with parameters of (model)
+    :param dataloader: dataloader function, yields (image, label, difficulty)
+    :param epochs: (int, int, int) of number of epochs left for training
+    :param score_keeper: object of class (ScoreKeeper), to keep track of early stopping
+    :return: A trained model
+    """
+
+    max_epochs = {'backbone': args.TRAINING.BACKBONE.MAX_EPOCH,
+                  'head': args.TRAINING.HEAD.MAX_EPOCH,
+                  'combined': args.TRAINING.COMBINED.MAX_EPOCH}
+    while score_keeper.is_training is not None:
+        if score_keeper.is_training == 'backbone':
+            model.freeze('head')
+        elif score_keeper.is_training == 'head':
+            model.freeze('backbone')
+        elif score_keeper.is_training == 'mixed':
+            model.freeze('unfreeze')
+
+        epochs = (max_epochs[score_keeper.is_training] -
+                  score_keeper.number_of_epochs_trained[score_keeper.is_training])
+
+        for epoch in range(epochs):
+            for idx, (image, label, difficulty) in enumerate(dataloader):
+                optimizer.zero_grad()
+                embedding, diff = model(image)
+                try:
+                    loss = losses(embedding, diff, label, difficulty, score_keeper)
+                except:
+                    breakpoint()
+                loss.backward()
+                optimizer.step()
+
+            if epoch % args.SAVE_POINT_PERIOD == 0:
+                savers_and_loaders.save_trained(model, score_keeper, args)
+            if epoch % args.EVAL_PERIOD == 0:
+                score, embeddings, difficulties, files, labels = run_validation_eval(model, dataloader)
+                breaker = score_keeper(score)
+                if breaker:
+                    break
+
+    return model, score_keeper, embeddings, difficulties, labels
+
+
+def run_validation_eval(model, dataloader, score_keeper):
+    """
+    Function to run validation procedure during training
+    :param model: (nn.Module)
+    :param dataloader: (torch.utils.data.DataLoader) dataloader.dataset.mode will be set equal to 'validation', changing
+    that attribute must therefore result in new data being loaded.
+    :return: (float, torch.Tensor, torch.Tensor, list, list) of validation scores, embeddings, difficulties, names of
+    files and their labels. This will result in files being returned as full paths.
+    """
+    dataloader.dataset.mode = 'validation'
+    model.eval()
+    embeddings, est_difficulties, files, labels, difficulties = eval_model(model, dataloader)
+    dataloader.dataset.mode = 'train'
+    score = score_keeper.calculate_score(embeddings, est_difficulties, labels, difficulties)
+    return score, embeddings, difficulties, files, labels
+
+def run_test_eval(model, dataloader, score_keeper):
+
+    dataloader.dataset.mode = 'test'
+    model.eval()
+    embeddings, est_difficulties, files, labels, difficulties = eval_model(model, dataloader)
+    score = score_keeper.calculate_score(embeddings, est_difficulties, labels, difficulties)
+
+    return score, embeddings, difficulties, files, labels
+
+def eval_model(model, dataloader):
+    embeddings = torch.zeros((len(dataloader, model.embedding_dimension)))
+    est_difficulties = torch.zeros((len(dataloader),))
+    files, labels, difficulties = list(), list(), list()
+
+    for idx, (image, label, difficulty, file) in enumerate(dataloader):
+        difficulties.append(difficulty)
+        embedding, difficulty = model(image)
+        embeddings[(idx * len(image)): (idx + 1) * len(image)] = embedding
+        est_difficulties[(idx * len(image)): (idx + 1) * len(image)] = difficulty
+        labels.append(label)
+        files += file
+
+    return embeddings, est_difficulties, files, label, difficulties