j-h-f commited on
Commit
cfee986
·
verified ·
1 Parent(s): 50718a1

Put count_classes funtction into the WCv1LMDBReader class to get it standalone

Browse files
Files changed (1) hide show
  1. WCv1LMDBReader.py +47 -2
WCv1LMDBReader.py CHANGED
@@ -8,7 +8,6 @@ import numpy as np
8
  from enum import Enum
9
  import safetensors.torch
10
  from torch.utils.data import Dataset
11
- from flumapping.utils.Utils import one_hot_encode, count_classes
12
 
13
  """
14
  tensors = {
@@ -179,6 +178,52 @@ class WCv1LMDBReader(Dataset):
179
  self.keys()
180
  return status
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  def __len__(self):
183
  if self._keys is None:
184
  self.logger.info("keys are not loaded yet")
@@ -254,5 +299,5 @@ class WCv1LMDBReader(Dataset):
254
  if "classprops" in tensor_dict.keys():
255
  class_props = tensor_dict['classprops']
256
  else:
257
- class_props = count_classes(tensor_dict['wcmap'])
258
  return (image, label, class_props)
 
8
  from enum import Enum
9
  import safetensors.torch
10
  from torch.utils.data import Dataset
 
11
 
12
  """
13
  tensors = {
 
178
  self.keys()
179
  return status
180
 
181
+ def count_classes(self, map: np.ndarray | torch.Tensor,output_type=None) -> np.ndarray | torch.Tensor:
182
+ """function to count the class proportions of an segmentation map.
183
+
184
+ Args:
185
+ map (np.ndarray | torch.Tensor): input map on which the class proportions needs to be counted. Input can be a single map (np.ndarray) or batched tensor of multiple maps (torch.Tensor)
186
+
187
+ Returns:
188
+ np.ndarray | torch.Tensor: the class proportions of the input map.
189
+ """
190
+ if len(map.shape) == 3:
191
+ map = map.squeeze()
192
+
193
+ if type(map) == np.ndarray:
194
+ map = torch.tensor(map, dtype=torch.float32)
195
+
196
+ output = []
197
+ num_pixel = map.shape[0] * map.shape[1]
198
+
199
+ for i in range(10, 110, 10):
200
+ if len(map.shape) == 4:
201
+ percentage = torch.sum(torch.where(map == i, 1, 0), dim=(1,2,3)) / num_pixel
202
+ else:
203
+ percentage = torch.sum(torch.where(map == i, 1, 0)) / num_pixel
204
+ output.append(percentage)
205
+
206
+ if i == 90:
207
+ if len(map.shape) == 4:
208
+ percentage = torch.sum(torch.where(map == i, 1, 0), dim=(1,2,3)) / num_pixel
209
+ else:
210
+ percentage = torch.sum(torch.where(map == i, 1, 0)) / num_pixel
211
+ output.append(percentage)
212
+
213
+ if len(map.shape) == 4:
214
+ class_props = torch.stack(output, dim=1)
215
+ class_props.requires_grad = True
216
+ else:
217
+ class_props = torch.tensor(output)
218
+
219
+ if type(map) == np.ndarray:
220
+ return class_props.cpu().detach().numpy()
221
+ if type(map) == torch.Tensor:
222
+ if output_type is not None:
223
+ return class_props.type(dtype=output_type)
224
+ else:
225
+ return class_props
226
+
227
  def __len__(self):
228
  if self._keys is None:
229
  self.logger.info("keys are not loaded yet")
 
299
  if "classprops" in tensor_dict.keys():
300
  class_props = tensor_dict['classprops']
301
  else:
302
+ class_props = self.count_classes(tensor_dict['wcmap'])
303
  return (image, label, class_props)