-
Notifications
You must be signed in to change notification settings - Fork 7
/
data_loader.py
88 lines (73 loc) · 3.16 KB
/
data_loader.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#!/usr/bin/python
from __future__ import print_function
import numpy as np
import torch.utils.data as data
import utils
class heter_data(data.Dataset):
def __init__(self, filename, transform=None, target_transform=None, target="hot"):
"""
Args:
filename: a list of pickle files.
transform: transform applied to the feature data.
target_transform: transform applied to the label data.
target: target label encoding approach.
Notes:
Input data with key values: "feature", "label" (already one-hot encoded), "user"(optional).
Feature with shape [seq_len, 8*2, interval_len] ([12, 16, 7])
For a single file we have:
"""
self.transform = transform
self.target_transform = target_transform
self.data = []
self.targets = []
for file in filename:
data = utils.load_pickle(file)
self.data.extend(data["feature"])
if target == "hot": # is one-hot required?
self.targets.extend(data["label"])
else: # Train classifier, don't need one-hot encoding
self.targets.extend(np.argmax(data["label"], axis=1))
self.targets = np.array(self.targets)
def __getitem__(self, index):
img, target = self.data[index], self.targets[index]
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.data)
class heter_data2(data.Dataset):
def __init__(self, filename, transform=None, target_transform=None, target="hot"):
self.transform = transform
self.target_transform = target_transform
self.data = []
self.targets = []
self.targets_t = []
for file in filename:
data_tmp = utils.load_pickle(file)
self.data.extend(data_tmp["feature"])
if target == "hot": # is one-hot required?
self.targets.extend(data_tmp["label"])
else: # Train classifier, don't need one-hot encoding
self.targets.extend(np.argmax(data_tmp["label"], axis=1))
self.targets_t.extend(data_tmp["label_t"])
self.targets = np.array(self.targets)
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target, target_t)
target is index of the global target class.
target is index of the local target class.
"""
img, target, target_t = self.data[index], self.targets[index], self.targets_t[index]
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
target_t = self.target_transform(target_t)
return img, target, target_t
def __len__(self):
return len(self.data)