完成实验四

This commit is contained in:
Jingfan Ke 2024-01-12 02:27:03 +08:00
parent 7fbb893223
commit feca24347a
15 changed files with 4588 additions and 7 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
dataset/
raw/
.vscode/

View File

@ -1121,9 +1121,9 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python [conda env:DeepLearningLab]",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "conda-env-DeepLearningLab-py"
"name": "python3"
},
"language_info": {
"codemirror_mode": {

73
Lab4/code/1.1.py Normal file
View File

@ -0,0 +1,73 @@
from utils import *
import ipdb
class My_Conv2d(nn.Module):
def __init__(self, in_channels:int, out_channels:int, kernel_size:int, padding:int=0, bias=True):
super(My_Conv2d, self).__init__()
self.has_bias = bias
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.padding = padding
self.weight = nn.Parameter(torch.Tensor(out_channels, in_channels, kernel_size, kernel_size))
nn.init.xavier_uniform_(self.weight)
if self.has_bias:
self.bias = nn.Parameter(torch.zeros(out_channels, requires_grad=True, dtype=torch.float32))
def forward(self, x):
batch_size, _, input_height, input_width = x.shape
if self.padding > 0:
x = F.pad(x, (self.padding, self.padding, self.padding, self.padding))
x = F.unfold(x, kernel_size=self.kernel_size)
x = x.permute(0, 2, 1).contiguous()
weight_unfold = self.weight.view(self.out_channels, -1).t()
x = torch.matmul(x, weight_unfold)
if self.has_bias:
x += self.bias
output_height = input_height + 2 * self.padding - self.kernel_size + 1
output_width = input_width + 2 * self.padding - self.kernel_size + 1
x = x.view(batch_size, output_height, output_width, self.out_channels).permute(0, 3, 1, 2).contiguous()
return x
class Model_Vehicle_CLS_1_1(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_1, self).__init__()
self.conv1 = My_Conv2d(in_channels=3, out_channels=128, kernel_size=3, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(128)
self.conv2 = My_Conv2d(in_channels=128, out_channels=512, kernel_size=3, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(512)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Haze_Removal_1_1(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_1, self).__init__()
self.conv1 = My_Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(16)
self.conv2 = My_Conv2d(in_channels=16, out_channels=48, kernel_size=5, padding=2, bias=False)
self.bn2 = nn.BatchNorm2d(48)
self.conv3 = My_Conv2d(in_channels=48, out_channels=3, kernel_size=3, padding=1)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = self.conv3(x)
return x
if __name__ == "__main__":
model = Model_Vehicle_CLS_1_1()
train_Vehicle_CLS(model=model, learning_rate=4e-4, batch_size=256)
model = Model_Haze_Removal_1_1()
train_Haze_Removal(model=model, learning_rate=5e-3, batch_size=16)

43
Lab4/code/1.2.py Normal file
View File

@ -0,0 +1,43 @@
from utils import *
import ipdb
class Model_Vehicle_CLS_1_2(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_2, self).__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=128, kernel_size=3, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(128)
self.conv2 = nn.Conv2d(in_channels=128, out_channels=512, kernel_size=3, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(512)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Haze_Removal_1_2(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_2, self).__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(16)
self.conv2 = nn.Conv2d(in_channels=16, out_channels=48, kernel_size=5, padding=2, bias=False)
self.bn2 = nn.BatchNorm2d(48)
self.conv3 = nn.Conv2d(in_channels=48, out_channels=3, kernel_size=3, padding=1)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = self.conv3(x)
return x
if __name__ == "__main__":
model = Model_Vehicle_CLS_1_2()
train_Vehicle_CLS(model=model, learning_rate=4e-4, batch_size=64)
model = Model_Haze_Removal_1_2()
train_Haze_Removal(model=model, learning_rate=5e-3, batch_size=16)

215
Lab4/code/1.3.py Normal file
View File

@ -0,0 +1,215 @@
from utils import *
class Model_Vehicle_CLS_1_3_1(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_3_1, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=512, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(512),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_1_3_2(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_3_2, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=128, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(128),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=128, out_channels=512, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(512),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes, bias=False)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_1_3_3(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_3_3, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(64),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=256, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(256),
)
self.conv3 = nn.Sequential(
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(512),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_1_3_4(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_1_3_4, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(64),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(128),
)
self.conv3 = nn.Sequential(
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(256),
)
self.conv4 = nn.Sequential(
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(512),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Haze_Removal_1_3_1(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_3_1, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(16),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=48, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(48),
)
self.conv3 = nn.Conv2d(in_channels=48, out_channels=3, kernel_size=3, padding=1)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.conv3(x)
return x
class Model_Haze_Removal_1_3_2(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_3_2, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5, padding=2, bias=False),
nn.BatchNorm2d(16),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=48, kernel_size=5, padding=2, bias=False),
nn.BatchNorm2d(48),
)
self.conv3 = nn.Conv2d(in_channels=48, out_channels=3, kernel_size=5, padding=2)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.conv3(x)
return x
class Model_Haze_Removal_1_3_3(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_3_3, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=7, padding=3, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=48, kernel_size=7, padding=3, bias=False),
nn.BatchNorm2d(48),
nn.ReLU(inplace=True),
)
self.conv3 = nn.Conv2d(in_channels=48, out_channels=3, kernel_size=7, padding=3)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.conv3(x)
return x
class Model_Haze_Removal_1_3_4(nn.Module):
def __init__(self):
super(Model_Haze_Removal_1_3_4, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=9, padding=4, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=48, kernel_size=9, padding=4, bias=False),
nn.BatchNorm2d(48),
nn.ReLU(inplace=True),
)
self.conv3 = nn.Conv2d(in_channels=48, out_channels=3, kernel_size=9, padding=4)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.conv3(x)
return x
if __name__ == "__main__":
num_epochs = 61
learning_rate = 2e-4
batch_size = 256
models = [
Model_Vehicle_CLS_1_3_1,
Model_Vehicle_CLS_1_3_2,
Model_Vehicle_CLS_1_3_3,
Model_Vehicle_CLS_1_3_4,
]
for i in range(4):
model = models[i]()
print(f"卷积层层数={i + 1}")
train_loss, test_acc = train_Vehicle_CLS(model=model, learning_rate=learning_rate,
batch_size=batch_size, num_epochs=num_epochs)
print()
num_epochs = 61
learning_rate = 8e-3
batch_size = 64
models = [
Model_Haze_Removal_1_3_1,
Model_Haze_Removal_1_3_2,
Model_Haze_Removal_1_3_3,
Model_Haze_Removal_1_3_4,
]
for i in range(4):
model = models[i]()
print(f"卷积核大小={3 + 2 * i}")
train_loss, test_loss = train_Haze_Removal(model=model, learning_rate=learning_rate, batch_size=batch_size, num_epochs=num_epochs)
print()

168
Lab4/code/2.py Normal file
View File

@ -0,0 +1,168 @@
from utils import *
class Model_Vehicle_CLS_2_1(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_2_1, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(inplace=True),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_2_2(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_2_2, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=2, dilation=2, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, padding=5, dilation=5, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=2, dilation=2, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=5, dilation=5, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(inplace=True),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_2_3(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_2_3, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=3, dilation=3, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, padding=5, dilation=5, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=3, dilation=3, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=5, dilation=5, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(inplace=True),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class Model_Vehicle_CLS_2_4(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_2_4, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=3, dilation=3, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, padding=7, dilation=7, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1, dilation=1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=3, dilation=3, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=7, dilation=7, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(inplace=True),
)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = F.avg_pool2d(x, 32)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
if __name__ == "__main__":
num_epochs = 61
learning_rate = 1e-4
batch_size = 256
dilations = ["[[1, 1, 1], [1, 1, 1]]",
"[[1, 2, 5], [1, 2, 5]]",
"[[1, 3, 5], [1, 3, 5]]",
"[[1, 3, 7], [1, 3, 7]]"]
models = [
Model_Vehicle_CLS_2_1,
Model_Vehicle_CLS_2_2,
Model_Vehicle_CLS_2_3,
Model_Vehicle_CLS_2_4,
]
for i in range(4):
model = models[i]()
print("Dilation: " + dilations[i])
train_loss, test_acc = train_Vehicle_CLS(model=model, learning_rate=learning_rate,
batch_size=batch_size, num_epochs=num_epochs)
print()

63
Lab4/code/3.py Normal file
View File

@ -0,0 +1,63 @@
from utils import *
class BasicResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(BasicResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(out_channels)
self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if stride != 1 or in_channels != out_channels:
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(out_channels)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Model_Vehicle_CLS_3(nn.Module):
def __init__(self, num_classes=3):
super(Model_Vehicle_CLS_3, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(64),
)
self.conv2 = BasicResidualBlock(in_channels=64, out_channels=64)
self.conv3 = BasicResidualBlock(in_channels=64, out_channels=64)
self.conv4 = BasicResidualBlock(in_channels=64, out_channels=128, stride=2)
self.conv5 = BasicResidualBlock(in_channels=128, out_channels=128)
self.conv6 = BasicResidualBlock(in_channels=128, out_channels=256, stride=2)
self.conv7 = BasicResidualBlock(in_channels=256, out_channels=256)
self.conv8 = BasicResidualBlock(in_channels=256, out_channels=512, stride=2)
self.conv9 = BasicResidualBlock(in_channels=512, out_channels=512)
self.fc = nn.Linear(in_features=512, out_features=num_classes)
def forward(self, x):
x = F.relu(self.conv1(x))
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.conv5(x)
x = self.conv6(x)
x = self.conv7(x)
x = self.conv8(x)
x = self.conv9(x)
x = F.avg_pool2d(x, 4)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
if __name__ == "__main__":
num_epochs = 61
learning_rate = 15e-5
batch_size = 512
model = Model_Vehicle_CLS_3(num_classes=3)
train_loss, test_acc = train_Vehicle_CLS(model=model, learning_rate=learning_rate, batch_size=batch_size, num_epochs=num_epochs)

197
Lab4/code/utils.py Normal file
View File

@ -0,0 +1,197 @@
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader, Dataset
from torch import nn
from torchvision import transforms
from tqdm import tqdm
import os
import time
from PIL import Image
import pandas as pd
class Vehicle(Dataset):
def __init__(self, root:str="../dataset", train:bool=True, transform=None):
root = os.path.join(root, "Vehicles")
csv_file = os.path.join(root, "train.csv" if train else "test.csv")
self.data = pd.read_csv(csv_file).to_numpy().tolist()
self.root = root
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_name, label = self.data[index]
img_path = os.path.join(self.root, img_name)
image = Image.open(img_path)
label = int(label)
if self.transform:
image = self.transform(image)
return image, label
class Haze(Dataset):
def __init__(self, root:str="../dataset", train:bool=True, transform=None):
root = os.path.join(root, "Haze")
split_file = pd.read_csv(os.path.join(root, "split.csv")).to_numpy().tolist()
self.data = list()
for img, is_train in split_file:
if train and int(is_train) == 1:
self.data.append(img)
elif not train and int(is_train) == 0:
self.data.append(img)
self.root = root
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_name = self.data[index]
img_path = os.path.join(self.root, "raw/haze", img_name)
ground_truth_path = os.path.join(self.root, "raw/no_haze", img_name)
image = Image.open(img_path)
ground_truth = Image.open(ground_truth_path)
if self.transform:
image = self.transform(image)
ground_truth = self.transform(ground_truth)
return image, ground_truth
def train_Vehicle_CLS(model:nn.Module, learning_rate=1e-3, batch_size=64, num_epochs=51):
num_classes = 3
device = "cuda:0" if torch.cuda.is_available() else "cpu"
transform = transforms.Compose(
[
transforms.ToTensor(),
transforms.Resize((32, 32), antialias=True),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
]
)
train_dataset = Vehicle(root="../dataset", train=True, transform=transform)
test_dataset = Vehicle(root="../dataset", train=False, transform=transform)
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True, num_workers=14, pin_memory=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=batch_size, num_workers=14, pin_memory=True)
model = model.to(device)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
train_loss = list()
test_acc = list()
for epoch in range(num_epochs):
model.train()
total_epoch_loss = 0
train_tik = time.time()
for index, (images, targets) in tqdm(enumerate(train_loader), total=len(train_loader)):
optimizer.zero_grad()
images = images.to(device)
targets = targets.to(device)
one_hot_targets = F.one_hot(targets, num_classes=num_classes).to(dtype=torch.float)
outputs = model(images)
loss = criterion(outputs, one_hot_targets)
total_epoch_loss += loss.item()
loss.backward()
optimizer.step()
train_tok = time.time()
model.eval()
with torch.no_grad():
total_epoch_acc = 0
test_tik = time.time()
for index, (image, targets) in tqdm(enumerate(test_loader), total=len(test_loader)):
image = image.to(device)
targets = targets.to(device)
outputs = model(image)
pred = F.softmax(outputs, dim=1)
total_epoch_acc += (pred.argmax(1) == targets).sum().item()
test_tok = time.time()
avg_epoch_acc = total_epoch_acc / len(test_dataset)
print(
f"Epoch [{epoch + 1}/{num_epochs}],",
f"Train Loss: {total_epoch_loss:.10f},",
f"Train Time: {1000 * (train_tok - train_tik):.2f}ms,",
f"Test Acc: {avg_epoch_acc * 100:.3f}%,",
f"Test Time: {1000 * (test_tok - test_tik):.2f}ms"
)
train_loss.append(total_epoch_loss)
test_acc.append(avg_epoch_acc)
print(f"最大显存使用量: {torch.cuda.max_memory_allocated() / (1024 * 1024):.2f}MiB")
torch.cuda.reset_peak_memory_stats()
return train_loss, test_acc
def train_Haze_Removal(model:nn.Module, learning_rate=1e-3, batch_size=64, num_epochs=51):
num_epochs = 50
device = "cuda:0" if torch.cuda.is_available() else "cpu"
transform = transforms.Compose(
[
transforms.ToTensor(),
transforms.Resize((224, 224), antialias=True),
]
)
train_dataset = Haze(root="../dataset", train=True, transform=transform)
test_dataset = Haze(root="../dataset", train=False, transform=transform)
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True, num_workers=14, pin_memory=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=batch_size, num_workers=14, pin_memory=True)
model = model.to(device)
criterion = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
train_loss = list()
test_loss = list()
for epoch in range(num_epochs):
model.train()
total_epoch_train_loss = 0
train_tik = time.time()
for index, (images, ground_truth) in tqdm(enumerate(train_loader), total=len(train_loader)):
optimizer.zero_grad()
images = images.to(device)
ground_truth = ground_truth.to(device)
outputs = model(images)
loss = criterion(outputs, ground_truth)
total_epoch_train_loss += loss.item()
loss.backward()
optimizer.step()
train_tok = time.time()
model.eval()
with torch.no_grad():
total_epoch_test_loss = 0
test_tik = time.time()
for index, (image, ground_truth) in tqdm(enumerate(test_loader), total=len(test_loader)):
image = image.to(device)
ground_truth = ground_truth.to(device)
outputs = model(image)
loss = criterion(outputs, ground_truth)
total_epoch_test_loss += loss.item()
test_tok = time.time()
print(
f"Epoch [{epoch + 1}/{num_epochs}],",
f"Train Loss: {total_epoch_train_loss:.10f},",
f"Train Time: {1000 * (train_tok - train_tik):.2f}ms,",
f"Test Loss: {total_epoch_test_loss:.10f},",
f"Test Time: {1000 * (test_tok - test_tik):.2f}ms"
)
train_loss.append(total_epoch_train_loss)
test_loss.append(total_epoch_test_loss)
print(f"最大显存使用量: {torch.cuda.max_memory_allocated() / (1024 * 1024):.2f}MiB")
torch.cuda.reset_peak_memory_stats()
return train_loss, test_loss

521
Lab4/dataset/Haze/split.csv Normal file
View File

@ -0,0 +1,521 @@
Image,Train
001.jpg,1
002.jpg,1
003.jpg,1
004.jpg,0
005.jpg,0
006.jpg,1
007.jpg,1
008.jpg,1
009.jpg,1
010.jpg,1
011.jpg,0
012.jpg,1
013.jpg,0
014.jpg,1
015.jpg,1
016.jpg,1
017.jpg,1
018.jpg,1
019.jpg,1
020.jpg,1
021.jpg,1
022.jpg,1
023.jpg,1
024.jpg,1
025.jpg,1
026.jpg,1
027.jpg,0
028.jpg,1
029.jpg,1
030.jpg,1
031.jpg,1
032.jpg,1
033.jpg,1
034.jpg,1
035.jpg,1
036.jpg,1
037.jpg,0
038.jpg,1
039.jpg,1
040.jpg,1
041.jpg,1
042.jpg,1
043.jpg,0
044.jpg,1
045.jpg,1
046.jpg,1
047.jpg,1
048.jpg,0
049.jpg,1
050.jpg,1
051.jpg,1
052.jpg,1
053.jpg,1
054.jpg,1
055.jpg,1
056.jpg,1
057.jpg,1
058.jpg,1
059.jpg,0
060.jpg,1
061.jpg,1
062.jpg,1
063.jpg,1
064.jpg,1
065.jpg,0
066.jpg,1
067.jpg,0
068.jpg,1
069.jpg,0
070.jpg,1
071.jpg,1
072.jpg,1
073.jpg,1
074.jpg,1
075.jpg,1
076.jpg,0
077.jpg,1
078.jpg,1
079.jpg,1
080.jpg,1
081.jpg,1
082.jpg,1
083.jpg,1
084.jpg,1
085.jpg,0
086.jpg,1
087.jpg,1
088.jpg,0
089.jpg,1
090.jpg,1
091.jpg,1
092.jpg,1
093.jpg,1
094.jpg,1
095.jpg,1
096.jpg,1
097.jpg,1
098.jpg,1
099.jpg,1
100.jpg,1
101.jpg,1
102.jpg,1
103.jpg,1
104.jpg,1
105.jpg,1
106.jpg,1
107.jpg,0
108.jpg,1
109.jpg,0
110.jpg,1
111.jpg,1
112.jpg,1
113.jpg,1
114.jpg,1
115.jpg,1
116.jpg,1
117.jpg,1
118.jpg,0
119.jpg,1
120.jpg,1
121.jpg,0
122.jpg,1
123.jpg,1
124.jpg,1
125.jpg,1
126.jpg,0
127.jpg,1
128.jpg,0
129.jpg,0
130.jpg,0
131.jpg,1
132.jpg,1
133.jpg,1
134.jpg,1
135.jpg,1
136.jpg,1
137.jpg,1
138.jpg,1
139.jpg,1
140.jpg,0
141.jpg,0
142.jpg,1
143.jpg,1
144.jpg,1
145.jpg,1
146.jpg,1
147.jpg,0
148.jpg,1
149.jpg,1
150.jpg,0
151.jpg,1
152.jpg,1
153.jpg,1
154.jpg,1
155.jpg,0
156.jpg,1
157.jpg,0
158.jpg,1
159.jpg,0
160.jpg,1
161.jpg,0
162.jpg,1
163.jpg,0
164.jpg,1
165.jpg,0
166.jpg,1
167.jpg,1
168.jpg,0
169.jpg,1
170.jpg,1
171.jpg,0
172.jpg,1
173.jpg,1
174.jpg,1
175.jpg,0
176.jpg,1
177.jpg,1
178.jpg,1
179.jpg,1
180.jpg,0
181.jpg,1
182.jpg,0
183.jpg,1
184.jpg,1
185.jpg,1
186.jpg,0
187.jpg,1
188.jpg,1
189.jpg,1
190.jpg,0
191.jpg,1
192.jpg,1
193.jpg,1
194.jpg,1
195.jpg,1
196.jpg,1
197.jpg,1
198.jpg,1
199.jpg,1
200.jpg,1
201.jpg,1
202.jpg,1
203.jpg,1
204.jpg,1
205.jpg,0
206.jpg,1
207.jpg,1
208.jpg,1
209.jpg,1
210.jpg,1
211.jpg,1
212.jpg,1
213.jpg,1
214.jpg,0
215.jpg,0
216.jpg,1
217.jpg,1
218.jpg,1
219.jpg,1
220.jpg,1
221.jpg,1
222.jpg,1
223.jpg,0
224.jpg,0
225.jpg,1
226.jpg,1
227.jpg,1
228.jpg,1
229.jpg,0
230.jpg,1
231.jpg,1
232.jpg,1
233.jpg,0
234.jpg,1
235.jpg,0
236.jpg,1
237.jpg,1
238.jpg,1
239.jpg,1
240.jpg,1
241.jpg,1
242.jpg,1
243.jpg,1
244.jpg,1
245.jpg,1
246.jpg,0
247.jpg,1
248.jpg,0
249.jpg,0
250.jpg,1
251.jpg,1
252.jpg,1
253.jpg,1
254.jpg,1
255.jpg,1
256.jpg,0
257.jpg,1
258.jpg,1
259.jpg,1
260.jpg,0
261.jpg,1
262.jpg,0
263.jpg,1
264.jpg,1
265.jpg,0
266.jpg,0
267.jpg,0
268.jpg,1
269.jpg,1
270.jpg,1
271.jpg,1
272.jpg,1
273.jpg,1
274.jpg,1
275.jpg,1
276.jpg,1
277.jpg,1
278.jpg,1
279.jpg,1
280.jpg,0
281.jpg,1
282.jpg,1
283.jpg,1
284.jpg,1
285.jpg,1
286.jpg,0
287.jpg,1
288.jpg,1
289.jpg,1
290.jpg,1
291.jpg,1
292.jpg,1
293.jpg,0
294.jpg,1
295.jpg,1
296.jpg,0
297.jpg,1
298.jpg,1
299.jpg,0
300.jpg,1
301.jpg,1
302.jpg,0
303.jpg,1
304.jpg,0
305.jpg,1
306.jpg,1
307.jpg,1
308.jpg,1
309.jpg,1
310.jpg,1
311.jpg,0
312.jpg,1
313.jpg,1
314.jpg,0
315.jpg,1
316.jpg,1
317.jpg,1
318.jpg,1
319.jpg,1
320.jpg,1
321.jpg,0
322.jpg,1
323.jpg,1
324.jpg,0
325.jpg,1
326.jpg,1
327.jpg,0
328.jpg,1
329.jpg,1
330.jpg,1
331.jpg,1
332.jpg,0
333.jpg,1
334.jpg,0
335.jpg,1
336.jpg,1
337.jpg,1
338.jpg,1
339.jpg,1
340.jpg,1
341.jpg,1
342.jpg,1
343.jpg,1
344.jpg,1
345.jpg,0
346.jpg,1
347.jpg,1
348.jpg,1
349.jpg,1
350.jpg,1
351.jpg,1
352.jpg,1
353.jpg,1
354.jpg,0
355.jpg,1
356.jpg,0
357.jpg,1
358.jpg,0
359.jpg,0
360.jpg,1
361.jpg,0
362.jpg,1
363.jpg,0
364.jpg,1
365.jpg,1
366.jpg,1
367.jpg,1
368.jpg,1
369.jpg,0
370.jpg,0
371.jpg,1
372.jpg,0
373.jpg,1
374.jpg,1
375.jpg,1
376.jpg,1
377.jpg,0
378.jpg,0
379.jpg,1
380.jpg,1
381.jpg,1
382.jpg,1
383.jpg,1
384.jpg,1
385.jpg,1
386.jpg,1
387.jpg,1
388.jpg,1
389.jpg,1
390.jpg,0
391.jpg,0
392.jpg,1
393.jpg,1
394.jpg,1
395.jpg,1
396.jpg,1
397.jpg,1
398.jpg,0
399.jpg,1
400.jpg,1
401.jpg,0
402.jpg,1
403.jpg,1
404.jpg,1
405.jpg,0
406.jpg,0
407.jpg,1
408.jpg,1
409.jpg,1
410.jpg,1
411.jpg,0
412.jpg,1
413.jpg,1
414.jpg,1
415.jpg,1
416.jpg,1
417.jpg,1
418.jpg,1
419.jpg,1
420.jpg,1
421.jpg,0
422.jpg,0
423.jpg,1
424.jpg,1
425.jpg,0
426.jpg,1
427.jpg,1
428.jpg,1
429.jpg,0
430.jpg,0
431.jpg,1
432.jpg,1
433.jpg,1
434.jpg,1
435.jpg,1
436.jpg,1
437.jpg,1
438.jpg,1
439.jpg,1
440.jpg,1
441.jpg,1
442.jpg,1
443.jpg,1
444.jpg,1
445.jpg,1
446.jpg,1
447.jpg,1
448.jpg,1
449.jpg,1
450.jpg,1
451.jpg,1
452.jpg,1
453.jpg,1
454.jpg,1
455.jpg,1
456.jpg,1
457.jpg,1
458.jpg,1
459.jpg,1
460.jpg,1
461.jpg,1
462.jpg,1
463.jpg,0
464.jpg,1
465.jpg,1
466.jpg,1
467.jpg,1
468.jpg,1
469.jpg,1
470.jpg,1
471.jpg,1
472.jpg,1
473.jpg,1
474.jpg,1
475.jpg,1
476.jpg,1
477.jpg,1
478.jpg,1
479.jpg,1
480.jpg,1
481.jpg,1
482.jpg,1
483.jpg,1
484.jpg,0
485.jpg,1
486.jpg,1
487.jpg,1
488.jpg,1
489.jpg,1
490.jpg,1
491.jpg,1
492.jpg,0
493.jpg,1
494.jpg,1
495.jpg,1
496.jpg,1
497.jpg,1
498.jpg,1
499.jpg,1
500.jpg,1
501.jpg,1
502.jpg,1
503.jpg,1
504.jpg,1
505.jpg,1
506.jpg,0
507.jpg,0
508.jpg,0
509.jpg,1
510.jpg,1
511.jpg,1
512.jpg,0
513.jpg,0
514.jpg,1
515.jpg,1
516.jpg,1
517.jpg,1
518.jpg,1
519.jpg,1
520.jpg,0
1 Image Train
2 001.jpg 1
3 002.jpg 1
4 003.jpg 1
5 004.jpg 0
6 005.jpg 0
7 006.jpg 1
8 007.jpg 1
9 008.jpg 1
10 009.jpg 1
11 010.jpg 1
12 011.jpg 0
13 012.jpg 1
14 013.jpg 0
15 014.jpg 1
16 015.jpg 1
17 016.jpg 1
18 017.jpg 1
19 018.jpg 1
20 019.jpg 1
21 020.jpg 1
22 021.jpg 1
23 022.jpg 1
24 023.jpg 1
25 024.jpg 1
26 025.jpg 1
27 026.jpg 1
28 027.jpg 0
29 028.jpg 1
30 029.jpg 1
31 030.jpg 1
32 031.jpg 1
33 032.jpg 1
34 033.jpg 1
35 034.jpg 1
36 035.jpg 1
37 036.jpg 1
38 037.jpg 0
39 038.jpg 1
40 039.jpg 1
41 040.jpg 1
42 041.jpg 1
43 042.jpg 1
44 043.jpg 0
45 044.jpg 1
46 045.jpg 1
47 046.jpg 1
48 047.jpg 1
49 048.jpg 0
50 049.jpg 1
51 050.jpg 1
52 051.jpg 1
53 052.jpg 1
54 053.jpg 1
55 054.jpg 1
56 055.jpg 1
57 056.jpg 1
58 057.jpg 1
59 058.jpg 1
60 059.jpg 0
61 060.jpg 1
62 061.jpg 1
63 062.jpg 1
64 063.jpg 1
65 064.jpg 1
66 065.jpg 0
67 066.jpg 1
68 067.jpg 0
69 068.jpg 1
70 069.jpg 0
71 070.jpg 1
72 071.jpg 1
73 072.jpg 1
74 073.jpg 1
75 074.jpg 1
76 075.jpg 1
77 076.jpg 0
78 077.jpg 1
79 078.jpg 1
80 079.jpg 1
81 080.jpg 1
82 081.jpg 1
83 082.jpg 1
84 083.jpg 1
85 084.jpg 1
86 085.jpg 0
87 086.jpg 1
88 087.jpg 1
89 088.jpg 0
90 089.jpg 1
91 090.jpg 1
92 091.jpg 1
93 092.jpg 1
94 093.jpg 1
95 094.jpg 1
96 095.jpg 1
97 096.jpg 1
98 097.jpg 1
99 098.jpg 1
100 099.jpg 1
101 100.jpg 1
102 101.jpg 1
103 102.jpg 1
104 103.jpg 1
105 104.jpg 1
106 105.jpg 1
107 106.jpg 1
108 107.jpg 0
109 108.jpg 1
110 109.jpg 0
111 110.jpg 1
112 111.jpg 1
113 112.jpg 1
114 113.jpg 1
115 114.jpg 1
116 115.jpg 1
117 116.jpg 1
118 117.jpg 1
119 118.jpg 0
120 119.jpg 1
121 120.jpg 1
122 121.jpg 0
123 122.jpg 1
124 123.jpg 1
125 124.jpg 1
126 125.jpg 1
127 126.jpg 0
128 127.jpg 1
129 128.jpg 0
130 129.jpg 0
131 130.jpg 0
132 131.jpg 1
133 132.jpg 1
134 133.jpg 1
135 134.jpg 1
136 135.jpg 1
137 136.jpg 1
138 137.jpg 1
139 138.jpg 1
140 139.jpg 1
141 140.jpg 0
142 141.jpg 0
143 142.jpg 1
144 143.jpg 1
145 144.jpg 1
146 145.jpg 1
147 146.jpg 1
148 147.jpg 0
149 148.jpg 1
150 149.jpg 1
151 150.jpg 0
152 151.jpg 1
153 152.jpg 1
154 153.jpg 1
155 154.jpg 1
156 155.jpg 0
157 156.jpg 1
158 157.jpg 0
159 158.jpg 1
160 159.jpg 0
161 160.jpg 1
162 161.jpg 0
163 162.jpg 1
164 163.jpg 0
165 164.jpg 1
166 165.jpg 0
167 166.jpg 1
168 167.jpg 1
169 168.jpg 0
170 169.jpg 1
171 170.jpg 1
172 171.jpg 0
173 172.jpg 1
174 173.jpg 1
175 174.jpg 1
176 175.jpg 0
177 176.jpg 1
178 177.jpg 1
179 178.jpg 1
180 179.jpg 1
181 180.jpg 0
182 181.jpg 1
183 182.jpg 0
184 183.jpg 1
185 184.jpg 1
186 185.jpg 1
187 186.jpg 0
188 187.jpg 1
189 188.jpg 1
190 189.jpg 1
191 190.jpg 0
192 191.jpg 1
193 192.jpg 1
194 193.jpg 1
195 194.jpg 1
196 195.jpg 1
197 196.jpg 1
198 197.jpg 1
199 198.jpg 1
200 199.jpg 1
201 200.jpg 1
202 201.jpg 1
203 202.jpg 1
204 203.jpg 1
205 204.jpg 1
206 205.jpg 0
207 206.jpg 1
208 207.jpg 1
209 208.jpg 1
210 209.jpg 1
211 210.jpg 1
212 211.jpg 1
213 212.jpg 1
214 213.jpg 1
215 214.jpg 0
216 215.jpg 0
217 216.jpg 1
218 217.jpg 1
219 218.jpg 1
220 219.jpg 1
221 220.jpg 1
222 221.jpg 1
223 222.jpg 1
224 223.jpg 0
225 224.jpg 0
226 225.jpg 1
227 226.jpg 1
228 227.jpg 1
229 228.jpg 1
230 229.jpg 0
231 230.jpg 1
232 231.jpg 1
233 232.jpg 1
234 233.jpg 0
235 234.jpg 1
236 235.jpg 0
237 236.jpg 1
238 237.jpg 1
239 238.jpg 1
240 239.jpg 1
241 240.jpg 1
242 241.jpg 1
243 242.jpg 1
244 243.jpg 1
245 244.jpg 1
246 245.jpg 1
247 246.jpg 0
248 247.jpg 1
249 248.jpg 0
250 249.jpg 0
251 250.jpg 1
252 251.jpg 1
253 252.jpg 1
254 253.jpg 1
255 254.jpg 1
256 255.jpg 1
257 256.jpg 0
258 257.jpg 1
259 258.jpg 1
260 259.jpg 1
261 260.jpg 0
262 261.jpg 1
263 262.jpg 0
264 263.jpg 1
265 264.jpg 1
266 265.jpg 0
267 266.jpg 0
268 267.jpg 0
269 268.jpg 1
270 269.jpg 1
271 270.jpg 1
272 271.jpg 1
273 272.jpg 1
274 273.jpg 1
275 274.jpg 1
276 275.jpg 1
277 276.jpg 1
278 277.jpg 1
279 278.jpg 1
280 279.jpg 1
281 280.jpg 0
282 281.jpg 1
283 282.jpg 1
284 283.jpg 1
285 284.jpg 1
286 285.jpg 1
287 286.jpg 0
288 287.jpg 1
289 288.jpg 1
290 289.jpg 1
291 290.jpg 1
292 291.jpg 1
293 292.jpg 1
294 293.jpg 0
295 294.jpg 1
296 295.jpg 1
297 296.jpg 0
298 297.jpg 1
299 298.jpg 1
300 299.jpg 0
301 300.jpg 1
302 301.jpg 1
303 302.jpg 0
304 303.jpg 1
305 304.jpg 0
306 305.jpg 1
307 306.jpg 1
308 307.jpg 1
309 308.jpg 1
310 309.jpg 1
311 310.jpg 1
312 311.jpg 0
313 312.jpg 1
314 313.jpg 1
315 314.jpg 0
316 315.jpg 1
317 316.jpg 1
318 317.jpg 1
319 318.jpg 1
320 319.jpg 1
321 320.jpg 1
322 321.jpg 0
323 322.jpg 1
324 323.jpg 1
325 324.jpg 0
326 325.jpg 1
327 326.jpg 1
328 327.jpg 0
329 328.jpg 1
330 329.jpg 1
331 330.jpg 1
332 331.jpg 1
333 332.jpg 0
334 333.jpg 1
335 334.jpg 0
336 335.jpg 1
337 336.jpg 1
338 337.jpg 1
339 338.jpg 1
340 339.jpg 1
341 340.jpg 1
342 341.jpg 1
343 342.jpg 1
344 343.jpg 1
345 344.jpg 1
346 345.jpg 0
347 346.jpg 1
348 347.jpg 1
349 348.jpg 1
350 349.jpg 1
351 350.jpg 1
352 351.jpg 1
353 352.jpg 1
354 353.jpg 1
355 354.jpg 0
356 355.jpg 1
357 356.jpg 0
358 357.jpg 1
359 358.jpg 0
360 359.jpg 0
361 360.jpg 1
362 361.jpg 0
363 362.jpg 1
364 363.jpg 0
365 364.jpg 1
366 365.jpg 1
367 366.jpg 1
368 367.jpg 1
369 368.jpg 1
370 369.jpg 0
371 370.jpg 0
372 371.jpg 1
373 372.jpg 0
374 373.jpg 1
375 374.jpg 1
376 375.jpg 1
377 376.jpg 1
378 377.jpg 0
379 378.jpg 0
380 379.jpg 1
381 380.jpg 1
382 381.jpg 1
383 382.jpg 1
384 383.jpg 1
385 384.jpg 1
386 385.jpg 1
387 386.jpg 1
388 387.jpg 1
389 388.jpg 1
390 389.jpg 1
391 390.jpg 0
392 391.jpg 0
393 392.jpg 1
394 393.jpg 1
395 394.jpg 1
396 395.jpg 1
397 396.jpg 1
398 397.jpg 1
399 398.jpg 0
400 399.jpg 1
401 400.jpg 1
402 401.jpg 0
403 402.jpg 1
404 403.jpg 1
405 404.jpg 1
406 405.jpg 0
407 406.jpg 0
408 407.jpg 1
409 408.jpg 1
410 409.jpg 1
411 410.jpg 1
412 411.jpg 0
413 412.jpg 1
414 413.jpg 1
415 414.jpg 1
416 415.jpg 1
417 416.jpg 1
418 417.jpg 1
419 418.jpg 1
420 419.jpg 1
421 420.jpg 1
422 421.jpg 0
423 422.jpg 0
424 423.jpg 1
425 424.jpg 1
426 425.jpg 0
427 426.jpg 1
428 427.jpg 1
429 428.jpg 1
430 429.jpg 0
431 430.jpg 0
432 431.jpg 1
433 432.jpg 1
434 433.jpg 1
435 434.jpg 1
436 435.jpg 1
437 436.jpg 1
438 437.jpg 1
439 438.jpg 1
440 439.jpg 1
441 440.jpg 1
442 441.jpg 1
443 442.jpg 1
444 443.jpg 1
445 444.jpg 1
446 445.jpg 1
447 446.jpg 1
448 447.jpg 1
449 448.jpg 1
450 449.jpg 1
451 450.jpg 1
452 451.jpg 1
453 452.jpg 1
454 453.jpg 1
455 454.jpg 1
456 455.jpg 1
457 456.jpg 1
458 457.jpg 1
459 458.jpg 1
460 459.jpg 1
461 460.jpg 1
462 461.jpg 1
463 462.jpg 1
464 463.jpg 0
465 464.jpg 1
466 465.jpg 1
467 466.jpg 1
468 467.jpg 1
469 468.jpg 1
470 469.jpg 1
471 470.jpg 1
472 471.jpg 1
473 472.jpg 1
474 473.jpg 1
475 474.jpg 1
476 475.jpg 1
477 476.jpg 1
478 477.jpg 1
479 478.jpg 1
480 479.jpg 1
481 480.jpg 1
482 481.jpg 1
483 482.jpg 1
484 483.jpg 1
485 484.jpg 0
486 485.jpg 1
487 486.jpg 1
488 487.jpg 1
489 488.jpg 1
490 489.jpg 1
491 490.jpg 1
492 491.jpg 1
493 492.jpg 0
494 493.jpg 1
495 494.jpg 1
496 495.jpg 1
497 496.jpg 1
498 497.jpg 1
499 498.jpg 1
500 499.jpg 1
501 500.jpg 1
502 501.jpg 1
503 502.jpg 1
504 503.jpg 1
505 504.jpg 1
506 505.jpg 1
507 506.jpg 0
508 507.jpg 0
509 508.jpg 0
510 509.jpg 1
511 510.jpg 1
512 511.jpg 1
513 512.jpg 0
514 513.jpg 0
515 514.jpg 1
516 515.jpg 1
517 516.jpg 1
518 517.jpg 1
519 518.jpg 1
520 519.jpg 1
521 520.jpg 0

View File

@ -0,0 +1,16 @@
import os
import pandas as pd
import random
train_list = set()
img_list = [i for i in os.listdir("raw/haze") if i.endswith(".jpg")]
random.shuffle(img_list)
for img in img_list[ : int(len(img_list) * 0.8)]:
train_list.add(img)
img_list.sort()
data = list()
for img in img_list:
data.append([img, 1 if img in train_list else 0])
pd.DataFrame(data=data, columns=["Image", "Train"]).to_csv("./split.csv", index=False)

View File

@ -0,0 +1,26 @@
import os
import random
import pandas as pd
train_list = list()
test_list = list()
root_dir = "raw"
class_index = 0
for vehicle in os.listdir(root_dir):
img_list = [i for i in os.listdir(os.path.join(root_dir, vehicle)) if i.endswith(".jpg")]
random.shuffle(img_list)
split_num = int(len(img_list) * 0.8)
for img in img_list[0 : split_num]:
train_list.append([os.path.join(root_dir, vehicle, img), class_index])
for img in img_list[split_num : ]:
test_list.append([os.path.join(root_dir, vehicle, img), class_index])
class_index += 1
train_list.sort()
test_list.sort()
pd.DataFrame(data=train_list, columns=["Vehicle", "Label"]).to_csv("./train.csv", index=False)
pd.DataFrame(data=test_list, columns=["Vehicle", "Label"]).to_csv("./test.csv", index=False)

View File

@ -0,0 +1,273 @@
Vehicle,Label
raw/bus/bus002.jpg,1
raw/bus/bus005.jpg,1
raw/bus/bus008.jpg,1
raw/bus/bus017.jpg,1
raw/bus/bus019.jpg,1
raw/bus/bus033.jpg,1
raw/bus/bus034.jpg,1
raw/bus/bus040.jpg,1
raw/bus/bus043.jpg,1
raw/bus/bus044.jpg,1
raw/bus/bus045.jpg,1
raw/bus/bus046.jpg,1
raw/bus/bus051.jpg,1
raw/bus/bus052.jpg,1
raw/bus/bus056.jpg,1
raw/bus/bus057.jpg,1
raw/bus/bus063.jpg,1
raw/bus/bus066.jpg,1
raw/bus/bus077.jpg,1
raw/bus/bus080.jpg,1
raw/bus/bus096.jpg,1
raw/bus/bus097.jpg,1
raw/bus/bus099.jpg,1
raw/bus/bus114.jpg,1
raw/bus/bus115.jpg,1
raw/bus/bus119.jpg,1
raw/bus/bus127.jpg,1
raw/bus/bus132.jpg,1
raw/bus/bus133.jpg,1
raw/bus/bus135.jpg,1
raw/bus/bus139.jpg,1
raw/bus/bus141.jpg,1
raw/bus/bus145.jpg,1
raw/bus/bus156.jpg,1
raw/bus/bus170.jpg,1
raw/bus/bus173.jpg,1
raw/bus/bus183.jpg,1
raw/bus/bus190.jpg,1
raw/bus/bus192.jpg,1
raw/bus/bus200.jpg,1
raw/bus/bus201.jpg,1
raw/bus/bus209.jpg,1
raw/bus/bus213.jpg,1
raw/bus/bus218.jpg,1
raw/car/car002.jpg,0
raw/car/car012.jpg,0
raw/car/car018.jpg,0
raw/car/car025.jpg,0
raw/car/car030.jpg,0
raw/car/car034.jpg,0
raw/car/car036.jpg,0
raw/car/car047.jpg,0
raw/car/car053.jpg,0
raw/car/car054.jpg,0
raw/car/car058.jpg,0
raw/car/car060.jpg,0
raw/car/car062.jpg,0
raw/car/car063.jpg,0
raw/car/car066.jpg,0
raw/car/car072.jpg,0
raw/car/car084.jpg,0
raw/car/car087.jpg,0
raw/car/car089.jpg,0
raw/car/car099.jpg,0
raw/car/car103.jpg,0
raw/car/car105.jpg,0
raw/car/car106.jpg,0
raw/car/car116.jpg,0
raw/car/car117.jpg,0
raw/car/car120.jpg,0
raw/car/car132.jpg,0
raw/car/car138.jpg,0
raw/car/car139.jpg,0
raw/car/car152.jpg,0
raw/car/car153.jpg,0
raw/car/car157.jpg,0
raw/car/car168.jpg,0
raw/car/car177.jpg,0
raw/car/car182.jpg,0
raw/car/car183.jpg,0
raw/car/car184.jpg,0
raw/car/car187.jpg,0
raw/car/car192.jpg,0
raw/car/car209.jpg,0
raw/car/car212.jpg,0
raw/car/car215.jpg,0
raw/car/car222.jpg,0
raw/car/car225.jpg,0
raw/car/car227.jpg,0
raw/car/car237.jpg,0
raw/car/car238.jpg,0
raw/car/car249.jpg,0
raw/car/car250.jpg,0
raw/car/car257.jpg,0
raw/car/car263.jpg,0
raw/car/car275.jpg,0
raw/car/car279.jpg,0
raw/car/car283.jpg,0
raw/car/car288.jpg,0
raw/car/car299.jpg,0
raw/car/car300.jpg,0
raw/car/car317.jpg,0
raw/car/car318.jpg,0
raw/car/car336.jpg,0
raw/car/car345.jpg,0
raw/car/car351.jpg,0
raw/car/car357.jpg,0
raw/car/car358.jpg,0
raw/car/car360.jpg,0
raw/car/car369.jpg,0
raw/car/car374.jpg,0
raw/car/car376.jpg,0
raw/car/car377.jpg,0
raw/car/car379.jpg,0
raw/car/car380.jpg,0
raw/car/car381.jpg,0
raw/car/car386.jpg,0
raw/car/car396.jpg,0
raw/car/car402.jpg,0
raw/car/car404.jpg,0
raw/car/car410.jpg,0
raw/car/car424.jpg,0
raw/car/car437.jpg,0
raw/car/car439.jpg,0
raw/car/car441.jpg,0
raw/car/car454.jpg,0
raw/car/car457.jpg,0
raw/car/car458.jpg,0
raw/car/car460.jpg,0
raw/car/car461.jpg,0
raw/car/car470.jpg,0
raw/car/car475.jpg,0
raw/car/car477.jpg,0
raw/car/car486.jpg,0
raw/car/car490.jpg,0
raw/car/car495.jpg,0
raw/car/car500.jpg,0
raw/car/car502.jpg,0
raw/car/car511.jpg,0
raw/car/car525.jpg,0
raw/car/car534.jpg,0
raw/car/car537.jpg,0
raw/car/car544.jpg,0
raw/car/car546.jpg,0
raw/car/car547.jpg,0
raw/car/car551.jpg,0
raw/car/car559.jpg,0
raw/car/car566.jpg,0
raw/car/car568.jpg,0
raw/car/car570.jpg,0
raw/car/car571.jpg,0
raw/car/car574.jpg,0
raw/car/car577.jpg,0
raw/car/car580.jpg,0
raw/car/car583.jpg,0
raw/car/car585.jpg,0
raw/car/car588.jpg,0
raw/car/car600.jpg,0
raw/car/car604.jpg,0
raw/car/car606.jpg,0
raw/car/car611.jpg,0
raw/car/car613.jpg,0
raw/car/car625.jpg,0
raw/car/car626.jpg,0
raw/car/car638.jpg,0
raw/car/car646.jpg,0
raw/car/car647.jpg,0
raw/car/car649.jpg,0
raw/car/car653.jpg,0
raw/car/car661.jpg,0
raw/car/car664.jpg,0
raw/car/car667.jpg,0
raw/car/car669.jpg,0
raw/car/car670.jpg,0
raw/car/car691.jpg,0
raw/car/car698.jpg,0
raw/car/car701.jpg,0
raw/car/car707.jpg,0
raw/car/car715.jpg,0
raw/car/car716.jpg,0
raw/car/car718.jpg,0
raw/car/car720.jpg,0
raw/car/car728.jpg,0
raw/car/car730.jpg,0
raw/car/car733.jpg,0
raw/car/car741.jpg,0
raw/car/car743.jpg,0
raw/car/car753.jpg,0
raw/car/car755.jpg,0
raw/car/car756.jpg,0
raw/car/car757.jpg,0
raw/car/car760.jpg,0
raw/car/car761.jpg,0
raw/car/car763.jpg,0
raw/car/car764.jpg,0
raw/car/car768.jpg,0
raw/car/car769.jpg,0
raw/car/car770.jpg,0
raw/car/car772.jpg,0
raw/car/car774.jpg,0
raw/truck/truck001.jpg,2
raw/truck/truck006.jpg,2
raw/truck/truck009.jpg,2
raw/truck/truck011.jpg,2
raw/truck/truck018.jpg,2
raw/truck/truck020.jpg,2
raw/truck/truck021.jpg,2
raw/truck/truck026.jpg,2
raw/truck/truck031.jpg,2
raw/truck/truck039.jpg,2
raw/truck/truck043.jpg,2
raw/truck/truck045.jpg,2
raw/truck/truck051.jpg,2
raw/truck/truck067.jpg,2
raw/truck/truck075.jpg,2
raw/truck/truck080.jpg,2
raw/truck/truck084.jpg,2
raw/truck/truck088.jpg,2
raw/truck/truck091.jpg,2
raw/truck/truck093.jpg,2
raw/truck/truck095.jpg,2
raw/truck/truck097.jpg,2
raw/truck/truck098.jpg,2
raw/truck/truck103.jpg,2
raw/truck/truck107.jpg,2
raw/truck/truck113.jpg,2
raw/truck/truck116.jpg,2
raw/truck/truck117.jpg,2
raw/truck/truck118.jpg,2
raw/truck/truck124.jpg,2
raw/truck/truck130.jpg,2
raw/truck/truck136.jpg,2
raw/truck/truck141.jpg,2
raw/truck/truck146.jpg,2
raw/truck/truck152.jpg,2
raw/truck/truck153.jpg,2
raw/truck/truck155.jpg,2
raw/truck/truck157.jpg,2
raw/truck/truck164.jpg,2
raw/truck/truck170.jpg,2
raw/truck/truck171.jpg,2
raw/truck/truck176.jpg,2
raw/truck/truck185.jpg,2
raw/truck/truck192.jpg,2
raw/truck/truck193.jpg,2
raw/truck/truck197.jpg,2
raw/truck/truck207.jpg,2
raw/truck/truck211.jpg,2
raw/truck/truck216.jpg,2
raw/truck/truck217.jpg,2
raw/truck/truck218.jpg,2
raw/truck/truck220.jpg,2
raw/truck/truck221.jpg,2
raw/truck/truck223.jpg,2
raw/truck/truck226.jpg,2
raw/truck/truck235.jpg,2
raw/truck/truck252.jpg,2
raw/truck/truck259.jpg,2
raw/truck/truck261.jpg,2
raw/truck/truck264.jpg,2
raw/truck/truck271.jpg,2
raw/truck/truck306.jpg,2
raw/truck/truck309.jpg,2
raw/truck/truck311.jpg,2
raw/truck/truck317.jpg,2
raw/truck/truck326.jpg,2
raw/truck/truck328.jpg,2
raw/truck/truck329.jpg,2
raw/truck/truck333.jpg,2
raw/truck/truck335.jpg,2
raw/truck/truck345.jpg,2
raw/truck/truck360.jpg,2
1 Vehicle Label
2 raw/bus/bus002.jpg 1
3 raw/bus/bus005.jpg 1
4 raw/bus/bus008.jpg 1
5 raw/bus/bus017.jpg 1
6 raw/bus/bus019.jpg 1
7 raw/bus/bus033.jpg 1
8 raw/bus/bus034.jpg 1
9 raw/bus/bus040.jpg 1
10 raw/bus/bus043.jpg 1
11 raw/bus/bus044.jpg 1
12 raw/bus/bus045.jpg 1
13 raw/bus/bus046.jpg 1
14 raw/bus/bus051.jpg 1
15 raw/bus/bus052.jpg 1
16 raw/bus/bus056.jpg 1
17 raw/bus/bus057.jpg 1
18 raw/bus/bus063.jpg 1
19 raw/bus/bus066.jpg 1
20 raw/bus/bus077.jpg 1
21 raw/bus/bus080.jpg 1
22 raw/bus/bus096.jpg 1
23 raw/bus/bus097.jpg 1
24 raw/bus/bus099.jpg 1
25 raw/bus/bus114.jpg 1
26 raw/bus/bus115.jpg 1
27 raw/bus/bus119.jpg 1
28 raw/bus/bus127.jpg 1
29 raw/bus/bus132.jpg 1
30 raw/bus/bus133.jpg 1
31 raw/bus/bus135.jpg 1
32 raw/bus/bus139.jpg 1
33 raw/bus/bus141.jpg 1
34 raw/bus/bus145.jpg 1
35 raw/bus/bus156.jpg 1
36 raw/bus/bus170.jpg 1
37 raw/bus/bus173.jpg 1
38 raw/bus/bus183.jpg 1
39 raw/bus/bus190.jpg 1
40 raw/bus/bus192.jpg 1
41 raw/bus/bus200.jpg 1
42 raw/bus/bus201.jpg 1
43 raw/bus/bus209.jpg 1
44 raw/bus/bus213.jpg 1
45 raw/bus/bus218.jpg 1
46 raw/car/car002.jpg 0
47 raw/car/car012.jpg 0
48 raw/car/car018.jpg 0
49 raw/car/car025.jpg 0
50 raw/car/car030.jpg 0
51 raw/car/car034.jpg 0
52 raw/car/car036.jpg 0
53 raw/car/car047.jpg 0
54 raw/car/car053.jpg 0
55 raw/car/car054.jpg 0
56 raw/car/car058.jpg 0
57 raw/car/car060.jpg 0
58 raw/car/car062.jpg 0
59 raw/car/car063.jpg 0
60 raw/car/car066.jpg 0
61 raw/car/car072.jpg 0
62 raw/car/car084.jpg 0
63 raw/car/car087.jpg 0
64 raw/car/car089.jpg 0
65 raw/car/car099.jpg 0
66 raw/car/car103.jpg 0
67 raw/car/car105.jpg 0
68 raw/car/car106.jpg 0
69 raw/car/car116.jpg 0
70 raw/car/car117.jpg 0
71 raw/car/car120.jpg 0
72 raw/car/car132.jpg 0
73 raw/car/car138.jpg 0
74 raw/car/car139.jpg 0
75 raw/car/car152.jpg 0
76 raw/car/car153.jpg 0
77 raw/car/car157.jpg 0
78 raw/car/car168.jpg 0
79 raw/car/car177.jpg 0
80 raw/car/car182.jpg 0
81 raw/car/car183.jpg 0
82 raw/car/car184.jpg 0
83 raw/car/car187.jpg 0
84 raw/car/car192.jpg 0
85 raw/car/car209.jpg 0
86 raw/car/car212.jpg 0
87 raw/car/car215.jpg 0
88 raw/car/car222.jpg 0
89 raw/car/car225.jpg 0
90 raw/car/car227.jpg 0
91 raw/car/car237.jpg 0
92 raw/car/car238.jpg 0
93 raw/car/car249.jpg 0
94 raw/car/car250.jpg 0
95 raw/car/car257.jpg 0
96 raw/car/car263.jpg 0
97 raw/car/car275.jpg 0
98 raw/car/car279.jpg 0
99 raw/car/car283.jpg 0
100 raw/car/car288.jpg 0
101 raw/car/car299.jpg 0
102 raw/car/car300.jpg 0
103 raw/car/car317.jpg 0
104 raw/car/car318.jpg 0
105 raw/car/car336.jpg 0
106 raw/car/car345.jpg 0
107 raw/car/car351.jpg 0
108 raw/car/car357.jpg 0
109 raw/car/car358.jpg 0
110 raw/car/car360.jpg 0
111 raw/car/car369.jpg 0
112 raw/car/car374.jpg 0
113 raw/car/car376.jpg 0
114 raw/car/car377.jpg 0
115 raw/car/car379.jpg 0
116 raw/car/car380.jpg 0
117 raw/car/car381.jpg 0
118 raw/car/car386.jpg 0
119 raw/car/car396.jpg 0
120 raw/car/car402.jpg 0
121 raw/car/car404.jpg 0
122 raw/car/car410.jpg 0
123 raw/car/car424.jpg 0
124 raw/car/car437.jpg 0
125 raw/car/car439.jpg 0
126 raw/car/car441.jpg 0
127 raw/car/car454.jpg 0
128 raw/car/car457.jpg 0
129 raw/car/car458.jpg 0
130 raw/car/car460.jpg 0
131 raw/car/car461.jpg 0
132 raw/car/car470.jpg 0
133 raw/car/car475.jpg 0
134 raw/car/car477.jpg 0
135 raw/car/car486.jpg 0
136 raw/car/car490.jpg 0
137 raw/car/car495.jpg 0
138 raw/car/car500.jpg 0
139 raw/car/car502.jpg 0
140 raw/car/car511.jpg 0
141 raw/car/car525.jpg 0
142 raw/car/car534.jpg 0
143 raw/car/car537.jpg 0
144 raw/car/car544.jpg 0
145 raw/car/car546.jpg 0
146 raw/car/car547.jpg 0
147 raw/car/car551.jpg 0
148 raw/car/car559.jpg 0
149 raw/car/car566.jpg 0
150 raw/car/car568.jpg 0
151 raw/car/car570.jpg 0
152 raw/car/car571.jpg 0
153 raw/car/car574.jpg 0
154 raw/car/car577.jpg 0
155 raw/car/car580.jpg 0
156 raw/car/car583.jpg 0
157 raw/car/car585.jpg 0
158 raw/car/car588.jpg 0
159 raw/car/car600.jpg 0
160 raw/car/car604.jpg 0
161 raw/car/car606.jpg 0
162 raw/car/car611.jpg 0
163 raw/car/car613.jpg 0
164 raw/car/car625.jpg 0
165 raw/car/car626.jpg 0
166 raw/car/car638.jpg 0
167 raw/car/car646.jpg 0
168 raw/car/car647.jpg 0
169 raw/car/car649.jpg 0
170 raw/car/car653.jpg 0
171 raw/car/car661.jpg 0
172 raw/car/car664.jpg 0
173 raw/car/car667.jpg 0
174 raw/car/car669.jpg 0
175 raw/car/car670.jpg 0
176 raw/car/car691.jpg 0
177 raw/car/car698.jpg 0
178 raw/car/car701.jpg 0
179 raw/car/car707.jpg 0
180 raw/car/car715.jpg 0
181 raw/car/car716.jpg 0
182 raw/car/car718.jpg 0
183 raw/car/car720.jpg 0
184 raw/car/car728.jpg 0
185 raw/car/car730.jpg 0
186 raw/car/car733.jpg 0
187 raw/car/car741.jpg 0
188 raw/car/car743.jpg 0
189 raw/car/car753.jpg 0
190 raw/car/car755.jpg 0
191 raw/car/car756.jpg 0
192 raw/car/car757.jpg 0
193 raw/car/car760.jpg 0
194 raw/car/car761.jpg 0
195 raw/car/car763.jpg 0
196 raw/car/car764.jpg 0
197 raw/car/car768.jpg 0
198 raw/car/car769.jpg 0
199 raw/car/car770.jpg 0
200 raw/car/car772.jpg 0
201 raw/car/car774.jpg 0
202 raw/truck/truck001.jpg 2
203 raw/truck/truck006.jpg 2
204 raw/truck/truck009.jpg 2
205 raw/truck/truck011.jpg 2
206 raw/truck/truck018.jpg 2
207 raw/truck/truck020.jpg 2
208 raw/truck/truck021.jpg 2
209 raw/truck/truck026.jpg 2
210 raw/truck/truck031.jpg 2
211 raw/truck/truck039.jpg 2
212 raw/truck/truck043.jpg 2
213 raw/truck/truck045.jpg 2
214 raw/truck/truck051.jpg 2
215 raw/truck/truck067.jpg 2
216 raw/truck/truck075.jpg 2
217 raw/truck/truck080.jpg 2
218 raw/truck/truck084.jpg 2
219 raw/truck/truck088.jpg 2
220 raw/truck/truck091.jpg 2
221 raw/truck/truck093.jpg 2
222 raw/truck/truck095.jpg 2
223 raw/truck/truck097.jpg 2
224 raw/truck/truck098.jpg 2
225 raw/truck/truck103.jpg 2
226 raw/truck/truck107.jpg 2
227 raw/truck/truck113.jpg 2
228 raw/truck/truck116.jpg 2
229 raw/truck/truck117.jpg 2
230 raw/truck/truck118.jpg 2
231 raw/truck/truck124.jpg 2
232 raw/truck/truck130.jpg 2
233 raw/truck/truck136.jpg 2
234 raw/truck/truck141.jpg 2
235 raw/truck/truck146.jpg 2
236 raw/truck/truck152.jpg 2
237 raw/truck/truck153.jpg 2
238 raw/truck/truck155.jpg 2
239 raw/truck/truck157.jpg 2
240 raw/truck/truck164.jpg 2
241 raw/truck/truck170.jpg 2
242 raw/truck/truck171.jpg 2
243 raw/truck/truck176.jpg 2
244 raw/truck/truck185.jpg 2
245 raw/truck/truck192.jpg 2
246 raw/truck/truck193.jpg 2
247 raw/truck/truck197.jpg 2
248 raw/truck/truck207.jpg 2
249 raw/truck/truck211.jpg 2
250 raw/truck/truck216.jpg 2
251 raw/truck/truck217.jpg 2
252 raw/truck/truck218.jpg 2
253 raw/truck/truck220.jpg 2
254 raw/truck/truck221.jpg 2
255 raw/truck/truck223.jpg 2
256 raw/truck/truck226.jpg 2
257 raw/truck/truck235.jpg 2
258 raw/truck/truck252.jpg 2
259 raw/truck/truck259.jpg 2
260 raw/truck/truck261.jpg 2
261 raw/truck/truck264.jpg 2
262 raw/truck/truck271.jpg 2
263 raw/truck/truck306.jpg 2
264 raw/truck/truck309.jpg 2
265 raw/truck/truck311.jpg 2
266 raw/truck/truck317.jpg 2
267 raw/truck/truck326.jpg 2
268 raw/truck/truck328.jpg 2
269 raw/truck/truck329.jpg 2
270 raw/truck/truck333.jpg 2
271 raw/truck/truck335.jpg 2
272 raw/truck/truck345.jpg 2
273 raw/truck/truck360.jpg 2

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -2,8 +2,9 @@ black
ipdb
jupyter
numpy
torch==2.1.0
torchaudio==2.1.0
torchvision==0.16.0
torch
torchaudio
torchvision
tqdm
matplotlib
matplotlib
pandas