Skip to content

Commit e0d33a6

Browse files
alykhantejanisoumith
authored andcommitted
Scale -> Resize + RandomSizedCrop -> RandomResizedCrop
1 parent cf74c81 commit e0d33a6

File tree

4 files changed

+8
-8
lines changed

4 files changed

+8
-8
lines changed

dcgan/main.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -58,23 +58,23 @@
5858
# folder dataset
5959
dataset = dset.ImageFolder(root=opt.dataroot,
6060
transform=transforms.Compose([
61-
transforms.Scale(opt.imageSize),
61+
transforms.Resize(opt.imageSize),
6262
transforms.CenterCrop(opt.imageSize),
6363
transforms.ToTensor(),
6464
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
6565
]))
6666
elif opt.dataset == 'lsun':
6767
dataset = dset.LSUN(db_path=opt.dataroot, classes=['bedroom_train'],
6868
transform=transforms.Compose([
69-
transforms.Scale(opt.imageSize),
69+
transforms.Resize(opt.imageSize),
7070
transforms.CenterCrop(opt.imageSize),
7171
transforms.ToTensor(),
7272
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
7373
]))
7474
elif opt.dataset == 'cifar10':
7575
dataset = dset.CIFAR10(root=opt.dataroot, download=True,
7676
transform=transforms.Compose([
77-
transforms.Scale(opt.imageSize),
77+
transforms.Resize(opt.imageSize),
7878
transforms.ToTensor(),
7979
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
8080
]))

fast_neural_style/neural_style/neural_style.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def train(args):
3535
torch.cuda.manual_seed(args.seed)
3636

3737
transform = transforms.Compose([
38-
transforms.Scale(args.image_size),
38+
transforms.Resize(args.image_size),
3939
transforms.CenterCrop(args.image_size),
4040
transforms.ToTensor(),
4141
transforms.Lambda(lambda x: x.mul(255))

imagenet/main.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def main():
119119
train_dataset = datasets.ImageFolder(
120120
traindir,
121121
transforms.Compose([
122-
transforms.RandomSizedCrop(224),
122+
transforms.RandomResizedCrop(224),
123123
transforms.RandomHorizontalFlip(),
124124
transforms.ToTensor(),
125125
normalize,
@@ -136,7 +136,7 @@ def main():
136136

137137
val_loader = torch.utils.data.DataLoader(
138138
datasets.ImageFolder(valdir, transforms.Compose([
139-
transforms.Scale(256),
139+
transforms.Resize(256),
140140
transforms.CenterCrop(224),
141141
transforms.ToTensor(),
142142
normalize,

super_resolution/data.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from os import makedirs, remove
33
from six.moves import urllib
44
import tarfile
5-
from torchvision.transforms import Compose, CenterCrop, ToTensor, Scale
5+
from torchvision.transforms import Compose, CenterCrop, ToTensor, Resize
66

77
from dataset import DatasetFromFolder
88

@@ -38,7 +38,7 @@ def calculate_valid_crop_size(crop_size, upscale_factor):
3838
def input_transform(crop_size, upscale_factor):
3939
return Compose([
4040
CenterCrop(crop_size),
41-
Scale(crop_size // upscale_factor),
41+
Resize(crop_size // upscale_factor),
4242
ToTensor(),
4343
])
4444

0 commit comments

Comments
 (0)