Skip to content

Commit 4927d34

Browse files
committed
remove dglib
1 parent 0282fff commit 4927d34

File tree

12 files changed

+126
-204
lines changed

12 files changed

+126
-204
lines changed

dglib/__init__.py

Lines changed: 0 additions & 3 deletions
This file was deleted.

dglib/modules/__init__.py

Whitespace-only changes.

dglib/modules/classifier.py

Lines changed: 0 additions & 46 deletions
This file was deleted.

dglib/modules/sampler.py

Lines changed: 0 additions & 113 deletions
This file was deleted.

examples/domain_generalization/image_classification/coral.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@
2020

2121
sys.path.append('../../..')
2222
from tllib.alignment.coral import CorrelationAlignmentLoss
23-
from dglib.modules.sampler import RandomDomainSampler
24-
from dglib.modules.classifier import ImageClassifier as Classifier
2523
from tllib.utils.data import ForeverDataIterator
2624
from tllib.utils.metric import accuracy
2725
from tllib.utils.meter import AverageMeter, ProgressMeter
@@ -60,7 +58,7 @@ def main(args: argparse.Namespace):
6058
train_dataset, num_classes = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources,
6159
split='train', download=True, transform=train_transform,
6260
seed=args.seed)
63-
sampler = RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=args.n_domains_per_batch)
61+
sampler = utils.RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=args.n_domains_per_batch)
6462
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
6563
sampler=sampler, drop_last=True)
6664
val_dataset, _ = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources, split='val',
@@ -78,8 +76,8 @@ def main(args: argparse.Namespace):
7876
print("=> using pre-trained model '{}'".format(args.arch))
7977
backbone = utils.get_model(args.arch)
8078
pool_layer = nn.Identity() if args.no_pool else None
81-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
82-
finetune=args.finetune, pool_layer=pool_layer).to(device)
79+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
80+
finetune=args.finetune, pool_layer=pool_layer).to(device)
8381

8482
# define optimizer and lr scheduler
8583
optimizer = SGD(classifier.get_parameters(base_lr=args.lr), args.lr, momentum=args.momentum, weight_decay=args.wd,
@@ -147,7 +145,7 @@ def main(args: argparse.Namespace):
147145
logger.close()
148146

149147

150-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer, lr_scheduler: CosineAnnealingLR,
148+
def train(train_iter: ForeverDataIterator, model, optimizer, lr_scheduler: CosineAnnealingLR,
151149
correlation_alignment_loss: CorrelationAlignmentLoss, n_domains_per_batch: int, epoch: int,
152150
args: argparse.Namespace):
153151
batch_time = AverageMeter('Time', ':4.2f')

examples/domain_generalization/image_classification/erm.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
import torch.nn.functional as F
2020

2121
sys.path.append('../../..')
22-
from dglib.modules.classifier import ImageClassifier as Classifier
2322
from tllib.utils.data import ForeverDataIterator
2423
from tllib.utils.metric import accuracy
2524
from tllib.utils.meter import AverageMeter, ProgressMeter
@@ -76,8 +75,8 @@ def main(args: argparse.Namespace):
7675
print("=> using pre-trained model '{}'".format(args.arch))
7776
backbone = utils.get_model(args.arch)
7877
pool_layer = nn.Identity() if args.no_pool else None
79-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
80-
finetune=args.finetune, pool_layer=pool_layer).to(device)
78+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
79+
finetune=args.finetune, pool_layer=pool_layer).to(device)
8180

8281
# define optimizer and lr scheduler
8382
optimizer = SGD(classifier.get_parameters(base_lr=args.lr), args.lr, momentum=args.momentum, weight_decay=args.wd,
@@ -140,8 +139,8 @@ def main(args: argparse.Namespace):
140139
logger.close()
141140

142141

143-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer,
144-
lr_scheduler: CosineAnnealingLR, epoch: int, args: argparse.Namespace):
142+
def train(train_iter: ForeverDataIterator, model, optimizer, lr_scheduler: CosineAnnealingLR, epoch: int,
143+
args: argparse.Namespace):
145144
batch_time = AverageMeter('Time', ':4.2f')
146145
data_time = AverageMeter('Data', ':3.1f')
147146
losses = AverageMeter('Loss', ':3.2f')

examples/domain_generalization/image_classification/groupdro.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@
2020
import torch.nn.functional as F
2121

2222
sys.path.append('../../..')
23-
from dglib.modules.sampler import RandomDomainSampler
24-
from dglib.modules.classifier import ImageClassifier as Classifier
2523
from tllib.reweight.groupdro import AutomaticUpdateDomainWeightModule
2624
from tllib.utils.data import ForeverDataIterator
2725
from tllib.utils.metric import accuracy
@@ -61,7 +59,7 @@ def main(args: argparse.Namespace):
6159
train_dataset, num_classes = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources,
6260
split='train', download=True, transform=train_transform,
6361
seed=args.seed)
64-
sampler = RandomDomainSampler(train_dataset, args.batch_size, args.n_domains_per_batch)
62+
sampler = utils.RandomDomainSampler(train_dataset, args.batch_size, args.n_domains_per_batch)
6563
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
6664
sampler=sampler, drop_last=True)
6765
val_dataset, _ = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources, split='val',
@@ -79,8 +77,8 @@ def main(args: argparse.Namespace):
7977
print("=> using pre-trained model '{}'".format(args.arch))
8078
backbone = utils.get_model(args.arch)
8179
pool_layer = nn.Identity() if args.no_pool else None
82-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
83-
finetune=args.finetune, pool_layer=pool_layer).to(device)
80+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
81+
finetune=args.finetune, pool_layer=pool_layer).to(device)
8482
num_all_domains = len(train_dataset.datasets)
8583

8684
# define optimizer and lr scheduler
@@ -146,7 +144,7 @@ def main(args: argparse.Namespace):
146144
logger.close()
147145

148146

149-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer, lr_scheduler: CosineAnnealingLR,
147+
def train(train_iter: ForeverDataIterator, model, optimizer, lr_scheduler: CosineAnnealingLR,
150148
domain_weight_module: AutomaticUpdateDomainWeightModule, n_domains_per_batch: int, epoch: int,
151149
args: argparse.Namespace):
152150
batch_time = AverageMeter('Time', ':4.2f')

examples/domain_generalization/image_classification/irm.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@
2121
import torch.autograd as autograd
2222

2323
sys.path.append('../../..')
24-
from dglib.modules.sampler import RandomDomainSampler
25-
from dglib.modules.classifier import ImageClassifier as Classifier
2624
from tllib.utils.data import ForeverDataIterator
2725
from tllib.utils.metric import accuracy
2826
from tllib.utils.meter import AverageMeter, ProgressMeter
@@ -90,7 +88,7 @@ def main(args: argparse.Namespace):
9088
train_dataset, num_classes = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources,
9189
split='train', download=True, transform=train_transform,
9290
seed=args.seed)
93-
sampler = RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=args.n_domains_per_batch)
91+
sampler = utils.RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=args.n_domains_per_batch)
9492
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
9593
sampler=sampler, drop_last=True)
9694
val_dataset, _ = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources, split='val',
@@ -108,8 +106,8 @@ def main(args: argparse.Namespace):
108106
print("=> using pre-trained model '{}'".format(args.arch))
109107
backbone = utils.get_model(args.arch)
110108
pool_layer = nn.Identity() if args.no_pool else None
111-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
112-
finetune=args.finetune, pool_layer=pool_layer).to(device)
109+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
110+
finetune=args.finetune, pool_layer=pool_layer).to(device)
113111

114112
# define optimizer and lr scheduler
115113
optimizer = SGD(classifier.get_parameters(base_lr=args.lr), args.lr, momentum=args.momentum, weight_decay=args.wd,
@@ -185,7 +183,7 @@ def main(args: argparse.Namespace):
185183
logger.close()
186184

187185

188-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer, lr_scheduler: CosineAnnealingLR,
186+
def train(train_iter: ForeverDataIterator, model, optimizer, lr_scheduler: CosineAnnealingLR,
189187
invariance_penalty_loss: InvariancePenaltyLoss, n_domains_per_batch: int, epoch: int,
190188
args: argparse.Namespace):
191189
batch_time = AverageMeter('Time', ':4.2f')

examples/domain_generalization/image_classification/mixstyle.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@
2020

2121
sys.path.append('../../..')
2222
import tllib.normalization.mixstyle.resnet as models
23-
from dglib.modules.sampler import RandomDomainSampler
24-
from dglib.modules.classifier import ImageClassifier as Classifier
2523
from tllib.utils.data import ForeverDataIterator
2624
from tllib.utils.metric import accuracy
2725
from tllib.utils.meter import AverageMeter, ProgressMeter
@@ -60,7 +58,7 @@ def main(args: argparse.Namespace):
6058
train_dataset, num_classes = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources,
6159
split='train', download=True, transform=train_transform,
6260
seed=args.seed)
63-
sampler = RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=2)
61+
sampler = utils.RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=2)
6462
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
6563
sampler=sampler, drop_last=True)
6664
val_dataset, _ = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources, split='val',
@@ -80,8 +78,8 @@ def main(args: argparse.Namespace):
8078
backbone = models.__dict__[args.arch](mix_layers=args.mix_layers, mix_p=args.mix_p, mix_alpha=args.mix_alpha,
8179
pretrained=True)
8280
pool_layer = nn.Identity() if args.no_pool else None
83-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
84-
finetune=args.finetune, pool_layer=pool_layer).to(device)
81+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
82+
finetune=args.finetune, pool_layer=pool_layer).to(device)
8583

8684
# define optimizer and lr scheduler
8785
optimizer = SGD(classifier.get_parameters(base_lr=args.lr), args.lr, momentum=args.momentum, weight_decay=args.wd,
@@ -144,7 +142,7 @@ def main(args: argparse.Namespace):
144142
logger.close()
145143

146144

147-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer,
145+
def train(train_iter: ForeverDataIterator, model, optimizer,
148146
lr_scheduler: CosineAnnealingLR, epoch: int, args: argparse.Namespace):
149147
batch_time = AverageMeter('Time', ':4.2f')
150148
data_time = AverageMeter('Data', ':3.1f')

examples/domain_generalization/image_classification/mldg.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@
2020
import higher
2121

2222
sys.path.append('../../..')
23-
from dglib.modules.sampler import RandomDomainSampler
24-
from dglib.modules.classifier import ImageClassifier as Classifier
2523
from tllib.utils.data import ForeverDataIterator
2624
from tllib.utils.metric import accuracy
2725
from tllib.utils.meter import AverageMeter, ProgressMeter
@@ -61,7 +59,7 @@ def main(args: argparse.Namespace):
6159
split='train', download=True, transform=train_transform,
6260
seed=args.seed)
6361
n_domains_per_batch = args.n_support_domains + args.n_query_domains
64-
sampler = RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=n_domains_per_batch)
62+
sampler = utils.RandomDomainSampler(train_dataset, args.batch_size, n_domains_per_batch=n_domains_per_batch)
6563
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
6664
sampler=sampler, drop_last=True)
6765
val_dataset, _ = utils.get_dataset(dataset_name=args.data, root=args.root, task_list=args.sources, split='val',
@@ -79,8 +77,8 @@ def main(args: argparse.Namespace):
7977
print("=> using pre-trained model '{}'".format(args.arch))
8078
backbone = utils.get_model(args.arch)
8179
pool_layer = nn.Identity() if args.no_pool else None
82-
classifier = Classifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
83-
finetune=args.finetune, pool_layer=pool_layer).to(device)
80+
classifier = utils.ImageClassifier(backbone, num_classes, freeze_bn=args.freeze_bn, dropout_p=args.dropout_p,
81+
finetune=args.finetune, pool_layer=pool_layer).to(device)
8482

8583
# define optimizer and lr scheduler
8684
optimizer = SGD(classifier.get_parameters(base_lr=args.lr), args.lr, momentum=args.momentum, weight_decay=args.wd,
@@ -155,7 +153,7 @@ def random_split(x_list, labels_list, n_domains_per_batch, n_support_domains):
155153
return support_domain_list, query_domain_list
156154

157155

158-
def train(train_iter: ForeverDataIterator, model: Classifier, optimizer, lr_scheduler: CosineAnnealingLR, epoch: int,
156+
def train(train_iter: ForeverDataIterator, model, optimizer, lr_scheduler: CosineAnnealingLR, epoch: int,
159157
n_domains_per_batch: int, args: argparse.Namespace):
160158
batch_time = AverageMeter('Time', ':4.2f')
161159
data_time = AverageMeter('Data', ':3.1f')

0 commit comments

Comments
 (0)