Force the input batch size to be equal to the number of GPUs during testing
This commit is contained in:
@@ -439,6 +439,9 @@ class BaseModel(MetaModel, nn.Module):
|
|||||||
def run_test(model):
|
def run_test(model):
|
||||||
"""Accept the instance object(model) here, and then run the test loop."""
|
"""Accept the instance object(model) here, and then run the test loop."""
|
||||||
|
|
||||||
|
if torch.distributed.get_world_size() != model.engine_cfg['sampler']['batch_size']:
|
||||||
|
raise ValueError("The batch size ({}) must be equal to the number of GPUs ({}) in testing mode!".format(
|
||||||
|
model.engine_cfg['sampler']['batch_size'], torch.distributed.get_world_size()))
|
||||||
rank = torch.distributed.get_rank()
|
rank = torch.distributed.get_rank()
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
info_dict = model.inference(rank)
|
info_dict = model.inference(rank)
|
||||||
|
|||||||
Reference in New Issue
Block a user