# Training params epoch : 5 batch_size : 4 num_workers: 8 limit : 5 # Optimizer optimizer : name : torch.optim.Adam lr : 0.0001 # Criterion loss_function : name : torch.nn.CrossEntropyLoss # Scheduler scheduler : name : torch.optim.lr_scheduler.StepLR gamma : 0.15 step_size : 3