{"d_model": 128, "num_layers": 2, "seq_len": 256, "batch_size": 128, "learning_rate": 0.000474, "weight_decay": 0.0381, "epochs": 8}