import torch.nn.functional as F hpdict={'num_layers': 7, 'd_model': 768, 'num_heads': 12, 'dff': 2048, 'A_dff': 170, 'num_reslayerA': 8, 'num_denseA': 2, 'input_vocab_size': 32000, 'max_seq_len': 1024, 'epochs': 1, 'save_model_path': './PLDRv51-104M-checkpoint', 'warmup_steps': 8000, 'lr_total_steps': 250000, 'learning_rate': 0.0012, 'lr_alpha': 0.1, 'adamw_decay': 0.1, 'activation': F.silu, 'disable_amp': False, 'auto_size_minimum': None, 'disable_fsdp_mixed_precision': False, 'fsdp_cpu_offload': False, 'fsdp_sharding_strategy': 'HYBRID_SHARD', 'backward_prefetch': 'PRE', 'save_type': 'torch'}