xiaoshideta / mask_rgbd

MIT License
0 stars 0 forks source link

bash #1

Open xiaoshideta opened 1 month ago

xiaoshideta commented 1 month ago

python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.5 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.01 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.05 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=1.0 --distillation_flag=1 --lambda_mask=0.75 --losse loss1

xiaoshideta commented 1 month ago

python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.75 --losse loss2 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.75 --losse loss3 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.75 --losse loss4 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=1.0 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.01 --distillation_flag=1 --lambda_mask=0.75 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=1.0 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.9 --losse loss1

xiaoshideta commented 1 month ago

python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=1.0 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=1.0 --losse loss4 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.01 --distillation_flag=1 --lambda_mask=1.0 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.01 --distillation_flag=1 --lambda_mask=1.0 --losse loss4 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.05 --distillation_flag=1 --lambda_mask=1.0 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.05 --distillation_flag=1 --lambda_mask=1.0 --losse loss4 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.9 --losse loss1 python -m torch.distributed.launch --nproc_per_node=2 train2.py --port=29516 --distillation_alpha=1.0 --distillation_beta=0.1 --distillation_flag=1 --lambda_mask=0.9 --losse loss4