Open eric-yyjau opened 5 years ago
a = torch.tensor(np.arange(10))
# tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
b = a>5
# tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, 1], dtype=torch.uint8)
a[b]
# tensor([6, 7, 8, 9])
b = torch.tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, 1])
# tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, 1])
a[b]
# tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, 1]) --> Note! The result is different from the previous one.
a[b.to(torch.uint8)]
# tensor([6, 7, 8, 9]) --> same results
a[b.nonzero().squeeze()]
# tensor([6, 7, 8, 9]) --> same results
Q: Data loader: shuffle = True
Q: Data loader: num_workers copy all things?
def worker_init_fn(worker_id):
"""The function is designed for pytorch multi-process dataloader.
Note that we use the pytorch random generator to generate a base_seed.
Please try to be consistent.
References:
https://pytorch.org/docs/stable/notes/faq.html#dataloader-workers-random-seed
"""
base_seed = torch.IntTensor(1).random_().item()
# print(worker_id, base_seed)
np.random.seed(base_seed + worker_id)
Check gradient flow in network - PyTorch Forums https://discuss.pytorch.org/t/check-gradient-flow-in-network/15063/10
Questions