Closed aghaphd closed 1 year ago
from future import ( division, absolute_import, with_statement, print_function, unicode_literals, ) import torch import torch.multiprocessing as multiprocessing from torch.utils.data.sampler import SequentialSampler, RandomSampler, BatchSampler import signal import collections import re import sys import threading import traceback from torch._six import string_classes int_classes = int
if sys.version_info[0] == 2: import Queue as queue
_mp_ctx = multiprocessing
else: import queue
_mp_ctx = multiprocessing.get_context("forkserver")
class ExceptionWrapper(object): r"Wraps an exception plus traceback to communicate across threads"
def __init__(self, exc_info): self.exc_type = exc_info[0] self.exc_msg = "".join(traceback.format_exception(*exc_info))
_use_shared_memory = False """Whether to use shared memory in default_collate"""
I have the above part of the script facing this error, how to solve it? I'm running it on Windows 11.
from future import ( division, absolute_import, with_statement, print_function, unicode_literals, ) import torch import torch.multiprocessing as multiprocessing from torch.utils.data.sampler import SequentialSampler, RandomSampler, BatchSampler import signal import collections import re import sys import threading import traceback from torch._six import string_classes int_classes = int
if sys.version_info[0] == 2: import Queue as queue
else: import queue
class ExceptionWrapper(object): r"Wraps an exception plus traceback to communicate across threads"
_use_shared_memory = False """Whether to use shared memory in default_collate"""
I have the above part of the script facing this error, how to solve it? I'm running it on Windows 11.