Start analyzing https://www.copymanga.com/comic/xbdhsdx
Analyzing success!
Start download 小斑的黃色短靴
Start downloading 小斑的黃色短靴
total 1 episode.
Downloading ep 全一话
Downloading 全一话 page 1: https://mirror277.mangafuna.xyz:12001/comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 382, in _make_request
self._validate_conn(conn)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 1010, in _validate_conn
conn.connect()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connection.py", line 416, in connect
self.sock = ssl_wrap_socket(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 449, in ssl_wrap_socket
ssl_sock = _ssl_wrap_socket_impl(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 493, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1070, in _create
self.do_handshake()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1341, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 439, in send
resp = conn.urlopen(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 755, in urlopen
retries = retries.increment(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\retry.py", line 574, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 338, in error_loop
process()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 311, in download
crawler.download_image()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 80, in download_image
result = self.downloader.img(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 14, in img
return self.grab(grabimg, url, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 30, in grab
return grab_method(url, **new_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 208, in grabimg
return ImgResult(grabber(*args, **kwargs))
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 105, in grabber
r = await_(do_request, s, url, proxies, retry, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 905, in wrapped
return f(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 927, in await_
return async_(callback, *args, **kwargs).get()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 682, in get
raise err
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 474, in wrap_worker
self.ret = self.worker(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 118, in do_request
r = s.request(kwargs.pop("method", "GET"), url, timeout=20,
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 514, in send
raise SSLError(e, request=request)
requests.exceptions.SSLError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
Downloading 全一话 page 1: https://mirror277.mangafuna.xyz:12001/comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 382, in _make_request
self._validate_conn(conn)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 1010, in _validate_conn
conn.connect()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connection.py", line 416, in connect
self.sock = ssl_wrap_socket(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 449, in ssl_wrap_socket
ssl_sock = _ssl_wrap_socket_impl(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 493, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1070, in _create
self.do_handshake()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1341, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 439, in send
resp = conn.urlopen(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 755, in urlopen
retries = retries.increment(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\retry.py", line 574, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 338, in error_loop
process()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 311, in download
crawler.download_image()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 80, in download_image
result = self.downloader.img(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 14, in img
return self.grab(grabimg, url, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 30, in grab
return grab_method(url, **new_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 208, in grabimg
return ImgResult(grabber(*args, **kwargs))
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 105, in grabber
r = await_(do_request, s, url, proxies, retry, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 905, in wrapped
return f(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 927, in await_
return async_(callback, *args, **kwargs).get()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 682, in get
raise err
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 474, in wrap_worker
self.ret = self.worker(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 118, in do_request
r = s.request(kwargs.pop("method", "GET"), url, timeout=20,
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 514, in send
raise SSLError(e, request=request)
requests.exceptions.SSLError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
Downloading 全一话 page 1: https://mirror277.mangafuna.xyz:12001/comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 382, in _make_request
self._validate_conn(conn)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 1010, in _validate_conn
conn.connect()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connection.py", line 416, in connect
self.sock = ssl_wrap_socket(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 449, in ssl_wrap_socket
ssl_sock = _ssl_wrap_socket_impl(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\ssl_.py", line 493, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1070, in _create
self.do_handshake()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1341, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 439, in send
resp = conn.urlopen(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\connectionpool.py", line 755, in urlopen
retries = retries.increment(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\urllib3\util\retry.py", line 574, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 338, in error_loop
process()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 311, in download
crawler.download_image()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 80, in download_image
result = self.downloader.img(
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 14, in img
return self.grab(grabimg, url, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\module_grabber.py", line 30, in grab
return grab_method(url, **new_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 208, in grabimg
return ImgResult(grabber(*args, **kwargs))
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 105, in grabber
r = await_(do_request, s, url, proxies, retry, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 905, in wrapped
return f(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 927, in await_
return async_(callback, *args, **kwargs).get()
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 682, in get
raise err
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 474, in wrap_worker
self.ret = self.worker(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\grabber.py", line 118, in do_request
r = s.request(kwargs.pop("method", "GET"), url, timeout=20,
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\requests\adapters.py", line 514, in send
raise SSLError(e, request=request)
requests.exceptions.SSLError: HTTPSConnectionPool(host='mirror277.mangafuna.xyz', port=12001): Max retries exceeded with url: /comic/xbdhsdx/ad0c8/2e5e79e4-df23-11e8-beac-e43a6e0fbea3.jpg!kb_w_read_large (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)')))
Something bad happened, skip the episode.
Thread crashed: <function DownloadManager.start_download.<locals>.do_download at 0x0000025327327250>
Traceback (most recent call last):
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\worker\__init__.py", line 474, in wrap_worker
self.ret = self.worker(*args, **kwargs)
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\download_manager.py", line 138, in do_download
download(mission, profile(mission.module.config["savepath"]))
File "C:\Users\yaoli\AppData\Local\Programs\Python\Python310\lib\site-packages\comiccrawler\crawler.py", line 233, in download
raise Exception("Mission is not completed")
Exception: Mission is not completed
連續失敗 1 次,停止下載
升级到最新版本,拷贝漫画分析获取章节没问题了,但是下载会报错。。。。
下载小斑的黄色短靴 这个漫画
尝试过加入cookie和没有cookie来下载,均出现同样的报错内容。