Closed monajalal closed 1 year ago
I dumped the code into a Python script:
from pprint import pprint
import numpy as np
import matplotlib.pyplot as plt
import torch
import pickle
import cv2
import pytorch_lightning as pl
from zephyr.models import getModel
from zephyr.models.pointnet2 import PointNet2SSG
from zephyr.datasets import getDataloader
from zephyr.options import getOptions, checkArgs
from zephyr.utils.bop_dataset import BopDataset, BopDatasetArgs
from zephyr.utils.metrics import add, adi
from zephyr.utils import K2meta, to_np, depth2cloud
DATASET_NAME = "lmo"
PREPROCESSED_DATA_FOLDER = "../python/zephyr/data/lmo/matches_data_test/" # The path to the preprocessed data folder
CKPT_PATH = "../python/zephyr/ckpts/final_lmo.ckpt" # The path to the checkpoint
USE_ICP = False # Not using ICP for LMO dataset, as it only uses PPF hypotheses, which are already after ICP processing.
# BOP_ROOT = "/home/qiaog/datasets/bop/" # Change this to the root folder of downloaded BOP dataset
BOP_ROOT = "/home/mona/zephyr/python/zephyr/data/bop"
PPF_RESULT_PATH = "../python/zephyr/data/lmo_list_bop_test_v1.txt" # A text file stroing all PPF pose hypotheses
MODEL_DATA_TPATH = "../python/zephyr/data/lmo/matches_data_test/model_data/model_cloud_{:02d}.npz" # path template to the sampled point cloud
'''Set up the arguments for the model'''
parser = getOptions()
args = parser.parse_args([])
# Model-related
args.model_name = "pn2"
args.dataset = "HSVD_diff_uv_norm"
args.no_valid_proj = True
args.no_valid_depth = True
args.inconst_ratio_th = 100
args.icp = USE_ICP
# Dataset-related
args.dataset_root = [PREPROCESSED_DATA_FOLDER]
args.dataset_name = [DATASET_NAME]
args.resume_path = CKPT_PATH
args.test_dataset = True
'''Initialize pytorch dataloader and model'''
# dataloader is only needed for the getPointNetData() function
loader = getDataloader(args)[0]
model = PointNet2SSG(args.dim_point, args, num_class=1)
ckpt = torch.load(CKPT_PATH)
model.load_state_dict(ckpt['state_dict'])
model = model.to(0)
model = model.eval()
'''Initialize the BOP dataset'''
# Set up the options
bop_args = BopDatasetArgs(
bop_root=BOP_ROOT,
dataset_name="lmo",
model_type=None,
split_name="bop_test", # This indicates we want to use the testing set defined in BOP challenge (different than original test set)
split="test",
split_type=None,
ppf_results_file=PPF_RESULT_PATH,
skip=1, # Iterate over all test samples, with no skipping
)
bop_dataset = BopDataset(bop_args)
print("Length of the test dataset:", len(bop_dataset))
'''Get one datapoint in the BOP dataset'''
data_raw = bop_dataset[150]
# Extract the data needed for test forwarding
obj_id, scene_id, im_id = data_raw['obj_id'], data_raw['scene_id'], data_raw['im_id']
img, depth, scene_camera = data_raw['img'], data_raw['depth'], data_raw['scene_camera']
scene_meta = data_raw['scene_meta']
mat_gt = data_raw['mat_gt']
cam_K = np.asarray(scene_camera['cam_K']).reshape((3, 3))
# Load the information of the model point cloud from the pre-processed dataset
model_data_path = MODEL_DATA_TPATH.format(obj_id)
model_data = np.load(model_data_path)
model_points, model_colors, model_normals = model_data['model_points'], model_data['model_colors'], model_data['model_normals']
# Get the proper error function according to whether the object is symmetric or not
is_sym = obj_id in bop_dataset.sym_obj_ids
err_func = adi if is_sym else add
# Load the pose hypotheses from PPF results
ppf_trans, ppf_scores = bop_dataset.getPPFHypos(obj_id, scene_id, im_id)
pose_hypos = ppf_trans
# Compute the ADD/ADI error for pose hypotheses r.w.t. grouth truth pose
pp_err = np.asarray([err_func(mat[:3,:3], mat[:3, 3], mat_gt[:3, :3], mat_gt[:3, 3], model_points) for mat in pose_hypos])
print("The ground truth error for pose hypotheses estimated beforehand: ")
print(pp_err[:10])
RUN_HALCON = True # Change this to False if HALCON is not ready
if RUN_HALCON:
from zephyr.utils.halcon_wrapper import PPFModel
# Get the full mesh model provided by LineMOD dataset
full_model_path = bop_dataset.model_tpath.format(obj_id=obj_id)
# Create the surface model (PPF training stage)
ppf_model = PPFModel(full_model_path)
scene_pc = depth2cloud(depth, depth > 0, cam_K)
poses_ppf, scores_ppf, time_ppf = ppf_model.find_surface_model(scene_pc * 1000.0) # The wrapper requires the input to be in milimeters
poses_ppf[:, :3, 3] = poses_ppf[:, :3, 3] / 1000.0 # Conver from milimeter to meter
pp_err = np.asarray([err_func(mat[:3,:3], mat[:3, 3], mat_gt[:3, :3], mat_gt[:3, 3], model_points) for mat in poses_ppf])
print("PPF algorithm takes %.2f seconds" % time_ppf)
print("The ground truth error for pose hypotheses estimated online: ")
print(pp_err[:10])
# Use the poses estimated online
pose_hypos = poses_ppf
data = {
"img": img, "depth": depth, "cam_K": cam_K,
"model_colors": model_colors, "model_points": model_points, "model_normals":model_normals,
"pose_hypos": pose_hypos
}
for k, v in data.items():
print(k, type(v), v.shape, v.dtype, v.max(), v.min())
def networkInference(model, dataloader, data):
# Convert the data to pytorch data format
scoring_data = {}
img_blur = cv2.GaussianBlur(data['img'], (5,5), 0)
scoring_data['img'] = torch.from_numpy(img_blur/255.)
scoring_data['depth'] = torch.from_numpy(data['depth'])
scoring_data['transforms'] = torch.from_numpy(data['pose_hypos'])
scoring_data['meta_data'] = K2meta(data['cam_K'])
scoring_data['model_points'] = torch.from_numpy(data['model_points'])
scoring_data['model_colors'] = torch.from_numpy(data['model_colors'])
scoring_data['model_normals'] = torch.from_numpy(data['model_normals'])
# If we have GT error, store it. Otherwise use a dummy one
if "pp_err" not in data:
scoring_data['pp_err'] = torch.zeros(len(data['pose_hypos']))
else:
scoring_data['pp_err'] = data['pp_err']
# Pre-process the data
point_x, uv_original = dataloader.dataset.getPointNetData(scoring_data, return_uv_original = True)
# Network inference
pred_score = model({"point_x": point_x.to(model.device)})
selected_poses = to_np(scoring_data['transforms'])
selected_errs = scoring_data['pp_err']
# Note that some hypotheses are excluded beforehand if it violates free-space violation too much
# Therefore, the pp_err may be changed to a different size.
return selected_poses, pred_score, selected_errs
# Use the add/adi error
data['pp_err'] = pp_err
model = model.to(0)
pose_hypos, pred_score, err = networkInference(model, loader, data)
pred_score = to_np(pred_score).reshape(-1)
'''The observed image and depth map'''
plt.figure(figsize=(8, 3), dpi=200)
plt.subplot(121)
plt.imshow(img)
plt.axis('off')
plt.subplot(122)
plt.imshow(depth)
plt.axis('off')
plt.colorbar()
plt.show()
from zephyr.utils.renderer import Renderer, blend
from zephyr.utils.vis import plotImages
# Render the GT and predicted pose
model_path = bop_dataset.model_tpath.format(obj_id = obj_id)
# GT pose
renderer = Renderer(K2meta(cam_K))
renderer.addObject(obj_id, model_path, pose=mat_gt, mm2m=True)
gt_color, gt_depth = renderer.render()
gt_blend = blend(img, gt_color, gt_depth)
# Predicted pose
renderer = Renderer(K2meta(cam_K))
renderer.addObject(obj_id, model_path, pose=pose_hypos[pred_score.argmax()], mm2m=True)
pred_color, pred_depth = renderer.render()
pred_blend = blend(img, pred_color, pred_depth)
plotImages(
[gt_blend, pred_blend],
["GT pose", "Estimated pose"]
)
plt.show()
print("obj_id:", obj_id, "scene_id:", scene_id, "im_id:", im_id)
plt.scatter(err, pred_score)
plt.xlabel("ADD/ADI error")
plt.ylabel("Predicted scores")
plt.show()
and here's the error:
(zephyr) mona@ard-gpu-01:~/zephyr/notebooks$ python TestExample.py
Initializing lmo dataset from ../python/zephyr/data/lmo/matches_data_test/
Using BOP dataset format. Total dataset: 1445
Using PointNet Dataset
Initializating test dataset ['u', 'v', 'H_diff', 'S_diff', 'V_diff', 'D_diff', 'norm_cos']
dim_agg: 0 dim_point: 7
No loss on the best hypotheses
PointNet2: extra_bottleneck_dim = 0
mask: [] xyz: [0, 1] points: [2, 3, 4, 5, 6]
Length of the test dataset: 1445
The ground truth error for pose hypotheses estimated beforehand:
[0.00846804 0.00845948 0.00845618 0.00845684 0.00846006 0.00848063
0.00846173 0.00846185 0.00846851 0.00846199]
PPF algorithm takes 3.44 seconds
The ground truth error for pose hypotheses estimated online:
[0.0084569 0.00846351 0.0084639 0.00846076 0.0084621 0.00845772
0.00845895 0.00846228 0.00848187 0.00846554]
img <class 'numpy.ndarray'> (480, 640, 3) uint8 255 0
depth <class 'numpy.ndarray'> (480, 640) float32 2.432 0.0
cam_K <class 'numpy.ndarray'> (3, 3) float64 573.57043 0.0
model_colors <class 'numpy.ndarray'> (570, 3) float64 0.6624836601307194 0.12000000000000002
model_points <class 'numpy.ndarray'> (570, 3) float64 0.045578560000000004 -0.045749572999999995
model_normals <class 'numpy.ndarray'> (570, 3) float64 0.99860799 -0.99988192
pose_hypos <class 'numpy.ndarray'> (100, 4, 4) float64 1.3008981509874424 -0.9986655614340839
python3: /usr/include/eigen3/Eigen/src/Core/util/XprHelper.h:133: Eigen::internal::variable_if_dynamic<T, Value>::variable_if_dynamic(T) [with T = long int; int Value = 3]: Assertion `v == T(Value)' failed.
Aborted (core dumped)
I am getting the same exact eigen error as mentioned in https://github.com/r-pad/zephyr/issues/11
Could you please suggest a fix/patch?
You can also take a look at here for more details https://gitlab.com/libeigen/eigen/-/issues/2662
Can you try running with RUN_HALCON = False
?
got it working with the followings (didn't use RUN_HALCON = False)
eigen 3.3.9
pcl 1.9.1
boost 1.68.0
metslib-trunk-2020-05-13
^^ installed these packages from source and checked each one individully for correctness in a minimal cpp script
rest of packages:
(zephyr) mona@ard-gpu-01:~/zephyr$ pip freeze
absl-py==1.4.0
addict==2.4.0
anyio==3.7.0
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
arrow==1.2.3
astropy==5.3
asttokens==2.2.1
attrs==23.1.0
autopep8==2.0.1
backcall==0.2.0
beautifulsoup4==4.12.2
bleach==6.0.0
bop-toolkit-lib==1.0
cachetools==5.3.1
certifi==2023.5.7
cffi==1.15.1
charset-normalizer==3.1.0
click==8.1.3
cloudpickle==2.2.1
comm==0.1.3
commonmark==0.9.1
ConfigArgParse==1.5.3
contourpy==1.0.7
cycler==0.11.0
Cython==0.29.35
dash==2.10.2
dash-core-components==2.0.0
dash-html-components==2.0.0
dash-table==5.0.0
dask==2023.5.1
debugpy==1.6.7
decorator==5.1.1
defusedxml==0.7.1
distro==1.8.0
einops==0.6.1
exceptiongroup==1.1.1
executing==1.2.0
fastjsonschema==2.17.1
ffmpeg-python==0.2.0
Flask==2.2.5
fonttools==4.39.4
fqdn==1.5.1
freetype-py==2.4.0
fsspec==2023.5.0
future==0.18.3
future-fstrings==1.2.0
fuzzyfinder==2.1.0
glumpy==1.2.0
google-auth==2.19.1
google-auth-oauthlib==1.0.0
grpcio==1.54.2
hsluv==5.0.3
idna==3.4
imageio==2.25.1
importlib-metadata==6.6.0
ipykernel==6.23.1
ipython==8.13.2
ipython-genutils==0.2.0
ipywidgets==8.0.6
isoduration==20.11.0
itsdangerous==2.1.2
jedi==0.18.2
Jinja2==3.1.2
joblib==1.2.0
jsonpointer==2.3
jsonschema==4.17.3
jupyter==1.0.0
jupyter-console==6.6.3
jupyter-events==0.6.3
jupyter_client==8.2.0
jupyter_core==5.3.0
jupyter_server==2.6.0
jupyter_server_terminals==0.4.4
jupyterlab-pygments==0.2.2
jupyterlab-widgets==3.0.7
kiwisolver==1.4.4
kornia==0.6.12
lazy_loader==0.2
locket==1.0.0
Markdown==3.4.3
MarkupSafe==2.1.2
matplotlib==3.7.1
matplotlib-inline==0.1.6
mistune==2.0.5
mpmath==1.3.0
multipledispatch==0.6.0
mvtec-halcon==23050.0.0
nbclassic==1.0.0
nbclient==0.8.0
nbconvert==7.4.0
nbformat==5.7.0
nest-asyncio==1.5.6
networkx==3.0
notebook==6.5.4
notebook_shim==0.2.3
numpy @ file:///home/conda/feedstock_root/build_artifacts/numpy_1651020388975/work
nvdu==1.0.0.0
nvidia-cublas-cu11==11.10.3.66
nvidia-cuda-nvrtc-cu11==11.7.99
nvidia-cuda-runtime-cu11==11.7.99
nvidia-cudnn-cu11==8.5.0.96
oauthlib==3.2.2
open3d==0.17.0
opencv-python==4.7.0.68
overrides==7.3.1
packaging==23.1
pandas==2.0.2
pandocfilters==1.5.0
parso==0.8.3
partd==1.4.0
pexpect==4.8.0
pickleshare==0.7.5
Pillow==9.5.0
platformdirs==3.5.1
plotly==5.14.1
pooch==1.7.0
prometheus-client==0.17.0
prompt-toolkit==3.0.38
protobuf==4.23.2
psutil==5.9.5
ptyprocess==0.7.0
pure-eval==0.2.2
py-cpuinfo==9.0.0
pyamg==5.0.0
pyasn1==0.5.0
pyasn1-modules==0.3.0
pybind11==2.10.4
pybind11-global @ file:///home/conda/feedstock_root/build_artifacts/pybind11-split_1648908988597/work
pycocotools @ git+https://github.com/MartinSmeyer/cocoapi.git@b114069f17162f704f20de15bbb15838305e960e#subdirectory=PythonAPI
pycodestyle==2.10.0
pycparser==2.21
pyerfa==2.0.0.3
pyglet==1.5.11
Pygments==2.15.1
PyOpenGL==3.1.7
pyparsing==3.0.9
pypng==0.20220715.0
pyquaternion==0.9.9
pyrr==0.10.3
pyrsistent==0.19.3
python-dateutil==2.8.2
python-json-logger==2.0.7
pytorch-lightning==0.7.6
pytz==2023.3
PyWavefront==0.2.0
PyWavelets==1.4.1
PyYAML==6.0
pyzmq==25.1.0
qtconsole==5.4.3
QtPy==2.3.1
requests==2.31.0
requests-oauthlib==1.3.1
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rich==12.6.0
rsa==4.9
scikit-image==0.20.0
scikit-learn==1.2.1
scipy==1.10.0
Send2Trash==1.8.2
SimpleITK==2.2.1
six==1.16.0
sniffio==1.3.0
soupsieve==2.4.1
stack-data==0.6.2
tenacity==8.2.2
tensorboard==2.13.0
tensorboard-data-server==0.7.0
terminado==0.17.1
textual==0.1.18
threadpoolctl==3.1.0
tifffile==2023.2.3
tinycss2==1.2.1
tiptop==0.2.8
tomli==2.0.1
toolz==0.12.0
torch==1.13.0+cu117
torchtext==0.14.0
torchvision==0.14.0+cu117
tornado==6.3.2
tqdm==4.64.1
traitlets==5.9.0
triangle==20220202
typing_extensions==4.4.0
tzdata==2023.3
uri-template==1.2.0
urllib3==1.26.16
vispy==0.13.0
wcwidth==0.2.6
webcolors==1.13
webencodings==0.5.1
websocket-client==1.5.2
Werkzeug==2.2.3
widgetsnbextension==4.0.7
yacs==0.1.8
zipp==3.15.0
and
(zephyr) mona@ard-gpu-01:~/zephyr$ conda list -n zephyr
# packages in environment at /home/mona/anaconda3/envs/zephyr:
#
# Name Version Build Channel
_libgcc_mutex 0.1 main
_openmp_mutex 5.1 1_gnu
absl-py 1.4.0 pypi_0 pypi
addict 2.4.0 pypi_0 pypi
anyio 3.7.0 pypi_0 pypi
argon2-cffi 21.3.0 pypi_0 pypi
argon2-cffi-bindings 21.2.0 pypi_0 pypi
arrow 1.2.3 pypi_0 pypi
astropy 5.3 pypi_0 pypi
asttokens 2.2.1 pypi_0 pypi
attrs 23.1.0 pypi_0 pypi
backcall 0.2.0 pypi_0 pypi
beautifulsoup4 4.12.2 pypi_0 pypi
bleach 6.0.0 pypi_0 pypi
bop-toolkit-lib 1.0 pypi_0 pypi
bzip2 1.0.8 h7b6447c_0
ca-certificates 2023.5.7 hbcca054_0 conda-forge
cachetools 5.3.1 pypi_0 pypi
certifi 2023.5.7 pypi_0 pypi
cffi 1.15.1 pypi_0 pypi
charset-normalizer 3.1.0 pypi_0 pypi
click 8.1.3 pypi_0 pypi
cloudpickle 2.2.1 pypi_0 pypi
comm 0.1.3 pypi_0 pypi
configargparse 1.5.3 pypi_0 pypi
contourpy 1.0.7 pypi_0 pypi
cycler 0.11.0 pypi_0 pypi
cython 0.29.35 pypi_0 pypi
dash 2.10.2 pypi_0 pypi
dash-core-components 2.0.0 pypi_0 pypi
dash-html-components 2.0.0 pypi_0 pypi
dash-table 5.0.0 pypi_0 pypi
dask 2023.5.1 pypi_0 pypi
debugpy 1.6.7 pypi_0 pypi
decorator 5.1.1 pypi_0 pypi
defusedxml 0.7.1 pypi_0 pypi
distro 1.8.0 pypi_0 pypi
exceptiongroup 1.1.1 pypi_0 pypi
executing 1.2.0 pypi_0 pypi
fastjsonschema 2.17.1 pypi_0 pypi
flask 2.2.5 pypi_0 pypi
fonttools 4.39.4 pypi_0 pypi
fqdn 1.5.1 pypi_0 pypi
freetype-py 2.4.0 pypi_0 pypi
fsspec 2023.5.0 pypi_0 pypi
future 0.18.3 pypi_0 pypi
glumpy 1.2.0 pypi_0 pypi
google-auth 2.19.1 pypi_0 pypi
google-auth-oauthlib 1.0.0 pypi_0 pypi
grpcio 1.54.2 pypi_0 pypi
hsluv 5.0.3 pypi_0 pypi
idna 3.4 pypi_0 pypi
importlib-metadata 6.6.0 pypi_0 pypi
ipykernel 6.23.1 pypi_0 pypi
ipython 8.13.2 pypi_0 pypi
ipython-genutils 0.2.0 pypi_0 pypi
ipywidgets 8.0.6 pypi_0 pypi
isoduration 20.11.0 pypi_0 pypi
itsdangerous 2.1.2 pypi_0 pypi
jedi 0.18.2 pypi_0 pypi
jinja2 3.1.2 pypi_0 pypi
jsonpointer 2.3 pypi_0 pypi
jsonschema 4.17.3 pypi_0 pypi
jupyter 1.0.0 pypi_0 pypi
jupyter-client 8.2.0 pypi_0 pypi
jupyter-console 6.6.3 pypi_0 pypi
jupyter-core 5.3.0 pypi_0 pypi
jupyter-events 0.6.3 pypi_0 pypi
jupyter-server 2.6.0 pypi_0 pypi
jupyter-server-terminals 0.4.4 pypi_0 pypi
jupyterlab-pygments 0.2.2 pypi_0 pypi
jupyterlab-widgets 3.0.7 pypi_0 pypi
kiwisolver 1.4.4 pypi_0 pypi
lazy-loader 0.2 pypi_0 pypi
ld_impl_linux-64 2.38 h1181459_1
libblas 3.9.0 16_linux64_openblas conda-forge
libcblas 3.9.0 16_linux64_openblas conda-forge
libffi 3.4.4 h6a678d5_0
libgcc-ng 11.2.0 h1234567_1
libgfortran-ng 12.2.0 h69a702a_19 conda-forge
libgfortran5 12.2.0 h337968e_19 conda-forge
libgomp 11.2.0 h1234567_1
liblapack 3.9.0 16_linux64_openblas conda-forge
libopenblas 0.3.21 h043d6bf_0
libstdcxx-ng 12.2.0 h46fd767_19 conda-forge
libuuid 1.41.5 h5eee18b_0
locket 1.0.0 pypi_0 pypi
markdown 3.4.3 pypi_0 pypi
markupsafe 2.1.2 pypi_0 pypi
matplotlib 3.7.1 pypi_0 pypi
matplotlib-inline 0.1.6 pypi_0 pypi
mistune 2.0.5 pypi_0 pypi
mpmath 1.3.0 pypi_0 pypi
mvtec-halcon 23050.0.0 pypi_0 pypi
nbclassic 1.0.0 pypi_0 pypi
nbclient 0.8.0 pypi_0 pypi
nbconvert 7.4.0 pypi_0 pypi
nbformat 5.7.0 pypi_0 pypi
ncurses 6.4 h6a678d5_0
nest-asyncio 1.5.6 pypi_0 pypi
notebook 6.5.4 pypi_0 pypi
notebook-shim 0.2.3 pypi_0 pypi
numpy 1.24.3 pypi_0 pypi
oauthlib 3.2.2 pypi_0 pypi
open3d 0.17.0 pypi_0 pypi
openssl 1.1.1t h7f8727e_0
overrides 7.3.1 pypi_0 pypi
packaging 23.1 pypi_0 pypi
pandas 2.0.2 pypi_0 pypi
pandocfilters 1.5.0 pypi_0 pypi
parso 0.8.3 pypi_0 pypi
partd 1.4.0 pypi_0 pypi
pexpect 4.8.0 pypi_0 pypi
pickleshare 0.7.5 pypi_0 pypi
pillow 9.5.0 pypi_0 pypi
pip 23.0.1 py310h06a4308_0
platformdirs 3.5.1 pypi_0 pypi
plotly 5.14.1 pypi_0 pypi
pooch 1.7.0 pypi_0 pypi
prometheus-client 0.17.0 pypi_0 pypi
prompt-toolkit 3.0.38 pypi_0 pypi
protobuf 4.23.2 pypi_0 pypi
psutil 5.9.5 pypi_0 pypi
ptyprocess 0.7.0 pypi_0 pypi
pure-eval 0.2.2 pypi_0 pypi
pyamg 5.0.0 pypi_0 pypi
pyasn1 0.5.0 pypi_0 pypi
pyasn1-modules 0.3.0 pypi_0 pypi
pybind11 2.10.4 pypi_0 pypi
pybind11-global 2.9.2 py310hbf28c38_1 conda-forge
pycocotools 2.0 pypi_0 pypi
pycparser 2.21 pypi_0 pypi
pyerfa 2.0.0.3 pypi_0 pypi
pygments 2.15.1 pypi_0 pypi
pyopengl 3.1.0 pypi_0 pypi
pyparsing 3.0.9 pypi_0 pypi
pypng 0.20220715.0 pypi_0 pypi
pyquaternion 0.9.9 pypi_0 pypi
pyrsistent 0.19.3 pypi_0 pypi
python 3.10.11 h7a1cb2a_2
python-dateutil 2.8.2 pypi_0 pypi
python-json-logger 2.0.7 pypi_0 pypi
python_abi 3.10 2_cp310 conda-forge
pytorch-lightning 0.7.6 pypi_0 pypi
pytz 2023.3 pypi_0 pypi
pyyaml 6.0 pypi_0 pypi
pyzmq 25.1.0 pypi_0 pypi
qtconsole 5.4.3 pypi_0 pypi
qtpy 2.3.1 pypi_0 pypi
readline 8.2 h5eee18b_0
requests 2.31.0 pypi_0 pypi
requests-oauthlib 1.3.1 pypi_0 pypi
rfc3339-validator 0.1.4 pypi_0 pypi
rfc3986-validator 0.1.1 pypi_0 pypi
rsa 4.9 pypi_0 pypi
scikit-image 0.20.0 pypi_0 pypi
send2trash 1.8.2 pypi_0 pypi
setuptools 67.8.0 py310h06a4308_0
simpleitk 2.2.1 pypi_0 pypi
six 1.16.0 pypi_0 pypi
sniffio 1.3.0 pypi_0 pypi
soupsieve 2.4.1 pypi_0 pypi
sqlite 3.41.2 h5eee18b_0
stack-data 0.6.2 pypi_0 pypi
tenacity 8.2.2 pypi_0 pypi
tensorboard 2.13.0 pypi_0 pypi
tensorboard-data-server 0.7.0 pypi_0 pypi
terminado 0.17.1 pypi_0 pypi
tinycss2 1.2.1 pypi_0 pypi
tk 8.6.12 h1ccaba5_0
toolz 0.12.0 pypi_0 pypi
torch 1.13.0+cu117 pypi_0 pypi
torchtext 0.14.0 pypi_0 pypi
torchvision 0.14.0+cu117 pypi_0 pypi
tornado 6.3.2 pypi_0 pypi
traitlets 5.9.0 pypi_0 pypi
triangle 20220202 pypi_0 pypi
tzdata 2023.3 pypi_0 pypi
uri-template 1.2.0 pypi_0 pypi
urllib3 1.26.16 pypi_0 pypi
vispy 0.13.0 pypi_0 pypi
wcwidth 0.2.6 pypi_0 pypi
webcolors 1.13 pypi_0 pypi
webencodings 0.5.1 pypi_0 pypi
websocket-client 1.5.2 pypi_0 pypi
werkzeug 2.2.3 pypi_0 pypi
wheel 0.38.4 py310h06a4308_0
widgetsnbextension 4.0.7 pypi_0 pypi
xtensor 0.24.2 h924138e_0 conda-forge
xtensor-python 0.26.1 py310hc4a4660_0 conda-forge
xtl 0.7.4 h4bd325d_0 conda-forge
xz 5.4.2 h5eee18b_0
zephyr 0.1.dev0 dev_0 <develop>
zipp 3.15.0 pypi_0 pypi
zlib 1.2.13 h5eee18b_0
when I run the following cell, the jupyter notebook keeps restarting due to dead kernel. Did you have the same problem? I am testing the halcon with an evaluation license I got from a sales representative.
All the cells above run with no problem.