Closed JacksonVation closed 1 year ago
Hi @JacksonVation, thanks for your interest in TbV! This info is now available at https://argoverse.github.io/user-guide/datasets/map_change_detection.html. We’ll update the hyperlinks here to reflect that.
Please find instructions on how to generate the semantic label maps here: https://github.com/johnwlambert/tbv#generating-seamseg-semantic-segmentation-label-maps (in the readme)
Fixed in https://github.com/johnwlambert/tbv/pull/13. @JacksonVation, closing this now since haven't heard back from you, but feel free to re-open if you have additional questions.
Hi @awayfromdog1 Thank you for your instructions! I`m sorry about my late response. Now I have another question. I found it very difficult to find the closest image paths according to the lidar timestamp which is very slow (maybe restricted by my device). I may take few weeks on my machine. So I wonder if there is any file we can refer to to get the corresponding image paths to the lidar timestamp
@JacksonVation No problem. Interesting, could you share which version of av2-api you are using?
python -c "import av2; print(av2.__version__)"
Could you also share
get_closest_img_fpath()
what you are referring to?
import time
start = time.time() dataloader = AV2SensorDataLoader( ... ) img_fpath = dataloader.get_closest_img_fpath(log_id, camera_name, lidar_timestamp_ns) end = time.time() duration = end - start print("Duration: {duration}")
@awayfromdog1 Sure. My av2-api is 0.1.0.
from multiprocessing import Pool
import torch
import os
import numpy as np
from pathlib import Path
from av2.datasets.sensor.av2_sensor_dataloader import AV2SensorDataLoader
import logging
from av2.map.map_api import ArgoverseStaticMap
from joblib import Parallel, delayed
from av2.datasets.sensor.constants import RingCameras
from av2.utils.synchronization_database import SynchronizationDB
from typing import Any, Dict, List, Mapping, Tuple, Union, Optional
import random
from splits import TRAIN, VAL, TEST
from utils.multiprocessing_utils import send_list_to_workers_with_worker_id
ORDERED_RING_CAMERA_LIST = [cam_enum.value for cam_enum in RingCameras]
LOG_IDS_TO_RENDER = list(TRAIN) + list(VAL) + list(TEST)
data_dir = Path("/root/autodl-tmp/av2-tbv") label_dir = Path("/root/autodl-tmp/av2-tbv")
def get_closest_img_fpath(data_dir: str, log_id: str, cam_name: str, lidar_timestamp_ns: int) -> Optional[Path]: sdb = SynchronizationDB(str(data_dir)) cam_timestamp_ns = sdb.get_closest_cam_channel_timestamp(lidar_timestamp_ns, cam_name, log_id) if cam_timestamp_ns is None: return None img_fpath = data_dir / log_id / "sensors" / "cameras" / cam_name / f"{cam_timestamp_ns}.jpg" return img_fpath
def process_lidar_frame(log_id, lidar_fpath): lidar_timestamp_ns = int(Path(lidar_fpath).stem) images = [] for cam_idx in [0,1,2,3,4,5,6]: cam_channel = ORDERED_RING_CAMERA_LIST[cam_idx] image_path = get_closest_img_fpath(data_dir, log_id, cam_channel, lidar_timestamp_ns) images.append(image_path) save_path = os.path.join(data_dir, "00_closest_images", f"{log_id}",f"{lidar_timestamp_ns}.npy") os.makedirs(os.path.dirname(save_path), exist_ok=True) np.save(save_path, images)
def process_log(log_id, loader): lidar_fpaths = loader.get_ordered_log_lidar_fpaths(log_id=log_id) lidar_fpaths.sort() for i, lidar_fpath in enumerate(lidar_fpaths): process_lidar_frame(log_id, lidar_fpath)
def render_dataset_all_logs(log_ids: List[str]) -> None:
np.random.seed(0)
random.seed(0)
num_processes = 20
dataloader = AV2SensorDataLoader(data_dir, label_dir)
for i, loaded_log_id in enumerate(dataloader.get_log_ids()):
if i % 20 == 0:
logging.info(f"Dataloader loaded {i}th: {loaded_log_id}")
if num_processes == 1:
kwargs = {"local_dataset_dir": data_dir, "dataloader": dataloader}
dataset_renderer_worker(log_ids=log_ids, start_idx=0, end_idx=len(log_ids), worker_id=0, kwargs=kwargs)
else:
send_list_to_workers_with_worker_id(
num_processes=num_processes,
list_to_split=log_ids,
worker_func_ptr=dataset_renderer_worker,
local_dataset_dir=data_dir,
dataloader=dataloader,
)
def dataset_renderer_worker( log_ids: List[str], start_idx: int, end_idx: int, worker_id: int, kwargs: Mapping[str, Any] ) -> None: logging.info(f"Worker {worker_id} started...")
dataloader = kwargs["dataloader"]
logging.info("Creating Argoverse dataloader...")
chunk_sz = end_idx - start_idx
for idx in range(start_idx, end_idx):
if idx % 10 == 0:
pct_completed = (idx - start_idx) / chunk_sz * 100
logging.info(f"Completed {pct_completed:.2f}%")
log_id = log_ids[idx]
try:
process_log(
log_id=log_id,
loader=dataloader,
)
except Exception as e:
logging.exception(f"Extraction failed for {log_id}")
def main(): render_dataset_all_logs(log_ids=LOG_IDS_TO_RENDER) print("Finished!")
if name == "main": main()
3. I measured the time it takes of one call using `get_closest_img_fpath()` like this:
```python
import time
from av2.datasets.sensor.av2_sensor_dataloader import AV2SensorDataLoader
from pathlib import Path
data_dir = Path("/root/autodl-tmp/av2-tbv")
label_dir = Path("/root/autodl-tmp/av2-tbv")
start = time.time()
dataloader = AV2SensorDataLoader(data_dir, label_dir)
img_fpaths = dataloader.get_closest_img_fpath("01bb304d7bd835f8bbef7086b688e35e__Summer_2019", "ring_front_left", 315968867959882000)
end = time.time()
duration = end - start
print(f"Duration: {duration}")
And the duration is : Duration: 137.59998774528503
Hi, thank you for your great work on map change detection!
I am currently doing some research on autonomous driving perception and I found the link of instructions on downloading TbV dataset is missing and I can`t access it.
Another question, when I read your code on rendering dataset, I found "Semantic label maps must be precomputed". However, the dataset which I
ve currently downloaded doesn
t have such .png things, how could I generate those label maps?Thanks.