CARLA version: 0.9.13
Platform/OS: Ubuntu
Problem you have experienced: Connecting 2 client. One represents the LIDAR the other a vehicle.
What you expected to happen: The LIDAR sense the vehicle equally to the other features such as trees.
Steps to reproduce: Simply check if LIDAR sense the vehicle.
What I am doing wrong?
LIDAR.py
#!/usr/bin/env python
# coding: utf-8
import os
import carla
import time
import random
import math
import numpy as np
class CARLAInfrastructureWorker:
def __init__(self):
self.map = np.zeros([40,40])
## Client
client = carla.Client("localhost", 2000)
client.set_timeout(20.0)
## World
self.world = client.get_world()
self.blueprint_library = self.world.get_blueprint_library()
self.world.wait_for_tick()
## Setup LIDAR sensor
self.lidar_bp = self.blueprint_library.find('sensor.lidar.ray_cast')
self.lidar_bp.set_attribute('range', "20")
self.lidar_bp.set_attribute('upper_fov', '30')
self.lidar_bp.set_attribute('lower_fov', '-30')
self.lidar_bp.set_attribute('rotation_frequency','10.0')
self.lidar_bp.set_attribute('points_per_second', '100000')
self.lidar_bp.set_attribute('dropoff_general_rate', '0.00')
self.lidar_bp.set_attribute('sensor_tick', '0.1')
self.x = 268
self.y = 60
self.z = 0.1
lidar_transform = carla.Transform(carla.Location(z=self.z,x=self.x,y=self.y))
self.lidar = self.world.spawn_actor(self.lidar_bp, lidar_transform)#, attach_to=self.vehicle)
self.lidar.listen(self.lidar_callback)
self.world.tick()
## Spectator camera
self.spectator = self.world.get_spectator()
self.world.tick()
## Collect LIDAR data
def lidar_callback(self,event):
#self.map = np.zeros([40,40])
for data in event:´
if data.point.z > self.z and data.point.z < self.z +0.5:
x_grid = data.point.x
y_grid = data.point.y
self.map[round(int(x_grid)),round(int(y_grid))] = 1
def consult(self):
# Make the world clock another tick
self.world.tick()
def getMap(self):
return self.map
worker = CARLAInfrastructureWorker()
step = 0
scans = []
while(True):
worker.consult()
step += 1
if step == 1000:
ground_trought = worker.getMap().reshape(40,40),
print("Start detecting")
elif step > 1000 :
scan_map = worker.getMap()
_rst = scan_map.reshape(40,40) - ground_trought
if np.max(_rst) > 0:
print("Object detected")
time.sleep(1.0)
begin = carla.Location(x=worker.x,y=worker.y ,z=worker.z )
worker.world.debug.draw_box(box=carla.BoundingBox(begin,carla.Vector3D(-20,-20,2)), rotation=carla.Rotation(0,0,0),thickness= 0.8, color= carla.Color(255,0,0,0),life_time=2.0)
worker.world.debug.draw_point(begin, size= 0.2, life_time=2.0)
VEHICLE.py
#!/usr/bin/env python
# coding: utf-8
import carla
import random
import math
import numpy as np
import matplotlib.image as img
class CARLAAutomatedVehicle:
def __init__(self):
## Client
client = carla.Client("localhost", 2000)
client.set_timeout(40.0)
## World
self.world = client.load_world("Town01")
self.world.wait_for_tick()
## Vehicle
self.blueprint_library = self.world.get_blueprint_library()
_vehicles = self.blueprint_library.filter('vehicle')
self.bp = _vehicles[7]
self.world.tick()
## Set spawn point
sp = self.world.get_map().get_spawn_points()
transform = sp[4]
## Spawn vehicle
self.vehicle = self.world.spawn_actor(self.bp, transform)
self.vehicle.set_transform(transform)
self.vehicle.set_location(transform.location)
#self.vehicle.set_autopilot(True)
self.world.tick()
## Spectator camera
self.spectator = self.world.get_spectator()
self.world.tick()
def consult(self):
# Let the camera follow the vehicle
spectator_transform = self.vehicle.get_transform()
self.spectator.set_transform(carla.Transform(spectator_transform.location + carla.Location(z=30), carla.Rotation(pitch=-90, yaw=-90)))
lc = self.vehicle.get_transform().location
#print(str(lc.x)+" "+str(lc.y)+" "+str(lc.z))
# Make the world clock another tick
print("Input")
input1 = input()
print("Given >",input1,"<")
if input1 == "x+":
lc.x += 1
self.vehicle.set_location(lc)
if input1 == "x-":
lc.x -= 1
self.vehicle.set_location(lc)
if input1 == "y+":
lc.y += 1
self.vehicle.set_location(lc)
if input1 == "y-":
lc.y -= 1
self.vehicle.set_location(lc)
if input1 == "z+":
lc.z += 1
self.vehicle.set_location(lc)
if input1 == "z-":
lc.z -= 1
self.vehicle.set_location(lc)
if input1 == "m":
print(str(lc.x)+" "+str(lc.y)+" "+str(lc.z))
self.world.tick()
### main()
worker = CARLAAutomatedVehicle()
step = 0
while(True):
worker.consult()
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
CARLA version: 0.9.13 Platform/OS: Ubuntu Problem you have experienced: Connecting 2 client. One represents the LIDAR the other a vehicle. What you expected to happen: The LIDAR sense the vehicle equally to the other features such as trees. Steps to reproduce: Simply check if LIDAR sense the vehicle.
What I am doing wrong?