• 在CARLA中手动开车,添加双目相机stereo camera,激光雷达Lidar


    CARLA的使用逻辑:

    首先创建客户端

    设置如果2秒没有从服务器返回任何内容,则终止

    1. client = carla.Client("127.0.0.1", 2000)
    2. client.set_timeout(2.0)

    从客户端中get world

    world = client.get_world()

    设置setting并应用

    这里使用固定时间步长的异步模式

    1. settings = world.get_settings()
    2. settings.synchronous_mode = False # Enables synchronous mode
    3. settings.fixed_delta_seconds = None
    4. delta = 0.05
    5. settings.fixed_delta_seconds = delta
    6. world.apply_settings(settings)

    设置traffic manager

    1. traffic_manager = client.get_trafficmanager(8000)
    2. traffic_manager.set_synchronous_mode(False)
    3. traffic_manager.set_random_device_seed(0)
    4. random.seed(0)

    创建一个观察视角

    spectator是我们观察的视角,在后面的循环中,我们让视角始终跟随车,显示从上往下看的车的俯视图

    spectator = world.get_spectator()

     从map中获得所有可行的出生点

    spawn_points是所有可行的出生点,在后面生成车的时候要指定一个出生点。

    spawn_points = world.get_map().get_spawn_points()

    carla中添加任何东西(车辆,相机,激光雷达等)的逻辑是:

    以车为例,添加车的逻辑是:先从world中get所有东西的blueprint库,从所有blueprint库中filter车的blueprint,再将车的blueprint与出生点结合,生成一个车。

    1. blueprint_library = world.get_blueprint_library()
    2. vehicle_bp = blueprint_library.filter("model3")[0]
    3. vehicle = world.try_spawn_actor(vehicle_bp, spawn_points[0])

    blueprint_library是所有blueprint的库,vehicle_bp是车的blueprint,与一个出生点结合,生成车的实例vehicle。

    相机也是同样的逻辑:

    1. camera_init_trans = carla.Transform(carla.Location(x=1, y=-1, z=2), carla.Rotation(pitch=0))
    2. camera_bp = blueprint_library.find('sensor.camera.rgb')
    3. camera_bp.set_attribute("image_size_x", str(1280))
    4. camera_bp.set_attribute("image_size_y", str(720))
    5. camera_bp.set_attribute("fov", str(105))
    6. camera = world.spawn_actor(camera_bp, camera_init_trans, attach_to=vehicle)

    相机要设置附着在车上面,carla.Location是固定在车身的随车坐标系,原点的高度在车轮与地面接触的平面,俯视图看原点在车的中心。x轴正方向指向车头,y轴指向车辆右舷,z轴指向天空,单位是米。

    相机监听回调函数:

    1. # Get camera dimensions
    2. image_w = camera_bp.get_attribute("image_size_x").as_int()
    3. image_h = camera_bp.get_attribute("image_size_y").as_int()
    4. # Instantiate objects for rendering and vehicle control
    5. renderObject = RenderObject(image_w, image_h)
    6. controlObject = ControlObject(vehicle)
    7. camera.listen(lambda image: pygame_callback1(image, renderObject))

    carla中没有双目相机,需要自己添加两个rgb相机实现双目相机的效果。

    如何理解CARLA中相机的焦距的概念?

    定义相机的时候,没有焦距这个概念,首先定义窗口的大小,也就是相当于相机的传感器横竖各有多少像素,再定义fov视角范围,单位是度,定义好这两个概念,相机的焦距f根据这两个数来算,如果双目相机恢复深度,需要这样去算f。假如相机的归一化平面,即距离相机z=1米的位置(这个z是slam相机坐标系,不是carla坐标系)有一根横棒,那么多长的横棒可以正好横向填满相机传感器,根据视角范围fov解相似三角形,求出slam中定义的相机内参概念f,窗口大小的一般,也就是slam中相机内参的c

    键盘控制器:

    见完整代码 ControlObject类

    每一步循环中:

    1. world.tick() # 获取服务器结果
    2. # 获取观察视角
    3. transform = vehicle.get_transform() # we get the transform of vehicle
    4. spectator.set_transform(carla.Transform(transform.location + carla.Location(z=50),
    5. carla.Rotation(pitch=-90)))
    6. for event in pygame.event.get(): # 获取键盘操作

    完整代码:

    1. import carla
    2. import random
    3. import time
    4. import sys
    5. import pygame
    6. import datetime
    7. import numpy as np
    8. import open3d as o3d
    9. from matplotlib import cm
    10. VIDIDIS = np.array(cm.get_cmap("plasma").colors)
    11. VID_RANGE = np.linspace(0.0, 1.0, VIDIDIS.shape[0])
    12. # Render object to keep and pass the PyGame surface
    13. class RenderObject(object):
    14. def __init__(self, width, height):
    15. init_image = np.random.randint(0,255,(height, width, 3),dtype='uint8')
    16. self.surface = pygame.surfarray.make_surface(init_image.swapaxes(0,1))
    17. # Control object to manage vehicle controls
    18. class ControlObject(object):
    19. def __init__(self, veh):
    20. # Conrol parameters to store the control state
    21. self._vehicle = veh
    22. self._steer = 0
    23. self._throttle = False
    24. self._brake = False
    25. self._steer = None
    26. self._steer_cache = 0
    27. # A carla.VehicleControl object is needed to alter the
    28. # vehicle's control state
    29. self._control = carla.VehicleControl()
    30. # Check for key press events in the PyGame window
    31. # and define the control state
    32. def parse_control(self, event):
    33. if event.type == pygame.KEYDOWN:
    34. if event.key == pygame.K_RETURN:
    35. self._vehicle.set_autopilot(True)
    36. if event.key == pygame.K_UP:
    37. self._throttle = True
    38. if event.key == pygame.K_DOWN:
    39. self._brake = True
    40. if event.key == pygame.K_RIGHT:
    41. self._steer = 1
    42. if event.key == pygame.K_LEFT:
    43. self._steer = -1
    44. if event.type == pygame.KEYUP:
    45. if event.key == pygame.K_UP:
    46. self._throttle = False
    47. if event.key == pygame.K_DOWN:
    48. self._brake = False
    49. self._control.reverse = False
    50. if event.key == pygame.K_RIGHT:
    51. self._steer = None
    52. if event.key == pygame.K_LEFT:
    53. self._steer = None
    54. # Process the current control state, change the control parameter
    55. # if the key remains pressed
    56. def process_control(self):
    57. if self._throttle:
    58. self._control.throttle = min(self._control.throttle + 0.01, 1)
    59. self._control.gear = 1
    60. self._control.brake = False
    61. elif not self._brake:
    62. self._control.throttle = 0.0
    63. if self._brake:
    64. # If the down arrow is held down when the car is stationary, switch to reverse
    65. if self._vehicle.get_velocity().length() < 0.01 and not self._control.reverse:
    66. self._control.brake = 0.0
    67. self._control.gear = 1
    68. self._control.reverse = True
    69. self._control.throttle = min(self._control.throttle + 0.1, 1)
    70. elif self._control.reverse:
    71. self._control.throttle = min(self._control.throttle + 0.1, 1)
    72. else:
    73. self._control.throttle = 0.0
    74. self._control.brake = min(self._control.brake + 0.3, 1)
    75. else:
    76. self._control.brake = 0.0
    77. if self._steer is not None:
    78. if self._steer == 1:
    79. self._steer_cache += 0.03
    80. if self._steer == -1:
    81. self._steer_cache -= 0.03
    82. min(0.7, max(-0.7, self._steer_cache))
    83. self._control.steer = round(self._steer_cache,1)
    84. else:
    85. if self._steer_cache > 0.0:
    86. self._steer_cache *= 0.2
    87. if self._steer_cache < 0.0:
    88. self._steer_cache *= 0.2
    89. if 0.01 > self._steer_cache > -0.01:
    90. self._steer_cache = 0.0
    91. self._control.steer = round(self._steer_cache,1)
    92. # Ápply the control parameters to the ego vehicle
    93. self._vehicle.apply_control(self._control)
    94. def generate_lidar_bp(blueprint_library, delta):
    95. """
    96. To get lidar bp
    97. :param blueprint_library: the world blueprint_library
    98. :param delta: update rate(s)
    99. :return: lidar bp
    100. """
    101. lidar_bp = blueprint_library.find("sensor.lidar.ray_cast")
    102. lidar_bp.set_attribute("dropoff_general_rate", "0.0")
    103. lidar_bp.set_attribute("dropoff_intensity_limit", "1.0")
    104. lidar_bp.set_attribute("dropoff_zero_intensity", "0.0")
    105. lidar_bp.set_attribute("upper_fov", str(15.0))
    106. lidar_bp.set_attribute("lower_fov", str(-25.0))
    107. lidar_bp.set_attribute("channels", str(64.0))
    108. lidar_bp.set_attribute("range", str(100.0))
    109. lidar_bp.set_attribute("rotation_frequency", str(1.0 / delta))
    110. lidar_bp.set_attribute("points_per_second", str(500000))
    111. return lidar_bp
    112. def lidar_callback(point_cloud, point_list):
    113. # We need to convert point cloud(carla-format) into numpy.ndarray
    114. data = np.copy(np.frombuffer(point_cloud.raw_data, dtype = np.dtype("f4")))
    115. data = np.reshape(data, (int(data.shape[0] / 4), 4))
    116. intensity = data[:, -1]
    117. intensity_col = 1.0 - np.log(intensity) / np.log(np.exp(-0.004 * 100))
    118. int_color = np.c_[
    119. np.interp(intensity_col, VID_RANGE, VIDIDIS[:, 0]),
    120. np.interp(intensity_col, VID_RANGE, VIDIDIS[:, 1]),
    121. np.interp(intensity_col, VID_RANGE, VIDIDIS[:, 2])]
    122. points = data[:, :-1] # we only use x, y, z coordinates
    123. points[:, 1] = -points[:, 1] # This is different from official script
    124. point_list.points = o3d.utility.Vector3dVector(points)
    125. point_list.colors = o3d.utility.Vector3dVector(int_color)
    126. # Camera sensor callback, reshapes raw data from camera into 2D RGB and applies to PyGame surface
    127. def pygame_callback1(data, obj):
    128. img = np.reshape(np.copy(data.raw_data), (data.height, data.width, 4))
    129. img = img[:, :, :3]
    130. img = img[:, :, ::-1]
    131. # msg = bridge.cv2_to_imgmsg(img, encoding="bgr8")
    132. obj.surface = pygame.surfarray.make_surface(img.swapaxes(0, 1))
    133. # pub.publish(msg)
    134. print("taking photos1")
    135. def pygame_callback2(data, obj):
    136. img = np.reshape(np.copy(data.raw_data), (data.height, data.width, 4))
    137. img = img[:, :, :3]
    138. img = img[:, :, ::-1]
    139. # msg = bridge.cv2_to_imgmsg(img, encoding="bgr8")
    140. # pub2.publish(msg)
    141. print("taking photos2")
    142. client = carla.Client("127.0.0.1", 2000)
    143. client.set_timeout(2.0)
    144. world = client.get_world()
    145. settings = world.get_settings()
    146. settings.synchronous_mode = False # Enables synchronous mode
    147. settings.fixed_delta_seconds = None
    148. delta = 0.05
    149. settings.fixed_delta_seconds = delta
    150. world.apply_settings(settings)
    151. traffic_manager = client.get_trafficmanager(8000)
    152. traffic_manager.set_synchronous_mode(False)
    153. traffic_manager.set_random_device_seed(0)
    154. random.seed(0)
    155. # We will set up the spectator so we can see what we do
    156. spectator = world.get_spectator()
    157. # Retrieve the map's spawn points
    158. spawn_points = world.get_map().get_spawn_points()
    159. blueprint_library = world.get_blueprint_library()
    160. vehicle_bp = blueprint_library.filter("model3")[0]
    161. vehicle = world.try_spawn_actor(vehicle_bp, spawn_points[0])
    162. vehicle.set_autopilot(False)
    163. # Initialise the camera floating behind the vehicle
    164. camera_init_trans = carla.Transform(carla.Location(x=1, y=-1, z=2), carla.Rotation(pitch=0))
    165. camera_bp = blueprint_library.find('sensor.camera.rgb')
    166. camera_bp.set_attribute("image_size_x", str(1280))
    167. camera_bp.set_attribute("image_size_y", str(720))
    168. camera_bp.set_attribute("fov", str(105))
    169. camera = world.spawn_actor(camera_bp, camera_init_trans, attach_to=vehicle)
    170. # Start camera with PyGame callback
    171. camera2_init_trans = carla.Transform(carla.Location(x=1, y=1, z=2), carla.Rotation(pitch=0))
    172. camera2_bp = blueprint_library.find('sensor.camera.rgb')
    173. camera2_bp.set_attribute("image_size_x", str(1280))
    174. camera2_bp.set_attribute("image_size_y", str(720))
    175. camera2_bp.set_attribute("fov", str(105))
    176. camera2 = world.spawn_actor(camera2_bp, camera2_init_trans, attach_to=vehicle)
    177. # Get camera dimensions
    178. image_w = camera_bp.get_attribute("image_size_x").as_int()
    179. image_h = camera_bp.get_attribute("image_size_y").as_int()
    180. # Instantiate objects for rendering and vehicle control
    181. renderObject = RenderObject(image_w, image_h)
    182. controlObject = ControlObject(vehicle)
    183. camera.listen(lambda image: pygame_callback1(image, renderObject))
    184. lidar_bp = generate_lidar_bp(blueprint_library, delta)
    185. lidar_transform = carla.Transform(carla.Location(x=-0.5, z=1.8))
    186. lidar = world.spawn_actor(lidar_bp, lidar_transform, attach_to=vehicle)
    187. point_list = o3d.geometry.PointCloud()
    188. lidar.listen(lambda data: lidar_callback(data, point_list))
    189. vis = o3d.visualization.Visualizer()
    190. vis.create_window(
    191. window_name= "Display Point Cloud",
    192. width= 960,
    193. height= 540,
    194. left= 480,
    195. top= 270)
    196. vis.get_render_option().background_color = [0.05, 0.05, 0.05]
    197. vis.get_render_option().point_size = 1
    198. vis.get_render_option().show_coordinate_frame = True
    199. frame = 0
    200. dt0 = datetime.datetime.now()
    201. # Initialise the display
    202. pygame.init()
    203. gameDisplay = pygame.display.set_mode((image_w,image_h), pygame.HWSURFACE | pygame.DOUBLEBUF)
    204. # Draw black to the display
    205. gameDisplay.fill((0,0,0))
    206. gameDisplay.blit(renderObject.surface, (0,0))
    207. pygame.display.flip()
    208. crashed = False
    209. while not crashed:
    210. # if frame == 2:
    211. # vis.add_geometry(point_list)
    212. #
    213. # vis.update_geometry(point_list)
    214. # vis.poll_events()
    215. # vis.update_renderer()
    216. # time.sleep(0.005)
    217. world.tick()
    218. transform = vehicle.get_transform() # we get the transform of vehicle
    219. spectator.set_transform(carla.Transform(transform.location + carla.Location(z=50),
    220. carla.Rotation(pitch=-90)))
    221. # Update the display
    222. gameDisplay.blit(renderObject.surface, (0, 0))
    223. pygame.display.flip()
    224. controlObject.process_control()
    225. for event in pygame.event.get():
    226. if event.type == pygame.QUIT:
    227. crashed = True
    228. controlObject.parse_control(event)
    229. process_time = datetime.datetime.now() - dt0
    230. sys.stdout.write("\r" + "FPS: " + str(1.0 / process_time.total_seconds()) + "Current Frame: " + str(frame))
    231. sys.stdout.flush()
    232. dt0 = datetime.datetime.now()
    233. frame += 1
    234. # Stop camera and quit PyGame after exiting game loop
    235. camera.stop()
    236. camera2.stop()
    237. pygame.quit()

    这里有一个bug,我的代码里分别有Lidar点云可视化的代码和相机照片可视化的代码,可是这两个可视化不能共存,如果Lidar的点云可视化更新,相机图片可视化就不更新,相机更新,Lidar就不更新。不知道为什么,知道的大神欢迎留言。

  • 相关阅读:
    Postgresql源码(77)plpgsql中参数传递和赋值
    java计算机毕业设计vue健身食谱系统源码+mysql数据库+系统+lw文档+部署
    c# 画球
    Spring Security实现基于RBAC的权限表达式动态访问控制
    springboot jpa 返回自定义非实体VO类
    Java备忘录模式剖析及使用场景
    循环分批从数据库获取数据
    [车联网安全自学篇] 五十八. Android安全之APK内存敏感信息泄露挖掘【静态分析】
    烟台专利申请发明-个人发明专利申请条件
    C#WPF控件Button详解
  • 原文地址:https://blog.csdn.net/qq_41816368/article/details/134038385