diff --git a/carla_ros_bridge/CMakeLists.txt b/carla_ros_bridge/CMakeLists.txt
new file mode 100644
index 000000000..3cca17ec7
--- /dev/null
+++ b/carla_ros_bridge/CMakeLists.txt
@@ -0,0 +1,200 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(carla_ros_bridge)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ rospy
+ sensor_msgs
+ tf
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a run_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# sensor_msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a run_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES carla_ros_bridge
+# CATKIN_DEPENDS rospy sensor_msgs tf
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/carla_ros_bridge.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/carla_ros_bridge_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+ install(FILES
+ client.launch
+ client_with_rviz.launch
+ test/ros_bridge_client.test
+ DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+ )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_carla_ros_bridge.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/carla_ros_bridge/Readme.md b/carla_ros_bridge/Readme.md
new file mode 100644
index 000000000..7a69406b3
--- /dev/null
+++ b/carla_ros_bridge/Readme.md
@@ -0,0 +1,162 @@
+
+# Ros bridge for Carla simulator
+
+This ros package aims at providing a simple ros bridge for carla simulator.
+
+![rviz setup](./assets/rviz_carla_default.png "rviz")
+![depthcloud](./assets/depth_cloud_and_lidar.png "depthcloud")
+
+![short video](https://youtu.be/S_NoN2GBtdY)
+
+
+# Features
+
+- [x] Cameras (depth, segmentation, rgb) support
+- [x] Add camera matrix
+- [x] Lidar sensor support
+- [x] Transform publications
+- [x] Manual control using ackermann msg
+- [x] Autopilot mode using rosparam
+- [x] Rosbag in the bridge (in order to avoid rosbag recoard -a small time errors)
+- [x] Handle ros dependencies
+- [x] Marker/bounding box messages for cars/pedestrian
+- [ ] Add traffic light support
+- [ ] Support dynamic change (restarting simulation using a topic/rosparam)
+
+
+# Setup
+
+## Create a catkin workspace and install carla_ros_bridge package
+
+### Create the catkin workspace:
+
+ mkdir -p ~/ros/catkin_ws_for_carla/src
+ cd ~/ros/catkin_ws_for_carla
+ source /opt/ros/kinetic/setup.bash
+ catkin_make
+ source ~/ros/catkin_ws_for_carla/devel/setup.bash
+
+For more information about configuring a ros environment see
+http://wiki.ros.org/ROS/Tutorials/InstallingandConfiguringROSEnvironment
+
+## Install carla python client in your workspace
+
+ cd carla/PythonClient
+ pip2 install -e . --user --upgrade
+
+Check the installation is successfull by trying to import carla from python:
+
+ python -c 'import carla;print("Success")'
+
+You should see the Success message without any errors.
+
+### Install recent protobuf version [optional]
+
+ sudo apt-get remove python-protobuf
+ sudo pip2 install --upgrade protobuf
+
+
+### Add the carla_ros_bridge in the catkin workspace
+
+Run the following command after replacing [PATH_TO_CARLA] with the actual path to carla directory on your machine:
+
+ ln -s [PATH_TO_CARLA]/carla_ros_bridge/ ~/ros/catkin_ws_for_carla/src/
+ source ~/ros/catkin_ws_for_carla/devel/setup.bash
+ rosdep update
+ rosdep install --from-paths ~/ros/catkin_ws_for_carla
+ cd ~/ros/catkin_ws_for_carla
+ catkin_make
+
+
+### Test your installation
+
+If you use the builded binary (0.8.2):
+
+ ./CarlaUE4 -carla-server -windowed -ResX=320 -ResY=240
+
+
+Wait for the message:
+
+ Waiting for the client to connect...
+
+Then run the tests
+
+ rostest carla_ros_bridge ros_bridge_client.test
+
+you should see:
+
+ [carla_ros_bridge.rosunit-testTopics/test_publish][passed]
+
+ SUMMARY
+ * RESULT: SUCCESS
+
+
+
+# Start the ros bridge
+
+First run the simulator (see carla documentation: http://carla.readthedocs.io/en/latest/)
+
+ ./CarlaUE4 -carla-server -windowed -ResX=320 -ResY=240
+
+
+Wait for the message:
+
+ Waiting for the client to connect...
+
+Then start the ros bridge:
+
+ source ~/ros/catkin_ws_for_carla/devel/setup.bash
+ roslaunch carla_ros_bridge client.launch
+
+To start the ros bridge with rviz use:
+
+ roslaunch carla_ros_bridge client_with_rviz.launch
+
+You can setup the wanted camera/sensors in config/settings.yaml.
+
+# Autopilot control
+
+To enable autopilot control set the ros param carla_autopilot to True
+
+ rosparam set carla_autopilot True
+
+# Manual control
+
+To enable manual control set the ros param carla_autopilot to False
+
+ rosparam set carla_autopilot False
+
+
+Then you can send command to the car using the /ackermann_cmd topic.
+
+Example of forward movements, speed in in meters/sec.
+
+ rostopic pub /ackermann_cmd ackermann_msgs/AckermannDrive "{steering_angle: 0.0, steering_angle_velocity: 0.0, speed: 10, acceleration: 0.0,
+ jerk: 0.0}" -r 10
+
+
+Example of forward with steering
+
+ rostopic pub /ackermann_cmd ackermann_msgs/AckermannDrive "{steering_angle: 5.41, steering_angle_velocity: 0.0, speed: 10, acceleration: 0.0,
+ jerk: 0.0}" -r 10
+
+ Warning: the steering_angle is the driving angle (in radians) not the wheel angle, for now max wheel is set to 500 degrees.
+
+
+Example for backward :
+
+ rostopic pub /ackermann_cmd ackermann_msgs/AckermannDrive "{steering_angle: 0, steering_angle_velocity: 0.0, speed: -10, acceleration: 0.0,
+ jerk: 0.0}" -r 10
+
+
+# ROSBAG recording
+
+The carla_ros_bridge could also be used to record all published topics into a rosbag:
+
+ roslaunch carla_ros_bridge client_with_rviz.launch rosbag_fname:=/tmp/save_session.bag
+
+This command will create a rosbag /tmp/save_session.bag
+
+You can of course also use rosbag record to do the same, but using the ros_bridge to do the recording you have the guarentee that all the message are saved without small desynchronization that could occurs when using *rosbag record* in an other process.
+
+
diff --git a/carla_ros_bridge/assets/depth_cloud_and_lidar.png b/carla_ros_bridge/assets/depth_cloud_and_lidar.png
new file mode 100644
index 000000000..25cecc8a6
Binary files /dev/null and b/carla_ros_bridge/assets/depth_cloud_and_lidar.png differ
diff --git a/carla_ros_bridge/assets/rviz_carla_default.png b/carla_ros_bridge/assets/rviz_carla_default.png
new file mode 100644
index 000000000..64e54ae1a
Binary files /dev/null and b/carla_ros_bridge/assets/rviz_carla_default.png differ
diff --git a/carla_ros_bridge/client.launch b/carla_ros_bridge/client.launch
new file mode 100644
index 000000000..020ac907e
--- /dev/null
+++ b/carla_ros_bridge/client.launch
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/carla_ros_bridge/client_with_rviz.launch b/carla_ros_bridge/client_with_rviz.launch
new file mode 100644
index 000000000..a4e47815c
--- /dev/null
+++ b/carla_ros_bridge/client_with_rviz.launch
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/carla_ros_bridge/config/carla_default_rviz.cfg.rviz b/carla_ros_bridge/config/carla_default_rviz.cfg.rviz
new file mode 100644
index 000000000..bc76d9903
--- /dev/null
+++ b/carla_ros_bridge/config/carla_default_rviz.cfg.rviz
@@ -0,0 +1,255 @@
+Panels:
+ - Class: rviz/Displays
+ Help Height: 102
+ Name: Displays
+ Property Tree Widget:
+ Expanded:
+ - /Global Options1
+ - /Status1
+ - /DepthCloud1/Auto Size1
+ Splitter Ratio: 0.5
+ Tree Height: 334
+ - Class: rviz/Selection
+ Name: Selection
+ - Class: rviz/Tool Properties
+ Expanded:
+ - /2D Pose Estimate1
+ - /2D Nav Goal1
+ - /Publish Point1
+ Name: Tool Properties
+ Splitter Ratio: 0.5886790156364441
+ - Class: rviz/Views
+ Expanded:
+ - /Current View1
+ Name: Views
+ Splitter Ratio: 0.5
+ - Class: rviz/Time
+ Experimental: true
+ Name: Time
+ SyncMode: 0
+ SyncSource: Front_cam
+ - Class: rviz/Displays
+ Help Height: 0
+ Name: Displays
+ Property Tree Widget:
+ Expanded:
+ - /Global Options1
+ - /non_player_vehicles1
+ - /DepthCloud1/Auto Size1
+ Splitter Ratio: 0.4943566620349884
+ Tree Height: 170
+Visualization Manager:
+ Class: ""
+ Displays:
+ - Alpha: 0.10000000149011612
+ Cell Size: 1
+ Class: rviz/Grid
+ Color: 170; 164; 162
+ Enabled: true
+ Line Style:
+ Line Width: 0.029999999329447746
+ Value: Lines
+ Name: Grid
+ Normal Cell Count: 0
+ Offset:
+ X: 0
+ Y: 0
+ Z: 0
+ Plane: XY
+ Plane Cell Count: 1000
+ Reference Frame: map
+ Value: true
+ - Class: rviz/Image
+ Enabled: true
+ Image Topic: /camera_front/image_raw
+ Max Value: 1
+ Median window: 5
+ Min Value: 0
+ Name: Front_cam
+ Normalize Range: true
+ Queue Size: 2
+ Transport Hint: raw
+ Unreliable: false
+ Value: true
+ - Class: rviz/Image
+ Enabled: true
+ Image Topic: /camera_depth/image_raw
+ Max Value: 30
+ Median window: 5
+ Min Value: 0
+ Name: Depth
+ Normalize Range: false
+ Queue Size: 2
+ Transport Hint: raw
+ Unreliable: false
+ Value: true
+ - Class: rviz/MarkerArray
+ Enabled: true
+ Marker Topic: /vehicles
+ Name: non_player_vehicles
+ Namespaces:
+ {}
+ Queue Size: 100
+ Value: true
+ - Class: rviz/Marker
+ Enabled: true
+ Marker Topic: /player_vehicle
+ Name: Marker
+ Namespaces:
+ "": true
+ Queue Size: 100
+ Value: true
+ - Alpha: 1
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 10
+ Min Value: -10
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/PointCloud2
+ Color: 255; 255; 255
+ Color Transformer: Intensity
+ Decay Time: 0
+ Enabled: false
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Max Intensity: 4096
+ Min Color: 0; 0; 0
+ Min Intensity: 0
+ Name: PointCloud2
+ Position Transformer: XYZ
+ Queue Size: 10
+ Selectable: true
+ Size (Pixels): 3
+ Size (m): 0.009999999776482582
+ Style: Flat Squares
+ Topic: /lidar_0
+ Unreliable: false
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: false
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 10
+ Min Value: -10
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: /camera_front/image_raw
+ Color Transformer: RGB8
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /camera_depth/image_raw
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Max Intensity: 4096
+ Min Color: 0; 0; 0
+ Min Intensity: 0
+ Name: DepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: false
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ Enabled: true
+ Global Options:
+ Background Color: 48; 48; 48
+ Default Light: true
+ Fixed Frame: map
+ Frame Rate: 30
+ Name: root
+ Tools:
+ - Class: rviz/Interact
+ Hide Inactive Objects: true
+ - Class: rviz/MoveCamera
+ - Class: rviz/Select
+ - Class: rviz/FocusCamera
+ - Class: rviz/Measure
+ - Class: rviz/SetInitialPose
+ Topic: /initialpose
+ - Class: rviz/SetGoal
+ Topic: /move_base_simple/goal
+ - Class: rviz/PublishPoint
+ Single click: true
+ Topic: /clicked_point
+ Value: true
+ Views:
+ Current:
+ Class: rviz/ThirdPersonFollower
+ Distance: 31.861072540283203
+ Enable Stereo Rendering:
+ Stereo Eye Separation: 0.05999999865889549
+ Stereo Focal Distance: 1
+ Swap Stereo Eyes: false
+ Value: false
+ Focal Point:
+ X: 6.999748229980469
+ Y: 0.7988636493682861
+ Z: 2.896774276450742e-6
+ Focal Shape Fixed Size: false
+ Focal Shape Size: 0.05000000074505806
+ Invert Z Axis: false
+ Name: Current View
+ Near Clip Distance: 0.009999999776482582
+ Pitch: 0.18520352244377136
+ Target Frame: base_link
+ Value: ThirdPersonFollower (rviz)
+ Yaw: 3.5035617351531982
+ Saved:
+ - Class: rviz/FPS
+ Enable Stereo Rendering:
+ Stereo Eye Separation: 0.05999999865889549
+ Stereo Focal Distance: 1
+ Swap Stereo Eyes: false
+ Value: false
+ Invert Z Axis: false
+ Name: FPS
+ Near Clip Distance: 0.009999999776482582
+ Pitch: 0.984796941280365
+ Position:
+ X: 48.14033508300781
+ Y: -258.651123046875
+ Z: 284.4742431640625
+ Target Frame: base_link
+ Value: FPS (rviz)
+ Yaw: 1.521981120109558
+Window Geometry:
+ "&Displays":
+ collapsed: false
+ "&Tool Properties":
+ collapsed: false
+ "&Views":
+ collapsed: false
+ Depth:
+ collapsed: false
+ Displays:
+ collapsed: false
+ Front_&cam:
+ collapsed: false
+ Height: 994
+ Hide Left Dock: false
+ Hide Right Dock: false
+ QMainWindow State: 000000ff00000000fd00000004000000000000019a0000032bfc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000007601000003fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007300000000430000032d000000f601000003fb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb000000100044006900730070006c0061007900730100000043000000f6000000f601000003fb0000001200460072006f006e0074005f00630061006d010000013a0000011d0000002201000003fb0000000a00440065007000740068010000025800000116000000220100000300000001000001810000032bfc0200000009fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000043000001950000007601000003fb0000000a004400650070007400680000000043000001e80000000000000000fb00000018005300650067006d0065006e0074006100740069006f006e000000017f0000011f0000000000000000fb0000000a0049006d0061006700650000000043000000f60000000000000000fb0000000a0049006d0061006700650000000043000001630000000000000000fb0000001a00630061006d005f00660072006f006e0074005f00720067006200000001a7000001c70000000000000000fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007301000001d900000195000000ce01000003fb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000690000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000006900000004dfc0100000002fb0000000800540069006d0065010000000000000690000002bb01000003fb0000000800540069006d00650100000000000004500000000000000000000003730000032b00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+ Selection:
+ collapsed: false
+ T&ime:
+ collapsed: false
+ Width: 1680
+ X: 0
+ Y: 30
diff --git a/carla_ros_bridge/config/setting_focus_on_player.yaml b/carla_ros_bridge/config/setting_focus_on_player.yaml
new file mode 100644
index 000000000..4321c2370
--- /dev/null
+++ b/carla_ros_bridge/config/setting_focus_on_player.yaml
@@ -0,0 +1,97 @@
+# carla original variable name is used unchanged here (i.e CamelCase, etc)
+carla:
+ host: localhost
+ port: 2000
+ QualityLevel: Low # Low or Epic
+ SynchronousMode: True
+
+ sensors:
+ CameraRGB:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: SceneFinal
+ ImageSizeX: 800
+ ImageSizeY: 600
+ FOV: 90
+ PositionX: -5.00
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 0
+
+ camera_depth:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: Depth
+ ImageSizeX: 800
+ ImageSizeY: 600
+ FOV: 90
+ PositionX: -5.00
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 0
+
+ camera_depth_front:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: Depth
+ ImageSizeX: 800
+ ImageSizeY: 600
+ FOV: 90
+ PositionX: 7.00
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 180
+
+ CameraRGB_front:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: SceneFinal
+ ImageSizeX: 800
+ ImageSizeY: 600
+ FOV: 90
+ PositionX: 7.00
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 180
+
+
+
+
+ #camera_semantic_segmentation:
+ # SensorType: CAMERA
+ # carla_settings:
+ # PostProcessing: SemanticSegmentation
+ # ImageSizeX: 800
+ # ImageSizeY: 600
+ # FOV: 90
+ # PositionX: 0.00
+ # PositionY: 0
+ # PositionZ: 1.30
+ # RotationPitch: 0
+ # RotationRoll: 0
+ # RotationYaw: 0
+
+
+ lidar_0:
+ SensorType: LIDAR_RAY_CAST
+ carla_settings:
+ Channels: 4
+ Range: 120
+ PointsPerSecond: 200000
+ RotationFrequency: 25
+ UpperFovLimit: 1.6
+ LowerFovLimit: -1.6
+ PositionX: 0.0
+ PositionY: 0
+ PositionZ: 1.50
+ RotationPitch: 0
+ RotationYaw: 0
+ RotationRoll: 0
diff --git a/carla_ros_bridge/config/settings.yaml b/carla_ros_bridge/config/settings.yaml
new file mode 100644
index 000000000..1adf0a46f
--- /dev/null
+++ b/carla_ros_bridge/config/settings.yaml
@@ -0,0 +1,55 @@
+carla:
+ host: localhost
+ port: 2000
+ QualityLevel: Low # Low or Epic
+ SynchronousMode: True
+ SendNonPlayerAgentsInfo: True
+ NumberOfVehicles: 30
+ NumberOfPedestrians: 10
+ WeatherId: 1
+
+ sensors:
+ camera_front:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: SceneFinal
+ ImageSizeX: 320
+ ImageSizeY: 240
+ FOV: 90
+ PositionX: 1.80
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 0
+
+ camera_depth:
+ SensorType: CAMERA
+ carla_settings:
+ PostProcessing: Depth
+ ImageSizeX: 320
+ ImageSizeY: 240
+ FOV: 90
+ PositionX: 1.80
+ PositionY: 0
+ PositionZ: 1.30
+ RotationPitch: 0
+ RotationRoll: 0
+ RotationYaw: 0
+
+# Uncomment the following lines if you want to enable lidar publication
+# lidar_0:
+# SensorType: LIDAR_RAY_CAST
+# carla_settings:
+# Channels: 4
+# Range: 120
+# PointsPerSecond: 200000
+# RotationFrequency: 25
+# UpperFovLimit: 1.6
+# LowerFovLimit: -1.6
+# PositionX: 0.0
+# PositionY: 0
+# PositionZ: 1.50
+# RotationPitch: 0
+# RotationYaw: 0
+# RotationRoll: 0
diff --git a/carla_ros_bridge/package.xml b/carla_ros_bridge/package.xml
new file mode 100644
index 000000000..89bc56a61
--- /dev/null
+++ b/carla_ros_bridge/package.xml
@@ -0,0 +1,75 @@
+
+
+ carla_ros_bridge
+ 0.0.1
+ The carla_ros_bridge package
+
+
+
+
+ lgeorge
+
+
+
+
+
+ MIT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+ rospy
+ sensor_msgs
+ tf
+ rospy
+ sensor_msgs
+ tf
+ rospy
+ sensor_msgs
+ tf
+ tf2
+ rviz
+ ackermann_msgs
+ cv_bridge
+ geometry_msgs
+ std_msgs
+ rosbag_storage
+
+
+
+
+
+
+
+
diff --git a/carla_ros_bridge/setup.py b/carla_ros_bridge/setup.py
new file mode 100644
index 000000000..a1e5f51fb
--- /dev/null
+++ b/carla_ros_bridge/setup.py
@@ -0,0 +1,10 @@
+from distutils.core import setup
+from catkin_pkg.python_setup import generate_distutils_setup
+
+d = generate_distutils_setup(
+ packages=['carla_ros_bridge'],
+ package_dir={'': 'src'}
+)
+
+setup(**d)
+
diff --git a/carla_ros_bridge/src/carla_ros_bridge/__init__.py b/carla_ros_bridge/src/carla_ros_bridge/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/carla_ros_bridge/src/carla_ros_bridge/bridge.py b/carla_ros_bridge/src/carla_ros_bridge/bridge.py
new file mode 100644
index 000000000..82332fb83
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/bridge.py
@@ -0,0 +1,174 @@
+"""
+Rosbridge class:
+
+Class that handle communication between CARLA and ROS
+"""
+import random
+
+from rosgraph_msgs.msg import Clock
+from tf2_msgs.msg import TFMessage
+import rospy
+
+from carla.settings import CarlaSettings
+from carla_ros_bridge.control import InputController
+from carla_ros_bridge.markers import PlayerAgentHandler, NonPlayerAgentsHandler
+from carla_ros_bridge.sensors import CameraHandler, LidarHandler
+
+
+class CarlaRosBridge(object):
+ """
+ Carla Ros bridge
+ """
+
+ def __init__(self, client, params):
+ """
+
+ :param params: dict of parameters, see settings.yaml
+ :param rate: rate to query data from carla in Hz
+ """
+ self.setup_carla_client(client=client, params=params)
+
+ self.tf_to_publish = []
+ self.msgs_to_publish = []
+ self.publishers = {}
+
+ # definitions useful for time
+ self.cur_time = rospy.Time.from_sec(
+ 0) # at the beginning of simulation
+ self.carla_game_stamp = 0
+ self.carla_platform_stamp = 0
+
+ # creating handler to handle vehicles messages
+ self.player_handler = PlayerAgentHandler(
+ "player_vehicle", process_msg_fun=self.process_msg)
+ self.non_players_handler = NonPlayerAgentsHandler(
+ "vehicles", process_msg_fun=self.process_msg)
+
+ # creating handler for sensors
+ self.sensors = {}
+ for name, _ in self.param_sensors.items():
+ self.add_sensor(name)
+
+ # creating input controller listener
+ self.input_controller = InputController()
+
+ def setup_carla_client(self, client, params):
+ self.client = client
+ self.param_sensors = params.get('sensors', {})
+ self.carla_settings = CarlaSettings()
+ self.carla_settings.set(
+ SendNonPlayerAgentsInfo=params.get('SendNonPlayerAgentsInfo', True),
+ NumberOfVehicles=params.get('NumberOfVehicles', 20),
+ NumberOfPedestrians=params.get('NumberOfPedestrians', 40),
+ WeatherId=params.get('WeatherId', random.choice([1, 3, 7, 8, 14])),
+ SynchronousMode=params.get('SynchronousMode', True),
+ QualityLevel=params.get('QualityLevel', 'Low')
+ )
+ self.carla_settings.randomize_seeds()
+
+ def add_sensor(self, name):
+ rospy.loginfo("Adding sensor {}".format(name))
+ sensor_type = self.param_sensors[name]['SensorType']
+ params = self.param_sensors[name]['carla_settings']
+ sensor_handler = None
+ if sensor_type == 'LIDAR_RAY_CAST':
+ sensor_handler = LidarHandler
+ if sensor_type == 'CAMERA':
+ sensor_handler = CameraHandler
+ if sensor_handler:
+ self.sensors[name] = sensor_handler(
+ name,
+ params,
+ carla_settings=self.carla_settings,
+ process_msg_fun=self.process_msg)
+ else:
+ rospy.logerr(
+ "Unable to handle sensor {name} of type {sensor_type}".format(
+ sensor_type=sensor_type, name=name))
+
+ def on_shutdown(self):
+ rospy.loginfo("Shutdown requested")
+
+ def process_msg(self, topic=None, msg=None):
+ """
+ Function used to process message
+
+ Here we create publisher if not yet created
+ Store the message in a list (waiting for their publication) with their associated publisher
+
+ Messages for /tf topics are handle differently in order to publish all transform in the same message
+ :param topic: topic to publish the message on
+ :param msg: ros message
+ """
+ if topic not in self.publishers:
+ if topic == 'tf':
+ self.publishers[topic] = rospy.Publisher(
+ topic, TFMessage, queue_size=100)
+ else:
+ self.publishers[topic] = rospy.Publisher(
+ topic, type(msg), queue_size=10)
+
+ if topic == 'tf':
+ # transform are merged in same message
+ self.tf_to_publish.append(msg)
+ else:
+ self.msgs_to_publish.append((self.publishers[topic], msg))
+
+ def send_msgs(self):
+ for publisher, msg in self.msgs_to_publish:
+ publisher.publish(msg)
+ self.msgs_to_publish = []
+
+ tf_msg = TFMessage(self.tf_to_publish)
+ self.publishers['tf'].publish(tf_msg)
+ self.tf_to_publish = []
+
+ def compute_cur_time_msg(self):
+ self.process_msg('clock', Clock(self.cur_time))
+
+ def run(self):
+ self.publishers['clock'] = rospy.Publisher(
+ "clock", Clock, queue_size=10)
+
+ # load settings into the server
+ scene = self.client.load_settings(self.carla_settings)
+ # Choose one player start at random.
+ number_of_player_starts = len(scene.player_start_spots)
+ player_start = random.randint(0, max(0, number_of_player_starts - 1))
+
+ self.client.start_episode(player_start)
+ while not (rospy.is_shutdown()):
+ measurements, sensor_data = self.client.read_data()
+
+ # handle time
+ self.carla_game_stamp = measurements.game_timestamp
+ self.cur_time = rospy.Time.from_sec(self.carla_game_stamp * 1e-3)
+ self.compute_cur_time_msg()
+
+ # handle agents
+ self.player_handler.process_msg(
+ measurements.player_measurements, cur_time=self.cur_time)
+ self.non_players_handler.process_msg(
+ measurements.non_player_agents, cur_time=self.cur_time)
+
+ # handle sensors
+ for name, data in sensor_data.items():
+ self.sensors[name].process_sensor_data(data, self.cur_time)
+
+ # publish all messages
+ self.send_msgs()
+
+ # handle control
+ if rospy.get_param('carla_autopilot', True):
+ control = measurements.player_measurements.autopilot_control
+ self.client.send_control(control)
+ else:
+ control = self.input_controller.cur_control
+ self.client.send_control(**control)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ rospy.loginfo("Exiting Bridge")
+ return None
diff --git a/carla_ros_bridge/src/carla_ros_bridge/bridge_with_rosbag.py b/carla_ros_bridge/src/carla_ros_bridge/bridge_with_rosbag.py
new file mode 100644
index 000000000..82ca3db52
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/bridge_with_rosbag.py
@@ -0,0 +1,36 @@
+"""
+RosBridge class with rosbag support
+"""
+
+import time
+
+from tf2_msgs.msg import TFMessage
+import rosbag
+import rospy
+import os
+
+from carla_ros_bridge.bridge import CarlaRosBridge
+
+
+class CarlaRosBridgeWithBag(CarlaRosBridge):
+ def __init__(self, *args, **kwargs):
+ super(CarlaRosBridgeWithBag, self).__init__(*args, **kwargs)
+ rosbag_fname = rospy.get_param('rosbag_fname')
+ self.bag = rosbag.Bag(rosbag_fname, mode='w')
+
+ def send_msgs(self):
+ for publisher, msg in self.msgs_to_publish:
+ self.bag.write(publisher.name, msg, self.cur_time)
+
+ tf_msg = TFMessage(self.tf_to_publish)
+ self.bag.write('tf', tf_msg, self.cur_time)
+ super(CarlaRosBridgeWithBag, self).send_msgs()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ rospy.loginfo("Closing the bag file")
+ self.bag.close()
+ super(CarlaRosBridgeWithBag, self).__exit__(exc_type, exc_value,
+ traceback)
diff --git a/carla_ros_bridge/src/carla_ros_bridge/client.py b/carla_ros_bridge/src/carla_ros_bridge/client.py
new file mode 100755
index 000000000..6f9fdea9e
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/client.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+"""
+Ros Bridge node for carla simulator
+"""
+
+import rospy
+
+from carla.client import make_carla_client
+from carla_ros_bridge.bridge import CarlaRosBridge
+from carla_ros_bridge.bridge_with_rosbag import CarlaRosBridgeWithBag
+
+
+def main():
+ rospy.init_node("carla_client", anonymous=True)
+
+ params = rospy.get_param('carla')
+ host = params['host']
+ port = params['port']
+
+ rospy.loginfo("Trying to connect to {host}:{port}".format(
+ host=host, port=port))
+
+ with make_carla_client(host, port) as client:
+ rospy.loginfo("Connected")
+
+ bridge_cls = CarlaRosBridgeWithBag if rospy.get_param(
+ 'rosbag_fname', '') else CarlaRosBridge
+ with bridge_cls(client=client, params=params) as carla_ros_bridge:
+ rospy.on_shutdown(carla_ros_bridge.on_shutdown)
+ carla_ros_bridge.run()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/carla_ros_bridge/src/carla_ros_bridge/control.py b/carla_ros_bridge/src/carla_ros_bridge/control.py
new file mode 100644
index 000000000..5b7b0581f
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/control.py
@@ -0,0 +1,68 @@
+"""
+Class to handle control input
+"""
+
+import math
+import numpy as np
+import rospy
+
+from ackermann_msgs.msg import AckermannDrive
+
+
+class InputController(object):
+ """
+ Class to handle ros input command
+
+ Each time a ros control msg (an ackermann_cmd msg) is received on control topic, a carla control message is
+ computed.
+ """
+
+ def __init__(self, topic_name='/ackermann_cmd'):
+ # current control command
+ self.cur_control = {
+ 'steer': 0.0,
+ 'throttle': 0.0,
+ 'brake': 0.0,
+ 'hand_brake': False,
+ 'reverse': False
+ }
+
+ self.cmd_vel_subscriber = rospy.Subscriber(
+ '/ackermann_cmd', AckermannDrive, self.set_control_cmd_callback)
+
+ def set_control_cmd_callback(self, data):
+ """
+ Convert a Ackerman drive msg into carla control msg
+
+ Right now the control is really simple and don't enforce acceleration and jerk command, nor the steering acceleration too
+ :param data: AckermannDrive msg
+ :return:
+ """
+ steering_angle_ctrl = data.steering_angle
+ speed_ctrl = data.speed
+
+ max_steering_angle = math.radians(
+ 500
+ ) # 500 degrees is the max steering angle that I have on my car,
+ # would be nice if we could use the value provided by carla
+ max_speed = 27 # just a value for me, 27 m/sec seems to be a reasonable max speed for now
+
+ control = {}
+
+ if abs(steering_angle_ctrl) > max_steering_angle:
+ rospy.logerr("Max steering angle reached, clipping value")
+ steering_angle_ctrl = np.clip(
+ steering_angle_ctrl, -max_steering_angle, max_steering_angle)
+
+ if abs(speed_ctrl) > max_speed:
+ rospy.logerr("Max speed reached, clipping value")
+ speed_ctrl = np.clip(speed_ctrl, -max_speed, max_speed)
+
+ if speed_ctrl == 0:
+ control['brake'] = True
+
+ control['steer'] = steering_angle_ctrl / max_steering_angle
+ control['throttle'] = abs(speed_ctrl / max_speed)
+ control['reverse'] = True if speed_ctrl < 0 else False
+
+ self.cur_control = control
diff --git a/carla_ros_bridge/src/carla_ros_bridge/markers.py b/carla_ros_bridge/src/carla_ros_bridge/markers.py
new file mode 100644
index 000000000..324a09909
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/markers.py
@@ -0,0 +1,154 @@
+"""
+Classes to handle Agent object (player and non-player)
+"""
+
+from geometry_msgs.msg import TransformStamped
+from std_msgs.msg import Header
+from visualization_msgs.msg import MarkerArray, Marker
+
+from carla.sensor import Transform as carla_Transform
+from carla_ros_bridge.transforms import carla_transform_to_ros_transform, ros_transform_to_pose
+
+
+class AgentObjectHandler(object):
+ """
+ Generic class to convert carla agent information to ros messages
+ In ros messages are represented as Marker message (thus they are viewable in Rviz).
+ """
+
+ def __init__(self, name, process_msg_fun=None, world_link='map'):
+ self.name = name
+ self.world_link = world_link
+ self.process_msg_fun = process_msg_fun
+ self.lookup_table_marker_id = {}
+
+ def process_msg(self, data, cur_time):
+ """
+
+ :param data: carla agent data
+ :param cur_time: current ros simulation time
+ :return:
+ """
+ raise NotImplemented
+
+ def get_marker_id(self, agent_id):
+ """
+ Return a unique marker id for each agent
+
+ ros marker id should be int32, carla/unrealengine seems to use int64
+ A lookup table is used to remap agent_id to small number between 0 and max_int32
+
+ :param agent_id:
+ :return:
+ """
+ if agent_id not in self.lookup_table_marker_id:
+ self.lookup_table_marker_id[agent_id] = len(
+ self.lookup_table_marker_id)
+ return self.lookup_table_marker_id[agent_id]
+
+
+class PlayerAgentHandler(AgentObjectHandler):
+ """
+ Convert player agent into ros message (as marker)
+ """
+
+ def __init__(self, name, **kwargs):
+ super(PlayerAgentHandler, self).__init__(name, **kwargs)
+
+ def process_msg(self, data, cur_time):
+ t = TransformStamped()
+ t.header.stamp = cur_time
+ t.header.frame_id = self.world_link
+ t.child_frame_id = "base_link"
+ t.transform = carla_transform_to_ros_transform(
+ carla_Transform(data.transform))
+ header = Header()
+ header.stamp = cur_time
+ header.frame_id = self.world_link
+ marker = get_vehicle_marker(
+ data, header=header, marker_id=0, is_player=True)
+ self.process_msg_fun(self.name, marker)
+ self.process_msg_fun('tf', t)
+
+
+class NonPlayerAgentsHandler(AgentObjectHandler):
+ """
+ Convert non-player agents into ros messages
+ """
+
+ def __init__(self, name, **kwargs):
+ super(NonPlayerAgentsHandler, self).__init__(name, **kwargs)
+
+ def process_msg(self, data, cur_time):
+ """
+ :param data: list of carla_server_pb2.Agent return by carla API,
+ with field 'id', 'vehicle', 'pedestrian', 'traffic_light', 'speed_limit_sign'
+
+ :return:
+ """
+ vehicles = [(self.get_marker_id(agent.id), agent.vehicle)
+ for agent in data if agent.HasField('vehicle')]
+
+ header = Header(stamp=cur_time, frame_id=self.world_link)
+ if not (vehicles):
+ return
+ markers = [
+ get_vehicle_marker(vehicle, header, agent_id)
+ for agent_id, vehicle in vehicles
+ ]
+ marker_array = MarkerArray(markers)
+ self.process_msg_fun('vehicles', marker_array)
+
+
+def get_vehicle_marker(object, header, marker_id=0, is_player=False):
+ """
+ Return a marker msg
+
+ :param object: carla agent object (pb2 object (vehicle, pedestrian or traffic light))
+ :param header: ros header (stamp/frame_id)
+ :param marker_id: a marker id (int32)
+ :param is_player: True if player else False (usefull to change marker color)
+ :return: a ros marker msg
+ """
+ marker = Marker(header=header)
+ marker.color.a = 0.3
+ if is_player:
+ marker.color.g = 1
+ marker.color.r = 0
+ marker.color.b = 0
+ else:
+ marker.color.r = 1
+ marker.color.g = 0
+ marker.color.b = 0
+
+ marker.id = marker_id
+ marker.text = "id = {}".format(marker_id)
+ update_marker_pose(object, marker)
+
+ if not is_player: # related to bug request #322
+ marker.scale.x = marker.scale.x / 100.0
+ marker.scale.y = marker.scale.y / 100.0
+ marker.scale.z = marker.scale.z / 100.0
+
+ # the box pose seems to require a little bump to be well aligned with camera depth
+ marker.pose.position.z += marker.scale.z / 2.0
+
+ return marker
+
+
+def update_marker_pose(object, base_marker):
+ """
+ Update pose of a marker based on carla information
+
+ :param object: pb2 carla object
+ :param base_marker: marker to update pose
+ """
+ ros_transform = carla_transform_to_ros_transform(
+ carla_Transform(object.transform))
+ base_marker.pose = ros_transform_to_pose(ros_transform)
+
+ base_marker.scale.x = object.bounding_box.extent.x * 2.0
+ base_marker.scale.y = object.bounding_box.extent.y * 2.0
+ base_marker.scale.z = object.bounding_box.extent.z * 2.0
+
+ base_marker.type = Marker.CUBE
diff --git a/carla_ros_bridge/src/carla_ros_bridge/sensors.py b/carla_ros_bridge/src/carla_ros_bridge/sensors.py
new file mode 100644
index 000000000..2fd37d759
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/sensors.py
@@ -0,0 +1,201 @@
+"""
+Classes to handle Carla sensors
+"""
+import math
+import numpy as np
+import tf
+
+from cv_bridge import CvBridge
+from geometry_msgs.msg import TransformStamped
+from sensor_msgs.msg import CameraInfo
+from sensor_msgs.point_cloud2 import create_cloud_xyz32
+from std_msgs.msg import Header
+
+from carla.sensor import Camera, Lidar, Sensor
+from carla_ros_bridge.transforms import carla_transform_to_ros_transform
+
+cv_bridge = CvBridge(
+) # global cv bridge to convert image between opencv and ros
+
+
+class SensorHandler(object):
+ """
+ Generic Sensor Handler
+
+ A sensor handler compute the associated ros message to a carla sensor data, and the transform asssociated to the
+ sensor.
+ These messages are passed to a *process_msg_fun* which will take care of publishing them.
+ """
+
+ def __init__(self,
+ name,
+ params=None,
+ carla_sensor_class=Sensor,
+ carla_settings=None,
+ process_msg_fun=None):
+ """
+ :param name: sensor name
+ :param params: params used to set the sensor in carla
+ :param carla_sensor_class: type of sensor
+ :param carla_settings: carla_settings object
+ :param process_msg_fun: function to call on each new computed message
+ """
+ self.process_msg_fun = process_msg_fun
+ self.carla_object = carla_sensor_class(name, **params)
+ carla_settings.add_sensor(self.carla_object)
+ self.name = name
+
+ def process_sensor_data(self, data, cur_time):
+ """
+ process a carla sensor data object
+
+ Generate sensor message and transform message
+
+ :param data: carla sensor data
+ :param cur_time: current ros simulation time
+ :return:
+ """
+ self._compute_sensor_msg(data, cur_time)
+ self._compute_transform(data, cur_time)
+
+ def _compute_sensor_msg(self, data, cur_time):
+ """
+ Compute the ros msg associated to carla data
+ :param data: SensorData object
+ :param cur_time: current ros simulation time
+ """
+ raise NotImplemented
+
+ def _compute_transform(self, data, cur_time):
+ """
+ Compute the tf msg associated to carla data
+
+ :param data: SensorData object
+ :param cur_time: current ros simulation time
+ """
+ raise NotImplemented
+
+
+class LidarHandler(SensorHandler):
+ """
+ Class to handle Lidar sensors
+ """
+
+ def __init__(self, name, params, **kwargs):
+ super(LidarHandler, self).__init__(
+ name, carla_sensor_class=Lidar, params=params, **kwargs)
+
+ def _compute_sensor_msg(self, sensor_data, cur_time):
+ header = Header()
+ header.frame_id = self.name
+ header.stamp = cur_time
+ # we take the oposite of y axis (as lidar point are express in left handed coordinate system, and ros need right handed)
+ # we need a copy here, because the data are read only in carla numpy array
+ new_sensor_data = sensor_data.data.copy()
+ new_sensor_data = -new_sensor_data
+ # we also need to permute x and y
+ new_sensor_data = new_sensor_data[..., [1, 0, 2]]
+ point_cloud_msg = create_cloud_xyz32(header, new_sensor_data)
+ topic = self.name
+ self.process_msg_fun(topic, point_cloud_msg)
+
+ def _compute_transform(self, sensor_data, cur_time):
+ parent_frame_id = "base_link"
+ child_frame_id = self.name
+
+ t = TransformStamped()
+ t.header.stamp = self.cur_time
+ t.header.frame_id = parent_frame_id
+ t.child_frame_id = child_frame_id
+ t.transform = carla_transform_to_ros_transform(
+ self.carla_object.get_transform())
+
+ self.process_msg_fun('tf', t)
+
+
+class CameraHandler(SensorHandler):
+ """
+ Class to handle Camera sensors
+ """
+
+ def __init__(self, name, params, **kwargs):
+ super(CameraHandler, self).__init__(
+ name, carla_sensor_class=Camera, params=params, **kwargs)
+
+ self.topic_image = '/'.join([name, 'image_raw'])
+ self.topic_cam_info = '/'.join([name, 'camera_info'])
+ self.build_camera_info()
+
+ def build_camera_info(self):
+ """
+ computing camera info
+
+ camera info doesn't change over time
+ """
+ camera_info = CameraInfo()
+ camera_info.header.frame_id = self.name
+ camera_info.width = self.carla_object.ImageSizeX
+ camera_info.height = self.carla_object.ImageSizeY
+ camera_info.distortion_model = 'plumb_bob'
+ cx = self.carla_object.ImageSizeX / 2.0
+ cy = self.carla_object.ImageSizeY / 2.0
+ fx = self.carla_object.ImageSizeX / (
+ 2.0 * math.tan(self.carla_object.FOV * math.pi / 360.0))
+ fy = fx
+ camera_info.K = [fx, 0, cx, 0, fy, cy, 0, 0, 1]
+ camera_info.D = [0, 0, 0, 0, 0]
+ camera_info.R = [1.0, 0, 0, 0, 1.0, 0, 0, 0, 1.0]
+ camera_info.P = [fx, 0, cx, 0, 0, fy, cy, 0, 0, 0, 1.0, 0]
+ self._camera_info = camera_info
+
+ def _compute_sensor_msg(self, sensor_data, cur_time):
+ if sensor_data.type == 'Depth':
+ # ROS PEP 0118 : Depth images are published as sensor_msgs/Image encoded as 32-bit float.
+ # Each pixel is a depth (along the camera Z axis) in meters.
+ data = np.float32(sensor_data.data * 1000.0) # in carla 1.0 = 1km
+ encoding = 'passthrough'
+ elif sensor_data.type == 'SemanticSegmentation':
+ encoding = 'mono16' # for semantic segmentation we use mono16 in order to be able to limit range in rviz
+ data = np.uint16(sensor_data.data)
+ else:
+ encoding = 'rgb8'
+ data = sensor_data.data
+ img_msg = cv_bridge.cv2_to_imgmsg(data, encoding=encoding)
+ img_msg.header.frame_id = self.name
+ img_msg.header.stamp = cur_time
+
+ cam_info = self._camera_info
+ cam_info.header = img_msg.header
+
+ self.process_msg_fun(self.topic_cam_info, cam_info)
+ self.process_msg_fun(self.topic_image, img_msg)
+
+ def _compute_transform(self, sensor_data, cur_time):
+ parent_frame_id = "base_link"
+ child_frame_id = self.name
+
+ t = TransformStamped()
+ t.header.stamp = cur_time
+ t.header.frame_id = parent_frame_id
+ t.child_frame_id = child_frame_id
+
+ # for camera we reorient it to look at the same axis as the opencv projection
+ # in order to get easy depth cloud for RGBD camera
+ t.transform = carla_transform_to_ros_transform(
+ self.carla_object.get_transform())
+
+ rotation = t.transform.rotation
+ quat = [rotation.x, rotation.y, rotation.z, rotation.w]
+ roll, pitch, yaw = tf.transformations.euler_from_quaternion(quat)
+
+ roll -= math.pi / 2.0
+ yaw -= math.pi / 2.0
+
+ quat = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
+
+ t.transform.rotation.x = quat[0]
+ t.transform.rotation.y = quat[1]
+ t.transform.rotation.z = quat[2]
+ t.transform.rotation.w = quat[3]
+
+ self.process_msg_fun('tf', t)
diff --git a/carla_ros_bridge/src/carla_ros_bridge/transforms.py b/carla_ros_bridge/src/carla_ros_bridge/transforms.py
new file mode 100644
index 000000000..a8087142b
--- /dev/null
+++ b/carla_ros_bridge/src/carla_ros_bridge/transforms.py
@@ -0,0 +1,79 @@
+"""
+Tool functions to convert transforms from carla to ros coordinate system
+"""
+from geometry_msgs.msg import Transform, Pose
+import tf
+
+
+def carla_transform_to_ros_transform(carla_transform):
+ """
+ Convert a carla transform to a ros transform
+ :param carla_transform:
+ :return: a ros transform
+ """
+ transform_matrix = carla_transform.matrix
+
+ x, y, z = tf.transformations.translation_from_matrix(transform_matrix)
+ quat = tf.transformations.quaternion_from_matrix(transform_matrix)
+
+ ros_transform = Transform()
+ # remember that we go from left-handed system (unreal) to right-handed system (ros)
+ ros_transform.translation.x = x
+ ros_transform.translation.y = -y
+ ros_transform.translation.z = z
+
+ roll, pitch, yaw = tf.transformations.euler_from_quaternion(quat)
+ roll = -roll
+ pitch = pitch
+ yaw = -yaw
+
+ quat = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
+ ros_transform.rotation.x = quat[0]
+ ros_transform.rotation.y = quat[1]
+ ros_transform.rotation.z = quat[2]
+ ros_transform.rotation.w = quat[3]
+
+ return ros_transform
+
+
+def carla_transform_to_ros_pose(carla_transform):
+ """
+ convert a carla transform to ros pose msg
+ :param carla_transform:
+ :return: a ros pose msg
+ """
+ transform_matrix = Transform(carla_transform).matrix
+
+ x, y, z = tf.transformations.translation_from_matrix(transform_matrix)
+ quat = tf.transformations.quaternion_from_matrix(transform_matrix)
+
+ ros_transform = Transform()
+ ros_transform.translation.x = x
+ ros_transform.translation.y = y
+ ros_transform.translation.z = z
+
+ ros_transform.rotation.x = quat[0]
+ ros_transform.rotation.y = quat[1]
+ ros_transform.rotation.z = quat[2]
+ ros_transform.rotation.w = quat[3]
+
+ return ros_transform
+
+
+def ros_transform_to_pose(ros_transform):
+ """
+ Util function to convert a ros transform into a ros pose
+
+ :param ros_transform:
+ :return: a ros pose msg
+ """
+ pose = Pose()
+ pose.position.x, pose.position.y, pose.position.z = ros_transform.translation.x, \
+ ros_transform.translation.y, \
+ ros_transform.translation.z
+
+ pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w = ros_transform.rotation.x, \
+ ros_transform.rotation.y, \
+ ros_transform.rotation.z, \
+ ros_transform.rotation.w
+ return pose
diff --git a/carla_ros_bridge/test/ros_bridge_client.py b/carla_ros_bridge/test/ros_bridge_client.py
new file mode 100644
index 000000000..0aa9647a9
--- /dev/null
+++ b/carla_ros_bridge/test/ros_bridge_client.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+import rostest
+import sys
+import unittest
+
+from carla_ros_bridge.client import main
+
+## A sample python unit test
+class TestPublishedTopics(unittest.TestCase):
+ def test_camera_topics(self):
+ self.assertEquals(1, 1, "1!=1")
+
+
+
+if __name__ == '__main__':
+ rostest.rosrun(__name__, 'test_published_topics', TestPublishedTopics)
diff --git a/carla_ros_bridge/test/ros_bridge_client.test b/carla_ros_bridge/test/ros_bridge_client.test
new file mode 100644
index 000000000..f8500b802
--- /dev/null
+++ b/carla_ros_bridge/test/ros_bridge_client.test
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+ topics:
+ - name: /player_vehicle
+ timeout: 20
+ negative: False
+ - name: /tf
+ timeout: 20
+ negative: False
+ - name: /camera_front/image_raw
+ timeout: 20
+ negative: False
+ - name: /camera_front/camera_info
+ timeout: 20
+ negative: False
+ - name: /camera_depth/image_raw
+ timeout: 20
+ negative: False
+ - name: /camera_depth/camera_info
+ timeout: 20
+ negative: False
+ - name: /vehicles
+ timeout: 20
+ negative: False
+
+
+
+
+
+
+
\ No newline at end of file