-Snippet for carla.World.enable_environment_objects
-
-
-
-```py
-
-# This recipe turn visibility off and on for two specifc buildings on the map
-
-# Get the buildings in the world
-world = client.get_world()
-env_objs = world.get_environment_objects(carla.CityObjectLabel.Buildings)
-
-# Access individual building IDs and save in a set
-building_01 = env_objs[0]
-building_02 = env_objs[1]
-objects_to_toggle = {building_01.id, building_02.id}
-
-# Toggle buildings off
-world.enable_environment_objects(objects_to_toggle, False)
-# Toggle buildings on
-world.enable_environment_objects(objects_to_toggle, True)
-
-
-```
-
-
-
-
-
-
-Snippet for carla.DebugHelper.draw_string
-
-
-
-```py
-
-
-# This recipe is a modification of lane_explorer.py example.
-# It draws the path of an actor through the world, printing information at each waypoint.
-
-# ...
-current_w = map.get_waypoint(vehicle.get_location())
-while True:
-
- next_w = map.get_waypoint(vehicle.get_location(), lane_type=carla.LaneType.Driving | carla.LaneType.Shoulder | carla.LaneType.Sidewalk )
- # Check if the vehicle is moving
- if next_w.id != current_w.id:
- vector = vehicle.get_velocity()
- # Check if the vehicle is on a sidewalk
- if current_w.lane_type == carla.LaneType.Sidewalk:
- draw_waypoint_union(debug, current_w, next_w, cyan if current_w.is_junction else red, 60)
- else:
- draw_waypoint_union(debug, current_w, next_w, cyan if current_w.is_junction else green, 60)
- debug.draw_string(current_w.transform.location, str('%15.0f km/h' % (3.6 * math.sqrt(vector.x**2 + vector.y**2 + vector.z**2))), False, orange, 60)
- draw_transform(debug, current_w.transform, white, 60)
-
- # Update the current waypoint and sleep for some time
- current_w = next_w
- time.sleep(args.tick_time)
-# ...
-
-
-```
-
-
-
-
-
-
-Snippet for carla.World.unload_map_layer
-
-
-
-```py
-
-# This recipe toggles off several layers in our "_Opt" maps
-
-# Load town one with minimum layout (roads, sidewalks, traffic lights and traffic signs)
-# as well as buildings and parked vehicles
-world = client.load_world('Town01_Opt', carla.MapLayer.Buildings | carla.MapLayer.ParkedVehicles)
-
-# Toggle all buildings off
-world.unload_map_layer(carla.MapLayer.Buildings)
-
-# Toggle all parked vehicles off
-world.unload_map_layer(carla.MapLayer.ParkedVehicles)
-
-
-```
-
-
-
-
-
-
-Snippet for carla.Vehicle.set_wheel_steer_direction
-
-
-
-```py
-
-# Sets the appearance of the vehicles front wheels to 40°. Vehicle physics will not be affected.
-
-vehicle.set_wheel_steer_direction(carla.VehicleWheelLocation.FR_Wheel, 40.0)
-vehicle.set_wheel_steer_direction(carla.VehicleWheelLocation.FL_Wheel, 40.0)
-
-
-```
-
-
-
-
-
-
-Snippet for carla.Client.__init__
-
-
-
-```py
-
-
-# This recipe shows in every script provided in PythonAPI/Examples
-# and it is used to parse the client creation arguments when running the script.
-
- argparser = argparse.ArgumentParser(
- description=__doc__)
- argparser.add_argument(
- '--host',
- metavar='H',
- default='127.0.0.1',
- help='IP of the host server (default: 127.0.0.1)')
- argparser.add_argument(
- '-p', '--port',
- metavar='P',
- default=2000,
- type=int,
- help='TCP port to listen to (default: 2000)')
- argparser.add_argument(
- '-s', '--speed',
- metavar='FACTOR',
- default=1.0,
- type=float,
- help='rate at which the weather changes (default: 1.0)')
- args = argparser.parse_args()
-
- speed_factor = args.speed
- update_freq = 0.1 / speed_factor
-
- client = carla.Client(args.host, args.port)
-
-
-
-```
-
-
-
-
-
-
-Snippet for carla.Map.get_waypoint
-
-
-
-```py
-
-
-# This recipe shows the current traffic rules affecting the vehicle.
-# Shows the current lane type and if a lane change can be done in the actual lane or the surrounding ones.
-
-# ...
-waypoint = world.get_map().get_waypoint(vehicle.get_location(),project_to_road=True, lane_type=(carla.LaneType.Driving | carla.LaneType.Shoulder | carla.LaneType.Sidewalk))
-print("Current lane type: " + str(waypoint.lane_type))
-# Check current lane change allowed
-print("Current Lane change: " + str(waypoint.lane_change))
-# Left and Right lane markings
-print("L lane marking type: " + str(waypoint.left_lane_marking.type))
-print("L lane marking change: " + str(waypoint.left_lane_marking.lane_change))
-print("R lane marking type: " + str(waypoint.right_lane_marking.type))
-print("R lane marking change: " + str(waypoint.right_lane_marking.lane_change))
-# ...
-
-
-```
-
-
-
-
-
-
-
-
-
-Snippet for carla.World.spawn_actor
-
-
-
-```py
-
-
-# This recipe attaches different camera / sensors to a vehicle with different attachments.
-
-# ...
-camera = world.spawn_actor(rgb_camera_bp, transform, attach_to=vehicle, attachment_type=Attachment.Rigid)
-# Default attachment: Attachment.Rigid
-gnss_sensor = world.spawn_actor(sensor_gnss_bp, transform, attach_to=vehicle)
-collision_sensor = world.spawn_actor(sensor_collision_bp, transform, attach_to=vehicle)
-lane_invasion_sensor = world.spawn_actor(sensor_lane_invasion_bp, transform, attach_to=vehicle)
-# ...
-
-
-```
-
-
-
-
-
-
-Snippet for carla.WalkerAIController.stop
-
-
-
-```py
-
-
-#To destroy the pedestrians, stop them from the navigation, and then destroy the objects (actor and controller).
-
-# stop pedestrians (list is [controller, actor, controller, actor ...])
-for i in range(0, len(all_id), 2):
- all_actors[i].stop()
-
-# destroy pedestrian (actor and controller)
-client.apply_batch([carla.command.DestroyActor(x) for x in all_id])
-
-
-```
-
-
-
-
-
-
-Snippet for carla.DebugHelper.draw_box
-
-
-
-```py
-
-
-# This recipe shows how to draw traffic light actor bounding boxes from a world snapshot.
-
-# ....
-debug = world.debug
-world_snapshot = world.get_snapshot()
-
-for actor_snapshot in world_snapshot:
- actual_actor = world.get_actor(actor_snapshot.id)
- if actual_actor.type_id == 'traffic.traffic_light':
- debug.draw_box(carla.BoundingBox(actor_snapshot.get_transform().location,carla.Vector3D(0.5,0.5,2)),actor_snapshot.get_transform().rotation, 0.05, carla.Color(255,0,0,0),0)
-# ...
-
-
-
-```
-
-
-
-
-
-
-
-
-
-Snippet for carla.World.get_spectator
-
-
-
-```py
-
-
-# This recipe spawns an actor and the spectator camera at the actor's location.
-
-# ...
-world = client.get_world()
-spectator = world.get_spectator()
-
-vehicle_bp = random.choice(world.get_blueprint_library().filter('vehicle.bmw.*'))
-transform = random.choice(world.get_map().get_spawn_points())
-vehicle = world.try_spawn_actor(vehicle_bp, transform)
-
-# Wait for world to get the vehicle actor
-world.tick()
-
-world_snapshot = world.wait_for_tick()
-actor_snapshot = world_snapshot.find(vehicle.id)
-
-# Set spectator at given transform (vehicle transform)
-spectator.set_transform(actor_snapshot.get_transform())
-# ...
-
-
-```
-
-
-
-
-
-
-Snippet for carla.Sensor.listen
-
-
-
-```py
-
-
-# This recipe applies a color conversion to the image taken by a camera sensor,
-# so it is converted to a semantic segmentation image.
-
-# ...
-camera_bp = world.get_blueprint_library().filter('sensor.camera.semantic_segmentation')
-# ...
-cc = carla.ColorConverter.CityScapesPalette
-camera.listen(lambda image: image.save_to_disk('output/%06d.png' % image.frame, cc))
-# ...
-
-
-```
-
-
-
-
-
-
-Snippet for carla.TrafficLight.set_state
-
-
-
-```py
-
-
-# This recipe changes from red to green the traffic light that affects the vehicle.
-# This is done by detecting if the vehicle actor is at a traffic light.
-
-# ...
-world = client.get_world()
-spectator = world.get_spectator()
-
-vehicle_bp = random.choice(world.get_blueprint_library().filter('vehicle.bmw.*'))
-transform = random.choice(world.get_map().get_spawn_points())
-vehicle = world.try_spawn_actor(vehicle_bp, transform)
-
-# Wait for world to get the vehicle actor
-world.tick()
-
-world_snapshot = world.wait_for_tick()
-actor_snapshot = world_snapshot.find(vehicle.id)
-
-# Set spectator at given transform (vehicle transform)
-spectator.set_transform(actor_snapshot.get_transform())
-# ...# ...
-if vehicle_actor.is_at_traffic_light():
- traffic_light = vehicle_actor.get_traffic_light()
- if traffic_light.get_state() == carla.TrafficLightState.Red:
- # world.hud.notification("Traffic light changed! Good to go!")
- traffic_light.set_state(carla.TrafficLightState.Green)
-# ...
-
-
-
-```
-
-
-
-
-
-
-
Snippet for carla.ActorBlueprint.set_attribute
@@ -4462,6 +4144,333 @@ for i in range(0, len(all_actors), 2):
+
+
+Snippet for carla.Client.__init__
+
+
+
+```py
+
+
+# This recipe shows in every script provided in PythonAPI/Examples
+# and it is used to parse the client creation arguments when running the script.
+
+ argparser = argparse.ArgumentParser(
+ description=__doc__)
+ argparser.add_argument(
+ '--host',
+ metavar='H',
+ default='127.0.0.1',
+ help='IP of the host server (default: 127.0.0.1)')
+ argparser.add_argument(
+ '-p', '--port',
+ metavar='P',
+ default=2000,
+ type=int,
+ help='TCP port to listen to (default: 2000)')
+ argparser.add_argument(
+ '-s', '--speed',
+ metavar='FACTOR',
+ default=1.0,
+ type=float,
+ help='rate at which the weather changes (default: 1.0)')
+ args = argparser.parse_args()
+
+ speed_factor = args.speed
+ update_freq = 0.1 / speed_factor
+
+ client = carla.Client(args.host, args.port)
+
+
+
+```
+
+
+
+
+
+
+Snippet for carla.DebugHelper.draw_box
+
+
+
+```py
+
+
+# This recipe shows how to draw traffic light actor bounding boxes from a world snapshot.
+
+# ....
+debug = world.debug
+world_snapshot = world.get_snapshot()
+
+for actor_snapshot in world_snapshot:
+ actual_actor = world.get_actor(actor_snapshot.id)
+ if actual_actor.type_id == 'traffic.traffic_light':
+ debug.draw_box(carla.BoundingBox(actor_snapshot.get_transform().location,carla.Vector3D(0.5,0.5,2)),actor_snapshot.get_transform().rotation, 0.05, carla.Color(255,0,0,0),0)
+# ...
+
+
+
+```
+
+
+
+
+
+
+
+
+
+Snippet for carla.DebugHelper.draw_string
+
+
+
+```py
+
+
+# This recipe is a modification of lane_explorer.py example.
+# It draws the path of an actor through the world, printing information at each waypoint.
+
+# ...
+current_w = map.get_waypoint(vehicle.get_location())
+while True:
+
+ next_w = map.get_waypoint(vehicle.get_location(), lane_type=carla.LaneType.Driving | carla.LaneType.Shoulder | carla.LaneType.Sidewalk )
+ # Check if the vehicle is moving
+ if next_w.id != current_w.id:
+ vector = vehicle.get_velocity()
+ # Check if the vehicle is on a sidewalk
+ if current_w.lane_type == carla.LaneType.Sidewalk:
+ draw_waypoint_union(debug, current_w, next_w, cyan if current_w.is_junction else red, 60)
+ else:
+ draw_waypoint_union(debug, current_w, next_w, cyan if current_w.is_junction else green, 60)
+ debug.draw_string(current_w.transform.location, str('%15.0f km/h' % (3.6 * math.sqrt(vector.x**2 + vector.y**2 + vector.z**2))), False, orange, 60)
+ draw_transform(debug, current_w.transform, white, 60)
+
+ # Update the current waypoint and sleep for some time
+ current_w = next_w
+ time.sleep(args.tick_time)
+# ...
+
+
+```
+
+
+
+
+
+
+Snippet for carla.Map.get_waypoint
+
+
+
+```py
+
+
+# This recipe shows the current traffic rules affecting the vehicle.
+# Shows the current lane type and if a lane change can be done in the actual lane or the surrounding ones.
+
+# ...
+waypoint = world.get_map().get_waypoint(vehicle.get_location(),project_to_road=True, lane_type=(carla.LaneType.Driving | carla.LaneType.Shoulder | carla.LaneType.Sidewalk))
+print("Current lane type: " + str(waypoint.lane_type))
+# Check current lane change allowed
+print("Current Lane change: " + str(waypoint.lane_change))
+# Left and Right lane markings
+print("L lane marking type: " + str(waypoint.left_lane_marking.type))
+print("L lane marking change: " + str(waypoint.left_lane_marking.lane_change))
+print("R lane marking type: " + str(waypoint.right_lane_marking.type))
+print("R lane marking change: " + str(waypoint.right_lane_marking.lane_change))
+# ...
+
+
+```
+
+
+
+
+
+
+
+
+
+Snippet for carla.Sensor.listen
+
+
+
+```py
+
+
+# This recipe applies a color conversion to the image taken by a camera sensor,
+# so it is converted to a semantic segmentation image.
+
+# ...
+camera_bp = world.get_blueprint_library().filter('sensor.camera.semantic_segmentation')
+# ...
+cc = carla.ColorConverter.CityScapesPalette
+camera.listen(lambda image: image.save_to_disk('output/%06d.png' % image.frame, cc))
+# ...
+
+
+```
+
+
+
+
+
+
+Snippet for carla.TrafficLight.set_state
+
+
+
+```py
+
+
+# This recipe changes from red to green the traffic light that affects the vehicle.
+# This is done by detecting if the vehicle actor is at a traffic light.
+
+# ...
+world = client.get_world()
+spectator = world.get_spectator()
+
+vehicle_bp = random.choice(world.get_blueprint_library().filter('vehicle.bmw.*'))
+transform = random.choice(world.get_map().get_spawn_points())
+vehicle = world.try_spawn_actor(vehicle_bp, transform)
+
+# Wait for world to get the vehicle actor
+world.tick()
+
+world_snapshot = world.wait_for_tick()
+actor_snapshot = world_snapshot.find(vehicle.id)
+
+# Set spectator at given transform (vehicle transform)
+spectator.set_transform(actor_snapshot.get_transform())
+# ...# ...
+if vehicle_actor.is_at_traffic_light():
+ traffic_light = vehicle_actor.get_traffic_light()
+ if traffic_light.get_state() == carla.TrafficLightState.Red:
+ # world.hud.notification("Traffic light changed! Good to go!")
+ traffic_light.set_state(carla.TrafficLightState.Green)
+# ...
+
+
+
+```
+
+
+
+
+
+
+
+
+
+Snippet for carla.Vehicle.set_wheel_steer_direction
+
+
+
+```py
+
+# Sets the appearance of the vehicles front wheels to 40°. Vehicle physics will not be affected.
+
+vehicle.set_wheel_steer_direction(carla.VehicleWheelLocation.FR_Wheel, 40.0)
+vehicle.set_wheel_steer_direction(carla.VehicleWheelLocation.FL_Wheel, 40.0)
+
+
+```
+
+
+
+
+
+
+Snippet for carla.WalkerAIController.stop
+
+
+
+```py
+
+
+#To destroy the pedestrians, stop them from the navigation, and then destroy the objects (actor and controller).
+
+# stop pedestrians (list is [controller, actor, controller, actor ...])
+for i in range(0, len(all_id), 2):
+ all_actors[i].stop()
+
+# destroy pedestrian (actor and controller)
+client.apply_batch([carla.command.DestroyActor(x) for x in all_id])
+
+
+```
+
+
+
+
+
+
+Snippet for carla.World.enable_environment_objects
+
+
+
+```py
+
+# This recipe turn visibility off and on for two specifc buildings on the map
+
+# Get the buildings in the world
+world = client.get_world()
+env_objs = world.get_environment_objects(carla.CityObjectLabel.Buildings)
+
+# Access individual building IDs and save in a set
+building_01 = env_objs[0]
+building_02 = env_objs[1]
+objects_to_toggle = {building_01.id, building_02.id}
+
+# Toggle buildings off
+world.enable_environment_objects(objects_to_toggle, False)
+# Toggle buildings on
+world.enable_environment_objects(objects_to_toggle, True)
+
+
+```
+
+
+
+
+
+
+Snippet for carla.World.get_spectator
+
+
+
+```py
+
+
+# This recipe spawns an actor and the spectator camera at the actor's location.
+
+# ...
+world = client.get_world()
+spectator = world.get_spectator()
+
+vehicle_bp = random.choice(world.get_blueprint_library().filter('vehicle.bmw.*'))
+transform = random.choice(world.get_map().get_spawn_points())
+vehicle = world.try_spawn_actor(vehicle_bp, transform)
+
+# Wait for world to get the vehicle actor
+world.tick()
+
+world_snapshot = world.wait_for_tick()
+actor_snapshot = world_snapshot.find(vehicle.id)
+
+# Set spectator at given transform (vehicle transform)
+spectator.set_transform(actor_snapshot.get_transform())
+# ...
+
+
+```
+
+
+
+
Snippet for carla.World.load_map_layer
@@ -4490,6 +4499,57 @@ world.load_map_layer(carla.MapLayer.ParkedVehicles)
+
+
+Snippet for carla.World.spawn_actor
+
+
+
+```py
+
+
+# This recipe attaches different camera / sensors to a vehicle with different attachments.
+
+# ...
+camera = world.spawn_actor(rgb_camera_bp, transform, attach_to=vehicle, attachment_type=Attachment.Rigid)
+# Default attachment: Attachment.Rigid
+gnss_sensor = world.spawn_actor(sensor_gnss_bp, transform, attach_to=vehicle)
+collision_sensor = world.spawn_actor(sensor_collision_bp, transform, attach_to=vehicle)
+lane_invasion_sensor = world.spawn_actor(sensor_lane_invasion_bp, transform, attach_to=vehicle)
+# ...
+
+
+```
+
+
+
+
+
+
+Snippet for carla.World.unload_map_layer
+
+
+
+```py
+
+# This recipe toggles off several layers in our "_Opt" maps
+
+# Load town one with minimum layout (roads, sidewalks, traffic lights and traffic signs)
+# as well as buildings and parked vehicles
+world = client.load_world('Town01_Opt', carla.MapLayer.Buildings | carla.MapLayer.ParkedVehicles)
+
+# Toggle all buildings off
+world.unload_map_layer(carla.MapLayer.Buildings)
+
+# Toggle all parked vehicles off
+world.unload_map_layer(carla.MapLayer.ParkedVehicles)
+
+
+```
+
+
+
+
diff --git a/LibCarla/source/carla/client/ServerSideSensor.cpp b/LibCarla/source/carla/client/ServerSideSensor.cpp
index 5671fd0db..b32906643 100644
--- a/LibCarla/source/carla/client/ServerSideSensor.cpp
+++ b/LibCarla/source/carla/client/ServerSideSensor.cpp
@@ -24,6 +24,10 @@ namespace client {
if (IsListening() && GetEpisode().IsValid()) {
try {
Stop();
+ for (uint32_t i = 1; i != 16; ++i) {
+ if (listening_mask.test(i))
+ StopGBuffer(i);
+ }
} catch (const std::exception &e) {
log_error("exception trying to stop sensor:", GetDisplayId(), ':', e.what());
}
@@ -34,26 +38,48 @@ namespace client {
log_debug("calling sensor Listen() ", GetDisplayId());
log_debug(GetDisplayId(), ": subscribing to stream");
GetEpisode().Lock()->SubscribeToSensor(*this, std::move(callback));
- _is_listening = true;
+ listening_mask.set(0);
}
void ServerSideSensor::Stop() {
log_debug("calling sensor Stop() ", GetDisplayId());
- if (!_is_listening) {
+ if (!IsListening()) {
log_warning(
"attempting to unsubscribe from stream but sensor wasn't listening:",
GetDisplayId());
return;
}
GetEpisode().Lock()->UnSubscribeFromSensor(*this);
- _is_listening = false;
+ listening_mask.reset(0);
+ }
+
+ void ServerSideSensor::ListenToGBuffer(uint32_t GBufferId, CallbackFunctionType callback) {
+ log_debug(GetDisplayId(), ": subscribing to gbuffer stream");
+ GetEpisode().Lock()->SubscribeToGBuffer(*this, GBufferId, std::move(callback));
+ listening_mask.set(0);
+ listening_mask.set(GBufferId + 1);
+ }
+
+ void ServerSideSensor::StopGBuffer(uint32_t GBufferId) {
+ log_debug(GetDisplayId(), ": unsubscribing from gbuffer stream");
+ GetEpisode().Lock()->UnSubscribeFromGBuffer(*this, GBufferId);
+ listening_mask.reset(GBufferId + 1);
+ if (listening_mask.count() == 1) {
+ listening_mask.reset(0);
+ }
}
bool ServerSideSensor::Destroy() {
log_debug("calling sensor Destroy() ", GetDisplayId());
if (IsListening()) {
+ for (uint32_t i = 1; i != 16; ++i) {
+ if (listening_mask.test(i)) {
+ StopGBuffer(i);
+ }
+ }
Stop();
}
+ listening_mask = {};
return Actor::Destroy();
}
diff --git a/LibCarla/source/carla/client/ServerSideSensor.h b/LibCarla/source/carla/client/ServerSideSensor.h
index 59592c284..d55a777b8 100644
--- a/LibCarla/source/carla/client/ServerSideSensor.h
+++ b/LibCarla/source/carla/client/ServerSideSensor.h
@@ -7,6 +7,7 @@
#pragma once
#include "carla/client/Sensor.h"
+#include
namespace carla {
namespace client {
@@ -33,7 +34,17 @@ namespace client {
/// Return whether this Sensor instance is currently listening to the
/// associated sensor in the simulator.
bool IsListening() const override {
- return _is_listening;
+ return listening_mask.test(0);
+ }
+
+ /// Listen fr
+ void ListenToGBuffer(uint32_t GBufferId, CallbackFunctionType callback);
+
+ /// Stop listening for a specific gbuffer stream.
+ void StopGBuffer(uint32_t GBufferId);
+
+ inline bool IsListeningGBuffer(uint32_t id) const {
+ return listening_mask.test(id + 1);
}
/// @copydoc Actor::Destroy()
@@ -43,7 +54,8 @@ namespace client {
private:
- bool _is_listening = false;
+ std::bitset<32> listening_mask;
+
};
} // namespace client
diff --git a/LibCarla/source/carla/client/detail/Client.cpp b/LibCarla/source/carla/client/detail/Client.cpp
index 52fed6289..960f6e329 100644
--- a/LibCarla/source/carla/client/detail/Client.cpp
+++ b/LibCarla/source/carla/client/detail/Client.cpp
@@ -586,6 +586,27 @@ namespace detail {
_pimpl->streaming_client.UnSubscribe(token);
}
+ void Client::SubscribeToGBuffer(
+ rpc::ActorId ActorId,
+ uint32_t GBufferId,
+ std::function callback)
+ {
+ std::vector token_data = _pimpl->CallAndWait>("get_gbuffer_token", ActorId, GBufferId);
+ streaming::Token token;
+ std::memcpy(&token.data[0u], token_data.data(), token_data.size());
+ _pimpl->streaming_client.Subscribe(token, std::move(callback));
+ }
+
+ void Client::UnSubscribeFromGBuffer(
+ rpc::ActorId ActorId,
+ uint32_t GBufferId)
+ {
+ std::vector token_data = _pimpl->CallAndWait>("get_gbuffer_token", ActorId, GBufferId);
+ streaming::Token token;
+ std::memcpy(&token.data[0u], token_data.data(), token_data.size());
+ _pimpl->streaming_client.UnSubscribe(token);
+ }
+
void Client::DrawDebugShape(const rpc::DebugShape &shape) {
_pimpl->AsyncCall("draw_debug_shape", shape);
}
diff --git a/LibCarla/source/carla/client/detail/Client.h b/LibCarla/source/carla/client/detail/Client.h
index 4e8c984c7..83da380ab 100644
--- a/LibCarla/source/carla/client/detail/Client.h
+++ b/LibCarla/source/carla/client/detail/Client.h
@@ -368,8 +368,17 @@ namespace detail {
const streaming::Token &token,
std::function callback);
+ void SubscribeToGBuffer(
+ rpc::ActorId ActorId,
+ uint32_t GBufferId,
+ std::function callback);
+
void UnSubscribeFromStream(const streaming::Token &token);
+ void UnSubscribeFromGBuffer(
+ rpc::ActorId ActorId,
+ uint32_t GBufferId);
+
void DrawDebugShape(const rpc::DebugShape &shape);
void ApplyBatch(
diff --git a/LibCarla/source/carla/client/detail/Simulator.cpp b/LibCarla/source/carla/client/detail/Simulator.cpp
index 898624e57..ba424e67b 100644
--- a/LibCarla/source/carla/client/detail/Simulator.cpp
+++ b/LibCarla/source/carla/client/detail/Simulator.cpp
@@ -389,9 +389,26 @@ namespace detail {
cb(std::move(data));
});
}
-
- void Simulator::UnSubscribeFromSensor(const Sensor &sensor) {
+
+ void Simulator::UnSubscribeFromSensor(Actor &sensor) {
_client.UnSubscribeFromStream(sensor.GetActorDescription().GetStreamToken());
+ // If in the future we need to unsubscribe from each gbuffer individually, it should be done here.
+ }
+
+ void Simulator::SubscribeToGBuffer(
+ Actor &actor,
+ uint32_t gbuffer_id,
+ std::function)> callback) {
+ _client.SubscribeToGBuffer(actor.GetId(), gbuffer_id,
+ [cb=std::move(callback), ep=WeakEpisodeProxy{shared_from_this()}](auto buffer) {
+ auto data = sensor::Deserializer::Deserialize(std::move(buffer));
+ data->_episode = ep.TryLock();
+ cb(std::move(data));
+ });
+ }
+
+ void Simulator::UnSubscribeFromGBuffer(Actor &actor, uint32_t gbuffer_id) {
+ _client.UnSubscribeFromGBuffer(actor.GetId(), gbuffer_id);
}
void Simulator::FreezeAllTrafficLights(bool frozen) {
diff --git a/LibCarla/source/carla/client/detail/Simulator.h b/LibCarla/source/carla/client/detail/Simulator.h
index 61eb724ee..e770c714d 100644
--- a/LibCarla/source/carla/client/detail/Simulator.h
+++ b/LibCarla/source/carla/client/detail/Simulator.h
@@ -590,7 +590,16 @@ namespace detail {
const Sensor &sensor,
std::function)> callback);
- void UnSubscribeFromSensor(const Sensor &sensor);
+ void UnSubscribeFromSensor(Actor &sensor);
+
+ void SubscribeToGBuffer(
+ Actor & sensor,
+ uint32_t gbuffer_id,
+ std::function)> callback);
+
+ void UnSubscribeFromGBuffer(
+ Actor & sensor,
+ uint32_t gbuffer_id);
/// @}
// =========================================================================
diff --git a/LibCarla/source/carla/sensor/SensorRegistry.h b/LibCarla/source/carla/sensor/SensorRegistry.h
index 245c105f1..901a47dfd 100644
--- a/LibCarla/source/carla/sensor/SensorRegistry.h
+++ b/LibCarla/source/carla/sensor/SensorRegistry.h
@@ -27,6 +27,8 @@
#include "carla/sensor/s11n/ObstacleDetectionEventSerializer.h"
#include "carla/sensor/s11n/RadarSerializer.h"
#include "carla/sensor/s11n/SemanticLidarSerializer.h"
+#include "carla/sensor/s11n/GBufferUint8Serializer.h"
+#include "carla/sensor/s11n/GBufferFloatSerializer.h"
// 2. Add a forward-declaration of the sensor here.
class ACollisionSensor;
@@ -46,6 +48,8 @@ class ASemanticSegmentationCamera;
class AInstanceSegmentationCamera;
class ARssSensor;
class FWorldObserver;
+struct FCameraGBufferUint8;
+struct FCameraGBufferFloat;
namespace carla {
namespace sensor {
@@ -74,7 +78,9 @@ namespace sensor {
std::pair,
std::pair,
std::pair,
- std::pair
+ std::pair,
+ std::pair,
+ std::pair
>;
} // namespace sensor
diff --git a/LibCarla/source/carla/sensor/data/Image.h b/LibCarla/source/carla/sensor/data/Image.h
index 531249867..5a7b05adc 100644
--- a/LibCarla/source/carla/sensor/data/Image.h
+++ b/LibCarla/source/carla/sensor/data/Image.h
@@ -15,6 +15,9 @@ namespace data {
/// An image of 32-bit BGRA colors (8-bit channels, 4 bytes)
using Image = ImageTmpl;
+
+ /// An image of float BGRA colors (32-bit channels)
+ using FloatImage = ImageTmpl;
/// An image of 64-bit BGRA colors (16-bit channels, 2 floats)
using OpticalFlowImage = ImageTmpl;
diff --git a/LibCarla/source/carla/sensor/data/ImageTmpl.h b/LibCarla/source/carla/sensor/data/ImageTmpl.h
index 7d76b28bc..e422c1964 100644
--- a/LibCarla/source/carla/sensor/data/ImageTmpl.h
+++ b/LibCarla/source/carla/sensor/data/ImageTmpl.h
@@ -10,6 +10,8 @@
#include "carla/sensor/data/Array.h"
#include "carla/sensor/s11n/ImageSerializer.h"
#include "carla/sensor/s11n/OpticalFlowImageSerializer.h"
+#include "carla/sensor/s11n/GBufferUint8Serializer.h"
+#include "carla/sensor/s11n/GBufferFloatSerializer.h"
#include "carla/sensor/s11n/NormalsImageSerializer.h"
namespace carla {
@@ -28,6 +30,8 @@ namespace data {
friend Serializer;
friend SerializerOpticalFlow;
+ friend s11n::GBufferUint8Serializer;
+ friend s11n::GBufferFloatSerializer;
friend SerializerNormals;
explicit ImageTmpl(RawData &&data)
diff --git a/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.cpp b/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.cpp
new file mode 100644
index 000000000..cefff1c79
--- /dev/null
+++ b/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.cpp
@@ -0,0 +1,22 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "carla/sensor/s11n/GBufferFloatSerializer.h"
+
+#include "carla/sensor/data/Image.h"
+
+namespace carla {
+namespace sensor {
+namespace s11n {
+
+ SharedPtr GBufferFloatSerializer::Deserialize(RawData &&data) {
+ auto image = SharedPtr(new data::FloatImage{std::move(data)});
+ return image;
+ }
+
+} // namespace s11n
+} // namespace sensor
+} // namespace carla
diff --git a/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.h b/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.h
new file mode 100644
index 000000000..86149b060
--- /dev/null
+++ b/LibCarla/source/carla/sensor/s11n/GBufferFloatSerializer.h
@@ -0,0 +1,62 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+
+#include "carla/Memory.h"
+#include "carla/sensor/RawData.h"
+
+#include
+#include
+
+namespace carla {
+namespace sensor {
+
+ class SensorData;
+
+namespace s11n {
+
+ /// Serializes image buffers generated by camera sensors.
+ class GBufferFloatSerializer {
+ public:
+
+#pragma pack(push, 1)
+ struct ImageHeader {
+ uint32_t width;
+ uint32_t height;
+ float fov_angle;
+ };
+#pragma pack(pop)
+
+ constexpr static auto header_offset = sizeof(ImageHeader);
+
+ static const ImageHeader &DeserializeHeader(const RawData &data) {
+ return *reinterpret_cast(data.begin());
+ }
+
+ template
+ static Buffer Serialize(const Sensor &sensor, Buffer &&bitmap,
+ uint32_t ImageWidth, uint32_t ImageHeight, float FovAngle);
+
+ static SharedPtr Deserialize(RawData &&data);
+ };
+
+ template
+ inline Buffer GBufferFloatSerializer::Serialize(const Sensor &/*sensor*/, Buffer &&bitmap,
+ uint32_t ImageWidth, uint32_t ImageHeight, float FovAngle) {
+ DEBUG_ASSERT(bitmap.size() > sizeof(ImageHeader));
+ ImageHeader header = {
+ ImageWidth,
+ ImageHeight,
+ FovAngle
+ };
+ std::memcpy(bitmap.data(), reinterpret_cast(&header), sizeof(header));
+ return std::move(bitmap);
+ }
+
+} // namespace s11n
+} // namespace sensor
+} // namespace carla
diff --git a/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.cpp b/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.cpp
new file mode 100644
index 000000000..ba8473701
--- /dev/null
+++ b/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.cpp
@@ -0,0 +1,22 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "carla/sensor/s11n/GBufferUint8Serializer.h"
+
+#include "carla/sensor/data/Image.h"
+
+namespace carla {
+namespace sensor {
+namespace s11n {
+
+ SharedPtr GBufferUint8Serializer::Deserialize(RawData &&data) {
+ auto image = SharedPtr(new data::Image{std::move(data)});
+ return image;
+ }
+
+} // namespace s11n
+} // namespace sensor
+} // namespace carla
diff --git a/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.h b/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.h
new file mode 100644
index 000000000..ecf11f96b
--- /dev/null
+++ b/LibCarla/source/carla/sensor/s11n/GBufferUint8Serializer.h
@@ -0,0 +1,62 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+
+#include "carla/Memory.h"
+#include "carla/sensor/RawData.h"
+
+#include
+#include
+
+namespace carla {
+namespace sensor {
+
+ class SensorData;
+
+namespace s11n {
+
+ /// Serializes image buffers generated by camera sensors.
+ class GBufferUint8Serializer {
+ public:
+
+#pragma pack(push, 1)
+ struct ImageHeader {
+ uint32_t width;
+ uint32_t height;
+ float fov_angle;
+ };
+#pragma pack(pop)
+
+ constexpr static auto header_offset = sizeof(ImageHeader);
+
+ static const ImageHeader &DeserializeHeader(const RawData &data) {
+ return *reinterpret_cast(data.begin());
+ }
+
+ template
+ static Buffer Serialize(const Sensor &sensor, Buffer &&bitmap,
+ uint32_t ImageWidth, uint32_t ImageHeight, float FovAngle);
+
+ static SharedPtr Deserialize(RawData &&data);
+ };
+
+ template
+ inline Buffer GBufferUint8Serializer::Serialize(const Sensor &/*sensor*/, Buffer &&bitmap,
+ uint32_t ImageWidth, uint32_t ImageHeight, float FovAngle) {
+ DEBUG_ASSERT(bitmap.size() > sizeof(ImageHeader));
+ ImageHeader header = {
+ ImageWidth,
+ ImageHeight,
+ FovAngle
+ };
+ std::memcpy(bitmap.data(), reinterpret_cast(&header), sizeof(header));
+ return std::move(bitmap);
+ }
+
+} // namespace s11n
+} // namespace sensor
+} // namespace carla
diff --git a/PythonAPI/carla/source/libcarla/Sensor.cpp b/PythonAPI/carla/source/libcarla/Sensor.cpp
index 45b5ab99d..5b66743a1 100644
--- a/PythonAPI/carla/source/libcarla/Sensor.cpp
+++ b/PythonAPI/carla/source/libcarla/Sensor.cpp
@@ -14,6 +14,13 @@ static void SubscribeToStream(carla::client::Sensor &self, boost::python::object
self.Listen(MakeCallback(std::move(callback)));
}
+static void SubscribeToGBuffer(
+ carla::client::ServerSideSensor &self,
+ uint32_t GBufferId,
+ boost::python::object callback) {
+ self.ListenToGBuffer(GBufferId, MakeCallback(std::move(callback)));
+}
+
void export_sensor() {
using namespace boost::python;
namespace cc = carla::client;
@@ -27,6 +34,8 @@ void export_sensor() {
class_, boost::noncopyable, boost::shared_ptr>
("ServerSideSensor", no_init)
+ .def("listen_to_gbuffer", &SubscribeToGBuffer, (arg("gbuffer_id"), arg("callback")))
+ .def("stop_gbuffer", &cc::ServerSideSensor::StopGBuffer, (arg("gbuffer_id")))
.def(self_ns::str(self_ns::self))
;
diff --git a/PythonAPI/carla/source/libcarla/SensorData.cpp b/PythonAPI/carla/source/libcarla/SensorData.cpp
index ca11eaa36..d235dd6b7 100644
--- a/PythonAPI/carla/source/libcarla/SensorData.cpp
+++ b/PythonAPI/carla/source/libcarla/SensorData.cpp
@@ -387,6 +387,24 @@ void export_sensor_data() {
.value("CityScapesPalette", EColorConverter::CityScapesPalette)
;
+ // The values here should match the ones in the enum EGBufferTextureID,
+ // from the CARLA fork of Unreal Engine (Renderer/Public/GBufferView.h).
+ enum_("GBufferTextureID")
+ .value("SceneColor", 0)
+ .value("SceneDepth", 1)
+ .value("SceneStencil", 2)
+ .value("GBufferA", 3)
+ .value("GBufferB", 4)
+ .value("GBufferC", 5)
+ .value("GBufferD", 6)
+ .value("GBufferE", 7)
+ .value("GBufferF", 8)
+ .value("Velocity", 9)
+ .value("SSAO", 10)
+ .value("CustomDepth", 11)
+ .value("CustomStencil", 12)
+ ;
+
class_, boost::noncopyable, boost::shared_ptr>("Image", no_init)
.add_property("width", &csd::Image::GetWidth)
.add_property("height", &csd::Image::GetHeight)
diff --git a/PythonAPI/docs/sensor.yml b/PythonAPI/docs/sensor.yml
index 2f57e0bd0..9a393e183 100644
--- a/PythonAPI/docs/sensor.yml
+++ b/PythonAPI/docs/sensor.yml
@@ -45,6 +45,29 @@
doc: >
Commands the sensor to stop listening for data.
# --------------------------------------
+ - def_name: listen_to_gbuffer
+ params:
+ - param_name: gbuffer_id
+ type: carla.GBufferTextureID
+ doc: >
+ The ID of the desired Unreal Engine GBuffer texture.
+ - param_name: callback
+ type: function
+ doc: >
+ The called function with one argument containing the received GBuffer texture.
+ doc: >
+ The function the sensor will be calling to every time the desired GBuffer texture is received.
+ This function needs for an argument containing an object type carla.SensorData to work with.
+ # --------------------------------------
+ - def_name: stop_gbuffer
+ params:
+ - param_name: gbuffer_id
+ type: carla.GBufferTextureID
+ doc: >
+ The ID of the Unreal Engine GBuffer texture.
+ doc: >
+ Commands the sensor to stop listening for the specified GBuffer texture.
+ # --------------------------------------
- def_name: __str__
# --------------------------------------
diff --git a/PythonAPI/docs/sensor_data.yml b/PythonAPI/docs/sensor_data.yml
index 927a03954..17385fb74 100644
--- a/PythonAPI/docs/sensor_data.yml
+++ b/PythonAPI/docs/sensor_data.yml
@@ -863,4 +863,71 @@
# --------------------------------------
- def_name: __str__
# --------------------------------------
+
+ - class_name: GBufferTextureID
+ # - DESCRIPTION ------------------------
+ doc: >
+ Defines the identifiers of each GBuffer texture (See the method `carla.Sensor.listen_to_gbuffer`).
+ # - PROPERTIES -------------------------
+ instance_variables:
+ - var_name: SceneColor
+ doc: >
+ The texture "SceneColor" contains the final color of the image.
+ - var_name: SceneDepth
+ doc: >
+ The texture "SceneDepth" contains the depth buffer - linear in world units.
+ - var_name: SceneStencil
+ doc: >
+ The texture "SceneStencil" contains the stencil buffer.
+ - var_name: GBufferA
+ doc: >
+ The texture "GBufferA" contains the world-space normal vectors in the RGB channels. The alpha channel contains "per-object data".
+ - var_name: GBufferB
+ doc: >
+ The texture "GBufferB" contains the metallic, specular and roughness in the RGB channels, respectively. The alpha channel contains a mask where the lower 4 bits indicate the shading model and the upper 4 bits contain the selective output mask.
+ - var_name: GBufferC
+ doc: >
+ The texture "GBufferC" contains the diffuse color in the RGB channels, with the indirect irradiance in the alpha channel.
+ If static lightning is not allowed, the alpha channel will contain the ambient occlusion instead.
+ - var_name: GBufferD
+ doc: >
+ The contents of the "GBufferD" varies depending on the rendered object's material shading model (GBufferB):
+ - MSM_Subsurface (2), MSM_PreintegratedSkin (3), MSM_TwoSidedFoliage (6):
+ RGB: Subsurface color.
+ A: Opacity.
+ - MSM_ClearCoat (4):
+ R: Clear coat.
+ G: Roughness.
+ - MSM_SubsurfaceProfile (5):
+ RGB: Subsurface profile.
+ - MSM_Hair (7):
+ RG: World normal.
+ B: Backlit value.
+ - MSM_Cloth (8):
+ RGB: Subsurface color.
+ A: Cloth value.
+ - MSM_Eye (9):
+ RG: Eye tangent.
+ B: Iris mask.
+ A: Iris distance.
+ - var_name: GBufferE
+ doc: >
+ The texture "GBufferE" contains the precomputed shadow factors in the RGBA channels. This texture is unavailable if the selective output mask (GBufferB) does not have its 4th bit set.
+ - var_name: GBufferF
+ doc: >
+ The texture "GBufferF" contains the world-space tangent in the RGB channels and the anisotropy in the alpha channel. This texture is unavailable if the selective output mask (GBufferB) does not have its 5th bit set.
+ - var_name: Velocity
+ doc: >
+ The texture "Velocity" contains the screen-space velocity of the scene objects.
+ - var_name: SSAO
+ doc: >
+ The texture "SSAO" contains the screen-space ambient occlusion texture.
+ - var_name: CustomDepth
+ doc: >
+ The texture "CustomDepth" contains the Unreal Engine custom depth data.
+ - var_name: CustomStencil
+ doc: >
+ The texture "CustomStencil" contains the Unreal Engine custom stencil data.
+ # --------------------------------------
+
...
diff --git a/PythonAPI/examples/manual_control_gbuffer.py b/PythonAPI/examples/manual_control_gbuffer.py
new file mode 100644
index 000000000..7fc5037b2
--- /dev/null
+++ b/PythonAPI/examples/manual_control_gbuffer.py
@@ -0,0 +1,1422 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
+# Barcelona (UAB).
+#
+# This work is licensed under the terms of the MIT license.
+# For a copy, see .
+
+# Allows controlling a vehicle with a keyboard. For a simpler and more
+# documented example, please take a look at tutorial.py.
+
+"""
+Welcome to CARLA manual control.
+
+Use ARROWS or WASD keys for control.
+
+ W : throttle
+ S : brake
+ A/D : steer left/right
+ Q : toggle reverse
+ Space : hand-brake
+ P : toggle autopilot
+ M : toggle manual transmission
+ ,/. : gear up/down
+ CTRL + W : toggle constant velocity mode at 60 km/h
+
+ L : toggle next light type
+ SHIFT + L : toggle high beam
+ Z/X : toggle right/left blinker
+ I : toggle interior light
+
+ TAB : change sensor position
+ ` or N : next sensor
+ [1-9] : change to sensor [1-9]
+ G : toggle radar visualization
+ C : change weather (Shift+C reverse)
+ Backspace : change vehicle
+
+ U : next gbuffer
+
+ O : open/close all doors of vehicle
+ T : toggle vehicle's telemetry
+
+ V : Select next map layer (Shift+V reverse)
+ B : Load current selected map layer (Shift+B to unload)
+
+ R : toggle recording images to disk
+
+ CTRL + R : toggle recording of simulation (replacing any previous)
+ CTRL + P : start replaying last recorded simulation
+ CTRL + + : increments the start time of the replay by 1 second (+SHIFT = 10 seconds)
+ CTRL + - : decrements the start time of the replay by 1 second (+SHIFT = 10 seconds)
+
+ F1 : toggle HUD
+ H/? : toggle help
+ ESC : quit
+"""
+
+from __future__ import print_function
+
+
+# ==============================================================================
+# -- find carla module ---------------------------------------------------------
+# ==============================================================================
+
+
+import glob
+import os
+import sys
+
+try:
+ sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
+ sys.version_info.major,
+ sys.version_info.minor,
+ 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
+except IndexError:
+ pass
+
+
+# ==============================================================================
+# -- imports -------------------------------------------------------------------
+# ==============================================================================
+
+
+import carla
+
+from carla import ColorConverter as cc
+
+import argparse
+import collections
+import datetime
+import logging
+import math
+import random
+import re
+import weakref
+
+try:
+ import pygame
+ from pygame.locals import KMOD_CTRL
+ from pygame.locals import KMOD_SHIFT
+ from pygame.locals import K_0
+ from pygame.locals import K_9
+ from pygame.locals import K_BACKQUOTE
+ from pygame.locals import K_BACKSPACE
+ from pygame.locals import K_COMMA
+ from pygame.locals import K_DOWN
+ from pygame.locals import K_ESCAPE
+ from pygame.locals import K_F1
+ from pygame.locals import K_LEFT
+ from pygame.locals import K_PERIOD
+ from pygame.locals import K_RIGHT
+ from pygame.locals import K_SLASH
+ from pygame.locals import K_SPACE
+ from pygame.locals import K_TAB
+ from pygame.locals import K_UP
+ from pygame.locals import K_a
+ from pygame.locals import K_b
+ from pygame.locals import K_c
+ from pygame.locals import K_d
+ from pygame.locals import K_f
+ from pygame.locals import K_g
+ from pygame.locals import K_h
+ from pygame.locals import K_i
+ from pygame.locals import K_l
+ from pygame.locals import K_m
+ from pygame.locals import K_n
+ from pygame.locals import K_o
+ from pygame.locals import K_p
+ from pygame.locals import K_q
+ from pygame.locals import K_r
+ from pygame.locals import K_s
+ from pygame.locals import K_t
+ from pygame.locals import K_u
+ from pygame.locals import K_v
+ from pygame.locals import K_w
+ from pygame.locals import K_x
+ from pygame.locals import K_z
+ from pygame.locals import K_MINUS
+ from pygame.locals import K_EQUALS
+except ImportError:
+ raise RuntimeError('cannot import pygame, make sure pygame package is installed')
+
+try:
+ import numpy as np
+except ImportError:
+ raise RuntimeError('cannot import numpy, make sure numpy package is installed')
+
+
+
+
+gbuffer_names = [
+ 'Final Color', # Image received using the regular `listen` method.
+ 'Scene Color',
+ 'Scene Depth',
+ 'Scene Stencil',
+ 'GBuffer A - WorldNormal + Object data',
+ 'GBuffer B - HDR + Shading model + selective output mask',
+ 'GBuffer C - Diffuse + indirect irradiance or ambient occlusion (depending on selective output mask)',
+ 'GBuffer D - Material shading model info',
+ 'GBuffer E - Precomputed shadow factor',
+ 'GBuffer F - World tangent + anisotropy',
+ 'Velocity',
+ 'Screen-Space Ambient Occlusion',
+ 'Custom Depth',
+ 'Custom Stencil'
+]
+
+
+
+# ==============================================================================
+# -- Global functions ----------------------------------------------------------
+# ==============================================================================
+
+
+def find_weather_presets():
+ rgx = re.compile('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)')
+ name = lambda x: ' '.join(m.group(0) for m in rgx.finditer(x))
+ presets = [x for x in dir(carla.WeatherParameters) if re.match('[A-Z].+', x)]
+ return [(getattr(carla.WeatherParameters, x), name(x)) for x in presets]
+
+
+def get_actor_display_name(actor, truncate=250):
+ name = ' '.join(actor.type_id.replace('_', '.').title().split('.')[1:])
+ return (name[:truncate - 1] + u'\u2026') if len(name) > truncate else name
+
+def get_actor_blueprints(world, filter, generation):
+ bps = world.get_blueprint_library().filter(filter)
+
+ if generation.lower() == "all":
+ return bps
+
+ # If the filter returns only one bp, we assume that this one needed
+ # and therefore, we ignore the generation
+ if len(bps) == 1:
+ return bps
+
+ try:
+ int_generation = int(generation)
+ # Check if generation is in available generations
+ if int_generation in [1, 2]:
+ bps = [x for x in bps if int(x.get_attribute('generation')) == int_generation]
+ return bps
+ else:
+ print(" Warning! Actor Generation is not valid. No actor will be spawned.")
+ return []
+ except:
+ print(" Warning! Actor Generation is not valid. No actor will be spawned.")
+ return []
+
+
+# ==============================================================================
+# -- World ---------------------------------------------------------------------
+# ==============================================================================
+
+
+class World(object):
+ def __init__(self, carla_world, hud, args):
+ self.world = carla_world
+ self.sync = args.sync
+ self.actor_role_name = args.rolename
+ try:
+ self.map = self.world.get_map()
+ except RuntimeError as error:
+ print('RuntimeError: {}'.format(error))
+ print(' The server could not send the OpenDRIVE (.xodr) file:')
+ print(' Make sure it exists, has the same name of your town, and is correct.')
+ sys.exit(1)
+ self.hud = hud
+ self.player = None
+ self.collision_sensor = None
+ self.lane_invasion_sensor = None
+ self.gnss_sensor = None
+ self.imu_sensor = None
+ self.radar_sensor = None
+ self.camera_manager = None
+ self._weather_presets = find_weather_presets()
+ self._weather_index = 0
+ self._actor_filter = args.filter
+ self._actor_generation = args.generation
+ self._gamma = args.gamma
+ self.restart()
+ self.world.on_tick(hud.on_world_tick)
+ self.recording_enabled = False
+ self.recording_start = 0
+ self.constant_velocity_enabled = False
+ self.show_vehicle_telemetry = False
+ self.doors_are_open = False
+ self.current_map_layer = 0
+ self.map_layer_names = [
+ carla.MapLayer.NONE,
+ carla.MapLayer.Buildings,
+ carla.MapLayer.Decals,
+ carla.MapLayer.Foliage,
+ carla.MapLayer.Ground,
+ carla.MapLayer.ParkedVehicles,
+ carla.MapLayer.Particles,
+ carla.MapLayer.Props,
+ carla.MapLayer.StreetLights,
+ carla.MapLayer.Walls,
+ carla.MapLayer.All
+ ]
+
+ def restart(self):
+ self.player_max_speed = 1.589
+ self.player_max_speed_fast = 3.713
+ # Keep same camera config if the camera manager exists.
+ cam_index = self.camera_manager.index if self.camera_manager is not None else 0
+ cam_pos_index = self.camera_manager.transform_index if self.camera_manager is not None else 0
+ # Get a random blueprint.
+ blueprint = random.choice(get_actor_blueprints(self.world, self._actor_filter, self._actor_generation))
+ blueprint.set_attribute('role_name', self.actor_role_name)
+ if blueprint.has_attribute('color'):
+ color = random.choice(blueprint.get_attribute('color').recommended_values)
+ blueprint.set_attribute('color', color)
+ if blueprint.has_attribute('driver_id'):
+ driver_id = random.choice(blueprint.get_attribute('driver_id').recommended_values)
+ blueprint.set_attribute('driver_id', driver_id)
+ if blueprint.has_attribute('is_invincible'):
+ blueprint.set_attribute('is_invincible', 'true')
+ # set the max speed
+ if blueprint.has_attribute('speed'):
+ self.player_max_speed = float(blueprint.get_attribute('speed').recommended_values[1])
+ self.player_max_speed_fast = float(blueprint.get_attribute('speed').recommended_values[2])
+
+ # Spawn the player.
+ if self.player is not None:
+ spawn_point = self.player.get_transform()
+ spawn_point.location.z += 2.0
+ spawn_point.rotation.roll = 0.0
+ spawn_point.rotation.pitch = 0.0
+ self.destroy()
+ self.player = self.world.try_spawn_actor(blueprint, spawn_point)
+ self.show_vehicle_telemetry = False
+ self.modify_vehicle_physics(self.player)
+ while self.player is None:
+ if not self.map.get_spawn_points():
+ print('There are no spawn points available in your map/town.')
+ print('Please add some Vehicle Spawn Point to your UE4 scene.')
+ sys.exit(1)
+ spawn_points = self.map.get_spawn_points()
+ spawn_point = random.choice(spawn_points) if spawn_points else carla.Transform()
+ self.player = self.world.try_spawn_actor(blueprint, spawn_point)
+ self.show_vehicle_telemetry = False
+ self.modify_vehicle_physics(self.player)
+ # Set up the sensors.
+ self.collision_sensor = CollisionSensor(self.player, self.hud)
+ self.lane_invasion_sensor = LaneInvasionSensor(self.player, self.hud)
+ self.gnss_sensor = GnssSensor(self.player)
+ self.imu_sensor = IMUSensor(self.player)
+ self.camera_manager = CameraManager(self.player, self.hud, self._gamma)
+ self.camera_manager.transform_index = cam_pos_index
+ self.camera_manager.set_sensor(cam_index, notify=False)
+ actor_type = get_actor_display_name(self.player)
+ self.hud.notification(actor_type)
+
+ if self.sync:
+ self.world.tick()
+ else:
+ self.world.wait_for_tick()
+
+ def next_weather(self, reverse=False):
+ self._weather_index += -1 if reverse else 1
+ self._weather_index %= len(self._weather_presets)
+ preset = self._weather_presets[self._weather_index]
+ self.hud.notification('Weather: %s' % preset[1])
+ self.player.get_world().set_weather(preset[0])
+
+ def next_map_layer(self, reverse=False):
+ self.current_map_layer += -1 if reverse else 1
+ self.current_map_layer %= len(self.map_layer_names)
+ selected = self.map_layer_names[self.current_map_layer]
+ self.hud.notification('LayerMap selected: %s' % selected)
+
+ def load_map_layer(self, unload=False):
+ selected = self.map_layer_names[self.current_map_layer]
+ if unload:
+ self.hud.notification('Unloading map layer: %s' % selected)
+ self.world.unload_map_layer(selected)
+ else:
+ self.hud.notification('Loading map layer: %s' % selected)
+ self.world.load_map_layer(selected)
+
+ def toggle_radar(self):
+ if self.radar_sensor is None:
+ self.radar_sensor = RadarSensor(self.player)
+ elif self.radar_sensor.sensor is not None:
+ self.radar_sensor.sensor.destroy()
+ self.radar_sensor = None
+
+ def modify_vehicle_physics(self, actor):
+ #If actor is not a vehicle, we cannot use the physics control
+ try:
+ physics_control = actor.get_physics_control()
+ physics_control.use_sweep_wheel_collision = True
+ actor.apply_physics_control(physics_control)
+ except Exception:
+ pass
+
+ def tick(self, clock):
+ self.hud.tick(self, clock)
+
+ def render(self, display):
+ self.camera_manager.render(display)
+ self.hud.render(display)
+
+ def destroy_sensors(self):
+ self.camera_manager.sensor.destroy()
+ self.camera_manager.sensor = None
+ self.camera_manager.index = None
+
+ def destroy(self):
+ if self.radar_sensor is not None:
+ self.toggle_radar()
+ sensors = [
+ self.camera_manager.sensor,
+ self.collision_sensor.sensor,
+ self.lane_invasion_sensor.sensor,
+ self.gnss_sensor.sensor,
+ self.imu_sensor.sensor]
+ for sensor in sensors:
+ if sensor is not None:
+ sensor.stop()
+ sensor.destroy()
+ if self.player is not None:
+ self.player.destroy()
+
+
+# ==============================================================================
+# -- KeyboardControl -----------------------------------------------------------
+# ==============================================================================
+
+
+class KeyboardControl(object):
+ """Class that handles keyboard input."""
+ def __init__(self, world, start_in_autopilot):
+ self._autopilot_enabled = start_in_autopilot
+ self._ackermann_enabled = False
+ self._ackermann_reverse = 1
+
+ if isinstance(world.player, carla.Vehicle):
+ self._control = carla.VehicleControl()
+ self._ackermann_control = carla.VehicleAckermannControl()
+ self._lights = carla.VehicleLightState.NONE
+ world.player.set_autopilot(self._autopilot_enabled)
+ world.player.set_light_state(self._lights)
+ elif isinstance(world.player, carla.Walker):
+ self._control = carla.WalkerControl()
+ self._autopilot_enabled = False
+ self._rotation = world.player.get_transform().rotation
+ else:
+ raise NotImplementedError("Actor type not supported")
+ self._steer_cache = 0.0
+ world.hud.notification("Press 'H' or '?' for help.", seconds=4.0)
+
+ def parse_events(self, client, world, clock, sync_mode):
+ if isinstance(self._control, carla.VehicleControl):
+ current_lights = self._lights
+ for event in pygame.event.get():
+ if event.type == pygame.QUIT:
+ return True
+ elif event.type == pygame.KEYUP:
+ if self._is_quit_shortcut(event.key):
+ return True
+ elif event.key == K_BACKSPACE:
+ if self._autopilot_enabled:
+ world.player.set_autopilot(False)
+ world.restart()
+ world.player.set_autopilot(True)
+ else:
+ world.restart()
+ elif event.key == K_F1:
+ world.hud.toggle_info()
+ elif event.key == K_v and pygame.key.get_mods() & KMOD_SHIFT:
+ world.next_map_layer(reverse=True)
+ elif event.key == K_v:
+ world.next_map_layer()
+ elif event.key == K_b and pygame.key.get_mods() & KMOD_SHIFT:
+ world.load_map_layer(unload=True)
+ elif event.key == K_b:
+ world.load_map_layer()
+ elif event.key == K_h or (event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT):
+ world.hud.help.toggle()
+ elif event.key == K_TAB:
+ world.camera_manager.toggle_camera()
+ elif event.key == K_c and pygame.key.get_mods() & KMOD_SHIFT:
+ world.next_weather(reverse=True)
+ elif event.key == K_c:
+ world.next_weather()
+ elif event.key == K_g:
+ world.toggle_radar()
+ elif event.key == K_BACKQUOTE:
+ world.camera_manager.next_sensor()
+ elif event.key == K_n:
+ world.camera_manager.next_sensor()
+ elif event.key == K_w and (pygame.key.get_mods() & KMOD_CTRL):
+ if world.constant_velocity_enabled:
+ world.player.disable_constant_velocity()
+ world.constant_velocity_enabled = False
+ world.hud.notification("Disabled Constant Velocity Mode")
+ else:
+ world.player.enable_constant_velocity(carla.Vector3D(17, 0, 0))
+ world.constant_velocity_enabled = True
+ world.hud.notification("Enabled Constant Velocity Mode at 60 km/h")
+ elif event.key == K_o:
+ try:
+ if world.doors_are_open:
+ world.hud.notification("Closing Doors")
+ world.doors_are_open = False
+ world.player.close_door(carla.VehicleDoor.All)
+ else:
+ world.hud.notification("Opening doors")
+ world.doors_are_open = True
+ world.player.open_door(carla.VehicleDoor.All)
+ except Exception:
+ pass
+ elif event.key == K_t:
+ if world.show_vehicle_telemetry:
+ world.player.show_debug_telemetry(False)
+ world.show_vehicle_telemetry = False
+ world.hud.notification("Disabled Vehicle Telemetry")
+ else:
+ try:
+ world.player.show_debug_telemetry(True)
+ world.show_vehicle_telemetry = True
+ world.hud.notification("Enabled Vehicle Telemetry")
+ except Exception:
+ pass
+ elif event.key == K_u:
+ world.camera_manager.next_gbuffer()
+ world.hud.notification(gbuffer_names[world.camera_manager.output_texture_id])
+ elif event.key > K_0 and event.key <= K_9:
+ index_ctrl = 0
+ if pygame.key.get_mods() & KMOD_CTRL:
+ index_ctrl = 9
+ world.camera_manager.set_sensor(event.key - 1 - K_0 + index_ctrl)
+ elif event.key == K_r and not (pygame.key.get_mods() & KMOD_CTRL):
+ world.camera_manager.toggle_recording()
+ elif event.key == K_r and (pygame.key.get_mods() & KMOD_CTRL):
+ if (world.recording_enabled):
+ client.stop_recorder()
+ world.recording_enabled = False
+ world.hud.notification("Recorder is OFF")
+ else:
+ client.start_recorder("manual_recording.rec")
+ world.recording_enabled = True
+ world.hud.notification("Recorder is ON")
+ elif event.key == K_p and (pygame.key.get_mods() & KMOD_CTRL):
+ # stop recorder
+ client.stop_recorder()
+ world.recording_enabled = False
+ # work around to fix camera at start of replaying
+ current_index = world.camera_manager.index
+ world.destroy_sensors()
+ # disable autopilot
+ self._autopilot_enabled = False
+ world.player.set_autopilot(self._autopilot_enabled)
+ world.hud.notification("Replaying file 'manual_recording.rec'")
+ # replayer
+ client.replay_file("manual_recording.rec", world.recording_start, 0, 0)
+ world.camera_manager.set_sensor(current_index)
+ elif event.key == K_MINUS and (pygame.key.get_mods() & KMOD_CTRL):
+ if pygame.key.get_mods() & KMOD_SHIFT:
+ world.recording_start -= 10
+ else:
+ world.recording_start -= 1
+ world.hud.notification("Recording start time is %d" % (world.recording_start))
+ elif event.key == K_EQUALS and (pygame.key.get_mods() & KMOD_CTRL):
+ if pygame.key.get_mods() & KMOD_SHIFT:
+ world.recording_start += 10
+ else:
+ world.recording_start += 1
+ world.hud.notification("Recording start time is %d" % (world.recording_start))
+ if isinstance(self._control, carla.VehicleControl):
+ if event.key == K_f:
+ # Toggle ackermann controller
+ self._ackermann_enabled = not self._ackermann_enabled
+ world.hud.show_ackermann_info(self._ackermann_enabled)
+ world.hud.notification("Ackermann Controller %s" %
+ ("Enabled" if self._ackermann_enabled else "Disabled"))
+ if event.key == K_q:
+ if not self._ackermann_enabled:
+ self._control.gear = 1 if self._control.reverse else -1
+ else:
+ self._ackermann_reverse *= -1
+ # Reset ackermann control
+ self._ackermann_control = carla.VehicleAckermannControl()
+ elif event.key == K_m:
+ self._control.manual_gear_shift = not self._control.manual_gear_shift
+ self._control.gear = world.player.get_control().gear
+ world.hud.notification('%s Transmission' %
+ ('Manual' if self._control.manual_gear_shift else 'Automatic'))
+ elif self._control.manual_gear_shift and event.key == K_COMMA:
+ self._control.gear = max(-1, self._control.gear - 1)
+ elif self._control.manual_gear_shift and event.key == K_PERIOD:
+ self._control.gear = self._control.gear + 1
+ elif event.key == K_p and not pygame.key.get_mods() & KMOD_CTRL:
+ if not self._autopilot_enabled and not sync_mode:
+ print("WARNING: You are currently in asynchronous mode and could "
+ "experience some issues with the traffic simulation")
+ self._autopilot_enabled = not self._autopilot_enabled
+ world.player.set_autopilot(self._autopilot_enabled)
+ world.hud.notification(
+ 'Autopilot %s' % ('On' if self._autopilot_enabled else 'Off'))
+ elif event.key == K_l and pygame.key.get_mods() & KMOD_CTRL:
+ current_lights ^= carla.VehicleLightState.Special1
+ elif event.key == K_l and pygame.key.get_mods() & KMOD_SHIFT:
+ current_lights ^= carla.VehicleLightState.HighBeam
+ elif event.key == K_l:
+ # Use 'L' key to switch between lights:
+ # closed -> position -> low beam -> fog
+ if not self._lights & carla.VehicleLightState.Position:
+ world.hud.notification("Position lights")
+ current_lights |= carla.VehicleLightState.Position
+ else:
+ world.hud.notification("Low beam lights")
+ current_lights |= carla.VehicleLightState.LowBeam
+ if self._lights & carla.VehicleLightState.LowBeam:
+ world.hud.notification("Fog lights")
+ current_lights |= carla.VehicleLightState.Fog
+ if self._lights & carla.VehicleLightState.Fog:
+ world.hud.notification("Lights off")
+ current_lights ^= carla.VehicleLightState.Position
+ current_lights ^= carla.VehicleLightState.LowBeam
+ current_lights ^= carla.VehicleLightState.Fog
+ elif event.key == K_i:
+ current_lights ^= carla.VehicleLightState.Interior
+ elif event.key == K_z:
+ current_lights ^= carla.VehicleLightState.LeftBlinker
+ elif event.key == K_x:
+ current_lights ^= carla.VehicleLightState.RightBlinker
+
+ if not self._autopilot_enabled:
+ if isinstance(self._control, carla.VehicleControl):
+ self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
+ self._control.reverse = self._control.gear < 0
+ # Set automatic control-related vehicle lights
+ if self._control.brake:
+ current_lights |= carla.VehicleLightState.Brake
+ else: # Remove the Brake flag
+ current_lights &= ~carla.VehicleLightState.Brake
+ if self._control.reverse:
+ current_lights |= carla.VehicleLightState.Reverse
+ else: # Remove the Reverse flag
+ current_lights &= ~carla.VehicleLightState.Reverse
+ if current_lights != self._lights: # Change the light state only if necessary
+ self._lights = current_lights
+ world.player.set_light_state(carla.VehicleLightState(self._lights))
+ # Apply control
+ if not self._ackermann_enabled:
+ world.player.apply_control(self._control)
+ else:
+ world.player.apply_ackermann_control(self._ackermann_control)
+ # Update control to the last one applied by the ackermann controller.
+ self._control = world.player.get_control()
+ # Update hud with the newest ackermann control
+ world.hud.update_ackermann_control(self._ackermann_control)
+
+ elif isinstance(self._control, carla.WalkerControl):
+ self._parse_walker_keys(pygame.key.get_pressed(), clock.get_time(), world)
+ world.player.apply_control(self._control)
+
+ def _parse_vehicle_keys(self, keys, milliseconds):
+ if keys[K_UP] or keys[K_w]:
+ if not self._ackermann_enabled:
+ self._control.throttle = min(self._control.throttle + 0.01, 1.00)
+ else:
+ self._ackermann_control.speed += round(milliseconds * 0.005, 2) * self._ackermann_reverse
+ else:
+ if not self._ackermann_enabled:
+ self._control.throttle = 0.0
+
+ if keys[K_DOWN] or keys[K_s]:
+ if not self._ackermann_enabled:
+ self._control.brake = min(self._control.brake + 0.2, 1)
+ else:
+ self._ackermann_control.speed -= min(abs(self._ackermann_control.speed), round(milliseconds * 0.005, 2)) * self._ackermann_reverse
+ self._ackermann_control.speed = max(0, abs(self._ackermann_control.speed)) * self._ackermann_reverse
+ else:
+ if not self._ackermann_enabled:
+ self._control.brake = 0
+
+ steer_increment = 5e-4 * milliseconds
+ if keys[K_LEFT] or keys[K_a]:
+ if self._steer_cache > 0:
+ self._steer_cache = 0
+ else:
+ self._steer_cache -= steer_increment
+ elif keys[K_RIGHT] or keys[K_d]:
+ if self._steer_cache < 0:
+ self._steer_cache = 0
+ else:
+ self._steer_cache += steer_increment
+ else:
+ self._steer_cache = 0.0
+ self._steer_cache = min(0.7, max(-0.7, self._steer_cache))
+ if not self._ackermann_enabled:
+ self._control.steer = round(self._steer_cache, 1)
+ self._control.hand_brake = keys[K_SPACE]
+ else:
+ self._ackermann_control.steer = round(self._steer_cache, 1)
+
+ def _parse_walker_keys(self, keys, milliseconds, world):
+ self._control.speed = 0.0
+ if keys[K_DOWN] or keys[K_s]:
+ self._control.speed = 0.0
+ if keys[K_LEFT] or keys[K_a]:
+ self._control.speed = .01
+ self._rotation.yaw -= 0.08 * milliseconds
+ if keys[K_RIGHT] or keys[K_d]:
+ self._control.speed = .01
+ self._rotation.yaw += 0.08 * milliseconds
+ if keys[K_UP] or keys[K_w]:
+ self._control.speed = world.player_max_speed_fast if pygame.key.get_mods() & KMOD_SHIFT else world.player_max_speed
+ self._control.jump = keys[K_SPACE]
+ self._rotation.yaw = round(self._rotation.yaw, 1)
+ self._control.direction = self._rotation.get_forward_vector()
+
+ @staticmethod
+ def _is_quit_shortcut(key):
+ return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)
+
+
+# ==============================================================================
+# -- HUD -----------------------------------------------------------------------
+# ==============================================================================
+
+
+class HUD(object):
+ def __init__(self, width, height):
+ self.dim = (width, height)
+ font = pygame.font.Font(pygame.font.get_default_font(), 20)
+ font_name = 'courier' if os.name == 'nt' else 'mono'
+ fonts = [x for x in pygame.font.get_fonts() if font_name in x]
+ default_font = 'ubuntumono'
+ mono = default_font if default_font in fonts else fonts[0]
+ mono = pygame.font.match_font(mono)
+ self._font_mono = pygame.font.Font(mono, 12 if os.name == 'nt' else 14)
+ self._notifications = FadingText(font, (width, 40), (0, height - 40))
+ self.help = HelpText(pygame.font.Font(mono, 16), width, height)
+ self.server_fps = 0
+ self.frame = 0
+ self.simulation_time = 0
+ self._show_info = True
+ self._info_text = []
+ self._server_clock = pygame.time.Clock()
+
+ self._show_ackermann_info = False
+ self._ackermann_control = carla.VehicleAckermannControl()
+
+ def on_world_tick(self, timestamp):
+ self._server_clock.tick()
+ self.server_fps = self._server_clock.get_fps()
+ self.frame = timestamp.frame
+ self.simulation_time = timestamp.elapsed_seconds
+
+ def tick(self, world, clock):
+ self._notifications.tick(world, clock)
+ if not self._show_info:
+ return
+ t = world.player.get_transform()
+ v = world.player.get_velocity()
+ c = world.player.get_control()
+ compass = world.imu_sensor.compass
+ heading = 'N' if compass > 270.5 or compass < 89.5 else ''
+ heading += 'S' if 90.5 < compass < 269.5 else ''
+ heading += 'E' if 0.5 < compass < 179.5 else ''
+ heading += 'W' if 180.5 < compass < 359.5 else ''
+ colhist = world.collision_sensor.get_collision_history()
+ collision = [colhist[x + self.frame - 200] for x in range(0, 200)]
+ max_col = max(1.0, max(collision))
+ collision = [x / max_col for x in collision]
+ vehicles = world.world.get_actors().filter('vehicle.*')
+ self._info_text = [
+ 'Server: % 16.0f FPS' % self.server_fps,
+ 'Client: % 16.0f FPS' % clock.get_fps(),
+ '',
+ 'Vehicle: % 20s' % get_actor_display_name(world.player, truncate=20),
+ 'Map: % 20s' % world.map.name.split('/')[-1],
+ 'Simulation time: % 12s' % datetime.timedelta(seconds=int(self.simulation_time)),
+ '',
+ 'Speed: % 15.0f km/h' % (3.6 * math.sqrt(v.x**2 + v.y**2 + v.z**2)),
+ u'Compass:% 17.0f\N{DEGREE SIGN} % 2s' % (compass, heading),
+ 'Accelero: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.accelerometer),
+ 'Gyroscop: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.gyroscope),
+ 'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (t.location.x, t.location.y)),
+ 'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (world.gnss_sensor.lat, world.gnss_sensor.lon)),
+ 'Height: % 18.0f m' % t.location.z,
+ '']
+ if isinstance(c, carla.VehicleControl):
+ self._info_text += [
+ ('Throttle:', c.throttle, 0.0, 1.0),
+ ('Steer:', c.steer, -1.0, 1.0),
+ ('Brake:', c.brake, 0.0, 1.0),
+ ('Reverse:', c.reverse),
+ ('Hand brake:', c.hand_brake),
+ ('Manual:', c.manual_gear_shift),
+ 'Gear: %s' % {-1: 'R', 0: 'N'}.get(c.gear, c.gear)]
+ if self._show_ackermann_info:
+ self._info_text += [
+ '',
+ 'Ackermann Controller:',
+ ' Target speed: % 8.0f km/h' % (3.6*self._ackermann_control.speed),
+ ]
+ elif isinstance(c, carla.WalkerControl):
+ self._info_text += [
+ ('Speed:', c.speed, 0.0, 5.556),
+ ('Jump:', c.jump)]
+ self._info_text += [
+ '',
+ 'Collision:',
+ collision,
+ '',
+ 'Number of vehicles: % 8d' % len(vehicles)]
+ if len(vehicles) > 1:
+ self._info_text += ['Nearby vehicles:']
+ distance = lambda l: math.sqrt((l.x - t.location.x)**2 + (l.y - t.location.y)**2 + (l.z - t.location.z)**2)
+ vehicles = [(distance(x.get_location()), x) for x in vehicles if x.id != world.player.id]
+ for d, vehicle in sorted(vehicles, key=lambda vehicles: vehicles[0]):
+ if d > 200.0:
+ break
+ vehicle_type = get_actor_display_name(vehicle, truncate=22)
+ self._info_text.append('% 4dm %s' % (d, vehicle_type))
+
+ def show_ackermann_info(self, enabled):
+ self._show_ackermann_info = enabled
+
+ def update_ackermann_control(self, ackermann_control):
+ self._ackermann_control = ackermann_control
+
+ def toggle_info(self):
+ self._show_info = not self._show_info
+
+ def notification(self, text, seconds=2.0):
+ self._notifications.set_text(text, seconds=seconds)
+
+ def error(self, text):
+ self._notifications.set_text('Error: %s' % text, (255, 0, 0))
+
+ def render(self, display):
+ if self._show_info:
+ info_surface = pygame.Surface((220, self.dim[1]))
+ info_surface.set_alpha(100)
+ display.blit(info_surface, (0, 0))
+ v_offset = 4
+ bar_h_offset = 100
+ bar_width = 106
+ for item in self._info_text:
+ if v_offset + 18 > self.dim[1]:
+ break
+ if isinstance(item, list):
+ if len(item) > 1:
+ points = [(x + 8, v_offset + 8 + (1.0 - y) * 30) for x, y in enumerate(item)]
+ pygame.draw.lines(display, (255, 136, 0), False, points, 2)
+ item = None
+ v_offset += 18
+ elif isinstance(item, tuple):
+ if isinstance(item[1], bool):
+ rect = pygame.Rect((bar_h_offset, v_offset + 8), (6, 6))
+ pygame.draw.rect(display, (255, 255, 255), rect, 0 if item[1] else 1)
+ else:
+ rect_border = pygame.Rect((bar_h_offset, v_offset + 8), (bar_width, 6))
+ pygame.draw.rect(display, (255, 255, 255), rect_border, 1)
+ f = (item[1] - item[2]) / (item[3] - item[2])
+ if item[2] < 0.0:
+ rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))
+ else:
+ rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))
+ pygame.draw.rect(display, (255, 255, 255), rect)
+ item = item[0]
+ if item: # At this point has to be a str.
+ surface = self._font_mono.render(item, True, (255, 255, 255))
+ display.blit(surface, (8, v_offset))
+ v_offset += 18
+ self._notifications.render(display)
+ self.help.render(display)
+
+
+# ==============================================================================
+# -- FadingText ----------------------------------------------------------------
+# ==============================================================================
+
+
+class FadingText(object):
+ def __init__(self, font, dim, pos):
+ self.font = font
+ self.dim = dim
+ self.pos = pos
+ self.seconds_left = 0
+ self.surface = pygame.Surface(self.dim)
+
+ def set_text(self, text, color=(255, 255, 255), seconds=2.0):
+ text_texture = self.font.render(text, True, color)
+ self.surface = pygame.Surface(self.dim)
+ self.seconds_left = seconds
+ self.surface.fill((0, 0, 0, 0))
+ self.surface.blit(text_texture, (10, 11))
+
+ def tick(self, _, clock):
+ delta_seconds = 1e-3 * clock.get_time()
+ self.seconds_left = max(0.0, self.seconds_left - delta_seconds)
+ self.surface.set_alpha(500.0 * self.seconds_left)
+
+ def render(self, display):
+ display.blit(self.surface, self.pos)
+
+
+# ==============================================================================
+# -- HelpText ------------------------------------------------------------------
+# ==============================================================================
+
+
+class HelpText(object):
+ """Helper class to handle text output using pygame"""
+ def __init__(self, font, width, height):
+ lines = __doc__.split('\n')
+ self.font = font
+ self.line_space = 18
+ self.dim = (780, len(lines) * self.line_space + 12)
+ self.pos = (0.5 * width - 0.5 * self.dim[0], 0.5 * height - 0.5 * self.dim[1])
+ self.seconds_left = 0
+ self.surface = pygame.Surface(self.dim)
+ self.surface.fill((0, 0, 0, 0))
+ for n, line in enumerate(lines):
+ text_texture = self.font.render(line, True, (255, 255, 255))
+ self.surface.blit(text_texture, (22, n * self.line_space))
+ self._render = False
+ self.surface.set_alpha(220)
+
+ def toggle(self):
+ self._render = not self._render
+
+ def render(self, display):
+ if self._render:
+ display.blit(self.surface, self.pos)
+
+
+# ==============================================================================
+# -- CollisionSensor -----------------------------------------------------------
+# ==============================================================================
+
+
+class CollisionSensor(object):
+ def __init__(self, parent_actor, hud):
+ self.sensor = None
+ self.history = []
+ self._parent = parent_actor
+ self.hud = hud
+ world = self._parent.get_world()
+ bp = world.get_blueprint_library().find('sensor.other.collision')
+ self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
+ # We need to pass the lambda a weak reference to self to avoid circular
+ # reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(lambda event: CollisionSensor._on_collision(weak_self, event))
+
+ def get_collision_history(self):
+ history = collections.defaultdict(int)
+ for frame, intensity in self.history:
+ history[frame] += intensity
+ return history
+
+ @staticmethod
+ def _on_collision(weak_self, event):
+ self = weak_self()
+ if not self:
+ return
+ actor_type = get_actor_display_name(event.other_actor)
+ self.hud.notification('Collision with %r' % actor_type)
+ impulse = event.normal_impulse
+ intensity = math.sqrt(impulse.x**2 + impulse.y**2 + impulse.z**2)
+ self.history.append((event.frame, intensity))
+ if len(self.history) > 4000:
+ self.history.pop(0)
+
+
+# ==============================================================================
+# -- LaneInvasionSensor --------------------------------------------------------
+# ==============================================================================
+
+
+class LaneInvasionSensor(object):
+ def __init__(self, parent_actor, hud):
+ self.sensor = None
+
+ # If the spawn object is not a vehicle, we cannot use the Lane Invasion Sensor
+ if parent_actor.type_id.startswith("vehicle."):
+ self._parent = parent_actor
+ self.hud = hud
+ world = self._parent.get_world()
+ bp = world.get_blueprint_library().find('sensor.other.lane_invasion')
+ self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
+ # We need to pass the lambda a weak reference to self to avoid circular
+ # reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(lambda event: LaneInvasionSensor._on_invasion(weak_self, event))
+
+ @staticmethod
+ def _on_invasion(weak_self, event):
+ self = weak_self()
+ if not self:
+ return
+ lane_types = set(x.type for x in event.crossed_lane_markings)
+ text = ['%r' % str(x).split()[-1] for x in lane_types]
+ self.hud.notification('Crossed line %s' % ' and '.join(text))
+
+
+# ==============================================================================
+# -- GnssSensor ----------------------------------------------------------------
+# ==============================================================================
+
+
+class GnssSensor(object):
+ def __init__(self, parent_actor):
+ self.sensor = None
+ self._parent = parent_actor
+ self.lat = 0.0
+ self.lon = 0.0
+ world = self._parent.get_world()
+ bp = world.get_blueprint_library().find('sensor.other.gnss')
+ self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=1.0, z=2.8)), attach_to=self._parent)
+ # We need to pass the lambda a weak reference to self to avoid circular
+ # reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(lambda event: GnssSensor._on_gnss_event(weak_self, event))
+
+ @staticmethod
+ def _on_gnss_event(weak_self, event):
+ self = weak_self()
+ if not self:
+ return
+ self.lat = event.latitude
+ self.lon = event.longitude
+
+
+# ==============================================================================
+# -- IMUSensor -----------------------------------------------------------------
+# ==============================================================================
+
+
+class IMUSensor(object):
+ def __init__(self, parent_actor):
+ self.sensor = None
+ self._parent = parent_actor
+ self.accelerometer = (0.0, 0.0, 0.0)
+ self.gyroscope = (0.0, 0.0, 0.0)
+ self.compass = 0.0
+ world = self._parent.get_world()
+ bp = world.get_blueprint_library().find('sensor.other.imu')
+ self.sensor = world.spawn_actor(
+ bp, carla.Transform(), attach_to=self._parent)
+ # We need to pass the lambda a weak reference to self to avoid circular
+ # reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(
+ lambda sensor_data: IMUSensor._IMU_callback(weak_self, sensor_data))
+
+ @staticmethod
+ def _IMU_callback(weak_self, sensor_data):
+ self = weak_self()
+ if not self:
+ return
+ limits = (-99.9, 99.9)
+ self.accelerometer = (
+ max(limits[0], min(limits[1], sensor_data.accelerometer.x)),
+ max(limits[0], min(limits[1], sensor_data.accelerometer.y)),
+ max(limits[0], min(limits[1], sensor_data.accelerometer.z)))
+ self.gyroscope = (
+ max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.x))),
+ max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.y))),
+ max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.z))))
+ self.compass = math.degrees(sensor_data.compass)
+
+
+# ==============================================================================
+# -- RadarSensor ---------------------------------------------------------------
+# ==============================================================================
+
+
+class RadarSensor(object):
+ def __init__(self, parent_actor):
+ self.sensor = None
+ self._parent = parent_actor
+ bound_x = 0.5 + self._parent.bounding_box.extent.x
+ bound_y = 0.5 + self._parent.bounding_box.extent.y
+ bound_z = 0.5 + self._parent.bounding_box.extent.z
+
+ self.velocity_range = 7.5 # m/s
+ world = self._parent.get_world()
+ self.debug = world.debug
+ bp = world.get_blueprint_library().find('sensor.other.radar')
+ bp.set_attribute('horizontal_fov', str(35))
+ bp.set_attribute('vertical_fov', str(20))
+ self.sensor = world.spawn_actor(
+ bp,
+ carla.Transform(
+ carla.Location(x=bound_x + 0.05, z=bound_z+0.05),
+ carla.Rotation(pitch=5)),
+ attach_to=self._parent)
+ # We need a weak reference to self to avoid circular reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(
+ lambda radar_data: RadarSensor._Radar_callback(weak_self, radar_data))
+
+ @staticmethod
+ def _Radar_callback(weak_self, radar_data):
+ self = weak_self()
+ if not self:
+ return
+ # To get a numpy [[vel, altitude, azimuth, depth],...[,,,]]:
+ # points = np.frombuffer(radar_data.raw_data, dtype=np.dtype('f4'))
+ # points = np.reshape(points, (len(radar_data), 4))
+
+ current_rot = radar_data.transform.rotation
+ for detect in radar_data:
+ azi = math.degrees(detect.azimuth)
+ alt = math.degrees(detect.altitude)
+ # The 0.25 adjusts a bit the distance so the dots can
+ # be properly seen
+ fw_vec = carla.Vector3D(x=detect.depth - 0.25)
+ carla.Transform(
+ carla.Location(),
+ carla.Rotation(
+ pitch=current_rot.pitch + alt,
+ yaw=current_rot.yaw + azi,
+ roll=current_rot.roll)).transform(fw_vec)
+
+ def clamp(min_v, max_v, value):
+ return max(min_v, min(value, max_v))
+
+ norm_velocity = detect.velocity / self.velocity_range # range [-1, 1]
+ r = int(clamp(0.0, 1.0, 1.0 - norm_velocity) * 255.0)
+ g = int(clamp(0.0, 1.0, 1.0 - abs(norm_velocity)) * 255.0)
+ b = int(abs(clamp(- 1.0, 0.0, - 1.0 - norm_velocity)) * 255.0)
+ self.debug.draw_point(
+ radar_data.transform.location + fw_vec,
+ size=0.075,
+ life_time=0.06,
+ persistent_lines=False,
+ color=carla.Color(r, g, b))
+
+# ==============================================================================
+# -- CameraManager -------------------------------------------------------------
+# ==============================================================================
+
+
+class CameraManager(object):
+ def __init__(self, parent_actor, hud, gamma_correction):
+ self.sensor = None
+ self.surface = None
+ self._parent = parent_actor
+ self.hud = hud
+ self.recording = False
+ self.output_texture_id = 0
+ bound_x = 0.5 + self._parent.bounding_box.extent.x
+ bound_y = 0.5 + self._parent.bounding_box.extent.y
+ bound_z = 0.5 + self._parent.bounding_box.extent.z
+ Attachment = carla.AttachmentType
+
+ if not self._parent.type_id.startswith("walker.pedestrian"):
+ self._camera_transforms = [
+ (carla.Transform(carla.Location(x=-2.0*bound_x, y=+0.0*bound_y, z=2.0*bound_z), carla.Rotation(pitch=8.0)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=+0.8*bound_x, y=+0.0*bound_y, z=1.3*bound_z)), Attachment.Rigid),
+ (carla.Transform(carla.Location(x=+1.9*bound_x, y=+1.0*bound_y, z=1.2*bound_z)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=-2.8*bound_x, y=+0.0*bound_y, z=4.6*bound_z), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=-1.0, y=-1.0*bound_y, z=0.4*bound_z)), Attachment.Rigid)]
+ else:
+ self._camera_transforms = [
+ (carla.Transform(carla.Location(x=-2.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=1.6, z=1.7)), Attachment.Rigid),
+ (carla.Transform(carla.Location(x=2.5, y=0.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=-4.0, z=2.0), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
+ (carla.Transform(carla.Location(x=0, y=-2.5, z=-0.0), carla.Rotation(yaw=90.0)), Attachment.Rigid)]
+
+ self.transform_index = 1
+ self.sensors = [
+ ['sensor.camera.rgb', cc.Raw, 'Camera RGB', {}],
+ ['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)', {}],
+ ['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)', {}],
+ ['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)', {}],
+ ['sensor.camera.semantic_segmentation', cc.Raw, 'Camera Semantic Segmentation (Raw)', {}],
+ ['sensor.camera.semantic_segmentation', cc.CityScapesPalette, 'Camera Semantic Segmentation (CityScapes Palette)', {}],
+ ['sensor.camera.instance_segmentation', cc.CityScapesPalette, 'Camera Instance Segmentation (CityScapes Palette)', {}],
+ ['sensor.camera.instance_segmentation', cc.Raw, 'Camera Instance Segmentation (Raw)', {}],
+ ['sensor.lidar.ray_cast', None, 'Lidar (Ray-Cast)', {'range': '50'}],
+ ['sensor.camera.dvs', cc.Raw, 'Dynamic Vision Sensor', {}],
+ ['sensor.camera.rgb', cc.Raw, 'Camera RGB Distorted',
+ {'lens_circle_multiplier': '3.0',
+ 'lens_circle_falloff': '3.0',
+ 'chromatic_aberration_intensity': '0.5',
+ 'chromatic_aberration_offset': '0'}],
+ ['sensor.camera.optical_flow', cc.Raw, 'Optical Flow', {}],
+ ['sensor.camera.normals', cc.Raw, 'Camera Normals', {}],
+ ]
+ world = self._parent.get_world()
+ bp_library = world.get_blueprint_library()
+ for item in self.sensors:
+ bp = bp_library.find(item[0])
+ if item[0].startswith('sensor.camera'):
+ bp.set_attribute('image_size_x', str(hud.dim[0]))
+ bp.set_attribute('image_size_y', str(hud.dim[1]))
+ if bp.has_attribute('gamma'):
+ bp.set_attribute('gamma', str(gamma_correction))
+ for attr_name, attr_value in item[3].items():
+ bp.set_attribute(attr_name, attr_value)
+ elif item[0].startswith('sensor.lidar'):
+ self.lidar_range = 50
+
+ for attr_name, attr_value in item[3].items():
+ bp.set_attribute(attr_name, attr_value)
+ if attr_name == 'range':
+ self.lidar_range = float(attr_value)
+
+ item.append(bp)
+ self.index = None
+
+ def toggle_camera(self):
+ self.transform_index = (self.transform_index + 1) % len(self._camera_transforms)
+ self.set_sensor(self.index, notify=False, force_respawn=True)
+
+ def set_sensor(self, index, notify=True, force_respawn=False):
+ index = index % len(self.sensors)
+ needs_respawn = True if self.index is None else \
+ (force_respawn or (self.sensors[index][2] != self.sensors[self.index][2]))
+ if needs_respawn:
+ if self.sensor is not None:
+ self.sensor.destroy()
+ self.surface = None
+ self.sensor = self._parent.get_world().spawn_actor(
+ self.sensors[index][-1],
+ self._camera_transforms[self.transform_index][0],
+ attach_to=self._parent,
+ attachment_type=self._camera_transforms[self.transform_index][1])
+ # We need to pass the lambda a weak reference to self to avoid
+ # circular reference.
+ weak_self = weakref.ref(self)
+ self.sensor.listen(
+ lambda image:
+ CameraManager._parse_image(weak_self, image))
+ if notify:
+ self.hud.notification(self.sensors[index][2])
+ self.index = index
+
+ def next_sensor(self):
+ self.set_sensor(self.index + 1)
+
+ def next_gbuffer(self):
+ weak_self = weakref.ref(self)
+ if self.output_texture_id != 0:
+ self.sensor.stop_gbuffer(self.output_texture_id - 1)
+ self.output_texture_id = (self.output_texture_id + 1) % len(gbuffer_names)
+ if self.output_texture_id != 0:
+ self.sensor.listen_to_gbuffer(
+ self.output_texture_id - 1,
+ lambda image, index = self.output_texture_id: # Need to capture the output_texture_id by value.
+ CameraManager._parse_image(weak_self, image, index))
+
+ def toggle_recording(self):
+ self.recording = not self.recording
+ self.hud.notification('Recording %s' % ('On' if self.recording else 'Off'))
+
+ def render(self, display):
+ if self.surface is not None:
+ display.blit(self.surface, (0, 0))
+
+ @staticmethod
+ def _parse_image(weak_self, image, output_texture_id = 0):
+ self = weak_self()
+ if self.output_texture_id != output_texture_id:
+ return
+ if not self:
+ return
+ if self.sensors[self.index][0].startswith('sensor.lidar'):
+ points = np.frombuffer(image.raw_data, dtype=np.dtype('f4'))
+ points = np.reshape(points, (int(points.shape[0] / 4), 4))
+ lidar_data = np.array(points[:, :2])
+ lidar_data *= min(self.hud.dim) / (2.0 * self.lidar_range)
+ lidar_data += (0.5 * self.hud.dim[0], 0.5 * self.hud.dim[1])
+ lidar_data = np.fabs(lidar_data) # pylint: disable=E1111
+ lidar_data = lidar_data.astype(np.int32)
+ lidar_data = np.reshape(lidar_data, (-1, 2))
+ lidar_img_size = (self.hud.dim[0], self.hud.dim[1], 3)
+ lidar_img = np.zeros((lidar_img_size), dtype=np.uint8)
+ lidar_img[tuple(lidar_data.T)] = (255, 255, 255)
+ self.surface = pygame.surfarray.make_surface(lidar_img)
+ elif self.sensors[self.index][0].startswith('sensor.camera.dvs'):
+ # Example of converting the raw_data from a carla.DVSEventArray
+ # sensor into a NumPy array and using it as an image
+ dvs_events = np.frombuffer(image.raw_data, dtype=np.dtype([
+ ('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.bool)]))
+ dvs_img = np.zeros((image.height, image.width, 3), dtype=np.uint8)
+ # Blue is positive, red is negative
+ dvs_img[dvs_events[:]['y'], dvs_events[:]['x'], dvs_events[:]['pol'] * 2] = 255
+ self.surface = pygame.surfarray.make_surface(dvs_img.swapaxes(0, 1))
+ elif self.sensors[self.index][0].startswith('sensor.camera.optical_flow'):
+ image = image.get_color_coded_flow()
+ array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
+ array = np.reshape(array, (image.height, image.width, 4))
+ array = array[:, :, :3]
+ array = array[:, :, ::-1]
+ self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
+ else:
+ image.convert(self.sensors[self.index][1])
+ array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
+ array = np.reshape(array, (image.height, image.width, 4))
+ array = array[:, :, :3]
+ array = array[:, :, ::-1]
+ self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
+ if self.recording:
+ image.save_to_disk('_out/%08d' % image.frame)
+
+
+# ==============================================================================
+# -- game_loop() ---------------------------------------------------------------
+# ==============================================================================
+
+
+def game_loop(args):
+ pygame.init()
+ pygame.font.init()
+ world = None
+ original_settings = None
+
+ try:
+ client = carla.Client(args.host, args.port)
+ client.set_timeout(20.0)
+
+ sim_world = client.get_world()
+ if args.sync:
+ original_settings = sim_world.get_settings()
+ settings = sim_world.get_settings()
+ if not settings.synchronous_mode:
+ settings.synchronous_mode = True
+ settings.fixed_delta_seconds = 0.05
+ sim_world.apply_settings(settings)
+
+ traffic_manager = client.get_trafficmanager()
+ traffic_manager.set_synchronous_mode(True)
+
+ if args.autopilot and not sim_world.get_settings().synchronous_mode:
+ print("WARNING: You are currently in asynchronous mode and could "
+ "experience some issues with the traffic simulation")
+
+ display = pygame.display.set_mode(
+ (args.width, args.height),
+ pygame.HWSURFACE | pygame.DOUBLEBUF)
+ display.fill((0,0,0))
+ pygame.display.flip()
+
+ hud = HUD(args.width, args.height)
+ world = World(sim_world, hud, args)
+ controller = KeyboardControl(world, args.autopilot)
+
+ if args.sync:
+ sim_world.tick()
+ else:
+ sim_world.wait_for_tick()
+
+ clock = pygame.time.Clock()
+ while True:
+ if args.sync:
+ sim_world.tick()
+ clock.tick_busy_loop(60)
+ if controller.parse_events(client, world, clock, args.sync):
+ return
+ world.tick(clock)
+ world.render(display)
+ pygame.display.flip()
+
+ finally:
+
+ if original_settings:
+ sim_world.apply_settings(original_settings)
+
+ if (world and world.recording_enabled):
+ client.stop_recorder()
+
+ if world is not None:
+ world.destroy()
+
+ pygame.quit()
+
+
+# ==============================================================================
+# -- main() --------------------------------------------------------------------
+# ==============================================================================
+
+
+def main():
+ argparser = argparse.ArgumentParser(
+ description='CARLA Manual Control Client')
+ argparser.add_argument(
+ '-v', '--verbose',
+ action='store_true',
+ dest='debug',
+ help='print debug information')
+ argparser.add_argument(
+ '--host',
+ metavar='H',
+ default='127.0.0.1',
+ help='IP of the host server (default: 127.0.0.1)')
+ argparser.add_argument(
+ '-p', '--port',
+ metavar='P',
+ default=2000,
+ type=int,
+ help='TCP port to listen to (default: 2000)')
+ argparser.add_argument(
+ '-a', '--autopilot',
+ action='store_true',
+ help='enable autopilot')
+ argparser.add_argument(
+ '--res',
+ metavar='WIDTHxHEIGHT',
+ default='1280x720',
+ help='window resolution (default: 1280x720)')
+ argparser.add_argument(
+ '--filter',
+ metavar='PATTERN',
+ default='vehicle.*',
+ help='actor filter (default: "vehicle.*")')
+ argparser.add_argument(
+ '--generation',
+ metavar='G',
+ default='2',
+ help='restrict to certain actor generation (values: "1","2","All" - default: "2")')
+ argparser.add_argument(
+ '--rolename',
+ metavar='NAME',
+ default='hero',
+ help='actor role name (default: "hero")')
+ argparser.add_argument(
+ '--gamma',
+ default=2.2,
+ type=float,
+ help='Gamma correction of the camera (default: 2.2)')
+ argparser.add_argument(
+ '--sync',
+ action='store_true',
+ help='Activate synchronous mode execution')
+ args = argparser.parse_args()
+
+ args.width, args.height = [int(x) for x in args.res.split('x')]
+
+ log_level = logging.DEBUG if args.debug else logging.INFO
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+ logging.info('listening to server %s:%s', args.host, args.port)
+
+ print(__doc__)
+
+ try:
+
+ game_loop(args)
+
+ except KeyboardInterrupt:
+ print('\nCancelled by user. Bye!')
+
+
+if __name__ == '__main__':
+
+ main()
diff --git a/PythonAPI/examples/tutorial_gbuffer.py b/PythonAPI/examples/tutorial_gbuffer.py
new file mode 100644
index 000000000..6373f7f2a
--- /dev/null
+++ b/PythonAPI/examples/tutorial_gbuffer.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
+# Barcelona (UAB).
+#
+# This work is licensed under the terms of the MIT license.
+# For a copy, see .
+
+import glob
+import os
+import sys
+
+try:
+ sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
+ sys.version_info.major,
+ sys.version_info.minor,
+ 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
+except IndexError:
+ pass
+
+import carla
+
+import random
+import time
+
+
+
+def main():
+ actor_list = []
+
+ # In this tutorial script, we are going to add a vehicle to the simulation
+ # and let it drive in autopilot. We will also create a camera attached to
+ # that vehicle, and save all the images generated by the camera to disk.
+ # Additionally, we will save all of the gbuffer textures for each frame.
+
+ try:
+ # First of all, we need to create the client that will send the requests
+ # to the simulator. Here we'll assume the simulator is accepting
+ # requests in the localhost at port 2000.
+ client = carla.Client('127.0.0.1', 2000)
+ client.set_timeout(2.0)
+
+ # Once we have a client we can retrieve the world that is currently
+ # running.
+ world = client.get_world()
+
+ # The world contains the list blueprints that we can use for adding new
+ # actors into the simulation.
+ blueprint_library = world.get_blueprint_library()
+
+ # Now let's filter all the blueprints of type 'vehicle' and choose one
+ # at random.
+ bp = random.choice(blueprint_library.filter('vehicle'))
+
+ # A blueprint contains the list of attributes that define a vehicle's
+ # instance, we can read them and modify some of them. For instance,
+ # let's randomize its color.
+ if bp.has_attribute('color'):
+ color = random.choice(bp.get_attribute('color').recommended_values)
+ bp.set_attribute('color', color)
+
+ # Now we need to give an initial transform to the vehicle. We choose a
+ # random transform from the list of recommended spawn points of the map.
+ transform = world.get_map().get_spawn_points()[0]
+
+ # So let's tell the world to spawn the vehicle.
+ vehicle = world.spawn_actor(bp, transform)
+
+ # It is important to note that the actors we create won't be destroyed
+ # unless we call their "destroy" function. If we fail to call "destroy"
+ # they will stay in the simulation even after we quit the Python script.
+ # For that reason, we are storing all the actors we create so we can
+ # destroy them afterwards.
+ actor_list.append(vehicle)
+ print('created %s' % vehicle.type_id)
+
+ # Let's put the vehicle to drive around.
+ vehicle.set_autopilot(True)
+
+ # Let's add now a "rgb" camera attached to the vehicle. Note that the
+ # transform we give here is now relative to the vehicle.
+ camera_bp = blueprint_library.find('sensor.camera.rgb')
+ camera_bp.set_attribute('image_size_x', '1920')
+ camera_bp.set_attribute('image_size_y', '1080')
+ camera_transform = carla.Transform(carla.Location(x=1.5, z=2.4))
+ camera = world.spawn_actor(camera_bp, camera_transform, attach_to=vehicle)
+ actor_list.append(camera)
+ print('created %s' % camera.type_id)
+
+ # Register a callback for whenever a new frame is available. This step is
+ # currently required to correctly receive the gbuffer textures, as it is
+ # used to determine whether the sensor is active.
+ camera.listen(lambda image: image.save_to_disk('_out/FinalColor-%06d.png' % image.frame))
+
+ # Here we will register the callbacks for each gbuffer texture.
+ # The function "listen_to_gbuffer" behaves like the regular listen function,
+ # but you must first pass it the ID of the desired gbuffer texture.
+ camera.listen_to_gbuffer(carla.GBufferTextureID.SceneColor, lambda image: image.save_to_disk('_out/SceneColor-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.SceneDepth, lambda image: image.save_to_disk('_out/SceneDepth-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.SceneStencil, lambda image: image.save_to_disk('_out/SceneStencil-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferA, lambda image: image.save_to_disk('_out/GBufferA-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferB, lambda image: image.save_to_disk('_out/GBufferB-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferC, lambda image: image.save_to_disk('_out/GBufferC-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferD, lambda image: image.save_to_disk('_out/GBufferD-%06d.png' % image.frame))
+ # Note that some gbuffer textures may not be available for a particular scene.
+ # For example, the textures E and F are likely unavailable in this example,
+ # which will result in them being sent as black images.
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferE, lambda image: image.save_to_disk('_out/GBufferE-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.GBufferF, lambda image: image.save_to_disk('_out/GBufferF-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.Velocity, lambda image: image.save_to_disk('_out/Velocity-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.SSAO, lambda image: image.save_to_disk('_out/SSAO-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.CustomDepth, lambda image: image.save_to_disk('_out/CustomDepth-%06d.png' % image.frame))
+ camera.listen_to_gbuffer(carla.GBufferTextureID.CustomStencil, lambda image: image.save_to_disk('_out/CustomStencil-%06d.png' % image.frame))
+
+ time.sleep(10)
+
+ finally:
+
+ print('destroying actors')
+ camera.destroy()
+ client.apply_batch([carla.command.DestroyActor(x) for x in actor_list])
+ print('done.')
+
+
+if __name__ == '__main__':
+
+ main()
diff --git a/Unreal/CarlaUE4/Config/.gitignore b/Unreal/CarlaUE4/Config/.gitignore
new file mode 100644
index 000000000..9c5b38c51
--- /dev/null
+++ b/Unreal/CarlaUE4/Config/.gitignore
@@ -0,0 +1 @@
+OptionalModules.ini
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Carla.Build.cs b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Carla.Build.cs
index 9f66588f5..68c09e087 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Carla.Build.cs
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Carla.Build.cs
@@ -71,6 +71,7 @@ public class Carla : ModuleRules
"Core",
"RenderCore",
"RHI",
+ "Renderer",
"ProceduralMeshComponent"
// ... add other public dependencies that you statically link with here ...
}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.cpp
new file mode 100644
index 000000000..21e66b4e5
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.cpp
@@ -0,0 +1,146 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "Carla/Sensor/ImageUtil.h"
+#include "Runtime/RHI/Public/RHISurfaceDataConversion.h"
+
+
+
+namespace ImageUtil
+{
+ void DecodePixelsByFormat(
+ void* PixelData,
+ int32 SourcePitch,
+ FIntPoint SourceExtent,
+ FIntPoint DestinationExtent,
+ EPixelFormat Format,
+ FReadSurfaceDataFlags Flags,
+ TArrayView Out)
+ {
+ SourcePitch *= GPixelFormats[Format].BlockBytes;
+ auto OutPixelCount = DestinationExtent.X * DestinationExtent.Y;
+ switch (Format)
+ {
+ case PF_G16:
+ case PF_R16_UINT:
+ case PF_R16_SINT:
+ // Shadow maps
+ ConvertRawR16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_R8G8B8A8:
+ ConvertRawR8G8B8A8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_B8G8R8A8:
+ ConvertRawB8G8R8A8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_A2B10G10R10:
+ ConvertRawA2B10G10R10DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_FloatRGBA:
+ case PF_R16G16B16A16_UNORM:
+ case PF_R16G16B16A16_SNORM:
+ ConvertRawR16G16B16A16FDataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_FloatR11G11B10:
+ ConvertRawRR11G11B10DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_A32B32G32R32F:
+ ConvertRawR32G32B32A32DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_A16B16G16R16:
+ ConvertRawR16G16B16A16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_G16R16:
+ ConvertRawR16G16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_DepthStencil: // Depth / Stencil
+ ConvertRawD32S8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_X24_G8: // Depth Stencil
+ ConvertRawR24G8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_R32_FLOAT: // Depth Stencil
+ ConvertRawR32DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_R16G16B16A16_UINT:
+ case PF_R16G16B16A16_SINT:
+ ConvertRawR16G16B16A16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ default:
+ UE_LOG(LogCarla, Warning, TEXT("Unsupported format %llu"), (unsigned long long)Format);
+ check(false);
+ break;
+ }
+ }
+
+ void DecodePixelsByFormat(
+ void* PixelData,
+ int32 SourcePitch,
+ FIntPoint SourceExtent,
+ FIntPoint DestinationExtent,
+ EPixelFormat Format,
+ FReadSurfaceDataFlags Flags,
+ TArrayView Out)
+ {
+ SourcePitch *= GPixelFormats[Format].BlockBytes;
+ auto OutPixelCount = DestinationExtent.X * DestinationExtent.Y;
+ switch (Format)
+ {
+ case PF_G16:
+ case PF_R16_UINT:
+ case PF_R16_SINT:
+ // Shadow maps
+ ConvertRawR16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_R8G8B8A8:
+ ConvertRawR8G8B8A8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_B8G8R8A8:
+ ConvertRawB8G8R8A8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_A2B10G10R10:
+ ConvertRawR10G10B10A2DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_FloatRGBA:
+ case PF_R16G16B16A16_UNORM:
+ case PF_R16G16B16A16_SNORM:
+ ConvertRawR16G16B16A16FDataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), false);
+ break;
+ case PF_FloatR11G11B10:
+ ConvertRawR11G11B10DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), false);
+ break;
+ case PF_A32B32G32R32F:
+ ConvertRawR32G32B32A32DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), false);
+ break;
+ case PF_A16B16G16R16:
+ ConvertRawR16G16B16A16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_G16R16:
+ ConvertRawR16G16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_DepthStencil: // Depth / Stencil
+ ConvertRawD32S8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_X24_G8: // Depth / Stencil
+ ConvertRawR24G8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_R32_FLOAT: // Depth
+ ConvertRawR32DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
+ break;
+ case PF_R16G16B16A16_UINT:
+ case PF_R16G16B16A16_SINT:
+ ConvertRawR16G16B16A16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ case PF_G8:
+ ConvertRawR8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
+ break;
+ default:
+ UE_LOG(LogCarla, Warning, TEXT("Unsupported format %llu"), (unsigned long long)Format);
+ check(false);
+ break;
+ }
+ }
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.h
new file mode 100644
index 000000000..1afa6b649
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/ImageUtil.h
@@ -0,0 +1,33 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+#include "CoreMinimal.h"
+
+
+
+class FRHIGPUTextureReadback;
+
+namespace ImageUtil
+{
+ void DecodePixelsByFormat(
+ void* PixelData,
+ int32 SourcePitch,
+ FIntPoint SourceExtent,
+ FIntPoint DestinationExtent,
+ EPixelFormat Format,
+ FReadSurfaceDataFlags Flags,
+ TArrayView Out);
+
+ void DecodePixelsByFormat(
+ void* PixelData,
+ int32 SourcePitch,
+ FIntPoint SourceExtent,
+ FIntPoint DestinationExtent,
+ EPixelFormat Format,
+ FReadSurfaceDataFlags Flags,
+ TArrayView Out);
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/PixelReader.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/PixelReader.h
index 39feafdaa..ca6578393 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/PixelReader.h
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/PixelReader.h
@@ -69,8 +69,6 @@ public:
template
static void SendPixelsInRenderThread(TSensor &Sensor, bool use16BitFormat = false, std::function(void *, uint32)> Conversor = {});
-private:
-
/// Copy the pixels in @a RenderTarget into @a Buffer.
///
/// @pre To be called from render-thread.
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.cpp
index 71de739fb..d6120bb60 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.cpp
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.cpp
@@ -13,17 +13,17 @@
FActorDefinition ASceneCaptureCamera::GetSensorDefinition()
{
- constexpr bool bEnableModifyingPostProcessEffects = true;
- return UActorBlueprintFunctionLibrary::MakeCameraDefinition(
- TEXT("rgb"),
- bEnableModifyingPostProcessEffects);
+ constexpr bool bEnableModifyingPostProcessEffects = true;
+ return UActorBlueprintFunctionLibrary::MakeCameraDefinition(
+ TEXT("rgb"),
+ bEnableModifyingPostProcessEffects);
}
-ASceneCaptureCamera::ASceneCaptureCamera(const FObjectInitializer &ObjectInitializer)
- : Super(ObjectInitializer)
+ASceneCaptureCamera::ASceneCaptureCamera(const FObjectInitializer& ObjectInitializer)
+ : Super(ObjectInitializer)
{
- AddPostProcessingMaterial(
- TEXT("Material'/Carla/PostProcessingMaterials/PhysicLensDistortion.PhysicLensDistortion'"));
+ AddPostProcessingMaterial(
+ TEXT("Material'/Carla/PostProcessingMaterials/PhysicLensDistortion.PhysicLensDistortion'"));
}
void ASceneCaptureCamera::BeginPlay()
@@ -62,3 +62,8 @@ void ASceneCaptureCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float
);
FPixelReader::SendPixelsInRenderThread(*this);
}
+
+void ASceneCaptureCamera::SendGBufferTextures(FGBufferRequest& GBuffer)
+{
+ SendGBufferTexturesInternal(*this, GBuffer);
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.h
index 9c80fd480..55fbce005 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.h
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureCamera.h
@@ -26,6 +26,8 @@ public:
ASceneCaptureCamera(const FObjectInitializer &ObjectInitializer);
protected:
+
+ virtual void SendGBufferTextures(FGBufferRequest& GBuffer) override;
void BeginPlay() override;
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.cpp
index 80bab5838..595dd0f26 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.cpp
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.cpp
@@ -6,20 +6,11 @@
#include "Carla.h"
#include "Carla/Sensor/SceneCaptureSensor.h"
-
#include "Carla/Game/CarlaStatics.h"
-#include "Async/Async.h"
-#include "Components/DrawFrustumComponent.h"
-#include "Components/SceneCaptureComponent2D.h"
-#include "Components/StaticMeshComponent.h"
-#include "ContentStreaming.h"
-#include "Engine/Classes/Engine/Scene.h"
-#include "Engine/TextureRenderTarget2D.h"
-#include "HAL/UnrealMemory.h"
-#include "HighResScreenshot.h"
-#include "Misc/CoreDelegates.h"
-#include "RHICommandList.h"
+#include
+#include
+#include
static auto SCENE_CAPTURE_COUNTER = 0u;
@@ -62,11 +53,12 @@ ASceneCaptureSensor::ASceneCaptureSensor(const FObjectInitializer &ObjectInitial
CaptureRenderTarget->AddressX = TextureAddress::TA_Clamp;
CaptureRenderTarget->AddressY = TextureAddress::TA_Clamp;
- CaptureComponent2D = CreateDefaultSubobject(
- FName(*FString::Printf(TEXT("SceneCaptureComponent2D_%d"), SCENE_CAPTURE_COUNTER)));
+ CaptureComponent2D = CreateDefaultSubobject(
+ FName(*FString::Printf(TEXT("USceneCaptureComponent2D_CARLA_%d"), SCENE_CAPTURE_COUNTER)));
+ check(CaptureComponent2D != nullptr);
+ CaptureComponent2D->ViewActor = this;
CaptureComponent2D->SetupAttachment(RootComponent);
CaptureComponent2D->PrimitiveRenderMode = ESceneCapturePrimitiveRenderMode::PRM_RenderScenePrimitives;
-
CaptureComponent2D->bCaptureOnMovement = false;
CaptureComponent2D->bCaptureEveryFrame = false;
CaptureComponent2D->bAlwaysPersistRenderingState = true;
@@ -456,8 +448,96 @@ float ASceneCaptureSensor::GetChromAberrOffset() const
void ASceneCaptureSensor::EnqueueRenderSceneImmediate() {
TRACE_CPUPROFILER_EVENT_SCOPE(ASceneCaptureSensor::EnqueueRenderSceneImmediate);
- // Creates an snapshot of the scene, requieres bCaptureEveryFrame = false.
- GetCaptureComponent2D()->CaptureScene();
+ // Equivalent to "CaptureComponent2D->CaptureScene" + (optional) GBuffer extraction.
+ CaptureSceneExtended();
+}
+
+constexpr const TCHAR* GBufferNames[] =
+{
+ TEXT("SceneColor"),
+ TEXT("SceneDepth"),
+ TEXT("SceneStencil"),
+ TEXT("GBufferA"),
+ TEXT("GBufferB"),
+ TEXT("GBufferC"),
+ TEXT("GBufferD"),
+ TEXT("GBufferE"),
+ TEXT("GBufferF"),
+ TEXT("Velocity"),
+ TEXT("SSAO"),
+ TEXT("CustomDepth"),
+ TEXT("CustomStencil"),
+};
+
+template
+static void CheckGBufferStream(T& GBufferStream, FGBufferRequest& GBuffer)
+{
+ GBufferStream.bIsUsed = GBufferStream.Stream.AreClientsListening();
+ if (GBufferStream.bIsUsed)
+ GBuffer.MarkAsRequested(ID);
+}
+
+static uint64 Prior = 0;
+
+void ASceneCaptureSensor::CaptureSceneExtended()
+{
+ auto GBufferPtr = MakeUnique();
+ auto& GBuffer = *GBufferPtr;
+
+ CheckGBufferStream(CameraGBuffers.SceneColor, GBuffer);
+ CheckGBufferStream(CameraGBuffers.SceneDepth, GBuffer);
+ CheckGBufferStream(CameraGBuffers.SceneStencil, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferA, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferB, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferC, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferD, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferE, GBuffer);
+ CheckGBufferStream(CameraGBuffers.GBufferF, GBuffer);
+ CheckGBufferStream(CameraGBuffers.Velocity, GBuffer);
+ CheckGBufferStream(CameraGBuffers.SSAO, GBuffer);
+ CheckGBufferStream(CameraGBuffers.CustomDepth, GBuffer);
+ CheckGBufferStream(CameraGBuffers.CustomStencil, GBuffer);
+
+ if (GBufferPtr->DesiredTexturesMask == 0)
+ {
+ // Creates an snapshot of the scene, requieres bCaptureEveryFrame = false.
+ CaptureComponent2D->CaptureScene();
+ return;
+ }
+
+ if (Prior != GBufferPtr->DesiredTexturesMask)
+ UE_LOG(LogCarla, Verbose, TEXT("GBuffer selection changed (%llu)."), GBufferPtr->DesiredTexturesMask);
+
+ Prior = GBufferPtr->DesiredTexturesMask;
+ GBufferPtr->OwningActor = CaptureComponent2D->GetViewOwner();
+
+#define CARLA_GBUFFER_DISABLE_TAA // Temporarily disable TAA to avoid jitter.
+
+#ifdef CARLA_GBUFFER_DISABLE_TAA
+ bool bTAA = CaptureComponent2D->ShowFlags.TemporalAA;
+ if (bTAA) {
+ CaptureComponent2D->ShowFlags.TemporalAA = false;
+ }
+#endif
+
+ CaptureComponent2D->CaptureSceneWithGBuffer(GBuffer);
+
+#ifdef CARLA_GBUFFER_DISABLE_TAA
+ if (bTAA) {
+ CaptureComponent2D->ShowFlags.TemporalAA = true;
+ }
+#undef CARLA_GBUFFER_DISABLE_TAA
+#endif
+
+ AsyncTask(ENamedThreads::AnyHiPriThreadNormalTask, [this, GBuffer = MoveTemp(GBufferPtr)]() mutable
+ {
+ SendGBufferTextures(*GBuffer);
+ });
+}
+
+void ASceneCaptureSensor::SendGBufferTextures(FGBufferRequest& GBuffer)
+{
+ SendGBufferTexturesInternal(*this, GBuffer);
}
void ASceneCaptureSensor::BeginPlay()
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.h
index 1de3ca798..c263310b2 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.h
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SceneCaptureSensor.h
@@ -8,15 +8,112 @@
#include "Carla/Sensor/PixelReader.h"
#include "Carla/Sensor/Sensor.h"
+#include "Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.h"
+#include "Carla/Sensor/ImageUtil.h"
+
+#include "Async/Async.h"
+#include "Renderer/Public/GBufferView.h"
+
+#include
-#include "Runtime/RenderCore/Public/RenderCommandFence.h"
#include "SceneCaptureSensor.generated.h"
+
+
class UDrawFrustumComponent;
-class USceneCaptureComponent2D;
class UStaticMeshComponent;
class UTextureRenderTarget2D;
+
+
+struct FCameraGBufferUint8
+{
+ /// Prevent this sensor to be spawned by users.
+ using not_spawnable = void;
+
+ void SetDataStream(FDataStream InStream)
+ {
+ Stream = std::move(InStream);
+ }
+
+ /// Replace the Stream associated with this sensor.
+ void SetStream(FDataMultiStream InStream)
+ {
+ Stream = std::move(InStream);
+ }
+ /// Return the token that allows subscribing to this sensor's stream.
+ auto GetToken() const
+ {
+ bIsUsed = true;
+ return Stream.GetToken();
+ }
+
+ /// Dummy. Required for compatibility with other sensors only.
+ FTransform GetActorTransform() const
+ {
+ return {};
+ }
+ /// Return the FDataStream associated with this sensor.
+ ///
+ /// You need to provide a reference to self, this is necessary for template
+ /// deduction.
+ template
+ FAsyncDataStream GetDataStream(const SensorT &Self)
+ {
+ return Stream.MakeAsyncDataStream(Self, Self.GetEpisode().GetElapsedGameTime());
+ }
+
+ mutable bool bIsUsed = false;
+ FDataStream Stream;
+};
+
+
+
+struct FCameraGBufferFloat
+{
+ /// Prevent this sensor to be spawned by users.
+ using not_spawnable = void;
+
+ void SetDataStream(FDataStream InStream)
+ {
+ Stream = std::move(InStream);
+ }
+
+ /// Replace the Stream associated with this sensor.
+ void SetStream(FDataMultiStream InStream)
+ {
+ Stream = std::move(InStream);
+ }
+ /// Return the token that allows subscribing to this sensor's stream.
+ auto GetToken() const
+ {
+ bIsUsed = true;
+ return Stream.GetToken();
+ }
+ /// Dummy. Required for compatibility with other sensors only.
+ FTransform GetActorTransform() const
+ {
+ return {};
+ }
+ /// Return the FDataStream associated with this sensor.
+ ///
+ /// You need to provide a reference to self, this is necessary for template
+ /// deduction.
+ template
+ FAsyncDataStream GetDataStream(const SensorT &Self)
+ {
+ return Stream.MakeAsyncDataStream(Self, Self.GetEpisode().GetElapsedGameTime());
+ }
+
+ mutable bool bIsUsed = false;
+ FDataStream Stream;
+};
+
+
+
+
+
+
/// Base class for sensors using a USceneCaptureComponent2D for rendering the
/// scene. This class does not capture data, use
/// `FPixelReader::SendPixelsInRenderThread(*this)` in derived classes.
@@ -32,6 +129,7 @@ class CARLA_API ASceneCaptureSensor : public ASensor
friend class ACarlaGameModeBase;
friend class FPixelReader;
+ friend class FPixelReader2;
public:
@@ -304,7 +402,28 @@ public:
// FlushRenderingCommands();
}
+ struct
+ {
+ FCameraGBufferUint8 SceneColor;
+ FCameraGBufferUint8 SceneDepth;
+ FCameraGBufferUint8 SceneStencil;
+ FCameraGBufferUint8 GBufferA;
+ FCameraGBufferUint8 GBufferB;
+ FCameraGBufferUint8 GBufferC;
+ FCameraGBufferUint8 GBufferD;
+ FCameraGBufferUint8 GBufferE;
+ FCameraGBufferUint8 GBufferF;
+ FCameraGBufferUint8 Velocity;
+ FCameraGBufferUint8 SSAO;
+ FCameraGBufferUint8 CustomDepth;
+ FCameraGBufferUint8 CustomStencil;
+ } CameraGBuffers;
+
protected:
+
+ void CaptureSceneExtended();
+
+ virtual void SendGBufferTextures(FGBufferRequest& GBuffer);
virtual void BeginPlay() override;
@@ -321,7 +440,7 @@ protected:
/// Scene capture component.
UPROPERTY(EditAnywhere)
- USceneCaptureComponent2D *CaptureComponent2D = nullptr;
+ USceneCaptureComponent2D_CARLA *CaptureComponent2D = nullptr;
UPROPERTY(EditAnywhere)
float TargetGamma = 2.4f;
@@ -342,6 +461,109 @@ protected:
UPROPERTY(EditAnywhere)
bool bEnable16BitFormat = false;
- FRenderCommandFence RenderFence;
+private:
+
+ template <
+ typename SensorT,
+ typename CameraGBufferT>
+ static void SendGBuffer(
+ SensorT& Self,
+ CameraGBufferT& CameraGBuffer,
+ FGBufferRequest& GBufferData,
+ EGBufferTextureID TextureID)
+ {
+ using PixelType = typename std::conditional<
+ std::is_same, FCameraGBufferUint8>::value,
+ FColor,
+ FLinearColor>::type;
+ FIntPoint ViewSize = {};
+ TArray Pixels;
+ if (GBufferData.WaitForTextureTransfer(TextureID))
+ {
+ TRACE_CPUPROFILER_EVENT_SCOPE_STR("GBuffer Decode");
+ void* PixelData;
+ int32 SourcePitch;
+ FIntPoint SourceExtent;
+ GBufferData.MapTextureData(TextureID, PixelData, SourcePitch, SourceExtent);
+ ViewSize = GBufferData.ViewRect.Size();
+ auto Format = GBufferData.Readbacks[(size_t)TextureID]->GetFormat();
+ Pixels.AddUninitialized(ViewSize.X * ViewSize.Y);
+ FReadSurfaceDataFlags Flags = {};
+ Flags.SetLinearToGamma(true);
+ ImageUtil::DecodePixelsByFormat(PixelData, SourcePitch, SourceExtent, ViewSize, Format, Flags, Pixels);
+ GBufferData.UnmapTextureData(TextureID);
+ }
+ auto GBufferStream = CameraGBuffer.GetDataStream(Self);
+ auto Buffer = GBufferStream.PopBufferFromPool();
+ Buffer.copy_from(carla::sensor::SensorRegistry::get::type::header_offset, Pixels);
+ if (Buffer.empty()) {
+ return;
+ }
+ SCOPE_CYCLE_COUNTER(STAT_CarlaSensorStreamSend);
+ TRACE_CPUPROFILER_EVENT_SCOPE_STR("Stream Send");
+ GBufferStream.Send(
+ CameraGBuffer, std::move(Buffer),
+ ViewSize.X, ViewSize.Y,
+ Self.GetFOVAngle());
+ }
+
+protected:
+
+ template
+ void SendGBufferTexturesInternal(T& Self, FGBufferRequest& GBufferData)
+ {
+ for (size_t i = 0; i != FGBufferRequest::TextureCount; ++i)
+ {
+ if ((GBufferData.DesiredTexturesMask & (UINT64_C(1) << i)) == 0) {
+ continue;
+ }
+ auto& C = CameraGBuffers;
+ EGBufferTextureID ID = (EGBufferTextureID)i;
+ switch (ID)
+ {
+ case EGBufferTextureID::SceneColor:
+ SendGBuffer(Self, C.SceneColor, GBufferData, ID);
+ break;
+ case EGBufferTextureID::SceneDepth:
+ SendGBuffer(Self, C.SceneDepth, GBufferData, ID);
+ break;
+ case EGBufferTextureID::SceneStencil:
+ SendGBuffer(Self, C.SceneStencil, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferA:
+ SendGBuffer(Self, C.GBufferA, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferB:
+ SendGBuffer(Self, C.GBufferB, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferC:
+ SendGBuffer(Self, C.GBufferC, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferD:
+ SendGBuffer(Self, C.GBufferD, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferE:
+ SendGBuffer(Self, C.GBufferE, GBufferData, ID);
+ break;
+ case EGBufferTextureID::GBufferF:
+ SendGBuffer(Self, C.GBufferF, GBufferData, ID);
+ break;
+ case EGBufferTextureID::Velocity:
+ SendGBuffer(Self, C.Velocity, GBufferData, ID);
+ break;
+ case EGBufferTextureID::SSAO:
+ SendGBuffer(Self, C.SSAO, GBufferData, ID);
+ break;
+ case EGBufferTextureID::CustomDepth:
+ SendGBuffer(Self, C.CustomDepth, GBufferData, ID);
+ break;
+ case EGBufferTextureID::CustomStencil:
+ SendGBuffer(Self, C.CustomStencil, GBufferData, ID);
+ break;
+ default:
+ abort();
+ }
+ }
+ }
};
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/Sensor.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/Sensor.h
index 02b686459..b2e72bc77 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/Sensor.h
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/Sensor.h
@@ -81,18 +81,18 @@ public:
UFUNCTION(BlueprintCallable)
void SetSeed(int32 InSeed);
-protected:
-
- void PostActorCreated() override;
-
- void EndPlay(EEndPlayReason::Type EndPlayReason) override;
-
const UCarlaEpisode &GetEpisode() const
{
check(Episode != nullptr);
return *Episode;
}
+protected:
+
+ void PostActorCreated() override;
+
+ void EndPlay(EEndPlayReason::Type EndPlayReason) override;
+
/// Return the FDataStream associated with this sensor.
///
/// You need to provide a reference to self, this is necessary for template
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SensorFactory.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SensorFactory.cpp
index a191cd15b..8c6ecf5c6 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SensorFactory.cpp
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/SensorFactory.cpp
@@ -10,6 +10,7 @@
#include "Carla/Game/CarlaGameInstance.h"
#include "Carla/Game/CarlaStatics.h"
#include "Carla/Sensor/Sensor.h"
+#include "Carla/Sensor/SceneCaptureSensor.h"
#include
#include
@@ -137,6 +138,23 @@ FActorSpawnResult ASensorFactory::SpawnActor(
Sensor->SetEpisode(*Episode);
Sensor->Set(Description);
Sensor->SetDataStream(GameInstance->GetServer().OpenStream());
+ ASceneCaptureSensor * SceneCaptureSensor = Cast(Sensor);
+ if(SceneCaptureSensor)
+ {
+ SceneCaptureSensor->CameraGBuffers.SceneColor.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.SceneDepth.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.SceneStencil.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferA.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferB.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferC.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferD.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferE.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.GBufferF.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.Velocity.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.SSAO.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.CustomDepth.SetDataStream(GameInstance->GetServer().OpenStream());
+ SceneCaptureSensor->CameraGBuffers.CustomStencil.SetDataStream(GameInstance->GetServer().OpenStream());
+ }
}
UGameplayStatics::FinishSpawningActor(Sensor, Transform);
return FActorSpawnResult{Sensor};
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.cpp
new file mode 100644
index 000000000..6adf16b8f
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.cpp
@@ -0,0 +1,19 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "SceneCaptureComponent2D_CARLA.h"
+
+
+
+USceneCaptureComponent2D_CARLA::USceneCaptureComponent2D_CARLA(const FObjectInitializer& ObjectInitializer) :
+ Super(ObjectInitializer)
+{
+}
+
+const AActor* USceneCaptureComponent2D_CARLA::GetViewOwner() const
+{
+ return ViewActor;
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.h
new file mode 100644
index 000000000..40517b4d0
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent2D_CARLA.h
@@ -0,0 +1,26 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+#include "CoreMinimal.h"
+#include "Components/SceneCaptureComponent2D.h"
+
+#include "SceneCaptureComponent2D_CARLA.generated.h"
+
+
+
+UCLASS(hidecategories=(Collision, Object, Physics, SceneComponent, Mobility))
+class CARLA_API USceneCaptureComponent2D_CARLA : public USceneCaptureComponent2D
+{
+ GENERATED_BODY()
+public:
+
+ USceneCaptureComponent2D_CARLA(const FObjectInitializer& = FObjectInitializer::Get());
+
+ const AActor* ViewActor;
+
+ virtual const AActor* GetViewOwner() const override;
+};
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.cpp
new file mode 100644
index 000000000..105f4dc25
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.cpp
@@ -0,0 +1,19 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "SceneCaptureComponentCube_CARLA.h"
+
+
+
+USceneCaptureComponentCube_CARLA::USceneCaptureComponentCube_CARLA(const FObjectInitializer& ObjectInitializer) :
+ Super(ObjectInitializer)
+{
+}
+
+const AActor* USceneCaptureComponentCube_CARLA::GetViewOwner() const
+{
+ return ViewActor;
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.h
new file mode 100644
index 000000000..f6fdfa8df
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponentCube_CARLA.h
@@ -0,0 +1,26 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+#include "CoreMinimal.h"
+#include "Components/SceneCaptureComponentCube.h"
+
+#include "SceneCaptureComponentCube_CARLA.generated.h"
+
+
+
+UCLASS(hidecategories=(Collision, Object, Physics, SceneComponent, Mobility))
+class CARLA_API USceneCaptureComponentCube_CARLA : public USceneCaptureComponentCube
+{
+ GENERATED_BODY()
+public:
+
+ USceneCaptureComponentCube_CARLA(const FObjectInitializer& = FObjectInitializer::Get());
+
+ const AActor* ViewActor;
+
+ virtual const AActor* GetViewOwner() const override;
+};
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.cpp
new file mode 100644
index 000000000..8346fb30a
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.cpp
@@ -0,0 +1,19 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#include "SceneCaptureComponent_CARLA.h"
+
+
+
+USceneCaptureComponent_CARLA::USceneCaptureComponent_CARLA(const FObjectInitializer& ObjectInitializer) :
+ Super(ObjectInitializer)
+{
+}
+
+const AActor* USceneCaptureComponent_CARLA::GetViewOwner() const
+{
+ return ViewActor;
+}
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.h
new file mode 100644
index 000000000..af6dca95f
--- /dev/null
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Sensor/UE4_Overridden/SceneCaptureComponent_CARLA.h
@@ -0,0 +1,26 @@
+// Copyright (c) 2022 Computer Vision Center (CVC) at the Universitat Autonoma
+// de Barcelona (UAB).
+//
+// This work is licensed under the terms of the MIT license.
+// For a copy, see .
+
+#pragma once
+#include "CoreMinimal.h"
+#include "Components/SceneCaptureComponent.h"
+
+#include "SceneCaptureComponent_CARLA.generated.h"
+
+
+
+UCLASS(hidecategories=(Collision, Object, Physics, SceneComponent, Mobility))
+class CARLA_API USceneCaptureComponent_CARLA : public USceneCaptureComponent
+{
+ GENERATED_BODY()
+public:
+
+ USceneCaptureComponent_CARLA(const FObjectInitializer& = FObjectInitializer::Get());
+
+ const AActor* ViewActor;
+
+ virtual const AActor* GetViewOwner() const override;
+};
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Server/CarlaServer.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Server/CarlaServer.cpp
index 5d2d96e7a..017331649 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Server/CarlaServer.cpp
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Server/CarlaServer.cpp
@@ -2001,7 +2001,7 @@ void FCarlaServer::FPimpl::BindActions()
{
REQUIRE_CARLA_EPISODE();
FCarlaActor* CarlaActor = Episode->FindCarlaActor(ActorId);
- if (CarlaActor)
+ if (!CarlaActor)
{
return RespondError(
"get_light_boxes",
@@ -2034,6 +2034,107 @@ void FCarlaServer::FPimpl::BindActions()
}
};
+ // ~~ GBuffer tokens ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ BIND_SYNC(get_gbuffer_token) << [this](const cr::ActorId ActorId, uint32_t GBufferId) -> R>
+ {
+ REQUIRE_CARLA_EPISODE();
+ FCarlaActor* CarlaActor = Episode->FindCarlaActor(ActorId);
+ if(!CarlaActor)
+ {
+ return RespondError(
+ "get_gbuffer_token",
+ ECarlaServerResponse::ActorNotFound,
+ " Actor Id: " + FString::FromInt(ActorId));
+ }
+ if (CarlaActor->IsDormant())
+ {
+ return RespondError(
+ "get_gbuffer_token",
+ ECarlaServerResponse::FunctionNotAvailiableWhenDormant,
+ " Actor Id: " + FString::FromInt(ActorId));
+ }
+ ASceneCaptureSensor* Sensor = Cast(CarlaActor->GetActor());
+ if (!Sensor)
+ {
+ return RespondError(
+ "get_gbuffer_token",
+ ECarlaServerResponse::ActorTypeMismatch,
+ " Actor Id: " + FString::FromInt(ActorId));
+ }
+
+ switch (GBufferId)
+ {
+ case 0:
+ {
+ const auto &Token = Sensor->CameraGBuffers.SceneColor.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 1:
+ {
+ const auto &Token = Sensor->CameraGBuffers.SceneDepth.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 2:
+ {
+ const auto& Token = Sensor->CameraGBuffers.SceneStencil.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 3:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferA.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 4:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferB.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 5:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferC.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 6:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferD.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 7:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferE.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 8:
+ {
+ const auto &Token = Sensor->CameraGBuffers.GBufferF.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 9:
+ {
+ const auto &Token = Sensor->CameraGBuffers.Velocity.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 10:
+ {
+ const auto &Token = Sensor->CameraGBuffers.SSAO.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 11:
+ {
+ const auto& Token = Sensor->CameraGBuffers.CustomDepth.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ case 12:
+ {
+ const auto& Token = Sensor->CameraGBuffers.CustomStencil.GetToken();
+ return std::vector(std::begin(Token.data), std::end(Token.data));
+ }
+ default:
+ UE_LOG(LogCarla, Error, TEXT("Requested invalid GBuffer ID %u"), GBufferId);
+ return {};
+ }
+ };
+
// ~~ Logging and playback ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BIND_SYNC(start_recorder) << [this](std::string name, bool AdditionalData) -> R
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.cpp b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.cpp
index 4cf8ca481..1c9cd105b 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.cpp
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.cpp
@@ -10,6 +10,7 @@
#include "YieldSignComponent.h"
#include "SpeedLimitComponent.h"
#include "Components/BoxComponent.h"
+#include "Runtime/CoreUObject/Public/UObject/ConstructorHelpers.h"
#include "UObject/ConstructorHelpers.h"
diff --git a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.h b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.h
index 8920c0798..542133725 100644
--- a/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.h
+++ b/Unreal/CarlaUE4/Plugins/Carla/Source/Carla/Traffic/TrafficLightManager.h
@@ -10,6 +10,7 @@
#include "TrafficLightGroup.h"
#include "TrafficSignBase.h"
#include "Carla/OpenDrive/OpenDrive.h"
+
#include "TrafficLightManager.generated.h"
/// Class In charge of creating and assigning traffic