DVS camera sensor implementation

This commit is contained in:
Javier Hidalgo-Carrió 2020-03-13 19:19:24 +01:00 committed by Marc Garcia Puig
parent fd88d61aa7
commit c45614c983
9 changed files with 737 additions and 0 deletions

View File

@ -23,10 +23,12 @@
#include "carla/sensor/s11n/NoopSerializer.h"
#include "carla/sensor/s11n/ObstacleDetectionEventSerializer.h"
#include "carla/sensor/s11n/RadarSerializer.h"
#include "carla/sensor/s11n/DVSEventArraySerializer.h"
// 2. Add a forward-declaration of the sensor here.
class ACollisionSensor;
class ADepthCamera;
class ADVSCamera;
class AGnssSensor;
class AInertialMeasurementUnit;
class ALaneInvasionSensor;
@ -51,6 +53,7 @@ namespace sensor {
using SensorRegistry = CompositeSerializer<
std::pair<ACollisionSensor *, s11n::CollisionEventSerializer>,
std::pair<ADepthCamera *, s11n::ImageSerializer>,
std::pair<ADVSCamera *, s11n::DVSEventArraySerializer>,
std::pair<AGnssSensor *, s11n::GnssSerializer>,
std::pair<AInertialMeasurementUnit *, s11n::IMUSerializer>,
std::pair<ALaneInvasionSensor *, s11n::NoopSerializer>,
@ -73,6 +76,7 @@ namespace sensor {
// 4. Include the sensor here.
#include "Carla/Sensor/CollisionSensor.h"
#include "Carla/Sensor/DepthCamera.h"
#include "Carla/Sensor/DVSCamera.h"
#include "Carla/Sensor/GnssSensor.h"
#include "Carla/Sensor/InertialMeasurementUnit.h"
#include "Carla/Sensor/LaneInvasionSensor.h"

View File

@ -0,0 +1,63 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#pragma once
#include <cstdint>
namespace carla {
namespace sensor {
namespace data {
#pragma pack(push, 1)
struct DVSEvent {
/** Default constructor **/
DVSEvent() = default;
/** Copy Constructor **/
DVSEvent(const DVSEvent &arg)
:x(arg.x), y(arg.y), t(arg.t), pol(arg.pol){}
/** Moving constructor **/
DVSEvent(const DVSEvent &&arg)
:x(std::move(arg.x)), y(std::move(arg.y)), t(std::move(arg.t)), pol(std::move(arg.pol)){}
/** Constructor **/
DVSEvent(std::uint16_t x, std::uint16_t y, std::int64_t t, bool pol)
: x(x), y(y), t(t), pol(pol) {}
/** Assignement operator **/
DVSEvent &operator=(const DVSEvent &other)
{
x = other.x; y = other.y; t = other.t; pol = other.pol;
return *this;
}
/** Move Assignement operator **/
DVSEvent &operator=(const DVSEvent &&other)
{
x = std::move(other.x); y = std::move(other.y); t = std::move(other.t); pol = std::move(other.pol);
return *this;
}
bool operator==(const DVSEvent &rhs) const {
return (x == rhs.x) && (y == rhs.y) && (t == rhs.t) && (pol == rhs.pol);
}
bool operator!=(const DVSEvent &rhs) const {
return !(*this == rhs);
}
std::uint16_t x;
std::uint16_t y;
std::int64_t t;
bool pol;
};
#pragma pack(pop)
} // namespace data
} // namespace sensor
} // namespace carla

View File

@ -0,0 +1,58 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#pragma once
#include "carla/Debug.h"
#include "carla/sensor/data/Array.h"
#include "carla/sensor/data/DVSEvent.h"
#include "carla/sensor/s11n/DVSEventArraySerializer.h"
namespace carla {
namespace sensor {
namespace data {
/// An array of DVS Events in an image structure HxW
class DVSEventArray : public Array<DVSEvent> {
using Super = Array<DVSEvent>;
protected:
using Serializer = s11n::DVSEventArraySerializer;
friend Serializer;
explicit DVSEventArray(RawData data)
: Super(Serializer::header_offset, std::move(data)) {
}
private:
const auto &GetHeader() const {
return Serializer::DeserializeHeader(Super::GetRawData());
}
public:
using event_type = DVSEvent;
/// Get image width in pixels.
auto GetWidth() const {
return GetHeader().width;
}
/// Get image height in pixels.
auto GetHeight() const {
return GetHeader().height;
}
/// Get horizontal field of view of the image in degrees.
auto GetFOVAngle() const {
return GetHeader().fov_angle;
}
};
} // namespace data
} // namespace sensor
} // namespace carla

View File

@ -0,0 +1,24 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "carla/sensor/s11n/DVSEventArraySerializer.h"
#include "carla/sensor/data/DVSEventArray.h"
namespace carla {
namespace sensor {
namespace s11n {
SharedPtr<SensorData> DVSEventArraySerializer::Deserialize(RawData &&data) {
auto events_array = SharedPtr<data::DVSEventArray>(new data::DVSEventArray{std::move(data)});
return events_array;
}
} // namespace s11n
} // namespace sensor
} // namespace carla

View File

@ -0,0 +1,78 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#pragma once
#include "carla/Memory.h"
#include "carla/sensor/RawData.h"
#include "carla/sensor/data/DVSEvent.h"
#include <cstdint>
#include <cstring>
namespace carla {
namespace sensor {
class SensorData;
namespace s11n {
/// Serializes events array generated by DVS camera sensors.
class DVSEventArraySerializer {
public:
#pragma pack(push, 1)
struct DVSHeader {
uint32_t width;
uint32_t height;
float fov_angle;
};
#pragma pack(pop)
constexpr static auto header_offset = sizeof(DVSHeader);
using DVSEventArray = std::vector<data::DVSEvent>;
static const DVSHeader &DeserializeHeader(const RawData &data) {
return *reinterpret_cast<const DVSHeader *>(data.begin());
}
template <typename Sensor>
static Buffer Serialize(const Sensor &sensor, const DVSEventArray &events, Buffer &&output);
static SharedPtr<SensorData> Deserialize(RawData &&data);
};
template <typename Sensor>
inline Buffer DVSEventArraySerializer::Serialize(const Sensor &sensor, const DVSEventArray &events, Buffer &&output) {
DEBUG_ASSERT(events.size() > sizeof(DVSHeader));
DVSHeader header = {
sensor.GetImageWidth(),
sensor.GetImageHeight(),
sensor.GetFOVAngle(),
};
/** Reset the output buffer **/
output.reset(sizeof(DVSHeader) + (events.size() * sizeof(data::DVSEvent)));
/** Pointer to data in buffer **/
unsigned char *it = output.data();
/** Copy the header into the output buffer **/
std::memcpy(it, reinterpret_cast<const void *>(&header), sizeof(header));
it += sizeof(DVSHeader);
/** Copy the events into the output buffer **/
for (auto e : events)
{
std::memcpy(it, reinterpret_cast<const void *>(&e), sizeof(data::DVSEvent));
it += sizeof(data::DVSEvent);
}
return std::move(output);
}
} // namespace s11n
} // namespace sensor
} // namespace carla

View File

@ -18,6 +18,7 @@
#include <carla/sensor/data/LidarMeasurement.h>
#include <carla/sensor/data/GnssMeasurement.h>
#include <carla/sensor/data/RadarMeasurement.h>
#include <carla/sensor/data/DVSEventArray.h>
#include <carla/sensor/s11n/RadarData.h>
@ -97,6 +98,23 @@ namespace data {
return out;
}
std::ostream &operator<<(std::ostream &out, const DVSEvent &event) {
out << "Event(" << event.x
<< ',' << event.y
<< ',' << event.t
<< ',' << event.pol << ')';
return out;
}
std::ostream &operator<<(std::ostream &out, const DVSEventArray &events) {
out << "EventArray(frame=" << events.GetFrame()
<< ", timestamp=" << events.GetTimestamp()
<< ", dimensions=" << events.GetWidth() << 'x' << events.GetHeight()
<< ", number_of_events=" << events.size()
<< ')';
return out;
}
} // namespace data
namespace s11n {
@ -300,4 +318,124 @@ void export_sensor_data() {
.def_readwrite("depth", &css::RadarDetection::depth)
.def(self_ns::str(self_ns::self))
;
class_<std::vector<csd::Color> >("ColorVector")
.add_property("raw_data", &GetRawDataAsBuffer< std::vector<csd::Color> >)
.def("__len__", &std::vector<csd::Color>::size)
.def(vector_indexing_suite< std::vector<csd::Color> >())
;
class_<std::vector<std::int64_t>>("IntVector")
.add_property("raw_data", &GetRawDataAsBuffer< std::vector<std::int64_t> >)
.def("__len__", &std::vector<std::int64_t>::size)
.def(vector_indexing_suite< std::vector<std::int64_t> >())
;
class_<std::vector<short>>("ShortVector")
.add_property("raw_data", &GetRawDataAsBuffer< std::vector<short> >)
.def("__len__", &std::vector<short>::size)
.def(vector_indexing_suite< std::vector<short> >())
;
class_<std::vector<std::vector<std::int64_t>> >("IntMatrix")
.add_property("raw_data", &GetRawDataAsBuffer< std::vector<std::vector<std::int64_t>> >)
.def("__len__", &std::vector<std::vector<std::int64_t>>::size)
.def(vector_indexing_suite< std::vector<std::vector<std::int64_t>> >())
;
class_<csd::DVSEvent>("DVSEvent")
.add_property("x", &csd::DVSEvent::x)
.add_property("y", &csd::DVSEvent::y)
.add_property("t", &csd::DVSEvent::t)
.add_property("pol", &csd::DVSEvent::pol)
.def(self_ns::str(self_ns::self))
;
class_<csd::DVSEventArray, bases<cs::SensorData>, boost::noncopyable, boost::shared_ptr<csd::DVSEventArray>>("DVSEventArray", no_init)
.add_property("width", &csd::DVSEventArray::GetWidth)
.add_property("height", &csd::DVSEventArray::GetHeight)
.add_property("fov", &csd::DVSEventArray::GetFOVAngle)
.add_property("raw_data", &GetRawDataAsBuffer<csd::DVSEventArray>)
.def("__len__", &csd::DVSEventArray::size)
.def("__iter__", iterator<csd::DVSEventArray>())
.def("__getitem__", +[](const csd::DVSEventArray &self, size_t pos) -> csd::DVSEvent {
return self.at(pos);
})
.def("__setitem__", +[](csd::DVSEventArray &self, size_t pos, csd::DVSEvent event) {
self.at(pos) = event;
})
.def("to_image", +[](const csd::DVSEventArray &self) -> std::vector<csd::Color> {
std::vector<csd::Color> img (self.GetHeight() * self.GetWidth());
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
size_t index = (self.GetWidth() * event.y) + event.x;
if (event.pol == true)
{
/** Blue is positive **/
img[index].b = 255u;
}
else
{
/** Red is negative **/
img[index].r = 255u;
}
}
return img;
})
.def("to_array", +[](const csd::DVSEventArray &self) -> std::vector<std::vector<std::int64_t>> {
std::vector<std::vector<std::int64_t>> array (self.size());
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
std::vector<std::int64_t> element = {static_cast<std::int64_t>(event.x), static_cast<std::int64_t>(event.y), static_cast<std::int64_t>(event.t), static_cast<std::int64_t>(event.pol)};
array[i] = element;
}
return array;
})
.def("to_array_x", +[](const csd::DVSEventArray &self) -> std::vector<std::int64_t> {
std::vector<std::int64_t> array;
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
array.push_back(event.x);
}
return array;
})
.def("to_array_y", +[](const csd::DVSEventArray &self) -> std::vector<std::int64_t> {
std::vector<std::int64_t> array;
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
array.push_back(event.y);
}
return array;
})
.def("to_array_t", +[](const csd::DVSEventArray &self) -> std::vector<std::int64_t> {
std::vector<std::int64_t> array;
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
array.push_back(event.t);
}
return array;
})
.def("to_array_pol", +[](const csd::DVSEventArray &self) -> std::vector<short> {
std::vector<short> array;
for (size_t i=0; i<self.size(); ++i)
{
const csd::DVSEvent &event = self[i];
if (event.pol == true)
array.push_back(1);
else
array.push_back(-1);
}
return array;
})
.def(self_ns::str(self_ns::self))
;
}

View File

@ -900,6 +900,7 @@ class CameraManager(object):
self.transform_index = 1
self.sensors = [
['sensor.camera.rgb', cc.Raw, 'Camera RGB', {}],
['sensor.camera.dvs', cc.Raw, 'Dynamic Vision Sensor', {}],
['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)', {}],
['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)', {}],
['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)', {}],
@ -982,6 +983,12 @@ class CameraManager(object):
lidar_img = np.zeros((lidar_img_size), dtype = int)
lidar_img[tuple(lidar_data.T)] = (255, 255, 255)
self.surface = pygame.surfarray.make_surface(lidar_img)
if self.sensors[self.index][0].startswith('sensor.camera.dvs'):
array = np.frombuffer(image.to_image().raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4))
array = array[:, :, :3]
array = array[:, :, ::-1]
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
else:
image.convert(self.sensors[self.index][1])
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))

View File

@ -0,0 +1,288 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include <random>
#include <cmath>
#include <algorithm>
#include "Carla.h"
#include "Carla/Sensor/DVSCamera.h"
ADVSCamera::ADVSCamera(const FObjectInitializer &ObjectInitializer)
: Super(ObjectInitializer)
{
EnablePostProcessingEffects(true);
AddPostProcessingMaterial(
TEXT("Material'/Carla/PostProcessingMaterials/PhysicLensDistortion.PhysicLensDistortion'"));
}
FActorDefinition ADVSCamera::GetSensorDefinition()
{
constexpr bool bEnableModifyingPostProcessEffects = true;
auto Definition = UActorBlueprintFunctionLibrary::MakeCameraDefinition(TEXT("dvs"), bEnableModifyingPostProcessEffects);
FActorVariation Cp;
Cp.Id = TEXT("positive_threshold");
Cp.Type = EActorAttributeType::Float;
Cp.RecommendedValues = { TEXT("0.5") };
Cp.bRestrictToRecommended = false;
FActorVariation Cm;
Cm.Id = TEXT("negative_threshold");
Cm.Type = EActorAttributeType::Float;
Cm.RecommendedValues = { TEXT("0.5") };
Cm.bRestrictToRecommended = false;
FActorVariation Sigma_Cp;
Sigma_Cp.Id = TEXT("sigma_positive_threshold");
Sigma_Cp.Type = EActorAttributeType::Float;
Sigma_Cp.RecommendedValues = { TEXT("0.0") };
Sigma_Cp.bRestrictToRecommended = false;
FActorVariation Sigma_Cm;
Sigma_Cm.Id = TEXT("sigma_negative_threshold");
Sigma_Cm.Type = EActorAttributeType::Float;
Sigma_Cm.RecommendedValues = { TEXT("0.0") };
Sigma_Cm.bRestrictToRecommended = false;
FActorVariation Refractory_Period;
Refractory_Period.Id = TEXT("refractory_period_ns");
Refractory_Period.Type = EActorAttributeType::Int;
Refractory_Period.RecommendedValues = { TEXT("0") };
Refractory_Period.bRestrictToRecommended = false;
FActorVariation Use_Log;
Use_Log.Id = TEXT("use_log");
Use_Log.Type = EActorAttributeType::Bool;
Use_Log.RecommendedValues = { TEXT("True") };
Use_Log.bRestrictToRecommended = false;
FActorVariation Log_EPS;
Log_EPS.Id = TEXT("log_eps");
Log_EPS.Type = EActorAttributeType::Float;
Log_EPS.RecommendedValues = { TEXT("0.001") };
Log_EPS.bRestrictToRecommended = false;
Definition.Variations.Append({ Cp, Cm, Sigma_Cp, Sigma_Cm, Refractory_Period, Use_Log, Log_EPS });
return Definition;
}
void ADVSCamera::Set(const FActorDescription &Description)
{
Super::Set(Description);
this->config.Cp = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToFloat(
"positive_threshold",
Description.Variations,
0.5f);
this->config.Cm = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToFloat(
"negative_threshold",
Description.Variations,
0.5f);
this->config.sigma_Cp = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToFloat(
"sigma_positive_threshold",
Description.Variations,
0.0f);
this->config.sigma_Cm = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToFloat(
"sigma_negative_threshold",
Description.Variations,
0.0f);
this->config.refractory_period_ns = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToInt(
"refractory_period_ns",
Description.Variations,
0.0);
this->config.use_log = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToBool(
"use_log",
Description.Variations,
true);
this->config.log_eps = UActorBlueprintFunctionLibrary::RetrieveActorAttributeToFloat(
"log_eps",
Description.Variations,
1e-03);
}
void ADVSCamera::Tick(float DeltaTime)
{
//Super (ASceneCaptureSensor) Capture the Scene in a (UTextureRenderTarget2D) CaptureRenderTarge from the CaptureComponent2D
Super::Tick(DeltaTime);
/** Read the image **/
TArray<FColor> image;
this->ReadPixels(image);
/** Convert image to gray scale **/
if (this->config.use_log)
{
this->imageToLogGray(image, this->last_image);
}
else
{
this->imageToGray(image, this->last_image);
}
/** DVS Simulator **/
ADVSCamera::DVSEventArray events = this->simulation(DeltaTime);
if (events.size() > 0)
{
/** Send the events **/
auto Stream = GetDataStream(*this);
auto Buffer = Stream.PopBufferFromPool();
Stream.Send(*this, events, std::move(Buffer));
}
}
void ADVSCamera::imageToGray(const TArray<FColor> &image, TArray<float> &gray)
{
/** Sanity check **/
if (image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
return;
/** Reserve HxW elements **/
gray.SetNumUninitialized(image.Num());
/** Convert image to gray raw image values **/
for (size_t i = 0; i<image.Num(); ++i)
{
gray[i] = 0.2989*image[i].R + 0.5870*image[i].G + 0.1140*image[i].B;
}
}
void ADVSCamera::imageToLogGray(const TArray<FColor> &image, TArray<float> &gray)
{
/** Sanity check **/
if (image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
return;
/** Reserve HxW elements **/
gray.SetNumUninitialized(image.Num());
/** Convert image to gray raw image values **/
for (size_t i = 0; i<image.Num(); ++i)
{
gray[i] = std::log(this->config.log_eps + ((0.2989*image[i].R + 0.5870*image[i].G + 0.1140*image[i].B)/255.0));
}
}
ADVSCamera::DVSEventArray ADVSCamera::simulation (float DeltaTime)
{
/** Array of events **/
ADVSCamera::DVSEventArray events;
/** Sanity check **/
if (this->last_image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
return events;
/** Check initialization **/
if(this->prev_image.Num() == 0)
{
/** Set the first rendered image **/
this->ref_values = this->last_image;
this->prev_image = this->last_image;
/** Resizes array to given number of elements. New elements will be zeroed.**/
this->last_event_timestamp.SetNumZeroed(this->last_image.Num());
/** Reset current time **/
this->current_time = dvs::secToNanosec(this->GetEpisode().GetElapsedGameTime());
return events;
}
static constexpr float tolerance = 1e-6;
/** delta time in nanoseconds **/
std::uint64_t delta_t_ns = dvs::secToNanosec(this->GetEpisode().GetElapsedGameTime()) - this->current_time;
/** Loop along the image size **/
for (int y = 0; y < this->GetImageHeight(); ++y)
{
for (int x = 0; x < this->GetImageWidth(); ++x)
{
int i = (this->GetImageWidth() * y) + x;
float itdt = this->last_image[i];
float it = this->prev_image[i];
float prev_cross = this->ref_values[i];
if (std::fabs (it - itdt) > tolerance)
{
float pol = (itdt >= it) ? +1.0 : -1.0;
float C = (pol > 0) ? this->config.Cp : this->config.Cm;
float sigma_C = (pol > 0) ? this->config.sigma_Cp : this->config.sigma_Cm;
if(sigma_C > 0)
{
C += this->sampleNormalDistribution<float>(false, 0, sigma_C);
constexpr float minimum_contrast_threshold = 0.01;
C = std::max(minimum_contrast_threshold, C);
}
float curr_cross = prev_cross;
bool all_crossings = false;
do
{
curr_cross += pol * C;
if ((pol > 0 && curr_cross > it && curr_cross <= itdt)
|| (pol < 0 && curr_cross < it && curr_cross >= itdt))
{
std::uint64_t edt = (curr_cross - it) * delta_t_ns / (itdt - it);
std::int64_t t = this->current_time + edt;
// check that pixel (x,y) is not currently in a "refractory" state
// i.e. |t-that last_timestamp(x,y)| >= refractory_period
const std::int64_t last_stamp_at_xy = dvs::secToNanosec(this->last_event_timestamp[i]);
if (t >= last_stamp_at_xy)
{
const std::uint64_t dt = t - last_stamp_at_xy;
if(this->last_event_timestamp[i] == 0 || dt >= this->config.refractory_period_ns)
{
events.push_back(::carla::sensor::data::DVSEvent(x, y, t, pol > 0));
this->last_event_timestamp[i] = dvs::nanosecToSecTrunc(t);
}
else
{
/** Dropping event because time since last event < refractory_period_ns **/
}
this->ref_values[i] = curr_cross;
}
}
else
{
all_crossings = true;
}
} while (!all_crossings);
} // end tolerance
}//end for each pixel
}
/** Update current time **/
this->current_time = dvs::secToNanosec(this->GetEpisode().GetElapsedGameTime());
this->prev_image = this->last_image;
// Sort the events by increasing timestamps, since this is what
// most event processing algorithms expect
std::sort(events.begin(), events.end(), [](const ::carla::sensor::data::DVSEvent& it1, const ::carla::sensor::data::DVSEvent& it2){return it1.t < it2.t;});
return events;
}
template<typename T> T ADVSCamera::sampleNormalDistribution(bool deterministic, T mean, T sigma)
{
static std::mt19937 gen_nondeterministic(std::random_device{}());
static std::mt19937 gen_deterministic(0);
auto dist = std::normal_distribution<T>(mean, sigma);
return deterministic ? dist(gen_deterministic) : dist(gen_nondeterministic);
}

View File

@ -0,0 +1,77 @@
// Copyright (c) 2020 Robotics and Perception Group (GPR)
// University of Zurich and ETH Zurich
//
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#pragma once
#include "Carla/Sensor/SceneCaptureSensor.h"
#include <carla/sensor/data/DVSEvent.h>
#include "DVSCamera.generated.h"
namespace dvs
{
/** DVS Configuration structure **/
struct Config
{
float Cp;
float Cm;
float sigma_Cp;
float sigma_Cm;
std::uint64_t refractory_period_ns;
bool use_log;
float log_eps;
};
inline constexpr std::int64_t secToNanosec(double seconds)
{
return static_cast<std::int64_t>(seconds * 1e9);
}
inline constexpr double nanosecToSecTrunc(std::int64_t nanoseconds)
{
return static_cast<double>(nanoseconds) / 1e9;
}
}//namespace dvs
/// Sensor that produce Dynamic Vision Events
UCLASS()
class CARLA_API ADVSCamera : public AShaderBasedSensor
{
GENERATED_BODY()
using DVSEventArray = std::vector<::carla::sensor::data::DVSEvent>;
public:
ADVSCamera(const FObjectInitializer &ObjectInitializer);
static FActorDefinition GetSensorDefinition();
void Set(const FActorDescription &ActorDescription) override;
protected:
void Tick(float DeltaTime) override;
void imageToGray(const TArray<FColor> &image, TArray<float> &gray);
void imageToLogGray(const TArray<FColor> &image, TArray<float> &gray);
ADVSCamera::DVSEventArray simulation (float DeltaTime);
private:
//! @return Sample from normal distribution (real-valued).
template<typename T> T sampleNormalDistribution( bool deterministic, T mean, T sigma);
private:
/** Images containing last (current) and previous image **/
TArray<float> last_image, prev_image;
/** Image containing the last reference vaklue to trigger event **/
TArray<float> ref_values;
/** Image containing time of last event in seconds **/
TArray<double> last_event_timestamp;
/** Current time in nanoseconds **/
std::int64_t current_time;
/** DVS simulation configuration **/
dvs::Config config;
};