Fix OpticalFlowSensor (#7797)

* Switch OpticalFlowSensor from FPixelReader to ImageUtil's methods.

* Fix OpticalFlowCamera, add extra client presence checks.

---------

Co-authored-by: Marcel Pi <25649656+MarcelPiNacy@users.noreply.github.com>
Co-authored-by: Marcel Pi <marcelpi97@gmail.com>
This commit is contained in:
MarcelPiNacy-CVC 2024-06-18 11:22:56 +02:00 committed by GitHub
parent e650f9950a
commit 898ad811da
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 378 additions and 156 deletions

View File

@ -33,12 +33,15 @@ void ADepthCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float DeltaS
TRACE_CPUPROFILER_EVENT_SCOPE(ADepthCamera::PostPhysTick);
Super::PostPhysTick(World, TickType, DeltaSeconds);
if (!AreClientsListening())
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
{
SendImageDataToClient(*this, Pixels, FrameIndex);
SendDataToClient(*this, Pixels, FrameIndex);
return true;
});
}

View File

@ -14,62 +14,74 @@
template <typename F>
class ScopedCallback
{
F fn;
public:
constexpr ScopedCallback(F&& fn) : fn(fn) { }
~ScopedCallback() { fn(); }
};
namespace ImageUtil
{
bool DecodePixelsByFormat(
const void* PixelData,
int32 SourcePitch,
FIntPoint SourceExtent,
FIntPoint DestinationExtent,
FIntPoint Extent,
EPixelFormat Format,
FReadSurfaceDataFlags Flags,
TArrayView<FLinearColor> Out)
{
SourcePitch *= GPixelFormats[Format].BlockBytes;
auto OutPixelCount = DestinationExtent.X * DestinationExtent.Y;
auto OutPixelCount = Extent.X * Extent.Y;
switch (Format)
{
case PF_G16:
case PF_R16_UINT:
case PF_R16_SINT:
// Shadow maps
ConvertRawR16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_R8G8B8A8:
ConvertRawR8G8B8A8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR8G8B8A8DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_B8G8R8A8:
ConvertRawB8G8R8A8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawB8G8R8A8DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_A2B10G10R10:
ConvertRawA2B10G10R10DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawA2B10G10R10DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_FloatRGBA:
case PF_R16G16B16A16_UNORM:
case PF_R16G16B16A16_SNORM:
ConvertRawR16G16B16A16FDataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR16G16B16A16FDataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_FloatR11G11B10:
ConvertRawRR11G11B10DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawRR11G11B10DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_A32B32G32R32F:
ConvertRawR32G32B32A32DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR32G32B32A32DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_A16B16G16R16:
ConvertRawR16G16B16A16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16B16A16DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_G16R16:
ConvertRawR16G16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_X24_G8: // Depth Stencil
ConvertRawR24G8DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR24G8DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_R32_FLOAT: // Depth Stencil
ConvertRawR32DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR32DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_R16G16B16A16_UINT:
case PF_R16G16B16A16_SINT:
ConvertRawR16G16B16A16DataToFLinearColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16B16A16DataToFLinearColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
default:
UE_LOG(LogCarla, Warning, TEXT("Unsupported format %llu"), (unsigned long long)Format);
@ -78,66 +90,67 @@ namespace ImageUtil
return true;
}
bool DecodePixelsByFormat(
const void* PixelData,
int32 SourcePitch,
FIntPoint SourceExtent,
FIntPoint DestinationExtent,
FIntPoint Extent,
EPixelFormat Format,
FReadSurfaceDataFlags Flags,
TArrayView<FColor> Out)
{
SourcePitch *= GPixelFormats[Format].BlockBytes;
auto OutPixelCount = DestinationExtent.X * DestinationExtent.Y;
auto OutPixelCount = Extent.X * Extent.Y;
switch (Format)
{
case PF_G16:
case PF_R16_UINT:
case PF_R16_SINT:
// Shadow maps
ConvertRawR16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_R8G8B8A8:
ConvertRawR8G8B8A8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR8G8B8A8DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_B8G8R8A8:
ConvertRawB8G8R8A8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawB8G8R8A8DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_A2B10G10R10:
ConvertRawR10G10B10A2DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR10G10B10A2DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_FloatRGBA:
case PF_R16G16B16A16_UNORM:
case PF_R16G16B16A16_SNORM:
ConvertRawR16G16B16A16FDataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
ConvertRawR16G16B16A16FDataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
break;
case PF_FloatR11G11B10:
ConvertRawR11G11B10DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
ConvertRawR11G11B10DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
break;
case PF_A32B32G32R32F:
ConvertRawR32G32B32A32DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
ConvertRawR32G32B32A32DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags.GetLinearToGamma());
break;
case PF_A16B16G16R16:
ConvertRawR16G16B16A16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16B16A16DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_G16R16:
ConvertRawR16G16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_DepthStencil: // Depth / Stencil
ConvertRawD32S8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawD32S8DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_X24_G8: // Depth / Stencil
ConvertRawR24G8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR24G8DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_R32_FLOAT: // Depth
ConvertRawR32DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
ConvertRawR32DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData(), Flags);
break;
case PF_R16G16B16A16_UINT:
case PF_R16G16B16A16_SINT:
ConvertRawR16G16B16A16DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR16G16B16A16DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
case PF_G8:
ConvertRawR8DataToFColor(DestinationExtent.X, DestinationExtent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
ConvertRawR8DataToFColor(Extent.X, Extent.Y, (uint8*)PixelData, SourcePitch, Out.GetData());
break;
default:
UE_LOG(LogCarla, Warning, TEXT("Unsupported format %llu"), (unsigned long long)Format);
@ -146,6 +159,8 @@ namespace ImageUtil
return true;
}
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget,
TArray<FColor>& Out)
@ -157,6 +172,8 @@ namespace ImageUtil
return Resource->ReadPixels(Out, ReadFlags);
}
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget,
TArray64<FColor>& Out)
@ -168,6 +185,8 @@ namespace ImageUtil
return Resource->ReadPixelsPtr(Out.GetData(), ReadFlags);
}
TUniquePtr<TImagePixelData<FColor>> ReadImageData(
UTextureRenderTarget2D& RenderTarget)
{
@ -179,6 +198,8 @@ namespace ImageUtil
return TUniquePtr<TImagePixelData<FColor>>();
}
TFuture<bool> SaveImageData(
UTextureRenderTarget2D& RenderTarget,
const FStringView& Path)
@ -186,6 +207,8 @@ namespace ImageUtil
return SaveImageData(ReadImageData(RenderTarget), Path);
}
TFuture<bool> SaveImageData(
TUniquePtr<TImagePixelData<FColor>> Data,
const FStringView& Path)
@ -201,11 +224,14 @@ namespace ImageUtil
return HighResScreenshotConfig.ImageWriteQueue->Enqueue(MoveTemp(ImageTask));
}
static void ReadImageDataAsyncCommand(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallback&& Callback)
{
static thread_local auto RenderQueryPool = RHICreateRenderQueryPool(RQT_AbsoluteTime);
static thread_local auto RenderQueryPool =
RHICreateRenderQueryPool(RQT_AbsoluteTime);
auto& CmdList = FRHICommandListImmediate::Get();
auto Resource = static_cast<FTextureRenderTarget2DResource*>(
@ -213,7 +239,8 @@ namespace ImageUtil
auto Texture = Resource->GetRenderTargetTexture();
if (Texture == nullptr)
return;
auto Readback = MakeUnique<FRHIGPUTextureReadback>(TEXT("ReadImageData-Readback"));
auto Readback = MakeUnique<FRHIGPUTextureReadback>(
TEXT("ReadImageData-Readback"));
auto Size = Texture->GetSizeXY();
auto Format = Texture->GetFormat();
auto ResolveRect = FResolveRect();
@ -226,21 +253,27 @@ namespace ImageUtil
RHIGetRenderQueryResult(Query.GetQuery(), DeltaTime, true);
Query.ReleaseQuery();
AsyncTask(ENamedThreads::HighTaskPriority, [
AsyncTask(
ENamedThreads::HighTaskPriority, [
Readback = MoveTemp(Readback),
Callback = std::move(Callback),
Size, Format]
Size,
Format]
{
while (!Readback->IsReady())
std::this_thread::yield();
int32 RowPitch, BufferHeight;
auto Mapping = Readback->Lock(RowPitch, &BufferHeight);
if (Mapping != nullptr)
Callback(Mapping, RowPitch, BufferHeight, Format, Size);
Readback->Unlock();
auto MappedPtr = Readback->Lock(RowPitch, &BufferHeight);
if (MappedPtr != nullptr)
{
ScopedCallback Unlock = [&] { Readback->Unlock(); };
Callback(MappedPtr, RowPitch, BufferHeight, Format, Size);
}
});
}
bool ReadImageDataAsync(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallback&& Callback)
@ -266,6 +299,8 @@ namespace ImageUtil
return true;
}
bool ReadSensorImageDataAsync(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallback&& Callback)
@ -280,6 +315,7 @@ namespace ImageUtil
}
bool ReadImageDataAsyncFColor(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallbackFColor&& Callback)
@ -294,12 +330,14 @@ namespace ImageUtil
FReadSurfaceDataFlags Flags;
TArray<FColor> Pixels;
Pixels.SetNum(Size.X * Size.Y);
if (!DecodePixelsByFormat(Mapping, RowPitch, Size, Size, Format, Flags, Pixels))
if (!DecodePixelsByFormat(Mapping, RowPitch, Size, Format, Flags, Pixels))
return false;
return Callback(Pixels, Size);
});
}
bool ReadSensorImageDataAsyncFColor(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallbackFColor&& Callback)
@ -310,6 +348,8 @@ namespace ImageUtil
return ReadImageDataAsyncFColor(*RenderTarget, std::move(Callback));
}
bool ReadImageDataAsyncFLinearColor(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallbackFLinearColor&& Callback)
@ -324,12 +364,14 @@ namespace ImageUtil
FReadSurfaceDataFlags Flags;
TArray<FLinearColor> Pixels;
Pixels.SetNum(Size.X * Size.Y);
if (!DecodePixelsByFormat(Mapping, RowPitch, Size, Size, Format, Flags, Pixels))
if (!DecodePixelsByFormat(Mapping, RowPitch, Size, Format, Flags, Pixels))
return false;
return Callback(Pixels, Size);
});
}
bool ReadSensorImageDataAsyncFLinearColor(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallbackFLinearColor&& Callback)

View File

@ -18,88 +18,161 @@ class UTextureRenderTarget2D;
namespace ImageUtil
{
bool DecodePixelsByFormat(
const void* PixelData,
int32 SourcePitch,
FIntPoint SourceExtent,
FIntPoint DestinationExtent,
EPixelFormat Format,
FReadSurfaceDataFlags Flags,
TArrayView<FColor> Out);
bool DecodePixelsByFormat(
const void* PixelData,
int32 SourcePitch,
FIntPoint SourceExtent,
FIntPoint DestinationExtent,
EPixelFormat Format,
FReadSurfaceDataFlags Flags,
TArrayView<FLinearColor> Out);
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget,
TArray<FColor>& Out);
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget,
TArray64<FColor>& Out);
TUniquePtr<TImagePixelData<FColor>> ReadImageData(
UTextureRenderTarget2D& RenderTarget);
TFuture<bool> SaveImageData(
UTextureRenderTarget2D& RenderTarget,
const FStringView& Path);
TFuture<bool> SaveImageData(
TUniquePtr<TImagePixelData<FColor>> Data,
const FStringView& Path);
// Callback for the untyped async image reading functions below.
using ReadImageDataAsyncCallback = std::function<
bool(
const void*, // MappedData
size_t, // RowPitch
size_t, // BufferHeight
EPixelFormat, // Format
FIntPoint // Extent
const void*, // Source image data.
size_t, // Number of pixels (NOT BYTES) per image row.
size_t, // Number of rows.
EPixelFormat, // Image pixel format.
FIntPoint // Image extent.
)
>;
// Callback for the FColor async image reading functions below.
using ReadImageDataAsyncCallbackFColor = std::function<
bool(
TArrayView<const FColor>, // Data
FIntPoint // Extent
TArrayView<const FColor>, // Source image data as FColor array.
FIntPoint // Image extent.
)
>;
// Callback for the FLinearColor async image reading functions below.
using ReadImageDataAsyncCallbackFLinearColor = std::function<
bool(
TArrayView<const FLinearColor>, // Data
FIntPoint // Extent
TArrayView<const FLinearColor>, // Source image data as FLinearColor array.
FIntPoint // Image extent.
)
>;
// Reads pixels in the specified format from a region of memory
// into an FColor array.
bool DecodePixelsByFormat(
const void* PixelData, // Image data to read from.
int32 SourcePitch, // Number of bytes per image row.
FIntPoint Extent, // Image extent.
EPixelFormat Format, // Image pixel format.
FReadSurfaceDataFlags Flags, // Read image flags.
TArrayView<FColor> Out // Output array view.
);
// Reads pixels in the specified format from a region of memory.
// into an FLinearColor array.
bool DecodePixelsByFormat(
const void* PixelData, // Image data to read from.
int32 SourcePitch, // Number of bytes per image row.
FIntPoint Extent, // Image extent.
EPixelFormat Format, // Image pixel format.
FReadSurfaceDataFlags Flags, // Read image flags.
TArrayView<FLinearColor> Out // Output array view.
);
// Reads image data from a UTextureRenderTarget2D using its ReadPixels method.
// This function is mainly for testing purposes, as we provide faster alternatives.
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
TArray<FColor>& Out // Output array.
);
// Reads image data from a UTextureRenderTarget2D using its ReadPixels method.
// This function is mainly for testing purposes, as we provide faster alternatives.
bool ReadImageData(
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
TArray64<FColor>& Out // Output array.
);
// Reads image data from a UTextureRenderTarget2D using its ReadPixels method.
// This function is mainly for testing purposes, as we provide faster alternatives.
TUniquePtr<TImagePixelData<FColor>> ReadImageData(
UTextureRenderTarget2D& RenderTarget // Render target to read from.
);
// Reads and outputs the contents of a render target to a PNG file.
// Currently uses ReadImageData under the hood.
TFuture<bool> SaveImageData(
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
const FStringView& Path // Output image file path.
);
// Given a TImagePixelData<FColor>, saves it into a PNG file.
TFuture<bool> SaveImageData(
TUniquePtr<TImagePixelData<FColor>> Data, // Render target to read from.
const FStringView& Path // Output image file path.
);
// Asynchronously reads the contents of the specified RenderTarget
// and calls the provided Callback.
bool ReadImageDataAsync(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallback&& Callback);
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
ReadImageDataAsyncCallback&& Callback // Callback to invoke when the image is available.
);
// Asynchronously reads the contents of the specified AShaderBasedSensor
// and calls the provided Callback. Uses ReadImageDataAsync underneath.
bool ReadSensorImageDataAsync(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallback&& Callback);
AShaderBasedSensor& Sensor, // ShaderBasedSensor to read from.
ReadImageDataAsyncCallback&& Callback // Callback to invoke when the image is available.
);
// Asynchronously reads the contents of the specified AShaderBasedSensor
// and calls the provided Callback. Uses ReadImageDataAsync underneath.
// This variant converts the raw pixel data to an FColor array before invoking Callback.
bool ReadImageDataAsyncFColor(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallbackFColor&& Callback);
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
ReadImageDataAsyncCallbackFColor&& Callback // Callback to invoke when the image is available.
);
// Asynchronously reads the contents of the specified AShaderBasedSensor
// and calls the provided Callback. Uses ReadImageDataAsync underneath.
// This variant converts the raw pixel data to an FLinearColor array before invoking Callback.
bool ReadSensorImageDataAsyncFColor(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallbackFColor&& Callback);
AShaderBasedSensor& Sensor, // ShaderBasedSensor to read from.
ReadImageDataAsyncCallbackFColor&& Callback // Callback to invoke when the image is available.
);
// Asynchronously reads the contents of the specified AShaderBasedSensor
// and calls the provided Callback. Uses ReadImageDataAsync underneath.
// This variant converts the raw pixel data to an FLinearColor array before invoking Callback.
bool ReadImageDataAsyncFLinearColor(
UTextureRenderTarget2D& RenderTarget,
ReadImageDataAsyncCallbackFLinearColor&& Callback);
UTextureRenderTarget2D& RenderTarget, // Render target to read from.
ReadImageDataAsyncCallbackFLinearColor&& Callback // Callback to invoke when the image is available.
);
// Asynchronously reads the contents of the specified AShaderBasedSensor
// and calls the provided Callback. Uses ReadImageDataAsync underneath.
// This variant converts the raw pixel data to an FLinearColor array before invoking Callback.
bool ReadSensorImageDataAsyncFLinearColor(
AShaderBasedSensor& Sensor,
ReadImageDataAsyncCallbackFLinearColor&& Callback);
AShaderBasedSensor& Sensor, // ShaderBasedSensor to read from.
ReadImageDataAsyncCallbackFLinearColor&& Callback // Callback to invoke when the image is available.
);
}

View File

@ -62,12 +62,15 @@ void AInstanceSegmentationCamera::PostPhysTick(UWorld *World, ELevelTick TickTyp
SceneCapture->ShowOnlyComponents.Emplace(Component);
}
if (!AreClientsListening())
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
{
SendImageDataToClient(*this, Pixels, FrameIndex);
SendDataToClient(*this, Pixels, FrameIndex);
return true;
});
}

View File

@ -28,12 +28,15 @@ void ANormalsCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float Delt
TRACE_CPUPROFILER_EVENT_SCOPE(ANormalsCamera::PostPhysTick);
Super::PostPhysTick(World, TickType, DeltaSeconds);
if (!AreClientsListening())
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
{
SendImageDataToClient(*this, Pixels, FrameIndex);
SendDataToClient(*this, Pixels, FrameIndex);
return true;
});
}

View File

@ -30,44 +30,44 @@ AOpticalFlowCamera::AOpticalFlowCamera(const FObjectInitializer &ObjectInitializ
void AOpticalFlowCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float DeltaSeconds)
{
TRACE_CPUPROFILER_EVENT_SCOPE(AOpticalFlowCamera::PostPhysTick);
auto CVarForceOutputsVelocity = IConsoleManager::Get().FindConsoleVariable(TEXT("r.BasePassForceOutputsVelocity"));
int32 OldValue = 0;
if (CVarForceOutputsVelocity)
{
OldValue = CVarForceOutputsVelocity->GetInt();
CVarForceOutputsVelocity->Set(1);
}
std::function<TArray<float>(void *, uint32)> Conversor = [](void *Data, uint32 Size)
{
TArray<float> IntermediateBuffer;
int32 Count = Size / sizeof(FFloat16Color);
DEBUG_ASSERT(Count * sizeof(FFloat16Color) == Size);
FFloat16Color *Buf = reinterpret_cast<FFloat16Color *>(Data);
IntermediateBuffer.Reserve(Count * 2);
for (int i=0; i<Count; ++i)
{
float x = (Buf->R.GetFloat() - 0.5f) * 4.f;
float y = (Buf->G.GetFloat() - 0.5f) * 4.f;
IntermediateBuffer.Add(x);
IntermediateBuffer.Add(y);
++Buf;
}
return IntermediateBuffer;
};
Super::PostPhysTick(World, TickType, DeltaSeconds);
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
ImageUtil::ReadImageDataAsync(
*GetCaptureRenderTarget(),
[this, FrameIndex](
const void* MappedPtr,
size_t RowPitch,
size_t BufferHeight,
EPixelFormat Format,
FIntPoint Extent)
{
SendImageDataToClient(*this, Pixels, FrameIndex);
return true;
check(sizeof(FVector2f) == sizeof(float) * 2);
check(RowPitch >= Extent.X);
check(BufferHeight >= Extent.Y);
// UE_LOG(LogCarla, Log, TEXT("Format=%u"), (unsigned)Format);
TArray<FVector2f> ImageData;
ImageData.Reserve(Extent.X * Extent.Y);
auto BasePtr = reinterpret_cast<const FFloat16Color*>(MappedPtr);
for (uint32 i = 0; i != Extent.Y; ++i)
{
auto Ptr = BasePtr;
for (uint32 j = 0; j != Extent.X; ++j)
{
FVector2f Out(
Ptr->R.GetFloat(),
Ptr->G.GetFloat());
Out -= FVector2f(0.5F, 0.5F);
Out *= 4.0F;
ImageData.Add(Out);
++Ptr;
}
BasePtr += RowPitch;
}
SendDataToClient(
*this,
TArrayView<FVector2f>(ImageData),
FrameIndex);
return true;
});
FPixelReader::SendPixelsInRenderThread<AOpticalFlowCamera, float>(*this, true, Conversor);
if (CVarForceOutputsVelocity)
{
CVarForceOutputsVelocity->Set(OldValue);
}
}

View File

@ -65,12 +65,15 @@ void ASceneCaptureCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float
}
);
if (!AreClientsListening())
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
{
SendImageDataToClient(*this, Pixels, FrameIndex);
SendDataToClient(*this, Pixels, FrameIndex);
return true;
});
}

View File

@ -28,12 +28,15 @@ void ASemanticSegmentationCamera::PostPhysTick(UWorld *World, ELevelTick TickTyp
TRACE_CPUPROFILER_EVENT_SCOPE(ASemanticSegmentationCamera::PostPhysTick);
Super::PostPhysTick(World, TickType, DeltaSeconds);
if (!AreClientsListening())
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadSensorImageDataAsyncFColor(*this, [this, FrameIndex](
TArrayView<const FColor> Pixels,
FIntPoint Size) -> bool
{
SendImageDataToClient(*this, Pixels, FrameIndex);
SendDataToClient(*this, Pixels, FrameIndex);
return true;
});
}

View File

@ -25,6 +25,45 @@
struct FActorDescription;
/* @CARLA_UE5
The FPixelReader class has been deprecated, as its functionality
is now split between ImageUtil::ReadImageDataAsync (see Sensor/ImageUtil.h)
and ASensor::SendDataToClient.
Here's a brief example of how to use both:
if (!AreClientsListening()) // Ideally, check whether there are any clients.
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadImageDataAsync(
*GetCaptureRenderTarget(),
[this](
const void* MappedPtr,
size_t RowPitch,
size_t BufferHeight,
EPixelFormat Format,
FIntPoint Extent)
{
TArray<FColor> ImageData;
// Parse the raw data into ImageData...
SendDataToClient(
*this,
ImageData,
FrameIndex);
return true;
});
Alternatively, if you just want to retrieve the pixels as
FColor/FLinearColor, you can just use ReadImageDataAsyncFColor
or ReadImageDataAsyncFLinearColor.
*/
/// Base class for sensors.
UCLASS(Abstract, hidecategories = (Collision, Attachment, Actor))
class CARLA_API ASensor : public AActor
@ -128,37 +167,46 @@ protected:
GetEpisode().GetElapsedGameTime());
}
// Send sensor data to the client.
template <
typename SensorType,
typename PixelType>
static void SendImageDataToClient(
SensorType&& Sensor,
TArrayView<PixelType> Pixels,
uint32 FrameIndex)
typename ElementType>
static void SendDataToClient(
SensorType&& Sensor, // The data's owning sensor.
TArrayView<ElementType> SensorData, // Data to send to the client.
uint64_t FrameIndex // Current frame index.
)
{
using carla::sensor::SensorRegistry;
using SensorT = std::remove_const_t<std::remove_reference_t<SensorType>>;
constexpr size_t HeaderOffset = SensorRegistry::get<SensorT*>::type::header_offset;
if (!Sensor.AreClientsListening())
return;
auto Stream = Sensor.GetDataStream(Sensor);
Stream.SetFrameNumber(FrameIndex);
auto Buffer = Stream.PopBufferFromPool();
Buffer.copy_from(
carla::sensor::SensorRegistry::get<SensorT*>::type::header_offset,
HeaderOffset,
boost::asio::buffer(
Pixels.GetData(),
Pixels.Num() * sizeof(FColor)));
SensorData.GetData(),
SensorData.Num() * sizeof(ElementType)));
if (!Buffer.data())
return;
auto Serialized = SensorRegistry::Serialize(
Sensor,
std::move(Buffer));
auto Serialized = SensorRegistry::Serialize(Sensor, std::move(Buffer));
auto SerializedBuffer = carla::Buffer(std::move(Serialized));
auto BufferView = carla::BufferView::CreateFrom(std::move(SerializedBuffer));
#if defined(WITH_ROS2)
auto ROS2 = carla::ros2::ROS2::GetInstance();
if (ROS2->IsEnabled())
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("ROS2 SendImageDataToClient");
TRACE_CPUPROFILER_EVENT_SCOPE_STR("ROS2 SendDataToClient");
auto StreamId = carla::streaming::detail::token_type(Sensor.GetToken()).get_stream_id();
auto Res = std::async(std::launch::async, [&Sensor, ROS2, &Stream, StreamId, BufferView]()
{
@ -191,7 +239,8 @@ protected:
});
}
#endif
if (Sensor.AreClientsListening())
if (Sensor.AreClientsListening())
Stream.Send(Sensor, BufferView);
}

View File

@ -10,6 +10,45 @@
#include "ShaderBasedSensor.generated.h"
/* @CARLA_UE5
The FPixelReader class has been deprecated, as its functionality
is now split between ImageUtil::ReadImageDataAsync (see Sensor/ImageUtil.h)
and ASensor::SendDataToClient.
Here's a brief example of how to use both:
if (!AreClientsListening()) // Ideally, check whether there are any clients.
return;
auto FrameIndex = FCarlaEngine::GetFrameCounter();
ImageUtil::ReadImageDataAsync(
*GetCaptureRenderTarget(),
[this](
const void* MappedPtr,
size_t RowPitch,
size_t BufferHeight,
EPixelFormat Format,
FIntPoint Extent)
{
TArray<FColor> ImageData;
// Parse the raw data into ImageData...
SendDataToClient(
*this,
ImageData,
FrameIndex);
return true;
});
Alternatively, if you just want to retrieve the pixels as
FColor/FLinearColor, you can just use ReadImageDataAsyncFColor
or ReadImageDataAsyncFLinearColor.
*/
/// A shader parameter value to change when the material
/// instance is available.
USTRUCT(BlueprintType)
@ -27,6 +66,8 @@ struct CARLA_API FShaderFloatParameterValue
float Value = 0.0f;
};
/// A shader in AShaderBasedSensor.
USTRUCT(BlueprintType)
struct CARLA_API FSensorShader
@ -40,6 +81,8 @@ struct CARLA_API FSensorShader
float Weight = 1.0f;
};
/// A sensor that produces data by applying post-process materials (shaders) to
/// a scene capture image.
///