Restructure PythonAPI folder layout + update copyright.

This commit is contained in:
Marcel Pi 2024-02-26 17:09:02 +01:00
parent 04e7f3ebdb
commit 74d19eece2
71 changed files with 321 additions and 275 deletions

View File

@ -68,7 +68,12 @@ set (PNG_SHARED OFF)
set (PNG_TOOLS OFF)
set (PNG_BUILD_ZLIB ON)
set (ZLIB_INCLUDE_DIR ${zlib_SOURCE_DIR})
set (ZLIB_LIBRARY ${zlib_BINARY_DIR}/zlib.lib)
if (WIN32)
set (ZLIB_LIBRARY ${zlib_BINARY_DIR}/zlibstatic.lib)
else ()
set (ZLIB_LIBRARY ${zlib_BINARY_DIR}/libz.a)
endif ()
carla_dependency_add (
libpng
https://github.com/glennrp/libpng.git
@ -89,6 +94,9 @@ carla_dependency_add (
${CARLA_BOOST_TAG}
)
set (EIGEN_BUILD_PKGCONFIG OFF)
set (BUILD_TESTING OFF)
set (EIGEN_BUILD_DOC OFF)
carla_dependency_add (
eigen
https://gitlab.com/libeigen/eigen.git
@ -107,17 +115,24 @@ carla_dependency_add (
${CARLA_RECAST_TAG}
)
# carla_dependency_add (
# proj
# https://github.com/OSGeo/PROJ.git
# ${CARLA_PROJ_TAG}
# )
if (ENABLE_OSM2ODR)
set (BUILD_TESTING OFF)
set (ENABLE_TIFF OFF)
set (ENABLE_CURL OFF)
carla_dependency_add (
proj
https://github.com/OSGeo/PROJ.git
${CARLA_PROJ_TAG}
)
endif ()
carla_dependency_add (
xercesc
https://github.com/apache/xerces-c.git
${CARLA_XERCESC_TAG}
)
if (ENABLE_OSM2ODR)
carla_dependency_add (
xercesc
https://github.com/apache/xerces-c.git
${CARLA_XERCESC_TAG}
)
endif ()
if (BUILD_OSM_WORLD_RENDERER)
carla_dependency_add (

View File

@ -40,7 +40,7 @@ option (
option (
ENABLE_OSM2ODR
"Enable OSM2ODR."
ON
OFF
)
option (

View File

@ -41,10 +41,10 @@ set (
include (CheckCCompilerFlag)
include (CheckCXXCompilerFlag)
include (FetchContent)
include (${CARLA_WORKSPACE_PATH}/CMake/CarlaOptions.cmake)
macro (carla_two_step_configure_file DESTINATION SOURCE)
message ("Configuring file \"${DESTINATION}\"")
# Configure-time step; evaluate variables:
configure_file (${SOURCE} ${DESTINATION})
# Generate-time step; evaluate generator expressions:

View File

@ -121,7 +121,7 @@ if (BUILD_CARLA_SERVER)
)
target_link_libraries (
carla-server PRIVATE
carla-server PUBLIC
Boost::asio
Boost::geometry
Boost::algorithm

View File

@ -17,16 +17,11 @@ namespace carla {
void FileSystem::ValidateFilePath(std::string &filepath, const std::string &ext) {
fs::path path(filepath);
if (path.extension().empty() && !ext.empty()) {
if (ext[0] != '.') {
path += '.';
}
path += ext;
}
if (path.extension() != ext)
path.replace_extension(ext);
auto parent = path.parent_path();
if (!parent.empty()) {
if (!fs::exists(parent))
fs::create_directories(parent);
}
filepath = path.string();
}

View File

@ -40,7 +40,7 @@
# pragma clang diagnostic ignored "-Wunused-parameter"
#endif
#if LIBCARLA_IMAGE_WITH_PNG_SUPPORT == true
#if LIBCARLA_IMAGE_WITH_PNG_SUPPORT
# ifndef png_infopp_NULL
# define png_infopp_NULL (png_infopp)NULL
# endif // png_infopp_NULL
@ -58,11 +58,11 @@
# endif
#endif
#if LIBCARLA_IMAGE_WITH_JPEG_SUPPORT == true
#if LIBCARLA_IMAGE_WITH_JPEG_SUPPORT
# include <boost/gil/extension/io/jpeg.hpp>
#endif
#if LIBCARLA_IMAGE_WITH_TIFF_SUPPORT == true
#if LIBCARLA_IMAGE_WITH_TIFF_SUPPORT
# include <boost/gil/extension/io/tiff.hpp>
#endif

View File

@ -36,9 +36,6 @@
/* ====== INCLUSIONS ====== */
#include <stdio.h>
// Edit to original file-----
#define _USE_MATH_DEFINES // Enable windows compatibility
// --------------------------
#include <math.h>
/* ====== LOCAL VARIABLES ====== */

View File

@ -15,38 +15,38 @@ find_package (
set (PYTHON_API_PATH ${CARLA_WORKSPACE_PATH}/PythonAPI)
set (PYTHON_API_CARLA_PATH ${PYTHON_API_PATH}/carla)
set (PYTHON_API_SOURCE_PATH ${PYTHON_API_CARLA_PATH}/source/libcarla)
file (
GENERATE
OUTPUT ${PYTHON_API_PATH}/carla/pyproject.toml
INPUT ${PYTHON_API_PATH}/carla/pyproject.toml.in
carla_two_step_configure_file (
${PYTHON_API_PATH}/carla/pyproject.toml
${PYTHON_API_PATH}/carla/pyproject.toml.in
)
set (
PYTHON_API_SOURCES
${PYTHON_API_SOURCE_PATH}/PythonAPI.cpp
${PYTHON_API_SOURCE_PATH}/PythonAPI.h
${PYTHON_API_SOURCE_PATH}/Actor.cpp
${PYTHON_API_SOURCE_PATH}/Blueprint.cpp
${PYTHON_API_SOURCE_PATH}/Client.cpp
${PYTHON_API_SOURCE_PATH}/Commands.cpp
${PYTHON_API_SOURCE_PATH}/Control.cpp
${PYTHON_API_SOURCE_PATH}/Exception.cpp
${PYTHON_API_SOURCE_PATH}/Geom.cpp
${PYTHON_API_SOURCE_PATH}/LightManager.cpp
${PYTHON_API_SOURCE_PATH}/Map.cpp
${PYTHON_API_SOURCE_PATH}/OSM2ODR.cpp
${PYTHON_API_SOURCE_PATH}/Sensor.cpp
${PYTHON_API_SOURCE_PATH}/SensorData.cpp
${PYTHON_API_SOURCE_PATH}/Snapshot.cpp
${PYTHON_API_SOURCE_PATH}/TrafficManager.cpp
${PYTHON_API_SOURCE_PATH}/Weather.cpp
${PYTHON_API_SOURCE_PATH}/World.cpp
${PYTHON_API_CARLA_PATH}/src/PythonAPI.cpp
${PYTHON_API_CARLA_PATH}/src/Actor.cpp
${PYTHON_API_CARLA_PATH}/src/Blueprint.cpp
${PYTHON_API_CARLA_PATH}/src/Client.cpp
${PYTHON_API_CARLA_PATH}/src/Commands.cpp
${PYTHON_API_CARLA_PATH}/src/Control.cpp
${PYTHON_API_CARLA_PATH}/src/Exception.cpp
${PYTHON_API_CARLA_PATH}/src/Geometry.cpp
${PYTHON_API_CARLA_PATH}/src/LightManager.cpp
${PYTHON_API_CARLA_PATH}/src/Map.cpp
${PYTHON_API_CARLA_PATH}/src/Sensor.cpp
${PYTHON_API_CARLA_PATH}/src/SensorData.cpp
${PYTHON_API_CARLA_PATH}/src/Snapshot.cpp
${PYTHON_API_CARLA_PATH}/src/TrafficManager.cpp
${PYTHON_API_CARLA_PATH}/src/Weather.cpp
${PYTHON_API_CARLA_PATH}/src/World.cpp
)
if (ENABLE_OSM2ODR)
list (APPEND PYTHON_API_SOURCES ${PYTHON_API_CARLA_PATH}/src/OSM2ODR.cpp)
endif ()
if (ENABLE_RSS)
list (APPEND PYTHON_API_SOURCES ${PYTHON_API_SOURCE_PATH}/AdRss.cpp)
list (APPEND PYTHON_API_SOURCES ${PYTHON_API_CARLA_PATH}/src/AdRss.cpp)
endif ()
Python_add_library (
@ -54,12 +54,26 @@ Python_add_library (
${PYTHON_API_SOURCES}
)
target_include_directories (
carla-python-api PRIVATE
${PYTHON_API_CARLA_PATH}/include
)
target_link_libraries (
carla-python-api PRIVATE
carla-client
Boost::asio
Boost::algorithm
Boost::geometry
Boost::python
Boost::assert
Boost::gil
RecastNavigation::Recast
RecastNavigation::Detour
RecastNavigation::DetourCrowd
png_static
zlibstatic
rpc
)
target_compile_definitions (
@ -67,12 +81,31 @@ target_compile_definitions (
${CARLA_COMMON_DEFINITIONS}
${CARLA_RTTI_DEFINITIONS}
BOOST_ALL_NO_LIB
BOOST_PYTHON_STATIC_LIB
BOOST_PYTHON_STATIC_LINK
LIBCARLA_WITH_PYTHON_SUPPORT
)
target_include_directories (
carla-python-api PRIVATE
${PYTHON_API_SOURCE_PATH}
${PYTHON_API_CARLA_PATH}/src
${LIBCARLA_SOURCE_PATH}
)
if (WIN32)
set (PYD_EXT .pyd)
else ()
set (PYD_EXT .so)
endif ()
# If this command fails, enable developer mode if on Windows.
add_custom_target (
carla-python-api-symlink
ALL
COMMAND
${CMAKE_COMMAND} -E create_symlink $<TARGET_FILE:carla-python-api> ${PYTHON_API_PATH}/examples/carla${PYD_EXT}
)
add_dependencies (
carla-python-api-symlink
carla-python-api
)

View File

@ -96,8 +96,10 @@
#endif
template <typename OptionalT>
boost::python::object OptionalToPythonObject(OptionalT &optional) {
return optional.has_value() ? boost::python::object(*optional) : boost::python::object();
static auto OptionalToPyObject(OptionalT &optional) {
return optional.has_value() ?
boost::python::object(*optional) :
boost::python::object();
}
// Convenient for requests without arguments.
@ -156,12 +158,11 @@ boost::python::object OptionalToPythonObject(OptionalT &optional) {
}
template<typename T>
std::vector<T> PythonLitstToVector(boost::python::list &input) {
std::vector<T> PyListToVector(boost::python::list &input) {
std::vector<T> result;
boost::python::ssize_t list_size = boost::python::len(input);
for (boost::python::ssize_t i = 0; i < list_size; ++i) {
auto list_size = boost::python::len(input);
for (decltype(list_size) i = 0; i < list_size; ++i)
result.emplace_back(boost::python::extract<T>(input[i]));
}
return result;
}
@ -203,22 +204,22 @@ std::vector<T> PythonLitstToVector(boost::python::list &input) {
#define CALL_RETURNING_OPTIONAL(cls, fn) +[](const cls &self) { \
auto optional = self.fn(); \
return OptionalToPythonObject(optional); \
return OptionalToPyObject(optional); \
}
#define CALL_RETURNING_OPTIONAL_1(cls, fn, T1_) +[](const cls &self, T1_ t1) { \
auto optional = self.fn(std::forward<T1_>(t1)); \
return OptionalToPythonObject(optional); \
return OptionalToPyObject(optional); \
}
#define CALL_RETURNING_OPTIONAL_2(cls, fn, T1_, T2_) +[](const cls &self, T1_ t1, T2_ t2) { \
auto optional = self.fn(std::forward<T1_>(t1), std::forward<T2_>(t2)); \
return OptionalToPythonObject(optional); \
return OptionalToPyObject(optional); \
}
#define CALL_RETURNING_OPTIONAL_3(cls, fn, T1_, T2_, T3_) +[](const cls &self, T1_ t1, T2_ t2, T3_ t3) { \
auto optional = self.fn(std::forward<T1_>(t1), std::forward<T2_>(t2), std::forward<T3_>(t3)); \
return OptionalToPythonObject(optional); \
return OptionalToPyObject(optional); \
}
#define CALL_RETURNING_OPTIONAL_WITHOUT_GIL(cls, fn) +[](const cls &self) { \
@ -228,12 +229,12 @@ std::vector<T> PythonLitstToVector(boost::python::list &input) {
}
template <typename T>
void PrintListItem_(std::ostream &out, const T &item) {
void PrintListItem(std::ostream &out, const T &item) {
out << item;
}
template <typename T>
void PrintListItem_(std::ostream &out, const carla::SharedPtr<T> &item) {
void PrintListItem(std::ostream &out, const carla::SharedPtr<T> &item) {
if (item == nullptr) {
out << "nullptr";
} else {
@ -242,14 +243,14 @@ void PrintListItem_(std::ostream &out, const carla::SharedPtr<T> &item) {
}
template <typename Iterable>
inline std::ostream &PrintList(std::ostream &out, const Iterable &list) {
std::ostream &PrintList(std::ostream &out, const Iterable &list) {
out << '[';
if (!list.empty()) {
auto it = list.begin();
PrintListItem_(out, *it);
PrintListItem(out, *it);
for (++it; it != list.end(); ++it) {
out << ", ";
PrintListItem_(out, *it);
PrintListItem(out, *it);
}
}
out << ']';
@ -257,18 +258,16 @@ inline std::ostream &PrintList(std::ostream &out, const Iterable &list) {
}
namespace std {
template <typename T>
inline std::ostream &operator<<(std::ostream &out, const std::vector<T> &vector_of_stuff) {
inline std::ostream& operator<<(std::ostream& out, const std::vector<T>& vector_of_stuff) {
return PrintList(out, vector_of_stuff);
}
template <typename T, typename H>
inline std::ostream &operator<<(std::ostream &out, const std::pair<T,H> &data) {
inline std::ostream& operator<<(std::ostream& out, const std::pair<T, H>& data) {
out << "(" << data.first << "," << data.second << ")";
return out;
}
} // namespace std
namespace carla {
@ -332,9 +331,6 @@ namespace geom {
}
} // namespace geom
} // namespace carla
namespace carla {
namespace sensor {
namespace data {
@ -398,9 +394,7 @@ namespace client {
}
} // namespace client
} // namespace carla
namespace carla {
namespace client {
inline std::ostream &operator<<(std::ostream &out, const Actor &actor) {
@ -409,9 +403,7 @@ namespace client {
}
} // namespace client
} // namespace carla
namespace carla {
namespace rpc {
inline auto boolalpha(bool b) {
@ -523,9 +515,7 @@ namespace rpc {
}
} // namespace rpc
} // namespace carla
namespace carla {
namespace client {
inline std::ostream &operator<<(std::ostream &out, const Map &map) {
@ -567,4 +557,4 @@ inline auto MakeCallback(boost::python::object callback) {
PyErr_Print();
}
};
}
}

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,8 +0,0 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
# pylint: disable=W0401
from .libcarla import *

View File

@ -1,8 +0,0 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
# pylint: disable=W0401
from .libcarla.command import *

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <thread>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#define TM_DEFAULT_PORT 8000

View File

@ -5,7 +5,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
void translator(const rpc::rpc_error &e) {
std::stringstream ss;

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/implicit.hpp>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include "boost/python/suite/indexing/vector_indexing_suite.hpp"

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
static void SaveOpenDriveToDisk(const carla::client::Map &self, std::string path) {
carla::PythonUtil::ReleaseGIL unlock;

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#ifdef CARLA_PYTHON_API_HAS_OSM2ODR
@ -22,10 +22,10 @@ namespace osm2odr {
return out;
}
void SetOsmWayTypes(OSM2ODRSettings& self, boost::python::list input) {
self.osm_highways_types = PythonLitstToVector<std::string>(input);
self.osm_highways_types = PyListToVector<std::string>(input);
}
void SetTLExcludedWayTypes(OSM2ODRSettings& self, boost::python::list input) {
self.tl_excluded_highways_types = PythonLitstToVector<std::string>(input);
self.tl_excluded_highways_types = PyListToVector<std::string>(input);
}
}
@ -58,4 +58,4 @@ void export_osm2odr() {
#pragma message("WARNING: Building CARLA PythonAPI without OSM2ODR support.")
#endif
#endif

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
extern void export_geom();
extern void export_control();

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
static void SubscribeToStream(carla::client::Sensor &self, boost::python::object callback) {
self.Listen(MakeCallback(std::move(callback)));

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>
namespace carla {
@ -300,29 +300,49 @@ static FakeImage ColorCodedFlow (
template <typename T>
static std::string SaveImageToDisk(T &self, std::string path, EColorConverter cc) {
std::string r;
carla::PythonUtil::ReleaseGIL unlock;
using namespace carla::image;
puts("ImageView::MakeView.");
auto view = ImageView::MakeView(self);
puts("ImageIO::WriteView.");
try
{
switch (cc) {
case EColorConverter::Raw:
return ImageIO::WriteView(
r = ImageIO::WriteView(
std::move(path),
view);
break;
case EColorConverter::Depth:
return ImageIO::WriteView(
r = ImageIO::WriteView(
std::move(path),
ImageView::MakeColorConvertedView(view, ColorConverter::Depth()));
break;
case EColorConverter::LogarithmicDepth:
return ImageIO::WriteView(
r = ImageIO::WriteView(
std::move(path),
ImageView::MakeColorConvertedView(view, ColorConverter::LogarithmicDepth()));
break;
case EColorConverter::CityScapesPalette:
return ImageIO::WriteView(
r = ImageIO::WriteView(
std::move(path),
ImageView::MakeColorConvertedView(view, ColorConverter::CityScapesPalette()));
break;
default:
throw std::invalid_argument("invalid color converter!");
}
}
catch (std::exception& e)
{
puts(e.what());
}
catch (...)
{
puts("Exception thrown");
}
puts("Done.");
return r;
}
template <typename T>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include "boost/python/suite/indexing/vector_indexing_suite.hpp"
using ActorPtr = carla::SharedPtr<carla::client::Actor>;
@ -45,7 +45,7 @@ std::vector<uint8_t> RoadOptionToUint(boost::python::list input) {
}
void InterSetCustomPath(carla::traffic_manager::TrafficManager& self, const ActorPtr &actor, boost::python::list input, bool empty_buffer) {
self.SetCustomPath(actor, PythonLitstToVector<carla::geom::Location>(input), empty_buffer);
self.SetCustomPath(actor, PyListToVector<carla::geom::Location>(input), empty_buffer);
}
void InterSetImportedRoute(carla::traffic_manager::TrafficManager& self, const ActorPtr &actor, boost::python::list input, bool empty_buffer) {

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
namespace carla {
namespace rpc {

View File

@ -4,7 +4,7 @@
// This work is licensed under the terms of the MIT license.
// For a copy, see <https://opensource.org/licenses/MIT>.
#include "PythonAPI.h"
#include <PythonAPI.h>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>
@ -169,7 +169,7 @@ void export_world() {
.def_readwrite("deterministic_ragdolls", &cr::EpisodeSettings::deterministic_ragdolls)
.add_property("fixed_delta_seconds",
+[](const cr::EpisodeSettings &self) {
return OptionalToPythonObject(self.fixed_delta_seconds);
return OptionalToPyObject(self.fixed_delta_seconds);
},
+[](cr::EpisodeSettings &self, object value) {
double fds = (value == object{} ? 0.0 : extract<double>(value));
@ -343,13 +343,13 @@ void export_world() {
.def("apply_float_color_texture_to_object", &cc::World::ApplyFloatColorTextureToObject, (arg("object_name"), arg("material_parameter"), arg("texture")))
.def("apply_textures_to_object", &cc::World::ApplyTexturesToObject, (arg("object_name"), arg("diffuse_texture"), arg("emissive_texture"), arg("normal_texture"), arg("ao_roughness_metallic_emissive_texture")))
.def("apply_color_texture_to_objects", +[](cc::World &self, boost::python::list &list, const cr::MaterialParameter& parameter, const cr::TextureColor& Texture) {
self.ApplyColorTextureToObjects(PythonLitstToVector<std::string>(list), parameter, Texture);
self.ApplyColorTextureToObjects(PyListToVector<std::string>(list), parameter, Texture);
}, (arg("objects_name_list"), arg("material_parameter"), arg("texture")))
.def("apply_float_color_texture_to_objects", +[](cc::World &self, boost::python::list &list, const cr::MaterialParameter& parameter, const cr::TextureFloatColor& Texture) {
self.ApplyFloatColorTextureToObjects(PythonLitstToVector<std::string>(list), parameter, Texture);
self.ApplyFloatColorTextureToObjects(PyListToVector<std::string>(list), parameter, Texture);
}, (arg("objects_name_list"), arg("material_parameter"), arg("texture")))
.def("apply_textures_to_objects", +[](cc::World &self, boost::python::list &list, const cr::TextureColor& diffuse_texture, const cr::TextureFloatColor& emissive_texture, const cr::TextureFloatColor& normal_texture, const cr::TextureFloatColor& ao_roughness_metallic_emissive_texture) {
self.ApplyTexturesToObjects(PythonLitstToVector<std::string>(list), diffuse_texture, emissive_texture, normal_texture, ao_roughness_metallic_emissive_texture);
self.ApplyTexturesToObjects(PyListToVector<std::string>(list), diffuse_texture, emissive_texture, normal_texture, ao_roughness_metallic_emissive_texture);
}, (arg("objects_name_list"), arg("diffuse_texture"), arg("emissive_texture"), arg("normal_texture"), arg("ao_roughness_metallic_emissive_texture")))
.def(self_ns::str(self_ns::self))
;

View File

@ -39,7 +39,13 @@ except ImportError:
# ==============================================================================
# -- Find CARLA module ---------------------------------------------------------
# ==============================================================================
import util.find_carla
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
# ==============================================================================
# -- Add PythonAPI for release mode --------------------------------------------

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
# Copyright (c) 2019-2020 Intel Corporation
#

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,4 +1,4 @@
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -157,7 +157,9 @@
"SupportedTargetPlatforms": [
"IOS",
"Win64",
"Mac"
"Mac",
"Linux",
"Android"
]
},
{

View File

@ -18,7 +18,7 @@
// =============================================================================
void FPixelReader::WritePixelsToBuffer(
const UTextureRenderTarget2D &RenderTarget,
UTextureRenderTarget2D &RenderTarget,
uint32 Offset,
FRHICommandListImmediate &RHICmdList,
FPixelReader::Payload FuncForSending)

View File

@ -39,34 +39,34 @@ class FPixelReader
{
public:
using Payload = std::function<void(void *, uint32, uint32, uint32)>;
using Payload = std::function<void(void*, uint32, uint32, uint32)>;
/// Copy the pixels in @a RenderTarget into @a BitMap.
///
/// @pre To be called from game-thread.
static bool WritePixelsToArray(
UTextureRenderTarget2D &RenderTarget,
TArray<FColor> &BitMap);
UTextureRenderTarget2D& RenderTarget,
TArray<FColor>& BitMap);
/// Dump the pixels in @a RenderTarget.
///
/// @pre To be called from game-thread.
static TUniquePtr<TImagePixelData<FColor>> DumpPixels(
UTextureRenderTarget2D &RenderTarget);
UTextureRenderTarget2D& RenderTarget);
/// Asynchronously save the pixels in @a RenderTarget to disk.
///
/// @pre To be called from game-thread.
static TFuture<bool> SavePixelsToDisk(
UTextureRenderTarget2D &RenderTarget,
const FString &FilePath);
UTextureRenderTarget2D& RenderTarget,
const FString& FilePath);
/// Asynchronously save the pixels in @a PixelData to disk.
///
/// @pre To be called from game-thread.
static TFuture<bool> SavePixelsToDisk(
TUniquePtr<TImagePixelData<FColor>> PixelData,
const FString &FilePath);
TUniquePtr<TImagePixelData<FColor>> PixelData,
const FString& FilePath);
/// Convenience function to enqueue a render command that sends the pixels
/// down the @a Sensor's data stream. It expects a sensor derived from
@ -77,16 +77,16 @@ public:
///
/// @pre To be called from game-thread.
template <typename TSensor, typename TPixel>
static void SendPixelsInRenderThread(TSensor &Sensor, bool use16BitFormat = false, std::function<TArray<TPixel>(void *, uint32)> Conversor = {});
static void SendPixelsInRenderThread(TSensor& Sensor, bool use16BitFormat = false, std::function<TArray<TPixel>(void*, uint32)> Conversor = {});
/// Copy the pixels in @a RenderTarget into @a Buffer.
///
/// @pre To be called from render-thread.
static void WritePixelsToBuffer(
const UTextureRenderTarget2D &RenderTarget,
uint32 Offset,
FRHICommandListImmediate &InRHICmdList,
FPixelReader::Payload FuncForSending);
UTextureRenderTarget2D& RenderTarget,
uint32 Offset,
FRHICommandListImmediate& InRHICmdList,
FPixelReader::Payload FuncForSending);
};
@ -95,7 +95,7 @@ public:
// =============================================================================
template <typename TSensor, typename TPixel>
void FPixelReader::SendPixelsInRenderThread(TSensor &Sensor, bool use16BitFormat, std::function<TArray<TPixel>(void *, uint32)> Conversor)
void FPixelReader::SendPixelsInRenderThread(TSensor& Sensor, bool use16BitFormat, std::function<TArray<TPixel>(void*, uint32)> Conversor)
{
TRACE_CPUPROFILER_EVENT_SCOPE(FPixelReader::SendPixelsInRenderThread);
check(Sensor.CaptureRenderTarget != nullptr);
@ -107,133 +107,137 @@ void FPixelReader::SendPixelsInRenderThread(TSensor &Sensor, bool use16BitFormat
/// Blocks until the render thread has finished all it's tasks.
Sensor.EnqueueRenderSceneImmediate();
FlushRenderingCommands();
SavePixelsToDisk(
*Sensor.CaptureRenderTarget,
TEXT("F:\\Carla\\PythonAPI\\examples\\_out\\image.png"));
// Enqueue a command in the render-thread that will write the image buffer to
// the data stream. The stream is created in the capture thus executed in the
// game-thread.
ENQUEUE_RENDER_COMMAND(FWritePixels_SendPixelsInRenderThread)
(
[&Sensor, use16BitFormat, Conversor = std::move(Conversor)](auto &InRHICmdList) mutable
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("FWritePixels_SendPixelsInRenderThread");
/// @todo Can we make sure the sensor is not going to be destroyed?
if (!Sensor.IsPendingKill())
(
[&Sensor, use16BitFormat, Conversor = std::move(Conversor)](auto& InRHICmdList) mutable
{
FPixelReader::Payload FuncForSending =
[&Sensor, Frame = FCarlaEngine::GetFrameCounter(), Conversor = std::move(Conversor)](void *LockedData, uint32 Size, uint32 Offset, uint32 ExpectedRowBytes)
{
if (Sensor.IsPendingKill()) return;
TRACE_CPUPROFILER_EVENT_SCOPE_STR("FWritePixels_SendPixelsInRenderThread");
TArray<TPixel> Converted;
// optional conversion of data
if (Conversor)
/// @todo Can we make sure the sensor is not going to be destroyed?
if (!Sensor.IsPendingKill())
{
FPixelReader::Payload FuncForSending =
[&Sensor, Frame = FCarlaEngine::GetFrameCounter(), Conversor = std::move(Conversor)](void* LockedData, uint32 Size, uint32 Offset, uint32 ExpectedRowBytes)
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Data conversion");
Converted = Conversor(LockedData, Size);
LockedData = reinterpret_cast<void *>(Converted.GetData());
Size = Converted.Num() * Converted.GetTypeSize();
}
if (Sensor.IsPendingKill()) return;
auto Stream = Sensor.GetDataStream(Sensor);
Stream.SetFrameNumber(Frame);
auto Buffer = Stream.PopBufferFromPool();
TArray<TPixel> Converted;
uint32 CurrentRowBytes = ExpectedRowBytes;
// optional conversion of data
if (Conversor)
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Data conversion");
Converted = Conversor(LockedData, Size);
LockedData = reinterpret_cast<void*>(Converted.GetData());
Size = Converted.Num() * Converted.GetTypeSize();
}
auto Stream = Sensor.GetDataStream(Sensor);
Stream.SetFrameNumber(Frame);
auto Buffer = Stream.PopBufferFromPool();
uint32 CurrentRowBytes = ExpectedRowBytes;
#ifdef _WIN32
// DirectX uses additional bytes to align each row to 256 boundry,
// so we need to remove that extra data
if (IsD3DPlatform(GMaxRHIShaderPlatform))
{
CurrentRowBytes = Align(ExpectedRowBytes, D3D12_TEXTURE_DATA_PITCH_ALIGNMENT);
if (ExpectedRowBytes != CurrentRowBytes)
// DirectX uses additional bytes to align each row to 256 boundry,
// so we need to remove that extra data
if (IsD3DPlatform(GMaxRHIShaderPlatform))
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Buffer Copy (windows, row by row)");
Buffer.reset(Offset + Size);
auto DstRow = Buffer.begin() + Offset;
const uint8 *SrcRow = reinterpret_cast<uint8 *>(LockedData);
uint32 i = 0;
while (i < Size)
CurrentRowBytes = Align(ExpectedRowBytes, D3D12_TEXTURE_DATA_PITCH_ALIGNMENT);
if (ExpectedRowBytes != CurrentRowBytes)
{
FMemory::Memcpy(DstRow, SrcRow, ExpectedRowBytes);
DstRow += ExpectedRowBytes;
SrcRow += CurrentRowBytes;
i += ExpectedRowBytes;
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Buffer Copy (windows, row by row)");
Buffer.reset(Offset + Size);
auto DstRow = Buffer.begin() + Offset;
const uint8* SrcRow = reinterpret_cast<uint8*>(LockedData);
uint32 i = 0;
while (i < Size)
{
FMemory::Memcpy(DstRow, SrcRow, ExpectedRowBytes);
DstRow += ExpectedRowBytes;
SrcRow += CurrentRowBytes;
i += ExpectedRowBytes;
}
}
}
}
#endif // _WIN32
if (ExpectedRowBytes == CurrentRowBytes)
{
check(ExpectedRowBytes == CurrentRowBytes);
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Buffer Copy");
Buffer.copy_from(Offset, boost::asio::buffer(LockedData, Size));
}
{
// send
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Sending buffer");
if(Buffer.data())
if (ExpectedRowBytes == CurrentRowBytes)
{
// serialize data
carla::Buffer BufferReady(std::move(carla::sensor::SensorRegistry::Serialize(Sensor, std::move(Buffer))));
carla::SharedBufferView BufView = carla::BufferView::CreateFrom(std::move(BufferReady));
// ROS2
#if defined(WITH_ROS2)
auto ROS2 = carla::ros2::ROS2::GetInstance();
if (ROS2->IsEnabled())
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("ROS2 Send PixelReader");
auto StreamId = carla::streaming::detail::token_type(Sensor.GetToken()).get_stream_id();
auto Res = std::async(std::launch::async, [&Sensor, ROS2, &Stream, StreamId, BufView]()
{
// get resolution of camera
int W = -1, H = -1;
float Fov = -1.0f;
auto WidthOpt = Sensor.GetAttribute("image_size_x");
if (WidthOpt.has_value())
W = FCString::Atoi(*WidthOpt->Value);
auto HeightOpt = Sensor.GetAttribute("image_size_y");
if (HeightOpt.has_value())
H = FCString::Atoi(*HeightOpt->Value);
auto FovOpt = Sensor.GetAttribute("fov");
if (FovOpt.has_value())
Fov = FCString::Atof(*FovOpt->Value);
// send data to ROS2
AActor* ParentActor = Sensor.GetAttachParentActor();
if (ParentActor)
{
FTransform LocalTransformRelativeToParent = Sensor.GetActorTransform().GetRelativeTransform(ParentActor->GetActorTransform());
ROS2->ProcessDataFromCamera(Stream.GetSensorType(), StreamId, LocalTransformRelativeToParent, W, H, Fov, BufView, &Sensor);
}
else
{
ROS2->ProcessDataFromCamera(Stream.GetSensorType(), StreamId, Stream.GetSensorTransform(), W, H, Fov, BufView, &Sensor);
}
});
}
#endif
// network
SCOPE_CYCLE_COUNTER(STAT_CarlaSensorStreamSend);
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Stream Send");
Stream.Send(Sensor, BufView);
check(ExpectedRowBytes == CurrentRowBytes);
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Buffer Copy");
Buffer.copy_from(Offset, boost::asio::buffer(LockedData, Size));
}
}
};
{
// send
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Sending buffer");
if (Buffer.data())
{
// serialize data
carla::Buffer BufferReady(std::move(carla::sensor::SensorRegistry::Serialize(Sensor, std::move(Buffer))));
carla::SharedBufferView BufView = carla::BufferView::CreateFrom(std::move(BufferReady));
// ROS2
#if defined(WITH_ROS2)
auto ROS2 = carla::ros2::ROS2::GetInstance();
if (ROS2->IsEnabled())
{
TRACE_CPUPROFILER_EVENT_SCOPE_STR("ROS2 Send PixelReader");
auto StreamId = carla::streaming::detail::token_type(Sensor.GetToken()).get_stream_id();
auto Res = std::async(std::launch::async, [&Sensor, ROS2, &Stream, StreamId, BufView]()
{
// get resolution of camera
int W = -1, H = -1;
float Fov = -1.0f;
auto WidthOpt = Sensor.GetAttribute("image_size_x");
if (WidthOpt.has_value())
W = FCString::Atoi(*WidthOpt->Value);
auto HeightOpt = Sensor.GetAttribute("image_size_y");
if (HeightOpt.has_value())
H = FCString::Atoi(*HeightOpt->Value);
auto FovOpt = Sensor.GetAttribute("fov");
if (FovOpt.has_value())
Fov = FCString::Atof(*FovOpt->Value);
// send data to ROS2
AActor* ParentActor = Sensor.GetAttachParentActor();
if (ParentActor)
{
FTransform LocalTransformRelativeToParent = Sensor.GetActorTransform().GetRelativeTransform(ParentActor->GetActorTransform());
ROS2->ProcessDataFromCamera(Stream.GetSensorType(), StreamId, LocalTransformRelativeToParent, W, H, Fov, BufView, &Sensor);
}
else
{
ROS2->ProcessDataFromCamera(Stream.GetSensorType(), StreamId, Stream.GetSensorTransform(), W, H, Fov, BufView, &Sensor);
}
});
}
#endif
// network
SCOPE_CYCLE_COUNTER(STAT_CarlaSensorStreamSend);
TRACE_CPUPROFILER_EVENT_SCOPE_STR("Stream Send");
Stream.Send(Sensor, BufView);
}
}
};
WritePixelsToBuffer(
*Sensor.CaptureRenderTarget,
carla::sensor::SensorRegistry::get<TSensor *>::type::header_offset,
InRHICmdList,
std::move(FuncForSending));
*Sensor.CaptureRenderTarget,
carla::sensor::SensorRegistry::get<TSensor*>::type::header_offset,
InRHICmdList,
std::move(FuncForSending));
}
}
);
);
// Blocks until the render thread has finished all it's tasks
Sensor.WaitForRenderThreadToFinish();

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma
# de Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma
# de Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Copyright (c) 2024 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.