2021-05-12 20:32:10 +08:00
|
|
|
#!/usr/bin/env python
|
2019-06-19 02:01:13 +08:00
|
|
|
|
|
|
|
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
|
|
|
|
# Barcelona (UAB).
|
|
|
|
#
|
|
|
|
# This work is licensed under the terms of the MIT license.
|
|
|
|
# For a copy, see <https://opensource.org/licenses/MIT>.
|
|
|
|
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Import Assets to Carla"""
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-06-21 22:52:27 +08:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2019-06-26 20:47:23 +08:00
|
|
|
import errno
|
2019-06-19 02:01:13 +08:00
|
|
|
import fnmatch
|
|
|
|
import json
|
2019-06-21 22:52:27 +08:00
|
|
|
import os
|
|
|
|
import shutil
|
2019-06-19 02:01:13 +08:00
|
|
|
import subprocess
|
2020-04-11 02:01:27 +08:00
|
|
|
import argparse
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-07-01 17:47:32 +08:00
|
|
|
# Global variables
|
|
|
|
IMPORT_SETTING_FILENAME = "importsetting.json"
|
|
|
|
SCRIPT_NAME = os.path.basename(__file__)
|
|
|
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
2019-07-09 20:15:51 +08:00
|
|
|
# Go two directories above the current script
|
|
|
|
CARLA_ROOT_PATH = os.path.normpath(SCRIPT_DIR + '/../..')
|
2019-07-01 17:47:32 +08:00
|
|
|
|
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
def get_packages_json_list(folder):
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Returns a list with the paths of each package's json
|
|
|
|
files that has been found recursively in the input folder.
|
|
|
|
"""
|
2019-06-19 02:01:13 +08:00
|
|
|
json_files = []
|
|
|
|
|
2019-06-26 20:47:23 +08:00
|
|
|
for root, _, filenames in os.walk(folder):
|
2019-06-19 02:01:13 +08:00
|
|
|
for filename in fnmatch.filter(filenames, "*.json"):
|
2021-04-20 00:58:20 +08:00
|
|
|
if filename != "roadpainter_decals.json":
|
|
|
|
json_files.append([root, filename])
|
2019-06-19 02:01:13 +08:00
|
|
|
|
|
|
|
return json_files
|
|
|
|
|
2021-04-20 00:58:20 +08:00
|
|
|
def get_decals_json_file(folder):
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-20 00:58:20 +08:00
|
|
|
for root, _, filenames in os.walk(folder):
|
|
|
|
for filename in fnmatch.filter(filenames, "roadpainter_decals.json"):
|
|
|
|
return filename
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-20 00:58:20 +08:00
|
|
|
return ""
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2020-04-11 02:01:27 +08:00
|
|
|
def generate_json_package(folder, package_name, use_carla_materials):
|
|
|
|
"""Generate a .json file with all the maps it founds on the folder
|
|
|
|
and subfolders. A map is a .fbx and a .xodr with the same name.
|
|
|
|
"""
|
|
|
|
json_files = []
|
|
|
|
|
|
|
|
# search for all .fbx and .xodr pair of files
|
|
|
|
maps = []
|
|
|
|
for root, _, filenames in os.walk(folder):
|
2021-04-15 17:17:37 +08:00
|
|
|
files = fnmatch.filter(filenames, "*.xodr")
|
2020-04-14 19:42:21 +08:00
|
|
|
for file_name in files:
|
2021-04-15 17:17:37 +08:00
|
|
|
xodr = file_name[:-5]
|
|
|
|
# check if exist the .fbx file
|
|
|
|
if os.path.exists("%s/%s.fbx" % (root, xodr)):
|
|
|
|
maps.append([os.path.relpath(root, folder), xodr, ["%s.fbx" % xodr]])
|
|
|
|
else:
|
|
|
|
# check if exist the map by tiles
|
|
|
|
tiles = fnmatch.filter(filenames, "*_Tile_*.fbx")
|
|
|
|
if (len(tiles) > 0):
|
|
|
|
maps.append([os.path.relpath(root, folder), xodr, tiles])
|
2020-04-11 02:01:27 +08:00
|
|
|
|
|
|
|
# write the json
|
|
|
|
if (len(maps) > 0):
|
2020-04-15 19:36:21 +08:00
|
|
|
# build all the maps in .json format
|
|
|
|
json_maps = []
|
|
|
|
for map_name in maps:
|
2020-04-14 19:42:21 +08:00
|
|
|
path = map_name[0].replace('\\', '/')
|
|
|
|
name = map_name[1]
|
2021-04-15 17:17:37 +08:00
|
|
|
tiles = map_name[2]
|
|
|
|
tiles = ["%s/%s" % (path, x) for x in tiles]
|
|
|
|
map_dict = {
|
2020-04-15 19:36:21 +08:00
|
|
|
'name': name,
|
|
|
|
'xodr': '%s/%s.xodr' % (path, name),
|
|
|
|
'use_carla_materials': use_carla_materials
|
2021-04-15 17:17:37 +08:00
|
|
|
}
|
|
|
|
# check for only one 'source' or map in 'tiles'
|
|
|
|
if (len(tiles) == 1):
|
|
|
|
map_dict['source'] = tiles[0]
|
|
|
|
else:
|
2021-04-29 14:40:51 +08:00
|
|
|
map_dict['tile_size'] = 2000
|
2021-04-15 17:17:37 +08:00
|
|
|
map_dict['tiles'] = tiles
|
2021-04-29 14:40:51 +08:00
|
|
|
|
2021-04-15 17:17:37 +08:00
|
|
|
# write
|
|
|
|
json_maps.append(map_dict)
|
2020-04-15 19:36:21 +08:00
|
|
|
# build and write the .json
|
|
|
|
f = open("%s/%s.json" % (folder, package_name), "w")
|
|
|
|
my_json = {'maps': json_maps, 'props': []}
|
|
|
|
serialized = json.dumps(my_json, sort_keys=False, indent=3)
|
|
|
|
f.write(serialized)
|
2020-04-11 02:01:27 +08:00
|
|
|
f.close()
|
2020-04-14 19:42:21 +08:00
|
|
|
# add
|
2020-04-11 02:01:27 +08:00
|
|
|
json_files.append([folder, "%s.json" % package_name])
|
|
|
|
|
|
|
|
return json_files
|
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
def generate_decals_file(folder):
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
# search for all .fbx and .xodr pair of files
|
|
|
|
maps = []
|
|
|
|
for root, _, filenames in os.walk(folder):
|
2021-05-13 00:45:46 +08:00
|
|
|
files = fnmatch.filter(filenames, "*.xodr")
|
2021-04-12 17:56:41 +08:00
|
|
|
for file_name in files:
|
2021-05-13 00:45:46 +08:00
|
|
|
xodr = file_name[:-5]
|
|
|
|
# check if exist the .fbx file
|
|
|
|
if os.path.exists("%s/%s.fbx" % (root, xodr)):
|
|
|
|
maps.append([os.path.relpath(root, folder), xodr, ["%s.fbx" % xodr]])
|
|
|
|
else:
|
|
|
|
# check if exist the map by tiles
|
|
|
|
tiles = fnmatch.filter(filenames, "*_Tile_*.fbx")
|
|
|
|
if (len(tiles) > 0):
|
|
|
|
maps.append([os.path.relpath(root, folder), xodr, tiles])
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
if (len(maps) > 0):
|
|
|
|
# build all the maps in .json format
|
|
|
|
json_decals = []
|
|
|
|
for map_name in maps:
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
name = map_name[1]
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
#create the decals default config file
|
|
|
|
json_decals.append({
|
|
|
|
'map_name' : name,
|
2021-05-10 23:55:08 +08:00
|
|
|
'drip1': '10',
|
|
|
|
'drip3': '10',
|
|
|
|
'dirt1': '10',
|
2021-05-12 01:38:06 +08:00
|
|
|
'dirt3' : '10',
|
|
|
|
'dirt4' : '10',
|
2021-05-10 23:55:08 +08:00
|
|
|
'dirt5': '10',
|
|
|
|
'roadline1': '20',
|
|
|
|
'roadline5': '20',
|
|
|
|
'tiremark1': '20',
|
|
|
|
'tiremark3': '20',
|
|
|
|
'tarsnake1': '10',
|
2021-05-12 01:38:06 +08:00
|
|
|
'tarsnake3': '20',
|
|
|
|
'tarsnake4': '10',
|
|
|
|
'tarsnake5': '20',
|
|
|
|
'tarsnake11': '20',
|
2021-05-10 23:55:08 +08:00
|
|
|
'cracksbig1': '10',
|
2021-05-12 01:38:06 +08:00
|
|
|
'cracksbig3': '10',
|
|
|
|
'cracksbig5': '10',
|
2021-05-10 23:55:08 +08:00
|
|
|
'cracksbig8': '10',
|
|
|
|
'mud1' : '10',
|
|
|
|
'mud5' : '10',
|
|
|
|
'oilsplat1' : '20',
|
|
|
|
'oilsplat2' : '20',
|
|
|
|
'oilsplat3' : '20',
|
|
|
|
'oilsplat4' : '20',
|
|
|
|
'oilsplat5' : '20',
|
|
|
|
'gum' : '30',
|
|
|
|
'crack1': '10',
|
2021-05-12 01:38:06 +08:00
|
|
|
'crack3' : '10',
|
|
|
|
'crack4' : '10',
|
|
|
|
'crack5' : '10',
|
2021-05-10 23:55:08 +08:00
|
|
|
'crack8': '10',
|
2021-04-12 17:56:41 +08:00
|
|
|
'decal_scale' : {
|
2021-05-10 23:55:08 +08:00
|
|
|
'x_axis' : '1.0',
|
|
|
|
'y_axis' : '1.0',
|
|
|
|
'z_axis' : '1.0'},
|
2021-04-12 17:56:41 +08:00
|
|
|
'fixed_decal_offset': {
|
2021-05-10 23:55:08 +08:00
|
|
|
'x_axis' : '15.0',
|
|
|
|
'y_axis' : '15.0',
|
|
|
|
'z_axis' : '0.0'},
|
|
|
|
'decal_min_scale' : '1.0',
|
|
|
|
'decal_max_scale' : '1.5',
|
|
|
|
'decal_random_yaw' : '360.0',
|
2021-05-12 20:27:05 +08:00
|
|
|
'random_offset' : '50.0'
|
2021-04-12 17:56:41 +08:00
|
|
|
});
|
2021-05-06 23:23:06 +08:00
|
|
|
|
2021-04-12 17:56:41 +08:00
|
|
|
# build and write the .json
|
|
|
|
f = open("%s/%s.json" % (folder, 'roadpainter_decals'), "w")
|
|
|
|
my_json = {'decals': json_decals}
|
|
|
|
serialized = json.dumps(my_json, sort_keys=False, indent=3)
|
|
|
|
f.write(serialized)
|
|
|
|
f.close()
|
2019-06-21 22:52:27 +08:00
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
def invoke_commandlet(name, arguments):
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Generic function for running a commandlet with its arguments."""
|
2019-11-20 00:43:29 +08:00
|
|
|
ue4_path = os.environ["UE4_ROOT"]
|
|
|
|
uproject_path = os.path.join(CARLA_ROOT_PATH, "Unreal", "CarlaUE4", "CarlaUE4.uproject")
|
|
|
|
run = "-run=%s" % (name)
|
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
if os.name == "nt":
|
|
|
|
sys_name = "Win64"
|
2019-11-20 00:43:29 +08:00
|
|
|
editor_path = "%s/Engine/Binaries/%s/UE4Editor" % (ue4_path, sys_name)
|
|
|
|
command = [editor_path, uproject_path, run]
|
|
|
|
command.extend(arguments)
|
|
|
|
print("Commandlet:", command)
|
|
|
|
subprocess.check_call(command, shell=True)
|
2019-06-19 02:01:13 +08:00
|
|
|
elif os.name == "posix":
|
|
|
|
sys_name = "Linux"
|
2019-11-20 00:43:29 +08:00
|
|
|
editor_path = "%s/Engine/Binaries/%s/UE4Editor" % (ue4_path, sys_name)
|
|
|
|
full_command = "%s %s %s %s" % (editor_path, uproject_path, run, " ".join(arguments))
|
|
|
|
print("Commandlet:", full_command)
|
|
|
|
subprocess.call([full_command], shell=True)
|
|
|
|
|
|
|
|
|
2021-04-29 14:40:51 +08:00
|
|
|
def generate_import_setting_file(package_name, json_dirname, props, maps, do_tiles, tile_size):
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Creates the PROPS and MAPS import_setting.json file needed
|
|
|
|
as an argument for using the ImportAssets commandlet
|
|
|
|
"""
|
2019-06-21 00:32:56 +08:00
|
|
|
importfile = os.path.join(os.getcwd(), IMPORT_SETTING_FILENAME)
|
2019-06-19 02:01:13 +08:00
|
|
|
if os.path.exists(importfile):
|
|
|
|
os.remove(importfile)
|
|
|
|
|
|
|
|
with open(importfile, "w+") as fh:
|
|
|
|
import_groups = []
|
|
|
|
file_names = []
|
2019-09-17 20:57:22 +08:00
|
|
|
import_settings = {
|
2019-06-19 02:01:13 +08:00
|
|
|
"bImportMesh": 1,
|
|
|
|
"bConvertSceneUnit": 1,
|
|
|
|
"bConvertScene": 1,
|
|
|
|
"bCombineMeshes": 1,
|
|
|
|
"bImportTextures": 1,
|
|
|
|
"bImportMaterials": 1,
|
|
|
|
"bRemoveDegenerates": 1,
|
|
|
|
"AnimSequenceImportData": {},
|
|
|
|
"SkeletalMeshImportData": {},
|
|
|
|
"TextureImportData": {},
|
|
|
|
"StaticMeshImportData": {
|
|
|
|
"bRemoveDegenerates": 1,
|
|
|
|
"bAutoGenerateCollision": 0,
|
2019-09-17 20:57:22 +08:00
|
|
|
"bCombineMeshes": 0,
|
2021-04-29 14:40:51 +08:00
|
|
|
"bConvertSceneUnit": 1,
|
|
|
|
"bForceVerticesRelativeToTile": do_tiles,
|
|
|
|
"TileSize": tile_size
|
2019-06-19 02:01:13 +08:00
|
|
|
}
|
2019-09-17 20:57:22 +08:00
|
|
|
}
|
2019-06-19 02:01:13 +08:00
|
|
|
|
|
|
|
for prop in props:
|
|
|
|
props_dest = "/" + "/".join(["Game", package_name, "Static", prop["tag"], prop["name"]])
|
|
|
|
|
|
|
|
file_names = [os.path.join(json_dirname, prop["source"])]
|
|
|
|
import_groups.append({
|
|
|
|
"ImportSettings": import_settings,
|
|
|
|
"FactoryName": "FbxFactory",
|
|
|
|
"DestinationPath": props_dest,
|
|
|
|
"bReplaceExisting": "true",
|
|
|
|
"FileNames": file_names
|
|
|
|
})
|
|
|
|
|
2019-07-09 01:34:52 +08:00
|
|
|
for umap in maps:
|
|
|
|
maps_dest = "/" + "/".join(["Game", package_name, "Maps", umap["name"]])
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2021-04-15 17:17:37 +08:00
|
|
|
if "source" in umap:
|
|
|
|
tiles = [os.path.join(json_dirname, umap["source"])]
|
|
|
|
else:
|
|
|
|
tiles = ["%s" % (os.path.join(json_dirname, x)) for x in umap["tiles"]]
|
2019-06-19 02:01:13 +08:00
|
|
|
import_groups.append({
|
|
|
|
"ImportSettings": import_settings,
|
|
|
|
"FactoryName": "FbxFactory",
|
|
|
|
"DestinationPath": maps_dest,
|
|
|
|
"bReplaceExisting": "true",
|
2021-04-15 17:17:37 +08:00
|
|
|
"FileNames": tiles
|
2019-06-19 02:01:13 +08:00
|
|
|
})
|
|
|
|
|
|
|
|
fh.write(json.dumps({"ImportGroups": import_groups}))
|
|
|
|
fh.close()
|
|
|
|
return importfile
|
|
|
|
|
2019-06-21 22:52:27 +08:00
|
|
|
|
2019-06-21 00:32:56 +08:00
|
|
|
def generate_package_file(package_name, props, maps):
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Creates the PackageName.Package.json file for the package."""
|
2019-06-19 02:01:13 +08:00
|
|
|
output_json = {}
|
2019-06-21 00:32:56 +08:00
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
output_json["props"] = []
|
|
|
|
for prop in props:
|
|
|
|
name = prop["name"]
|
|
|
|
size = prop["size"]
|
2019-07-01 17:47:32 +08:00
|
|
|
source_name = os.path.basename(prop["source"]).split('.')
|
|
|
|
if len(source_name) < 2:
|
|
|
|
print("[Warning] File name '" + prop["source"] + "' contains multiple dots ('.')")
|
|
|
|
|
|
|
|
source_name = '.'.join([source_name[0], source_name[0]])
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-07-01 17:47:32 +08:00
|
|
|
path = "/" + "/".join(["Game", package_name, "Static", prop["tag"], prop["name"], source_name])
|
2019-06-19 02:01:13 +08:00
|
|
|
|
|
|
|
output_json["props"].append({
|
|
|
|
"name": name,
|
2019-06-21 00:32:56 +08:00
|
|
|
"path": path,
|
2019-06-19 02:01:13 +08:00
|
|
|
"size": size,
|
|
|
|
})
|
|
|
|
|
|
|
|
output_json["maps"] = []
|
2019-07-09 01:34:52 +08:00
|
|
|
for umap in maps:
|
|
|
|
path = "/" + "/".join(["Game", package_name, "Maps", umap["name"]])
|
|
|
|
use_carla_materials = umap["use_carla_materials"] if "use_carla_materials" in umap else False
|
2019-06-21 22:52:27 +08:00
|
|
|
output_json["maps"].append({
|
2019-07-09 01:34:52 +08:00
|
|
|
"name": umap["name"],
|
2019-06-21 22:52:27 +08:00
|
|
|
"path": path,
|
2019-06-21 00:32:56 +08:00
|
|
|
"use_carla_materials": use_carla_materials
|
|
|
|
})
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-07-01 17:47:32 +08:00
|
|
|
package_config_path = os.path.join(CARLA_ROOT_PATH, "Unreal", "CarlaUE4", "Content", package_name, "Config")
|
2019-06-21 00:32:56 +08:00
|
|
|
if not os.path.exists(package_config_path):
|
2019-06-21 22:52:27 +08:00
|
|
|
try:
|
|
|
|
os.makedirs(package_config_path)
|
|
|
|
except OSError as exc:
|
2019-06-26 20:47:23 +08:00
|
|
|
if exc.errno != errno.EEXIST:
|
2019-06-21 22:52:27 +08:00
|
|
|
raise
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-06-21 00:32:56 +08:00
|
|
|
with open(os.path.join(package_config_path, package_name + ".Package.json"), "w+") as fh:
|
2019-06-26 20:47:23 +08:00
|
|
|
json.dump(output_json, fh, indent=4)
|
2021-05-12 20:48:37 +08:00
|
|
|
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-09 23:02:41 +08:00
|
|
|
def copy_roadpainter_config_files(package_name):
|
|
|
|
"""Copies roadpainter configuration files into Unreal content folder"""
|
2021-05-12 20:48:37 +08:00
|
|
|
|
2021-04-09 23:02:41 +08:00
|
|
|
two_directories_up = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
2021-05-12 20:48:37 +08:00
|
|
|
final_path = os.path.join(two_directories_up, "Import", "roadpainter_decals.json")
|
2021-04-09 23:02:41 +08:00
|
|
|
package_config_path = os.path.join(CARLA_ROOT_PATH, "Unreal", "CarlaUE4", "Content", package_name, "Config")
|
|
|
|
if not os.path.exists(package_config_path):
|
|
|
|
try:
|
|
|
|
os.makedirs(package_config_path)
|
|
|
|
except OSError as exc:
|
|
|
|
if exc.errno != errno.EEXIST:
|
2021-05-12 20:48:37 +08:00
|
|
|
raise
|
2021-04-09 23:02:41 +08:00
|
|
|
shutil.copy(final_path, package_config_path)
|
2021-05-12 20:48:37 +08:00
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2021-04-09 23:02:41 +08:00
|
|
|
def copy_roadpainter_config_files(package_name):
|
|
|
|
"""Copies roadpainter configuration files into Unreal content folder"""
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-09 23:02:41 +08:00
|
|
|
two_directories_up = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
2021-05-12 20:48:37 +08:00
|
|
|
final_path = os.path.join(two_directories_up, "Import", "roadpainter_decals.json")
|
2021-04-09 23:02:41 +08:00
|
|
|
package_config_path = os.path.join(CARLA_ROOT_PATH, "Unreal", "CarlaUE4", "Content", package_name, "Config")
|
|
|
|
if not os.path.exists(package_config_path):
|
|
|
|
try:
|
|
|
|
os.makedirs(package_config_path)
|
|
|
|
except OSError as exc:
|
|
|
|
if exc.errno != errno.EEXIST:
|
2021-05-06 23:23:06 +08:00
|
|
|
raise
|
2021-04-09 23:02:41 +08:00
|
|
|
shutil.copy(final_path, package_config_path)
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2021-04-29 14:40:51 +08:00
|
|
|
def import_assets(package_name, json_dirname, props, maps, do_tiles, tile_size):
|
2019-06-21 22:52:27 +08:00
|
|
|
"""Same commandlet is used for importing assets and also maps."""
|
2019-06-19 02:01:13 +08:00
|
|
|
commandlet_name = "ImportAssets"
|
|
|
|
|
|
|
|
# Import Props
|
2021-04-29 14:40:51 +08:00
|
|
|
import_setting_file = generate_import_setting_file(package_name, json_dirname, props, maps, do_tiles, tile_size)
|
2019-11-20 00:43:29 +08:00
|
|
|
commandlet_arguments = ["-importSettings=\"%s\"" % import_setting_file, "-nosourcecontrol", "-replaceexisting"]
|
2019-06-21 00:32:56 +08:00
|
|
|
invoke_commandlet(commandlet_name, commandlet_arguments)
|
2019-06-19 02:01:13 +08:00
|
|
|
os.remove(import_setting_file)
|
|
|
|
|
2019-06-21 22:52:27 +08:00
|
|
|
# Move maps XODR files if any
|
|
|
|
for umap in maps:
|
|
|
|
# Make sure XODR info is full and the file exists
|
2019-06-26 20:47:23 +08:00
|
|
|
if "xodr" in umap and umap["xodr"] and os.path.isfile(os.path.join(json_dirname, umap["xodr"])):
|
|
|
|
# Make sure the `.xodr` file have the same name than the `.umap`
|
|
|
|
xodr_path = os.path.abspath(os.path.join(json_dirname, umap["xodr"]))
|
|
|
|
umap_name = umap["name"]
|
|
|
|
xodr_name = '.'.join([umap_name, "xodr"])
|
|
|
|
|
|
|
|
xodr_folder_destin = os.path.join(
|
2019-07-01 17:47:32 +08:00
|
|
|
CARLA_ROOT_PATH,
|
2019-06-26 20:47:23 +08:00
|
|
|
"Unreal",
|
|
|
|
"CarlaUE4",
|
|
|
|
"Content",
|
|
|
|
package_name,
|
|
|
|
"Maps",
|
|
|
|
umap_name,
|
|
|
|
"OpenDrive")
|
|
|
|
|
|
|
|
if not os.path.exists(xodr_folder_destin):
|
|
|
|
os.makedirs(xodr_folder_destin)
|
|
|
|
|
|
|
|
xodr_path_destin = os.path.join(
|
|
|
|
xodr_folder_destin,
|
|
|
|
xodr_name)
|
|
|
|
|
|
|
|
print('Copying "' + xodr_path + '" to "' + xodr_path_destin + '"')
|
2019-06-21 22:52:27 +08:00
|
|
|
shutil.copy2(xodr_path, xodr_path_destin)
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-06-26 20:47:23 +08:00
|
|
|
# Create package file
|
|
|
|
generate_package_file(package_name, props, maps)
|
|
|
|
|
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
def import_assets_from_json_list(json_list):
|
2019-06-21 23:19:37 +08:00
|
|
|
maps = []
|
2019-07-01 17:47:32 +08:00
|
|
|
package_name = ""
|
2019-06-19 02:01:13 +08:00
|
|
|
for dirname, filename in json_list:
|
|
|
|
# Read json file
|
|
|
|
with open(os.path.join(dirname, filename)) as json_file:
|
|
|
|
data = json.load(json_file)
|
2019-07-09 01:34:52 +08:00
|
|
|
# Take all the fbx registered in the provided json files
|
2019-06-19 02:01:13 +08:00
|
|
|
# and place it inside unreal in the provided path (by the json file)
|
2019-09-17 20:57:22 +08:00
|
|
|
maps = []
|
|
|
|
props = []
|
|
|
|
if "maps" in data:
|
|
|
|
maps = data["maps"]
|
|
|
|
if "props" in data:
|
|
|
|
props = data["props"]
|
|
|
|
|
2021-04-29 14:40:51 +08:00
|
|
|
if "tile_size" in maps[0]:
|
|
|
|
tile_size = maps[0]["tile_size"]
|
|
|
|
else:
|
|
|
|
tile_size = 2000
|
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
package_name = filename.replace(".json", "")
|
|
|
|
|
2021-04-29 14:40:51 +08:00
|
|
|
if ("tiles" in maps[0]):
|
|
|
|
import_assets(package_name, dirname, props, maps, 1, tile_size)
|
|
|
|
else:
|
|
|
|
import_assets(package_name, dirname, props, maps, 0, 0)
|
2019-06-19 02:01:13 +08:00
|
|
|
|
2019-07-04 00:16:34 +08:00
|
|
|
if not package_name:
|
|
|
|
print("No Packages JSONs found, nothing to import. Skipping package.")
|
|
|
|
continue
|
2019-07-01 17:47:32 +08:00
|
|
|
|
2019-09-17 20:57:22 +08:00
|
|
|
# First we only move the meshes to the tagged folders for semantic
|
|
|
|
# segmentation
|
|
|
|
move_assets_commandlet(package_name, maps)
|
2019-06-21 22:52:27 +08:00
|
|
|
|
2019-12-04 01:27:11 +08:00
|
|
|
# we need to build the binary file for navigation of pedestrians
|
2021-05-12 01:38:06 +08:00
|
|
|
build_binary_for_navigation(package_name, dirname, maps)
|
2019-12-04 01:27:11 +08:00
|
|
|
|
2019-09-17 20:57:22 +08:00
|
|
|
# We prepare only the maps for cooking after moving them. Props cooking will be done from Package.sh script.
|
2021-04-15 17:17:37 +08:00
|
|
|
if len(maps) > 0:
|
|
|
|
prepare_maps_commandlet_for_cooking(package_name, only_prepare_maps=True)
|
2021-05-06 23:23:06 +08:00
|
|
|
|
2021-04-15 17:17:37 +08:00
|
|
|
# We apply the carla materials to the imported maps
|
2021-03-22 17:28:41 +08:00
|
|
|
load_asset_materials_commandlet(package_name)
|
2019-09-17 20:57:22 +08:00
|
|
|
|
2021-03-22 17:28:41 +08:00
|
|
|
def load_asset_materials_commandlet(package_name):
|
|
|
|
commandlet_name = "LoadAssetMaterials"
|
|
|
|
commandlet_arguments = ["-PackageName=%s" % package_name]
|
|
|
|
invoke_commandlet(commandlet_name, commandlet_arguments)
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-03-22 17:28:41 +08:00
|
|
|
|
|
|
|
def load_asset_materials_commandlet(package_name):
|
|
|
|
commandlet_name = "LoadAssetMaterials"
|
|
|
|
commandlet_arguments = ["-PackageName=%s" % package_name]
|
|
|
|
invoke_commandlet(commandlet_name, commandlet_arguments)
|
|
|
|
|
2019-09-17 20:57:22 +08:00
|
|
|
def prepare_maps_commandlet_for_cooking(package_name, only_prepare_maps):
|
2019-07-10 03:19:17 +08:00
|
|
|
commandlet_name = "PrepareAssetsForCooking"
|
2019-11-20 00:43:29 +08:00
|
|
|
commandlet_arguments = ["-PackageName=%s" % package_name]
|
|
|
|
commandlet_arguments.append("-OnlyPrepareMaps=%d" % only_prepare_maps)
|
2019-07-10 03:19:17 +08:00
|
|
|
invoke_commandlet(commandlet_name, commandlet_arguments)
|
2019-06-21 23:19:37 +08:00
|
|
|
|
2019-09-17 20:57:22 +08:00
|
|
|
|
|
|
|
def move_assets_commandlet(package_name, maps):
|
|
|
|
commandlet_name = "MoveAssets"
|
2019-11-20 00:43:29 +08:00
|
|
|
commandlet_arguments = ["-PackageName=%s" % package_name]
|
2019-09-17 20:57:22 +08:00
|
|
|
|
|
|
|
umap_names = ""
|
|
|
|
for umap in maps:
|
|
|
|
umap_names += umap["name"] + " "
|
2019-11-20 00:43:29 +08:00
|
|
|
commandlet_arguments.append("-Maps=%s" % umap_names)
|
2019-09-17 20:57:22 +08:00
|
|
|
|
|
|
|
invoke_commandlet(commandlet_name, commandlet_arguments)
|
|
|
|
|
2019-12-04 01:27:11 +08:00
|
|
|
# build the binary file for navigation of pedestrians for that map
|
|
|
|
def build_binary_for_navigation(package_name, dirname, maps):
|
|
|
|
folder = os.path.join(CARLA_ROOT_PATH, "Util", "DockerUtils", "dist")
|
|
|
|
|
|
|
|
# process each map
|
|
|
|
for umap in maps:
|
|
|
|
|
2021-04-15 17:17:37 +08:00
|
|
|
# get the sources for the map (single or tiles)
|
|
|
|
if ("source" in umap):
|
|
|
|
tiles = [umap["source"]]
|
|
|
|
elif ("tiles" in umap):
|
2021-05-12 20:48:37 +08:00
|
|
|
tiles = umap["tiles"]
|
2021-04-15 17:17:37 +08:00
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
2019-12-11 10:05:29 +08:00
|
|
|
# get the target name
|
2019-12-04 01:27:11 +08:00
|
|
|
target_name = umap["name"]
|
2021-04-15 17:17:37 +08:00
|
|
|
xodr_filename = os.path.basename(umap["xodr"])
|
2019-12-04 01:27:11 +08:00
|
|
|
|
|
|
|
# copy the XODR file into docker utils folder
|
|
|
|
if "xodr" in umap and umap["xodr"] and os.path.isfile(os.path.join(dirname, umap["xodr"])):
|
|
|
|
# Make sure the `.xodr` file have the same name than the `.umap`
|
|
|
|
xodr_path_source = os.path.abspath(os.path.join(dirname, umap["xodr"]))
|
2021-04-15 17:17:37 +08:00
|
|
|
xodr_path_target = os.path.join(folder, xodr_filename)
|
2019-12-04 01:27:11 +08:00
|
|
|
# copy
|
|
|
|
print('Copying "' + xodr_path_source + '" to "' + xodr_path_target + '"')
|
|
|
|
shutil.copy2(xodr_path_source, xodr_path_target)
|
2021-05-12 20:48:37 +08:00
|
|
|
|
2021-04-15 17:17:37 +08:00
|
|
|
for tile in tiles:
|
|
|
|
|
|
|
|
fbx_filename = os.path.basename(tile)
|
|
|
|
fbx_name_no_ext = os.path.splitext(fbx_filename)[0]
|
|
|
|
|
|
|
|
# copy the FBX file into docker utils folder
|
|
|
|
if os.path.isfile(os.path.join(dirname, tile)):
|
|
|
|
# Make sure the `.fbx` file have the same name than the `.umap`
|
|
|
|
fbx_path_source = os.path.abspath(os.path.join(dirname, tile))
|
|
|
|
fbx_path_target = os.path.join(folder, fbx_filename)
|
|
|
|
# copy
|
|
|
|
print('Copying "' + fbx_path_source + '" to "' + fbx_path_target + '"')
|
|
|
|
shutil.copy2(fbx_path_source, fbx_path_target)
|
|
|
|
|
|
|
|
# rename the xodr with the same name of the source/tile
|
|
|
|
os.rename(os.path.join(folder, xodr_filename), os.path.join(folder, "%s.xodr" % fbx_name_no_ext))
|
|
|
|
|
|
|
|
# make the conversion
|
|
|
|
if os.name == "nt":
|
|
|
|
subprocess.call(["build.bat", fbx_name_no_ext], cwd=folder, shell=True)
|
|
|
|
else:
|
|
|
|
subprocess.call(["chmod +x build.sh"], cwd=folder, shell=True)
|
|
|
|
subprocess.call("./build.sh %s" % fbx_name_no_ext, cwd=folder, shell=True)
|
|
|
|
|
|
|
|
# rename the xodr with the original name
|
|
|
|
os.rename(os.path.join(folder, "%s.xodr" % fbx_name_no_ext), os.path.join(folder, xodr_filename))
|
|
|
|
|
|
|
|
# copy the binary file
|
|
|
|
nav_path_source = os.path.join(folder, "%s.bin" % fbx_name_no_ext)
|
|
|
|
nav_folder_target = os.path.join(CARLA_ROOT_PATH, "Unreal", "CarlaUE4", "Content", package_name, "Maps", target_name, "Nav")
|
|
|
|
if os.path.exists(nav_path_source):
|
|
|
|
if not os.path.exists(nav_folder_target):
|
|
|
|
os.makedirs(nav_folder_target)
|
|
|
|
nav_path_target = os.path.join(nav_folder_target, "%s.bin" % fbx_name_no_ext)
|
|
|
|
print('Copying "' + nav_path_source + '" to "' + nav_path_target + '"')
|
|
|
|
shutil.copy2(nav_path_source, nav_path_target)
|
|
|
|
|
|
|
|
# remove files
|
2021-05-13 00:45:46 +08:00
|
|
|
if os.path.exists(nav_path_source):
|
|
|
|
os.remove(nav_path_source)
|
|
|
|
|
2021-04-23 23:28:44 +08:00
|
|
|
if os.path.exists(fbx_path_target):
|
2021-05-13 00:45:46 +08:00
|
|
|
os.remove(fbx_path_target)
|
2019-12-04 01:27:11 +08:00
|
|
|
|
|
|
|
os.remove(xodr_path_target)
|
2019-09-17 20:57:22 +08:00
|
|
|
|
2019-06-19 02:01:13 +08:00
|
|
|
def main():
|
2020-04-11 02:01:27 +08:00
|
|
|
argparser = argparse.ArgumentParser(description=__doc__)
|
|
|
|
argparser.add_argument(
|
|
|
|
'--package',
|
|
|
|
metavar='P',
|
|
|
|
default='map_package',
|
|
|
|
help='Name of the imported package')
|
|
|
|
argparser.add_argument(
|
|
|
|
'--no-carla-materials',
|
|
|
|
action='store_false',
|
|
|
|
help='user Carla materials')
|
2021-04-20 00:58:20 +08:00
|
|
|
argparser.add_argument(
|
|
|
|
'--json-only',
|
|
|
|
action='store_true',
|
|
|
|
help='Create JSON files only')
|
2021-05-06 23:23:06 +08:00
|
|
|
|
2020-04-14 19:14:46 +08:00
|
|
|
args = argparser.parse_known_args()[0]
|
2020-04-11 02:01:27 +08:00
|
|
|
|
2019-07-01 17:47:32 +08:00
|
|
|
import_folder = os.path.join(CARLA_ROOT_PATH, "Import")
|
2021-04-12 17:56:41 +08:00
|
|
|
json_list = get_packages_json_list(import_folder)
|
2021-04-20 00:58:20 +08:00
|
|
|
decals_json = get_decals_json_file(import_folder)
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-05-13 00:45:46 +08:00
|
|
|
if len(json_list) < 1:
|
2020-04-11 02:01:27 +08:00
|
|
|
json_list = generate_json_package(import_folder, args.package, args.no_carla_materials)
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-20 00:58:20 +08:00
|
|
|
if len(decals_json) == 0:
|
2021-04-12 17:56:41 +08:00
|
|
|
decals_json_file = generate_decals_file(import_folder)
|
2021-05-03 21:25:42 +08:00
|
|
|
|
2021-04-20 00:58:20 +08:00
|
|
|
if args.json_only == False:
|
|
|
|
copy_roadpainter_config_files(args.package)
|
|
|
|
import_assets_from_json_list(json_list)
|
2019-06-19 02:01:13 +08:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|