mirror of
https://github.com/godotengine/godot.git
synced 2024-11-21 03:18:37 +08:00
Use multiple threads to import.
- For now everything imports multithreaded by default (should work I guess, let's test). - Controllable per importer Early test benchmark. 64 large textures (importing as lossless, _not_ as vram) on a mobile i7, 12 threads: Importing goes down from 46 to 7 seconds. For VRAM I will change the logic to use a compressing thread in a subsequent PR, as well as implementing Betsy.
This commit is contained in:
parent
29775a1714
commit
2b730cad90
@ -192,6 +192,34 @@ bool ResourceFormatImporter::recognize_path(const String &p_path, const String &
|
||||
return FileAccess::exists(p_path + ".import");
|
||||
}
|
||||
|
||||
Error ResourceFormatImporter::get_import_order_threads_and_importer(const String &p_path, int &r_order, bool &r_can_threads, String &r_importer) const {
|
||||
r_order = 0;
|
||||
r_importer = "";
|
||||
|
||||
r_can_threads = false;
|
||||
Ref<ResourceImporter> importer;
|
||||
|
||||
if (FileAccess::exists(p_path + ".import")) {
|
||||
PathAndType pat;
|
||||
Error err = _get_path_and_type(p_path, pat);
|
||||
|
||||
if (err == OK) {
|
||||
importer = get_importer_by_name(pat.importer);
|
||||
}
|
||||
} else {
|
||||
importer = get_importer_by_extension(p_path.get_extension().to_lower());
|
||||
}
|
||||
|
||||
if (importer.is_valid()) {
|
||||
r_order = importer->get_import_order();
|
||||
r_importer = importer->get_importer_name();
|
||||
r_can_threads = importer->can_import_threaded();
|
||||
return OK;
|
||||
} else {
|
||||
return ERR_INVALID_PARAMETER;
|
||||
}
|
||||
}
|
||||
|
||||
int ResourceFormatImporter::get_import_order(const String &p_path) const {
|
||||
Ref<ResourceImporter> importer;
|
||||
|
||||
|
@ -72,6 +72,8 @@ public:
|
||||
|
||||
virtual int get_import_order(const String &p_path) const;
|
||||
|
||||
Error get_import_order_threads_and_importer(const String &p_path, int &r_order, bool &r_can_threads, String &r_importer) const;
|
||||
|
||||
String get_internal_resource_path(const String &p_path) const;
|
||||
void get_internal_resource_path_list(const String &p_path, List<String> *r_paths);
|
||||
|
||||
@ -126,6 +128,9 @@ public:
|
||||
virtual String get_option_group_file() const { return String(); }
|
||||
|
||||
virtual Error import(const String &p_source_file, const String &p_save_path, const Map<StringName, Variant> &p_options, List<String> *r_platform_variants, List<String> *r_gen_files = nullptr, Variant *r_metadata = nullptr) = 0;
|
||||
virtual bool can_import_threaded() const { return true; }
|
||||
virtual void import_threaded_begin() {}
|
||||
virtual void import_threaded_end() {}
|
||||
|
||||
virtual Error import_group_file(const String &p_group_file, const Map<String, Map<StringName, Variant>> &p_source_file_options, const Map<String, String> &p_base_paths) { return ERR_UNAVAILABLE; }
|
||||
virtual bool are_import_settings_valid(const String &p_path) const { return true; }
|
||||
|
@ -83,7 +83,7 @@ public:
|
||||
ERR_FAIL_COND(!threads); //never initialized
|
||||
ERR_FAIL_COND(current_work != nullptr);
|
||||
|
||||
index.store(0);
|
||||
index.store(0, std::memory_order_release);
|
||||
|
||||
Work<C, M, U> *w = memnew((Work<C, M, U>));
|
||||
w->instance = p_instance;
|
||||
@ -104,8 +104,15 @@ public:
|
||||
return current_work != nullptr;
|
||||
}
|
||||
|
||||
bool is_done_dispatching() const {
|
||||
ERR_FAIL_COND_V(current_work == nullptr, false);
|
||||
return index.load(std::memory_order_acquire) >= current_work->max_elements;
|
||||
}
|
||||
|
||||
uint32_t get_work_index() const {
|
||||
return index;
|
||||
ERR_FAIL_COND_V(current_work == nullptr, 0);
|
||||
uint32_t idx = index.load(std::memory_order_acquire);
|
||||
return MIN(idx, current_work->max_elements);
|
||||
}
|
||||
|
||||
void end_work() {
|
||||
|
@ -1922,6 +1922,11 @@ void EditorFileSystem::reimport_file_with_custom_parameters(const String &p_file
|
||||
_reimport_file(p_file, &p_custom_params, p_importer);
|
||||
}
|
||||
|
||||
void EditorFileSystem::_reimport_thread(uint32_t p_index, ImportThreadData *p_import_data) {
|
||||
p_import_data->max_index = MAX(p_import_data->reimport_from + int(p_index), p_import_data->max_index);
|
||||
_reimport_file(p_import_data->reimport_files[p_import_data->reimport_from + p_index].path);
|
||||
}
|
||||
|
||||
void EditorFileSystem::reimport_files(const Vector<String> &p_files) {
|
||||
{
|
||||
// Ensure that ProjectSettings::IMPORTED_FILES_PATH exists.
|
||||
@ -1939,7 +1944,8 @@ void EditorFileSystem::reimport_files(const Vector<String> &p_files) {
|
||||
importing = true;
|
||||
EditorProgress pr("reimport", TTR("(Re)Importing Assets"), p_files.size());
|
||||
|
||||
Vector<ImportFile> files;
|
||||
Vector<ImportFile> reimport_files;
|
||||
|
||||
Set<String> groups_to_reimport;
|
||||
|
||||
for (int i = 0; i < p_files.size(); i++) {
|
||||
@ -1957,8 +1963,8 @@ void EditorFileSystem::reimport_files(const Vector<String> &p_files) {
|
||||
//it's a regular file
|
||||
ImportFile ifile;
|
||||
ifile.path = p_files[i];
|
||||
ifile.order = ResourceFormatImporter::get_singleton()->get_import_order(p_files[i]);
|
||||
files.push_back(ifile);
|
||||
ResourceFormatImporter::get_singleton()->get_import_order_threads_and_importer(p_files[i], ifile.order, ifile.threaded, ifile.importer);
|
||||
reimport_files.push_back(ifile);
|
||||
}
|
||||
|
||||
//group may have changed, so also update group reference
|
||||
@ -1969,11 +1975,51 @@ void EditorFileSystem::reimport_files(const Vector<String> &p_files) {
|
||||
}
|
||||
}
|
||||
|
||||
files.sort();
|
||||
reimport_files.sort();
|
||||
|
||||
for (int i = 0; i < files.size(); i++) {
|
||||
pr.step(files[i].path.get_file(), i);
|
||||
_reimport_file(files[i].path);
|
||||
bool use_threads = GLOBAL_GET("editor/import/use_multiple_threads");
|
||||
|
||||
int from = 0;
|
||||
for (int i = 0; i < reimport_files.size(); i++) {
|
||||
if (use_threads && reimport_files[i].threaded) {
|
||||
if (i + 1 == reimport_files.size() || reimport_files[i + 1].importer != reimport_files[from].importer) {
|
||||
if (from - i == 0) {
|
||||
//single file, do not use threads
|
||||
pr.step(reimport_files[i].path.get_file(), i);
|
||||
_reimport_file(reimport_files[i].path);
|
||||
} else {
|
||||
Ref<ResourceImporter> importer = ResourceFormatImporter::get_singleton()->get_importer_by_name(reimport_files[from].importer);
|
||||
ERR_CONTINUE(!importer.is_valid());
|
||||
|
||||
importer->import_threaded_begin();
|
||||
|
||||
ImportThreadData data;
|
||||
data.max_index = from;
|
||||
data.reimport_from = from;
|
||||
data.reimport_files = reimport_files.ptr();
|
||||
|
||||
import_threads.begin_work(i - from + 1, this, &EditorFileSystem::_reimport_thread, &data);
|
||||
int current_index = from - 1;
|
||||
do {
|
||||
if (current_index < data.max_index) {
|
||||
current_index = data.max_index;
|
||||
pr.step(reimport_files[current_index].path.get_file(), current_index);
|
||||
}
|
||||
OS::get_singleton()->delay_usec(1);
|
||||
} while (!import_threads.is_done_dispatching());
|
||||
|
||||
import_threads.end_work();
|
||||
|
||||
importer->import_threaded_end();
|
||||
}
|
||||
|
||||
from = i + 1;
|
||||
}
|
||||
|
||||
} else {
|
||||
pr.step(reimport_files[i].path.get_file(), i);
|
||||
_reimport_file(reimport_files[i].path);
|
||||
}
|
||||
}
|
||||
|
||||
//reimport groups
|
||||
@ -2111,7 +2157,7 @@ void EditorFileSystem::_update_extensions() {
|
||||
EditorFileSystem::EditorFileSystem() {
|
||||
ResourceLoader::import = _resource_import;
|
||||
reimport_on_missing_imported_files = GLOBAL_DEF("editor/import/reimport_missing_imported_files", true);
|
||||
|
||||
GLOBAL_DEF("editor/import/use_multiple_threads", true);
|
||||
singleton = this;
|
||||
filesystem = memnew(EditorFileSystemDirectory); //like, empty
|
||||
filesystem->parent = nullptr;
|
||||
@ -2138,7 +2184,9 @@ EditorFileSystem::EditorFileSystem() {
|
||||
first_scan = true;
|
||||
scan_changes_pending = false;
|
||||
revalidate_import_files = false;
|
||||
import_threads.init();
|
||||
}
|
||||
|
||||
EditorFileSystem::~EditorFileSystem() {
|
||||
import_threads.finish();
|
||||
}
|
||||
|
@ -36,7 +36,9 @@
|
||||
#include "core/os/thread_safe.h"
|
||||
#include "core/templates/safe_refcount.h"
|
||||
#include "core/templates/set.h"
|
||||
#include "core/templates/thread_work_pool.h"
|
||||
#include "scene/main/node.h"
|
||||
|
||||
class FileAccess;
|
||||
|
||||
struct EditorProgressBG;
|
||||
@ -214,9 +216,11 @@ class EditorFileSystem : public Node {
|
||||
|
||||
struct ImportFile {
|
||||
String path;
|
||||
String importer;
|
||||
bool threaded = false;
|
||||
int order = 0;
|
||||
bool operator<(const ImportFile &p_if) const {
|
||||
return order < p_if.order;
|
||||
return order == p_if.order ? (importer < p_if.importer) : (order < p_if.order);
|
||||
}
|
||||
};
|
||||
|
||||
@ -236,6 +240,16 @@ class EditorFileSystem : public Node {
|
||||
|
||||
Set<String> group_file_cache;
|
||||
|
||||
ThreadWorkPool import_threads;
|
||||
|
||||
struct ImportThreadData {
|
||||
const ImportFile *reimport_files;
|
||||
int reimport_from;
|
||||
int max_index = 0;
|
||||
};
|
||||
|
||||
void _reimport_thread(uint32_t p_index, ImportThreadData *p_import_data);
|
||||
|
||||
protected:
|
||||
void _notification(int p_what);
|
||||
static void _bind_methods();
|
||||
|
@ -173,6 +173,8 @@ public:
|
||||
virtual bool has_advanced_options() const override;
|
||||
virtual void show_advanced_options(const String &p_path) override;
|
||||
|
||||
virtual bool can_import_threaded() const override { return false; }
|
||||
|
||||
ResourceImporterScene();
|
||||
};
|
||||
|
||||
|
@ -346,7 +346,7 @@ void GPUParticlesCollisionSDF::_compute_sdf(ComputeSDFParams *params) {
|
||||
ThreadWorkPool work_pool;
|
||||
work_pool.init();
|
||||
work_pool.begin_work(params->size.z, this, &GPUParticlesCollisionSDF::_compute_sdf_z, params);
|
||||
while (work_pool.get_work_index() < (uint32_t)params->size.z) {
|
||||
while (!work_pool.is_done_dispatching()) {
|
||||
OS::get_singleton()->delay_usec(10000);
|
||||
bake_step_function(work_pool.get_work_index() * 100 / params->size.z, "Baking SDF");
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user