From e4e044098e5c4d825537cc3166c3d730b04c1e50 Mon Sep 17 00:00:00 2001 From: Sylvain Martel Date: Thu, 16 Jun 2022 09:26:40 -0400 Subject: [PATCH 1/2] Add gltf-compiler for every internal types --- .cargo/config.toml | 11 + .vscode/legionlabs.natvis | 3 +- Cargo.lock | 38 +-- crates/lgn-data-build/src/databuild.rs | 154 ++++++---- crates/lgn-data-compiler/src/compiler_api.rs | 30 +- .../src/compiler_node/compiler_registry.rs | 8 +- .../src/compiler_node/inproc_stub.rs | 4 +- crates/lgn-data-compiler/tests/compile.rs | 2 +- crates/lgn-data-runtime/src/asset_registry.rs | 18 +- crates/lgn-data-runtime/src/lib.rs | 8 +- crates/lgn-data-runtime/src/resource.rs | 9 +- crates/lgn-data-runtime/src/resourcepathid.rs | 14 + crates/lgn-data-transaction/Cargo.toml | 1 + .../lgn-data-transaction/src/lock_context.rs | 14 +- .../lgn-data-transaction/src/transaction.rs | 10 +- .../src/transaction_manager.rs | 61 ++-- crates/lgn-editor-srv/src/grpc.rs | 1 + crates/lgn-editor-srv/src/plugin.rs | 31 +- .../src/property_inspector_plugin.rs | 3 +- .../Cargo.toml | 10 +- crates/lgn-gltf-compiler/src/lib.rs | 164 +++++++++++ crates/lgn-gltf-compiler/src/main.rs | 15 + crates/lgn-gltf2mat-compiler/Cargo.toml | 27 -- crates/lgn-gltf2mat-compiler/src/lib.rs | 112 -------- crates/lgn-gltf2mat-compiler/src/main.rs | 9 - crates/lgn-gltf2model-compiler/Cargo.toml | 28 -- crates/lgn-gltf2model-compiler/src/lib.rs | 100 ------- crates/lgn-gltf2model-compiler/src/main.rs | 9 - crates/lgn-gltf2tex-compiler/src/lib.rs | 101 ------- crates/lgn-gltf2tex-compiler/src/main.rs | 9 - .../src/offline/gltf_utils.rs | 266 ++++++++++-------- crates/lgn-resource-registry/src/lib.rs | 7 +- crates/lgn-scene-plugin/src/lib.rs | 19 +- crates/lgn-scene-plugin/src/scene_instance.rs | 107 ++----- crates/lgn-scene-plugin/src/scene_manager.rs | 58 +++- crates/lgn-ubercompiler/Cargo.toml | 4 +- crates/lgn-ubercompiler/src/lib.rs | 4 +- tests/sample-data-compiler/Cargo.toml | 1 + .../src/raw_loader/mod.rs | 186 ++++++++---- .../raw/world/sample_1/helmet/helmet_root.ent | 16 +- 40 files changed, 824 insertions(+), 848 deletions(-) rename crates/{lgn-gltf2tex-compiler => lgn-gltf-compiler}/Cargo.toml (74%) create mode 100644 crates/lgn-gltf-compiler/src/lib.rs create mode 100644 crates/lgn-gltf-compiler/src/main.rs delete mode 100644 crates/lgn-gltf2mat-compiler/Cargo.toml delete mode 100644 crates/lgn-gltf2mat-compiler/src/lib.rs delete mode 100644 crates/lgn-gltf2mat-compiler/src/main.rs delete mode 100644 crates/lgn-gltf2model-compiler/Cargo.toml delete mode 100644 crates/lgn-gltf2model-compiler/src/lib.rs delete mode 100644 crates/lgn-gltf2model-compiler/src/main.rs delete mode 100644 crates/lgn-gltf2tex-compiler/src/lib.rs delete mode 100644 crates/lgn-gltf2tex-compiler/src/main.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index f62914b032..7bce1fe226 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -135,5 +135,16 @@ opt-level = 3 debug = true debug-assertions = false +[profile.dev.package.image] +opt-level = 3 +debug = true +debug-assertions = false + +[profile.dev.package.gltf] +opt-level = 3 +debug = true +debug-assertions = false + + [env] LGN_TELEMETRY_GRPC_API_KEY = "296bb99c-6921-4233-a353-a29c1045b5f2" diff --git a/.vscode/legionlabs.natvis b/.vscode/legionlabs.natvis index cb22fedde1..b8621b4b22 100644 --- a/.vscode/legionlabs.natvis +++ b/.vscode/legionlabs.natvis @@ -29,7 +29,8 @@ offline_model({__0,x}) runtime_model({__0,x}) gltf({__0,x}) - + export_gltf({__0,x}) + {__0,x} diff --git a/Cargo.lock b/Cargo.lock index d7ec9abac8..be30d1e35b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4104,35 +4104,7 @@ dependencies = [ ] [[package]] -name = "lgn-compiler-gltf2mat" -version = "0.1.0" -dependencies = [ - "async-trait", - "gltf", - "lgn-content-store", - "lgn-data-compiler", - "lgn-data-offline", - "lgn-data-runtime", - "lgn-graphics-data", - "tokio", -] - -[[package]] -name = "lgn-compiler-gltf2model" -version = "0.1.0" -dependencies = [ - "async-trait", - "gltf", - "lgn-content-store", - "lgn-data-compiler", - "lgn-data-offline", - "lgn-data-runtime", - "lgn-graphics-data", - "tokio", -] - -[[package]] -name = "lgn-compiler-gltf2tex" +name = "lgn-compiler-gltf" version = "0.1.0" dependencies = [ "async-trait", @@ -4141,6 +4113,8 @@ dependencies = [ "lgn-data-offline", "lgn-data-runtime", "lgn-graphics-data", + "lgn-telemetry-sink", + "lgn-tracing", "tokio", ] @@ -4652,6 +4626,7 @@ name = "lgn-data-transaction" version = "0.1.0" dependencies = [ "async-trait", + "crossbeam-channel", "erased-serde", "futures-lite", "generic-data", @@ -5518,9 +5493,7 @@ name = "lgn-ubercompiler" version = "0.1.0" dependencies = [ "async-trait", - "lgn-compiler-gltf2mat", - "lgn-compiler-gltf2model", - "lgn-compiler-gltf2tex", + "lgn-compiler-gltf", "lgn-compiler-material", "lgn-compiler-png2tex", "lgn-compiler-psd2tex", @@ -7816,6 +7789,7 @@ dependencies = [ "bincode", "clap 3.1.18", "generic-data", + "gltf", "lgn-config", "lgn-content-store", "lgn-data-build", diff --git a/crates/lgn-data-build/src/databuild.rs b/crates/lgn-data-build/src/databuild.rs index 4eb4a1df86..fbcb836746 100644 --- a/crates/lgn-data-build/src/databuild.rs +++ b/crates/lgn-data-build/src/databuild.rs @@ -1,5 +1,5 @@ use std::{ - collections::{HashMap, HashSet}, + collections::{hash_map::Entry, HashMap, HashSet}, env, hash::{Hash, Hasher}, io, @@ -451,18 +451,34 @@ impl DataBuild { }; let mut compiler_details = HashMap::new(); - for t in unique_transforms { - let (transform, res_path_id) = t; - let (compiler, transform) = self - .compilers - .compilers() - .find_compiler(transform) - .ok_or(Error::CompilerNotFound(transform, res_path_id))?; - let compiler_hash = compiler - .compiler_hash(transform, env) - .await - .map_err(|e| Error::Io(e.into()))?; - compiler_details.insert(transform, (compiler, compiler_hash)); + for (transform, res_path_id) in unique_transforms { + // Insert a identity compiler if one exists + let identity_transform = transform.as_identity(); + if let Some((identity_compiler, _transform)) = + self.compilers.compilers().find_compiler(identity_transform) + { + if let Entry::Vacant(e) = compiler_details.entry(identity_transform) { + let compiler_hash = identity_compiler + .compiler_hash(identity_transform, env) + .await + .map_err(|e| Error::Io(e.into()))?; + e.insert((identity_compiler, compiler_hash)); + } + } else if let Entry::Vacant(e) = compiler_details.entry(transform) { + let (compiler, _transform) = self + .compilers + .compilers() + .find_compiler(transform) + .ok_or_else(|| { + lgn_tracing::error!("Compiler not found {}", transform); + Error::CompilerNotFound(transform, res_path_id) + })?; + let compiler_hash = compiler + .compiler_hash(transform, env) + .await + .map_err(|e| Error::Io(e.into()))?; + e.insert((compiler, compiler_hash)); + } } compiler_details }; @@ -477,7 +493,7 @@ impl DataBuild { // in the future this should be improved. // let mut accumulated_dependencies = vec![]; - let mut node_hash = HashMap::<_, (AssetHash, AssetHash)>::new(); + let mut node_hash = HashMap::<_, (AssetHash, AssetHash, ResourcePathId)>::new(); let mut compiled_at_node = HashMap::::new(); let mut compiled = HashSet::::new(); @@ -579,36 +595,54 @@ impl DataBuild { let mut new_work = vec![]; let num_ready = ready.len(); for compile_node_index in ready { - let compile_node = build_graph.node_weight(compile_node_index).unwrap(); + let source_node = build_graph.node_weight(compile_node_index).unwrap().clone(); info!( "Progress({:?}): {:?} is ready", - compile_node_index, compile_node + compile_node_index, source_node ); - // compile non-source dependencies. - if let Some(direct_dependency) = compile_node.direct_dependency() { - let mut n = - build_graph.neighbors_directed(compile_node_index, petgraph::Incoming); - let direct_dependency_index = n.next().unwrap(); - - // only one direct dependency supported now. it's ok for the path - // but it needs to be revisited for source (if this ever applies to source). - assert!(n.next().is_none()); - - assert_eq!( - &direct_dependency, - build_graph.node_weight(direct_dependency_index).unwrap() - ); - let transform = compile_node.last_transform().unwrap(); + let compile_node = if let Some(transform) = source_node.last_transform() { + // If there's no compiler for this operation but there's an Identity transform on the source, use it instead + if compiler_details.get(&transform).is_none() + && compiler_details.get(&transform.as_identity()).is_some() + { + ResourcePathId::from(source_node.source_resource()) + .push(source_node.source_resource().kind) + } else { + source_node.clone() + } + } else { + // If it's a source node but there's an Identity compiler, replace with Identity transform + let identity_transform = ResourcePathId::from(source_node.source_resource()) + .push(source_node.source_resource().kind); + if let Some((_compiler, _compiler_hash)) = + compiler_details.get(&identity_transform.last_transform().unwrap()) + { + identity_transform + } else { + source_node.clone() + } + }; - // 'name' is dropped as we always compile input as a whole. - let expected_name = compile_node.name(); + // compile non-source dependencies. + if let Some(direct_dependency) = compile_node.direct_dependency() { let compile_node = compile_node.to_unnamed(); // check if the unnamed ResourcePathId has been already compiled and early out. if let Some(node_index) = compiled_at_node.get(&compile_node) { - node_hash.insert(compile_node_index, *node_hash.get(node_index).unwrap()); + node_hash.insert( + compile_node_index, + node_hash.get(node_index).unwrap().clone(), + ); + + let unnamed = source_node.to_unnamed(); + info!( + "Source({:?}) Completed '{}' (reusing result from {:?})", + compile_node_index, source_node, node_index + ); compiled.insert(compile_node_index); + compiling_unnamed.remove(&unnamed); + compiled_unnamed.insert(unnamed); continue; } @@ -629,13 +663,6 @@ impl DataBuild { .find_dependencies(&direct_dependency) .unwrap_or_default(); - let (compiler, compiler_hash) = *compiler_details.get(&transform).unwrap(); - - // todo: not sure if transform is the right thing here. resource_path_id better? - // transform is already defined by the compiler_hash so it seems redundant. - let context_hash = - compute_context_hash(transform, compiler_hash, Self::version()); - let source_hash = { if direct_dependency.is_source() { // @@ -654,19 +681,24 @@ impl DataBuild { // resource should not read any other resources - but right now // `accumulated_dependencies` allows to read much more. // - let (dep_context_hash, dep_source_hash) = + let mut n = build_graph + .neighbors_directed(compile_node_index, petgraph::Incoming); + let direct_dependency_index = n.next().unwrap(); + let (dep_context_hash, dep_source_hash, dep_path) = node_hash.get(&direct_dependency_index).unwrap(); // we can assume there are results of compilation of the `direct_dependency` let compiled = self .output_index - .find_compiled( - &direct_dependency.to_unnamed(), - *dep_context_hash, - *dep_source_hash, - ) + .find_compiled(dep_path, *dep_context_hash, *dep_source_hash) .await - .unwrap() + .ok_or_else(|| { + lgn_tracing::error!( + "Output not present for {}", + &direct_dependency + ); + Error::OutputNotPresent(direct_dependency.clone(), "".into()) + })? .0; // can we assume there is a result of a requested name? // probably no, this should return a compile error. @@ -687,7 +719,22 @@ impl DataBuild { } }; - node_hash.insert(compile_node_index, (context_hash, source_hash)); + // Find the compiler and compiler_hash from the last transform + let transform = compile_node.last_transform().unwrap(); + let (compiler, context_hash) = + if let Some((compiler, compiler_hash)) = compiler_details.get(&transform) { + ( + *compiler, + compute_context_hash(transform, *compiler_hash, Self::version()), + ) + } else { + return Err(Error::CompilerNotFound(transform, compile_node.clone())); + }; + + node_hash.insert( + compile_node_index, + (context_hash, source_hash, compile_node.clone()), + ); let output_index = &self.output_index; let data_content_provider = Arc::clone(&self.data_content_provider); @@ -703,7 +750,9 @@ impl DataBuild { > = async move { info!( "Compiling({:?}) {} ({:?}) ...", - compile_node_index, compile_node, expected_name + compile_node_index, + compile_node, + compile_node.name() ); let start = std::time::Instant::now(); @@ -723,7 +772,10 @@ impl DataBuild { resources.clone(), ) .await - .map_err(|e| (compile_node_index, e))?; + .map_err(|e| { + lgn_tracing::error!("Failed to compile: {}", e); + (compile_node_index, e) + })?; info!( "Compiled({:?}) {:?} ended in {:?}.", diff --git a/crates/lgn-data-compiler/src/compiler_api.rs b/crates/lgn-data-compiler/src/compiler_api.rs index b4a845fe86..97f7c31329 100644 --- a/crates/lgn-data-compiler/src/compiler_api.rs +++ b/crates/lgn-data-compiler/src/compiler_api.rs @@ -396,14 +396,15 @@ async fn get_transform_hash( env: &CompilationEnv, transform: Transform, ) -> Result { - let (compiler, transform) = compilers - .find_compiler(transform) - .ok_or(CompilerError::CompilerNotFound(transform))?; + let (compiler, transform) = compilers.find_compiler(transform).ok_or_else(|| { + lgn_tracing::error!("Not found"); + CompilerError::CompilerNotFound(transform) + })?; - let compiler_hash = compiler - .compiler_hash(transform, env) - .await - .map_err(|_e| CompilerError::CompilerNotFound(transform))?; + let compiler_hash = compiler.compiler_hash(transform, env).await.map_err(|e| { + lgn_tracing::error!("{}", e); + CompilerError::CompilerNotFound(transform) + })?; Ok(compiler_hash) } @@ -508,9 +509,10 @@ async fn run(command: Commands, compilers: CompilerRegistry) -> Result<(), Compi }; let registry = { - let (compiler, _) = compilers - .find_compiler(transform) - .ok_or(CompilerError::CompilerNotFound(transform))?; + let (compiler, _) = compilers.find_compiler(transform).ok_or_else(|| { + lgn_tracing::error!("Compiler not found"); + CompilerError::CompilerNotFound(transform) + })?; let registry = AssetRegistryOptions::new() .add_device_cas(Arc::clone(&data_provider), runtime_manifest_id.clone()) @@ -524,10 +526,10 @@ async fn run(command: Commands, compilers: CompilerRegistry) -> Result<(), Compi let shell = CompilerNode::new(compilers, registry); - let (compiler, _) = shell - .compilers() - .find_compiler(transform) - .ok_or(CompilerError::CompilerNotFound(transform))?; + let (compiler, _) = shell.compilers().find_compiler(transform).ok_or_else(|| { + lgn_tracing::error!("Compiler not found"); + CompilerError::CompilerNotFound(transform) + })?; let compilation_output = compiler .compile( diff --git a/crates/lgn-data-compiler/src/compiler_node/compiler_registry.rs b/crates/lgn-data-compiler/src/compiler_node/compiler_registry.rs index 78ec88b6e7..69a0559d75 100644 --- a/crates/lgn-data-compiler/src/compiler_node/compiler_registry.rs +++ b/crates/lgn-data-compiler/src/compiler_node/compiler_registry.rs @@ -161,11 +161,9 @@ impl fmt::Debug for CompilerRegistry { impl CompilerRegistry { /// Returns a reference to the compiler pub fn find_compiler(&self, transform: Transform) -> Option<(&dyn CompilerStub, Transform)> { - if let Some(compiler_index) = self - .infos - .iter() - .position(|info| info.transform == transform) - { + if let Some(compiler_index) = self.infos.iter().position(|info| { + info.transform == transform || info.transform.is_wildcard_for(&transform) + }) { let stub_index = self.indices[compiler_index]; return Some((self.compilers[stub_index].as_ref(), transform)); } diff --git a/crates/lgn-data-compiler/src/compiler_node/inproc_stub.rs b/crates/lgn-data-compiler/src/compiler_node/inproc_stub.rs index 1fe5acdbe8..e62f905923 100644 --- a/crates/lgn-data-compiler/src/compiler_node/inproc_stub.rs +++ b/crates/lgn-data-compiler/src/compiler_node/inproc_stub.rs @@ -33,7 +33,9 @@ impl CompilerStub for InProcessCompilerStub { transform: Transform, env: &CompilationEnv, ) -> io::Result { - if transform != *self.descriptor.transform { + if transform != *self.descriptor.transform + && !self.descriptor.transform.is_wildcard_for(&transform) + { return Err(io::Error::new(io::ErrorKind::Other, "Transform mismatch")); } let hash = self diff --git a/crates/lgn-data-compiler/tests/compile.rs b/crates/lgn-data-compiler/tests/compile.rs index 59506c8c74..9cc8c65b84 100644 --- a/crates/lgn-data-compiler/tests/compile.rs +++ b/crates/lgn-data-compiler/tests/compile.rs @@ -103,7 +103,7 @@ async fn compile_intermediate() { kind: TextResource::TYPE, id: ResourceId::new(), }; - let mut resource = TextResource::new_named("test_resource"); + let mut resource = TextResource::new_with_id("test_resource", source); resource.content = source_magic_value.clone(); let source_manifest_id = diff --git a/crates/lgn-data-runtime/src/asset_registry.rs b/crates/lgn-data-runtime/src/asset_registry.rs index 9b80358844..d307cada96 100644 --- a/crates/lgn-data-runtime/src/asset_registry.rs +++ b/crates/lgn-data-runtime/src/asset_registry.rs @@ -255,6 +255,12 @@ pub enum AssetRegistryScheduling { AssetRegistryCreated, } +/// Message to notifying `AssetRegistry` Operation +pub enum AssetRegistryMessage { + /// Sent when resources changed + ChangedResources(Vec), +} + /// Async reader type for `AssetRegistry`/`AssetLoader` pub type AssetRegistryReader = Pin>; @@ -264,11 +270,14 @@ impl AssetRegistry { } /// Trigger a reload of a given primary resource. - pub async fn reload(&self, resource_id: ResourceTypeAndId) { + /// # Errors + /// Return `AssetRegistryError` on failure + pub async fn reload( + &self, + resource_id: ResourceTypeAndId, + ) -> Result { let future = self.new_load_request(resource_id); - if let Err(err) = future.await { - lgn_tracing::error!("Reload failed: {}", err); - } + future.await } pub(crate) fn mark_for_cleanup(&self, key: AssetRegistryHandleKey) { @@ -291,6 +300,7 @@ impl AssetRegistry { Some(self.lookup_untyped(resource_id)?.into()) } + // TODO: replace with Arc fn arc_self(&self) -> Arc { let registry = unsafe { Arc::from_raw(self as *const Self) }; let result = registry.clone(); diff --git a/crates/lgn-data-runtime/src/lib.rs b/crates/lgn-data-runtime/src/lib.rs index 166a4dcd45..935382adc5 100644 --- a/crates/lgn-data-runtime/src/lib.rs +++ b/crates/lgn-data-runtime/src/lib.rs @@ -99,9 +99,9 @@ pub use resource_installer::*; pub mod prelude { #[doc(hidden)] pub use crate::{ - AssetRegistry, AssetRegistryError, AssetRegistryOptions, AssetRegistryReader, Component, - ComponentInstaller, EditHandle, EditHandleUntyped, Handle, HandleUntyped, LoadRequest, - Resource, ResourceDescriptor, ResourceId, ResourceInstaller, ResourcePathId, ResourceType, - ResourceTypeAndId, Transform, + AssetRegistry, AssetRegistryError, AssetRegistryMessage, AssetRegistryOptions, + AssetRegistryReader, Component, ComponentInstaller, EditHandle, EditHandleUntyped, Handle, + HandleUntyped, LoadRequest, Resource, ResourceDescriptor, ResourceId, ResourceInstaller, + ResourcePathId, ResourceType, ResourceTypeAndId, Transform, }; } diff --git a/crates/lgn-data-runtime/src/resource.rs b/crates/lgn-data-runtime/src/resource.rs index 4441590c06..6aeabe7db8 100644 --- a/crates/lgn-data-runtime/src/resource.rs +++ b/crates/lgn-data-runtime/src/resource.rs @@ -41,8 +41,8 @@ pub async fn from_binary_reader<'de, T: Resource + Default + serde::Deserialize< /// Write a Resource to a binary stream /// # Errors /// Return `AssetRegistryError` on failure -pub fn to_binary_writer( - resource: &dyn Resource, +pub fn to_binary_writer( + resource: &T, writer: &mut dyn std::io::Write, ) -> Result<(), AssetRegistryError> { let mut bincode_ser = bincode::Serializer::new( @@ -51,8 +51,9 @@ pub fn to_binary_writer( .allow_trailing_bytes() .with_fixint_encoding(), ); - let mut serializer = ::erase(&mut bincode_ser); - lgn_data_model::utils::serialize_property_by_name(resource.as_reflect(), "", &mut serializer)?; + resource + .serialize(&mut bincode_ser) + .map_err(|_err| AssetRegistryError::Generic("bincode serialize error".into()))?; Ok(()) } diff --git a/crates/lgn-data-runtime/src/resourcepathid.rs b/crates/lgn-data-runtime/src/resourcepathid.rs index 3e503198e9..4f1e2fff17 100644 --- a/crates/lgn-data-runtime/src/resourcepathid.rs +++ b/crates/lgn-data-runtime/src/resourcepathid.rs @@ -16,6 +16,20 @@ impl Transform { pub const fn new(from: ResourceType, to: ResourceType) -> Self { Self { from, to } } + + /// Wildcard + pub fn is_wildcard_for(&self, transform: &Transform) -> bool { + self.from == self.to && self.from == transform.from + } + + /// To Identity transform + #[must_use] + pub fn as_identity(&self) -> Self { + Self { + from: self.from, + to: self.from, + } + } } impl fmt::Display for Transform { diff --git a/crates/lgn-data-transaction/Cargo.toml b/crates/lgn-data-transaction/Cargo.toml index aae1bfbf4e..b94f13f912 100644 --- a/crates/lgn-data-transaction/Cargo.toml +++ b/crates/lgn-data-transaction/Cargo.toml @@ -26,6 +26,7 @@ thiserror = "1.0" tokio = { version = "1.13", features = ["full", "tracing"] } async-trait = "0.1" futures-lite = "1.12" +crossbeam-channel = "0.5.1" [dev-dependencies] lgn-compiler-testentity = { path = "../../tests/testentity-compiler" } diff --git a/crates/lgn-data-transaction/src/lock_context.rs b/crates/lgn-data-transaction/src/lock_context.rs index fa96bf3a0e..4d084abc61 100644 --- a/crates/lgn-data-transaction/src/lock_context.rs +++ b/crates/lgn-data-transaction/src/lock_context.rs @@ -69,20 +69,24 @@ impl<'a> LockContext<'a> { changed.get_or_insert(Vec::new()).push(id); } - /*for resource_id in &self.changed_resources { + for resource_id in changed.iter().flatten() { match self .build .build_all_derived(*resource_id, &self.project) .await { - Ok((runtime_path_id, _built_resources)) => { - self.asset_registry.reload(runtime_path_id.resource_id()); + Ok((runtime_path_id, built_resources)) => { + lgn_tracing::info!( + "Rebuilt {} ({} deps)", + runtime_path_id, + built_resources.len() + ); } Err(e) => { - error!("Error building resource derivations {:?}", e); + lgn_tracing::error!("Error building resource derivations {:?}", e); } } - }*/ + } Ok(changed) } diff --git a/crates/lgn-data-transaction/src/transaction.rs b/crates/lgn-data-transaction/src/transaction.rs index 9b85988392..134c0bbb04 100644 --- a/crates/lgn-data-transaction/src/transaction.rs +++ b/crates/lgn-data-transaction/src/transaction.rs @@ -56,9 +56,10 @@ impl Transaction { // All the ops complete, the the resources let changed = context.save_changed_resources().await?; let mut log = format!( - "Transaction Applied: {} / {}ops", + "Transaction Applied: {} / {}ops ({} changed)", &self.id, - self.operations.len() + self.operations.len(), + changed.as_ref().map_or(0, Vec::len) ); if lgn_config::get_or("data_transaction.log_operation", false).unwrap() { @@ -99,9 +100,10 @@ impl Transaction { // All the ops complete, the the resources let changed = context.save_changed_resources().await?; info!( - "Transaction Rollbacked: {} / {}ops", + "Transaction Rollbacked: {} / {}ops ({} changed)", &self.id, - self.operations.len() + self.operations.len(), + changed.as_ref().map_or(0, Vec::len) ); Ok(changed) } diff --git a/crates/lgn-data-transaction/src/transaction_manager.rs b/crates/lgn-data-transaction/src/transaction_manager.rs index e447b314bd..c397f3b784 100644 --- a/crates/lgn-data-transaction/src/transaction_manager.rs +++ b/crates/lgn-data-transaction/src/transaction_manager.rs @@ -2,7 +2,9 @@ use std::{collections::HashSet, path::PathBuf, sync::Arc}; use lgn_content_store::indexing::SharedTreeIdentifier; use lgn_data_offline::{Project, ResourcePathName}; -use lgn_data_runtime::{AssetRegistryError, ResourcePathId, ResourceType, ResourceTypeAndId}; +use lgn_data_runtime::{ + AssetRegistryError, AssetRegistryMessage, ResourcePathId, ResourceType, ResourceTypeAndId, +}; use thiserror::Error; use tokio::sync::Mutex; @@ -78,6 +80,8 @@ pub enum Error { pub struct TransactionManager { commited_transactions: Vec, rollbacked_transactions: Vec, + notification_tx: crossbeam_channel::Sender, + notification_rx: crossbeam_channel::Receiver, pub(crate) project: Arc>, pub(crate) build_manager: Arc>, @@ -92,9 +96,14 @@ impl TransactionManager { build_manager: BuildManager, selection_manager: Arc, ) -> Self { + let (notification_tx, notification_rx) = + crossbeam_channel::unbounded::(); + Self { commited_transactions: Vec::new(), rollbacked_transactions: Vec::new(), + notification_tx, + notification_rx, project, build_manager: Arc::new(Mutex::new(build_manager)), selection_manager, @@ -102,6 +111,11 @@ impl TransactionManager { } } + /// Return a Notification receiver + pub fn get_notification_receiver(&self) -> crossbeam_channel::Receiver { + self.notification_rx.clone() + } + /// Add a scene and build it pub async fn add_scene( &mut self, @@ -130,6 +144,22 @@ impl TransactionManager { self.active_scenes.iter().copied().collect() } + fn notify_changed_resources(&self, changed: Option>) { + if let Some(changed) = changed { + let runtime_changed = changed + .iter() + .map(|r| BuildManager::get_derived_id(*r).resource_id()) + .collect::>(); + + if let Err(err) = self + .notification_tx + .send(AssetRegistryMessage::ChangedResources(runtime_changed)) + { + lgn_tracing::warn!("Failed to TransactionMessage::ChangedResources: {}", err); + } + } + } + /// Build a resource by id pub async fn build_by_id( &self, @@ -143,53 +173,42 @@ impl TransactionManager { .await .map_err(|err| Error::Databuild(resource_id, err))?; - /*// Reload runtime asset (just entity for now) - for asset_id in changed_assets { - // Try to reload, if it doesn't exist, load normally - if asset_id.kind.as_pretty().starts_with("runtime_") - && !ctx.asset_registry.reload(asset_id) - { - ctx.asset_registry.load_untyped(asset_id); - } - }*/ Ok(runtime_path_id) } /// Commit the current pending `Transaction` - pub async fn commit_transaction( - &mut self, - mut transaction: Transaction, - ) -> Result>, Error> { + pub async fn commit_transaction(&mut self, mut transaction: Transaction) -> Result<(), Error> { let changed = transaction .apply_transaction(LockContext::new(self).await) .await?; self.commited_transactions.push(transaction); self.rollbacked_transactions.clear(); - Ok(changed) + self.notify_changed_resources(changed); + Ok(()) } /// Undo the last committed transaction - pub async fn undo_transaction(&mut self) -> Result>, Error> { + pub async fn undo_transaction(&mut self) -> Result<(), Error> { if let Some(mut transaction) = self.commited_transactions.pop() { let changed = transaction .rollback_transaction(LockContext::new(self).await) .await?; self.rollbacked_transactions.push(transaction); - return Ok(changed); + self.notify_changed_resources(changed); } - Ok(None) + Ok(()) } /// Reapply a rollbacked transaction - pub async fn redo_transaction(&mut self) -> Result>, Error> { + pub async fn redo_transaction(&mut self) -> Result<(), Error> { if let Some(mut transaction) = self.rollbacked_transactions.pop() { let changed = transaction .apply_transaction(LockContext::new(self).await) .await?; self.commited_transactions.push(transaction); - return Ok(changed); + self.notify_changed_resources(changed); } - Ok(None) + Ok(()) } /// Retrieve the identifier for the current runtime manifest diff --git a/crates/lgn-editor-srv/src/grpc.rs b/crates/lgn-editor-srv/src/grpc.rs index e566192d63..35cd970778 100644 --- a/crates/lgn-editor-srv/src/grpc.rs +++ b/crates/lgn-editor-srv/src/grpc.rs @@ -16,6 +16,7 @@ use tonic::{Request, Response, Status}; #[derive(Debug, Clone)] pub(crate) enum EditorEvent { SelectionChanged(Vec), + #[allow(dead_code)] // TODO ResourceChanged(Vec), } diff --git a/crates/lgn-editor-srv/src/plugin.rs b/crates/lgn-editor-srv/src/plugin.rs index a34c0bd627..0fe7026031 100644 --- a/crates/lgn-editor-srv/src/plugin.rs +++ b/crates/lgn-editor-srv/src/plugin.rs @@ -103,36 +103,21 @@ impl EditorPlugin { entities: Query<'_, '_, (Entity, &ResourceMetaInfo)>, mut event_reader: EventReader<'_, '_, PickingEvent>, keys: Res<'_, Input>, - event_sender: Res<'_, broadcast::Sender>, ) { if keys.pressed(KeyCode::LControl) && keys.just_pressed(KeyCode::Z) { let transaction_manager = transaction_manager.clone(); - let event_sender = event_sender.clone(); tokio_runtime.start_detached(async move { let mut transaction_manager = transaction_manager.lock().await; - match transaction_manager.undo_transaction().await { - Ok(Some(changed)) => { - if let Err(err) = event_sender.send(EditorEvent::ResourceChanged(changed)) { - warn!("Failed to send EditorEvent: {}", err); - } - } - Err(err) => error!("Undo transaction failed: {}", err), - Ok(_) => {} + if let Err(err) = transaction_manager.undo_transaction().await { + error!("Undo transaction failed: {}", err); } }); } else if keys.pressed(KeyCode::LControl) && keys.just_pressed(KeyCode::Y) { let transaction_manager = transaction_manager.clone(); - let event_sender = event_sender.clone(); tokio_runtime.start_detached(async move { let mut transaction_manager = transaction_manager.lock().await; - match transaction_manager.redo_transaction().await { - Ok(Some(changed)) => { - if let Err(err) = event_sender.send(EditorEvent::ResourceChanged(changed)) { - warn!("Failed to send EditorEvent: {}", err); - } - } - Err(err) => error!("Redo transaction failed: {}", err), - Ok(_) => {} + if let Err(err) = transaction_manager.redo_transaction().await { + error!("Redo transaction failed: {}", err); } }); } @@ -176,7 +161,6 @@ impl EditorPlugin { let scale_value = serde_json::json!(transform.scale).to_string(); let transaction_manager = transaction_manager.clone(); - let event_sender = event_sender.clone(); tokio_runtime.start_detached(async move { let mut transaction_manager = transaction_manager.lock().await; @@ -196,13 +180,6 @@ impl EditorPlugin { ], )); match transaction_manager.commit_transaction(transaction).await { - Ok(Some(changed)) => { - if let Err(err) = - event_sender.send(EditorEvent::ResourceChanged(changed)) - { - warn!("Failed to send EditorEvent: {}", err); - } - } Ok(_) => {} Err(err) => { error!("ApplyTransform transaction failed: {}", err); diff --git a/crates/lgn-editor-srv/src/property_inspector_plugin.rs b/crates/lgn-editor-srv/src/property_inspector_plugin.rs index 896482f164..bf815d4589 100644 --- a/crates/lgn-editor-srv/src/property_inspector_plugin.rs +++ b/crates/lgn-editor-srv/src/property_inspector_plugin.rs @@ -330,8 +330,7 @@ impl PropertyInspector for PropertyInspectorRPC { for (model, name) in &models { gltf_loader.models.push( ResourcePathId::from(gltf_resource_id) - .push_named(lgn_graphics_data::offline::Model::TYPE, name) - .push(lgn_graphics_data::runtime::Model::TYPE), + .push_named(lgn_graphics_data::runtime::Model::TYPE, name) ); gltf_loader .materials diff --git a/crates/lgn-gltf2tex-compiler/Cargo.toml b/crates/lgn-gltf-compiler/Cargo.toml similarity index 74% rename from crates/lgn-gltf2tex-compiler/Cargo.toml rename to crates/lgn-gltf-compiler/Cargo.toml index f1cc1a63a6..d2f9b33531 100644 --- a/crates/lgn-gltf2tex-compiler/Cargo.toml +++ b/crates/lgn-gltf-compiler/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "lgn-compiler-gltf2tex" +name = "lgn-compiler-gltf" version = "0.1.0" authors = ["Legion Labs "] edition = "2021" @@ -9,19 +9,21 @@ license = "MIT OR Apache-2.0" doc = false [[bin]] -name = "compiler-gltf2tex" +name = "compiler-gltf" path = "src/main.rs" doc = false [dependencies] +lgn-tracing = { path = "../lgn-tracing", version = "0.1.0" } +lgn-telemetry-sink = { path = "../lgn-telemetry-sink", version = "0.1.0" } lgn-content-store = { path = "../lgn-content-store", version = "0.1.0" } +lgn-data-compiler = { path = "../lgn-data-compiler", version = "0.1.0" } lgn-data-offline = { path = "../lgn-data-offline", version = "0.1.0" } lgn-data-runtime = { path = "../lgn-data-runtime", version = "0.1.0" } lgn-graphics-data = { path = "../lgn-graphics-data", version = "0.1.0", features = [ "runtime", "offline", ] } -lgn-data-compiler = { path = "../lgn-data-compiler", version = "0.1.0" } async-trait = "0.1" -tokio = { version = "1", features = ["full", "tracing"] } +tokio = { version = "1" } diff --git a/crates/lgn-gltf-compiler/src/lib.rs b/crates/lgn-gltf-compiler/src/lib.rs new file mode 100644 index 0000000000..245283c061 --- /dev/null +++ b/crates/lgn-gltf-compiler/src/lib.rs @@ -0,0 +1,164 @@ +use async_trait::async_trait; +use lgn_graphics_data::gltf_utils::GltfFile; +use std::{env, str::FromStr}; + +use lgn_data_compiler::{ + compiler_api::{ + CompilationEnv, CompilationOutput, Compiler, CompilerContext, CompilerDescriptor, + CompilerError, CompilerHash, DATA_BUILD_VERSION, + }, + compiler_utils::hash_code_and_data, +}; +use lgn_data_runtime::{AssetRegistryOptions, Resource, ResourceDescriptor, Transform}; + +pub static COMPILER_INFO: CompilerDescriptor = CompilerDescriptor { + name: env!("CARGO_CRATE_NAME"), + build_version: DATA_BUILD_VERSION, + code_version: "1", + data_version: "1", + transform: &Transform::new( + lgn_graphics_data::offline::Gltf::TYPE, + lgn_graphics_data::offline::Gltf::TYPE, + ), + compiler_creator: || Box::new(GltfCompiler {}), +}; + +struct GltfCompiler(); + +#[async_trait] +impl Compiler for GltfCompiler { + async fn init(&self, mut registry: AssetRegistryOptions) -> AssetRegistryOptions { + lgn_graphics_data::register_types(&mut registry); + registry + } + + async fn hash( + &self, + code: &'static str, + data: &'static str, + env: &CompilationEnv, + ) -> CompilerHash { + hash_code_and_data(code, data, env) + } + + async fn compile( + &self, + mut context: CompilerContext<'_>, + ) -> Result { + lgn_tracing::async_span_scope!("compiler_gltf"); + + let resources = context.registry(); + + let resource = { + lgn_tracing::async_span_scope!("load-resource-def"); + resources + .load_async::(context.source.resource_id()) + .await? + }; + + // minimize lock + let content_id = { + let gltf = resource.get().unwrap(); + gltf.content_id.clone() + }; + + let raw_data = { + lgn_tracing::async_span_scope!("content store read"); + let identifier = lgn_content_store::Identifier::from_str(&content_id) + .map_err(|err| CompilerError::CompilationError(err.to_string()))?; + + // TODO: aganea - should we read from a Device directly? + context.persistent_provider.read(&identifier).await? + }; + + let outputs = { + let source = context.source.clone(); + let target_unnamed = + lgn_data_runtime::ResourcePathId::from(context.target_unnamed.source_resource()); + + CompilerContext::execute_workload(move || { + let gltf = { + lgn_tracing::span_scope!("GltfFile::from_bytes"); + GltfFile::from_bytes(&raw_data)? + }; + + let mut compiled_resources = vec![]; + let mut resource_references = vec![]; + + { + // Extract Textures + lgn_tracing::span_scope!("Extract Textires"); + let textures = { + lgn_tracing::span_scope!("gather_texture"); + gltf.gather_textures() + }; + for (texture, texture_name) in textures { + lgn_tracing::span_scope!("RawTexture write"); + let mut compiled_asset = vec![]; + lgn_data_runtime::to_binary_writer(&texture, &mut compiled_asset).map_err( + |err| { + CompilerError::CompilationError(format!( + "Writing to file '{}' failed: {}", + source.resource_id(), + err + )) + }, + )?; + compiled_resources.push(( + target_unnamed.push_named(texture.get_resource_type(), &texture_name), + compiled_asset, + )); + } + } + + { + // Extract Materials + lgn_tracing::span_scope!("Extract Material"); + let (materials, texture_references) = + gltf.gather_materials(source.resource_id()); + resource_references.extend(texture_references); + + for (material, name) in materials { + let mut compiled_asset = vec![]; + lgn_data_runtime::to_binary_writer(&material, &mut compiled_asset)?; + let material_rpid = + target_unnamed.push_named(material.get_resource_type(), &name); + compiled_resources.push((material_rpid.clone(), compiled_asset)); + } + } + + { + // Extract Models + lgn_tracing::span_scope!("Extract Models"); + let (models, model_references) = gltf.gather_models(source.resource_id()); + resource_references.extend(model_references); + for (model, name) in models { + lgn_tracing::span_scope!("Model Write"); + let mut compiled_asset = vec![]; + lgn_data_runtime::to_binary_writer(&model, &mut compiled_asset)?; + let model_rpid = + target_unnamed.push_named(model.get_resource_type(), &name); + compiled_resources.push((model_rpid.clone(), compiled_asset)); + } + } + + Ok(compiled_resources) + }) + .await? + }; + + let compiled_resources = { + lgn_tracing::async_span_scope!("Store Write"); + let mut compiled_resources = vec![]; + for (id, content) in outputs { + compiled_resources.push(context.store_volatile(&content, id).await?); + } + compiled_resources + }; + + Ok(CompilationOutput { + compiled_resources, + resource_references: vec![], + }) + } +} diff --git a/crates/lgn-gltf-compiler/src/main.rs b/crates/lgn-gltf-compiler/src/main.rs new file mode 100644 index 0000000000..0efcea8e25 --- /dev/null +++ b/crates/lgn-gltf-compiler/src/main.rs @@ -0,0 +1,15 @@ +use std::env; + +use lgn_compiler_gltf::COMPILER_INFO; +use lgn_data_compiler::compiler_api::{compiler_main, CompilerError}; +use lgn_telemetry_sink::TelemetryGuardBuilder; + +#[tokio::main] +async fn main() -> Result<(), CompilerError> { + let _telemetry_guard = TelemetryGuardBuilder::default() + .with_ctrlc_handling() + .with_local_sink_enabled(false) + .build(); + lgn_tracing::span_scope!("compiler-gltf::main"); + compiler_main(&env::args(), &COMPILER_INFO).await +} diff --git a/crates/lgn-gltf2mat-compiler/Cargo.toml b/crates/lgn-gltf2mat-compiler/Cargo.toml deleted file mode 100644 index 364b0a3c4c..0000000000 --- a/crates/lgn-gltf2mat-compiler/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -name = "lgn-compiler-gltf2mat" -version = "0.1.0" -authors = ["Legion Labs "] -edition = "2021" -license = "MIT OR Apache-2.0" - -[lib] -doc = false - -[[bin]] -name = "compiler-gltf2mat" -path = "src/main.rs" -doc = false - -[dependencies] -lgn-content-store = { path = "../lgn-content-store", version = "0.1.0" } -lgn-data-offline = { path = "../lgn-data-offline", version = "0.1.0" } -lgn-data-runtime = { path = "../lgn-data-runtime", version = "0.1.0" } -lgn-graphics-data = { path = "../lgn-graphics-data", version = "0.1.0", features = [ - "runtime", - "offline", -] } -lgn-data-compiler = { path = "../lgn-data-compiler", version = "0.1.0" } -async-trait = "0.1" -tokio = { version = "1", features = ["full", "tracing"] } -gltf = { version = "1.0.0", optional = true } diff --git a/crates/lgn-gltf2mat-compiler/src/lib.rs b/crates/lgn-gltf2mat-compiler/src/lib.rs deleted file mode 100644 index b46c451ff0..0000000000 --- a/crates/lgn-gltf2mat-compiler/src/lib.rs +++ /dev/null @@ -1,112 +0,0 @@ -use async_trait::async_trait; -use std::{env, str::FromStr}; - -use lgn_data_compiler::{ - compiler_api::{ - CompilationEnv, CompilationOutput, Compiler, CompilerContext, CompilerDescriptor, - CompilerError, CompilerHash, DATA_BUILD_VERSION, - }, - compiler_utils::hash_code_and_data, -}; -use lgn_data_runtime::prelude::*; - -use lgn_graphics_data::gltf_utils::GltfFile; - -pub static COMPILER_INFO: CompilerDescriptor = CompilerDescriptor { - name: env!("CARGO_CRATE_NAME"), - build_version: DATA_BUILD_VERSION, - code_version: "1", - data_version: "1", - transform: &Transform::new( - lgn_graphics_data::offline::Gltf::TYPE, - lgn_graphics_data::offline::Material::TYPE, - ), - compiler_creator: || Box::new(Gltf2MatCompiler {}), -}; - -struct Gltf2MatCompiler(); - -#[async_trait] -impl Compiler for Gltf2MatCompiler { - async fn init(&self, mut registry: AssetRegistryOptions) -> AssetRegistryOptions { - lgn_graphics_data::register_types(&mut registry); - registry - } - - async fn hash( - &self, - code: &'static str, - data: &'static str, - env: &CompilationEnv, - ) -> CompilerHash { - hash_code_and_data(code, data, env) - } - - async fn compile( - &self, - mut context: CompilerContext<'_>, - ) -> Result { - let resources = context.registry(); - - let gltf_resource = resources - .load_async::(context.source.resource_id()) - .await?; - - let content_id = { - let gltf = gltf_resource.get().unwrap(); - gltf.content_id.clone() - }; - let identifier = lgn_content_store::Identifier::from_str(&content_id) - .map_err(|err| CompilerError::CompilationError(err.to_string()))?; - - // TODO: aganea - should we read from a Device directly? - let bytes = context.persistent_provider.read(&identifier).await?; - - let (outputs, resource_references) = { - let source = context.source.clone(); - let target_unnamed = context.target_unnamed.clone(); - - CompilerContext::execute_workload(move || { - let gltf = GltfFile::from_bytes(&bytes)?; - - let mut compiled_resources: Vec<(ResourcePathId, Vec)> = vec![]; - let mut resource_references = vec![]; - let materials = gltf.gather_materials(source.resource_id()); - for (material, name) in materials { - let mut compiled_asset = vec![]; - lgn_data_offline::to_json_writer(&material, &mut compiled_asset)?; - let material_rpid = target_unnamed.new_named(&name); - - compiled_resources.push((material_rpid.clone(), compiled_asset)); - if let Some(albedo) = material.albedo { - resource_references.push((material_rpid.clone(), albedo)); - } - - if let Some(normal) = material.normal { - resource_references.push((material_rpid.clone(), normal)); - } - - if let Some(roughness) = material.roughness { - resource_references.push((material_rpid.clone(), roughness)); - } - - if let Some(metalness) = material.metalness { - resource_references.push((material_rpid.clone(), metalness)); - } - } - Ok((compiled_resources, resource_references)) - }) - .await? - }; - - let mut compiled_resources = vec![]; - for (id, content) in outputs { - compiled_resources.push(context.store_volatile(&content, id.clone()).await?); - } - - Ok(CompilationOutput { - compiled_resources, - resource_references, - }) - } -} diff --git a/crates/lgn-gltf2mat-compiler/src/main.rs b/crates/lgn-gltf2mat-compiler/src/main.rs deleted file mode 100644 index a2205dec1c..0000000000 --- a/crates/lgn-gltf2mat-compiler/src/main.rs +++ /dev/null @@ -1,9 +0,0 @@ -use std::env; - -use lgn_compiler_gltf2mat::COMPILER_INFO; -use lgn_data_compiler::compiler_api::{compiler_main, CompilerError}; - -#[tokio::main] -async fn main() -> Result<(), CompilerError> { - compiler_main(&env::args(), &COMPILER_INFO).await -} diff --git a/crates/lgn-gltf2model-compiler/Cargo.toml b/crates/lgn-gltf2model-compiler/Cargo.toml deleted file mode 100644 index 4c4f27258a..0000000000 --- a/crates/lgn-gltf2model-compiler/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "lgn-compiler-gltf2model" -version = "0.1.0" -authors = ["Legion Labs "] -edition = "2021" -license = "MIT OR Apache-2.0" - -[lib] -doc = false - -[[bin]] -name = "compiler-gltf2model" -path = "src/main.rs" -doc = false - -[dependencies] -lgn-content-store = { path = "../lgn-content-store", version = "0.1.0" } -lgn-data-offline = { path = "../lgn-data-offline", version = "0.1.0" } -lgn-data-runtime = { path = "../lgn-data-runtime", version = "0.1.0" } -lgn-graphics-data = { path = "../lgn-graphics-data", version = "0.1.0", features = [ - "runtime", - "offline", -] } -lgn-data-compiler = { path = "../lgn-data-compiler", version = "0.1.0" } - -async-trait = "0.1" -tokio = { version = "1", features = ["full", "tracing"] } -gltf = { version = "1.0.0", optional = true } diff --git a/crates/lgn-gltf2model-compiler/src/lib.rs b/crates/lgn-gltf2model-compiler/src/lib.rs deleted file mode 100644 index c5ae156659..0000000000 --- a/crates/lgn-gltf2model-compiler/src/lib.rs +++ /dev/null @@ -1,100 +0,0 @@ -use async_trait::async_trait; -use lgn_graphics_data::gltf_utils::GltfFile; -use std::{env, str::FromStr}; - -use lgn_data_compiler::{ - compiler_api::{ - CompilationEnv, CompilationOutput, Compiler, CompilerContext, CompilerDescriptor, - CompilerError, CompilerHash, DATA_BUILD_VERSION, - }, - compiler_utils::hash_code_and_data, -}; -use lgn_data_runtime::prelude::*; - -pub static COMPILER_INFO: CompilerDescriptor = CompilerDescriptor { - name: env!("CARGO_CRATE_NAME"), - build_version: DATA_BUILD_VERSION, - code_version: "1", - data_version: "1", - transform: &Transform::new( - lgn_graphics_data::offline::Gltf::TYPE, - lgn_graphics_data::offline::Model::TYPE, - ), - compiler_creator: || Box::new(Gltf2ModelCompiler {}), -}; - -struct Gltf2ModelCompiler(); - -#[async_trait] -impl Compiler for Gltf2ModelCompiler { - async fn init(&self, mut registry: AssetRegistryOptions) -> AssetRegistryOptions { - lgn_graphics_data::register_types(&mut registry); - registry - } - - async fn hash( - &self, - code: &'static str, - data: &'static str, - env: &CompilationEnv, - ) -> CompilerHash { - hash_code_and_data(code, data, env) - } - - async fn compile( - &self, - mut context: CompilerContext<'_>, - ) -> Result { - let resources = context.registry(); - - let gltf_resource = resources - .load_async::(context.source.resource_id()) - .await?; - - let content_id = { - let gltf = gltf_resource.get().unwrap(); - gltf.content_id.clone() - }; - let identifier = lgn_content_store::Identifier::from_str(&content_id) - .map_err(|err| CompilerError::CompilationError(err.to_string()))?; - - // TODO: aganea - should we read from a Device directly? - let bytes = context.persistent_provider.read(&identifier).await?; - - let (outputs, resource_references) = { - let source = context.source.clone(); - let target_unnamed = context.target_unnamed.clone(); - - CompilerContext::execute_workload(move || { - let gltf = GltfFile::from_bytes(&bytes)?; - let mut compiled_resources = vec![]; - let mut resource_references = Vec::new(); - - let models = gltf.gather_models(source.resource_id()); - for (model, name) in models { - let mut compiled_asset = vec![]; - lgn_data_offline::to_json_writer(&model, &mut compiled_asset)?; - let model_rpid = target_unnamed.new_named(&name); - compiled_resources.push((model_rpid.clone(), compiled_asset)); - for mesh in model.meshes { - if let Some(material_rpid) = mesh.material { - resource_references.push((model_rpid.clone(), material_rpid)); - } - } - } - Ok((compiled_resources, resource_references)) - }) - .await? - }; - - let mut compiled_resources = vec![]; - for (id, content) in outputs { - compiled_resources.push(context.store_volatile(&content, id).await?); - } - - Ok(CompilationOutput { - compiled_resources, - resource_references, - }) - } -} diff --git a/crates/lgn-gltf2model-compiler/src/main.rs b/crates/lgn-gltf2model-compiler/src/main.rs deleted file mode 100644 index eeebc90376..0000000000 --- a/crates/lgn-gltf2model-compiler/src/main.rs +++ /dev/null @@ -1,9 +0,0 @@ -use std::env; - -use lgn_compiler_gltf2model::COMPILER_INFO; -use lgn_data_compiler::compiler_api::{compiler_main, CompilerError}; - -#[tokio::main] -async fn main() -> Result<(), CompilerError> { - compiler_main(&env::args(), &COMPILER_INFO).await -} diff --git a/crates/lgn-gltf2tex-compiler/src/lib.rs b/crates/lgn-gltf2tex-compiler/src/lib.rs deleted file mode 100644 index 380f9305ee..0000000000 --- a/crates/lgn-gltf2tex-compiler/src/lib.rs +++ /dev/null @@ -1,101 +0,0 @@ -use async_trait::async_trait; -use lgn_graphics_data::gltf_utils::GltfFile; -use std::{env, str::FromStr}; - -use lgn_data_compiler::{ - compiler_api::{ - CompilationEnv, CompilationOutput, Compiler, CompilerContext, CompilerDescriptor, - CompilerError, CompilerHash, DATA_BUILD_VERSION, - }, - compiler_utils::hash_code_and_data, -}; -use lgn_data_runtime::{AssetRegistryOptions, ResourceDescriptor, Transform}; - -pub static COMPILER_INFO: CompilerDescriptor = CompilerDescriptor { - name: env!("CARGO_CRATE_NAME"), - build_version: DATA_BUILD_VERSION, - code_version: "1", - data_version: "1", - transform: &Transform::new( - lgn_graphics_data::offline::Gltf::TYPE, - lgn_graphics_data::runtime::RawTexture::TYPE, - ), - compiler_creator: || Box::new(Gltf2TexCompiler {}), -}; - -struct Gltf2TexCompiler(); - -#[async_trait] -impl Compiler for Gltf2TexCompiler { - async fn init(&self, mut registry: AssetRegistryOptions) -> AssetRegistryOptions { - lgn_graphics_data::register_types(&mut registry); - registry - } - - async fn hash( - &self, - code: &'static str, - data: &'static str, - env: &CompilationEnv, - ) -> CompilerHash { - hash_code_and_data(code, data, env) - } - - async fn compile( - &self, - mut context: CompilerContext<'_>, - ) -> Result { - let resources = context.registry(); - - let resource = resources - .load_async::(context.source.resource_id()) - .await?; - - // minimize lock - let content_id = { - let gltf = resource.get().unwrap(); - gltf.content_id.clone() - }; - let identifier = lgn_content_store::Identifier::from_str(&content_id) - .map_err(|err| CompilerError::CompilationError(err.to_string()))?; - - // TODO: aganea - should we read from a Device directly? - let bytes = context.persistent_provider.read(&identifier).await?; - - let outputs = { - let source = context.source.clone(); - let target_unnamed = context.target_unnamed.clone(); - - CompilerContext::execute_workload(move || { - let gltf_file = GltfFile::from_bytes(&bytes)?; - let mut compiled_resources = vec![]; - let textures = gltf_file.gather_textures(); - for texture in textures { - let mut compiled_asset = vec![]; - lgn_data_runtime::to_binary_writer(&texture.0, &mut compiled_asset).map_err( - |err| { - CompilerError::CompilationError(format!( - "Writing to file '{}' failed: {}", - source.resource_id(), - err - )) - }, - )?; - compiled_resources.push((target_unnamed.new_named(&texture.1), compiled_asset)); - } - Ok(compiled_resources) - }) - .await? - }; - - let mut compiled_resources = vec![]; - for (id, content) in outputs { - compiled_resources.push(context.store_volatile(&content, id).await?); - } - - Ok(CompilationOutput { - compiled_resources, - resource_references: vec![], - }) - } -} diff --git a/crates/lgn-gltf2tex-compiler/src/main.rs b/crates/lgn-gltf2tex-compiler/src/main.rs deleted file mode 100644 index 8ea7f93060..0000000000 --- a/crates/lgn-gltf2tex-compiler/src/main.rs +++ /dev/null @@ -1,9 +0,0 @@ -use std::env; - -use lgn_compiler_gltf2tex::COMPILER_INFO; -use lgn_data_compiler::compiler_api::{compiler_main, CompilerError}; - -#[tokio::main] -async fn main() -> Result<(), CompilerError> { - compiler_main(&env::args(), &COMPILER_INFO).await -} diff --git a/crates/lgn-graphics-data/src/offline/gltf_utils.rs b/crates/lgn-graphics-data/src/offline/gltf_utils.rs index 7ea6bd3a9f..0bc751d23a 100644 --- a/crates/lgn-graphics-data/src/offline/gltf_utils.rs +++ b/crates/lgn-graphics-data/src/offline/gltf_utils.rs @@ -1,8 +1,7 @@ use std::{cell::RefCell, str::FromStr}; use crate::{ - offline::{Material, Mesh, Model, SamplerData}, - runtime::RawTexture, + runtime::{Material, Mesh, Model, RawTexture, SamplerData}, Color, Filter, TextureType, WrappingMode, }; use gltf::{ @@ -17,6 +16,116 @@ use lgn_math::{Vec2, Vec3, Vec4}; use lgn_data_runtime::prelude::*; use lgn_tracing::warn; +pub fn extract_materials_from_document( + document: &Document, + resource_id: ResourceTypeAndId, +) -> (Vec<(Material, String)>, Vec) { + let mut references = Vec::::new(); + let mut materials = Vec::new(); + for material in document.materials() { + let material_name = material.name().unwrap(); + let base_albedo = material.pbr_metallic_roughness().base_color_factor(); + let base_albedo = Color::from(( + (base_albedo[0] * 255.0) as u8, + (base_albedo[1] * 255.0) as u8, + (base_albedo[2] * 255.0) as u8, + (base_albedo[3] * 255.0) as u8, + )); + let material_sampler = RefCell::new(None); + let albedo = material + .pbr_metallic_roughness() + .base_color_texture() + .map(|info| { + *material_sampler.borrow_mut() = Some(info.texture().sampler()); + ResourcePathId::from(resource_id) + .push_named( + crate::runtime::RawTexture::TYPE, + texture_name(&info.texture()).unwrap().as_str(), + ) + .push_named(crate::runtime::BinTexture::TYPE, "Albedo") + }); + + let normal = material.normal_texture().map(|info| { + let normal_sampler = info.texture().sampler(); + if let Some(sampler) = &*material_sampler.borrow() { + if !samplers_equal(sampler, &normal_sampler) { + warn!("Material {} uses more than one sampler", material_name); + } + } else { + *material_sampler.borrow_mut() = Some(normal_sampler); + } + + ResourcePathId::from(resource_id) + .push_named( + crate::runtime::RawTexture::TYPE, + normal_texture_name(&info).unwrap().as_str(), + ) + .push_named(crate::runtime::BinTexture::TYPE, "Normal") + }); + let base_roughness = material.pbr_metallic_roughness().roughness_factor(); + let base_metalness = material.pbr_metallic_roughness().metallic_factor(); + let roughness = material + .pbr_metallic_roughness() + .metallic_roughness_texture() + .map(|info| { + let roughness_sampler = info.texture().sampler(); + if let Some(sampler) = &*material_sampler.borrow() { + if !samplers_equal(sampler, &roughness_sampler) { + warn!("Material {} uses more than one sampler", material_name); + } + } else { + *material_sampler.borrow_mut() = Some(roughness_sampler); + } + ResourcePathId::from(resource_id) + .push_named( + crate::runtime::RawTexture::TYPE, + format!("{}_Roughness", texture_name(&info.texture()).unwrap()).as_str(), + ) + .push_named(crate::runtime::BinTexture::TYPE, "Roughness") + }); + let metalness = material + .pbr_metallic_roughness() + .metallic_roughness_texture() + .map(|info| { + let metalness_sampler = info.texture().sampler(); + if let Some(sampler) = &*material_sampler.borrow() { + if !samplers_equal(sampler, &metalness_sampler) { + warn!("Material {} uses more than one sampler", material_name); + } + } else { + *material_sampler.borrow_mut() = Some(metalness_sampler); + } + ResourcePathId::from(resource_id) + .push_named( + crate::runtime::RawTexture::TYPE, + format!("{}_Metalness", texture_name(&info.texture()).unwrap()).as_str(), + ) + .push_named(crate::runtime::BinTexture::TYPE, "Metalness") + }); + + references.extend(albedo.iter().cloned()); + references.extend(normal.iter().cloned()); + references.extend(roughness.iter().cloned()); + references.extend(metalness.iter().cloned()); + + materials.push(( + Material { + albedo: albedo.map(|p| p.resource_id().into()), + normal: normal.map(|p| p.resource_id().into()), + roughness: roughness.map(|p| p.resource_id().into()), + metalness: metalness.map(|p| p.resource_id().into()), + base_albedo, + base_metalness, + base_roughness, + sampler: material_sampler.borrow().as_ref().map(build_sampler), + ..Material::default() + }, + String::from(material_name), + )); + } + (materials, references) +} + pub struct GltfFile { document: Document, buffers: Vec, @@ -37,8 +146,12 @@ impl GltfFile { }) } - pub fn gather_models(&self, resource_id: ResourceTypeAndId) -> Vec<(Model, String)> { + pub fn gather_models( + &self, + resource_id: ResourceTypeAndId, + ) -> (Vec<(Model, String)>, Vec) { let mut models = Vec::new(); + let mut references = Vec::::new(); for mesh in self.document.meshes() { let mut meshes = Vec::new(); for primitive in mesh.primitives() { @@ -52,6 +165,7 @@ impl GltfFile { let reader = primitive.reader(|buffer| Some(&self.buffers[buffer.index()])); if let Some(iter) = reader.read_positions() { + positions.reserve(iter.size_hint().0); for position in iter { // GLTF uses RH Y-up coordinate system, Legion Engine uses RH Z-up. By importing -Z -> Y and Y -> Z we // rotate the imported model 90 degrees. This is done to compensate rotation caused by Blender exporting @@ -61,11 +175,13 @@ impl GltfFile { } } if let Some(iter) = reader.read_normals() { + normals.reserve(iter.size_hint().0); for normal in iter { normals.push(Vec3::new(normal[0], -normal[2], normal[1])); } } if let Some(iter) = reader.read_tangents() { + tangents.reserve(iter.size_hint().0); for tangent in iter { // Same rule as above applies to the tangents. W coordinate of the tangent contains the handedness // of the tangent space. -1 handedness corresponds to a LH tangent basis in a RH coordinate system. @@ -76,6 +192,7 @@ impl GltfFile { if let Some(tex_coords_option) = reader.read_tex_coords(0) { match tex_coords_option { ReadTexCoords::F32(iter) => { + tex_coords.reserve(iter.size_hint().0); for tex_coord in iter { tex_coords.push(Vec2::new(tex_coord[0], tex_coord[1])); } @@ -86,16 +203,19 @@ impl GltfFile { if let Some(indices_option) = reader.read_indices() { match indices_option { ReadIndices::U8(iter) => { + indices.reserve(iter.size_hint().0); for idx in iter { indices.push(u16::from(idx)); } } ReadIndices::U16(iter) => { + indices.reserve(iter.size_hint().0); for idx in iter { indices.push(idx); } } ReadIndices::U32(iter) => { + indices.reserve(iter.size_hint().0); for idx in iter { // TODO - will panic if does not fit in 16bits indices.push(idx as u16); @@ -130,6 +250,9 @@ impl GltfFile { ) }, ); + + references.extend(material.iter().cloned()); + meshes.push(Mesh { positions, normals, @@ -137,122 +260,21 @@ impl GltfFile { tex_coords, indices, colors: Vec::new(), - material, + material: material.map(|p| p.resource_id().into()), }); } - models.push(( - Model { - meshes, - ..Model::default() - }, - String::from(mesh.name().unwrap()), - )); - } - models - } - pub fn gather_materials(&self, resource_id: ResourceTypeAndId) -> Vec<(Material, String)> { - let mut materials = Vec::new(); - for material in self.document.materials() { - let material_name = material.name().unwrap(); - let base_albedo = material.pbr_metallic_roughness().base_color_factor(); - let base_albedo = Color::from(( - (base_albedo[0] * 255.0) as u8, - (base_albedo[1] * 255.0) as u8, - (base_albedo[2] * 255.0) as u8, - (base_albedo[3] * 255.0) as u8, - )); - let material_sampler = RefCell::new(None); - let albedo = material - .pbr_metallic_roughness() - .base_color_texture() - .map(|info| { - *material_sampler.borrow_mut() = Some(info.texture().sampler()); - ResourcePathId::from(resource_id) - .push_named( - crate::runtime::RawTexture::TYPE, - texture_name(&info.texture()).unwrap().as_str(), - ) - .push_named(crate::runtime::BinTexture::TYPE, "Albedo") - }); - - let normal = material.normal_texture().map(|info| { - let normal_sampler = info.texture().sampler(); - if let Some(sampler) = &*material_sampler.borrow() { - if !samplers_equal(sampler, &normal_sampler) { - warn!("Material {} uses more than one sampler", material_name); - } - } else { - *material_sampler.borrow_mut() = Some(normal_sampler); - } - - ResourcePathId::from(resource_id) - .push_named( - crate::runtime::RawTexture::TYPE, - normal_texture_name(&info).unwrap().as_str(), - ) - .push_named(crate::runtime::BinTexture::TYPE, "Normal") - }); - let base_roughness = material.pbr_metallic_roughness().roughness_factor(); - let base_metalness = material.pbr_metallic_roughness().metallic_factor(); - let roughness = material - .pbr_metallic_roughness() - .metallic_roughness_texture() - .map(|info| { - let roughness_sampler = info.texture().sampler(); - if let Some(sampler) = &*material_sampler.borrow() { - if !samplers_equal(sampler, &roughness_sampler) { - warn!("Material {} uses more than one sampler", material_name); - } - } else { - *material_sampler.borrow_mut() = Some(roughness_sampler); - } - ResourcePathId::from(resource_id) - .push_named( - crate::runtime::RawTexture::TYPE, - format!("{}_Roughness", texture_name(&info.texture()).unwrap()) - .as_str(), - ) - .push_named(crate::runtime::BinTexture::TYPE, "Roughness") - }); - let metalness = material - .pbr_metallic_roughness() - .metallic_roughness_texture() - .map(|info| { - let metalness_sampler = info.texture().sampler(); - if let Some(sampler) = &*material_sampler.borrow() { - if !samplers_equal(sampler, &metalness_sampler) { - warn!("Material {} uses more than one sampler", material_name); - } - } else { - *material_sampler.borrow_mut() = Some(metalness_sampler); - } - ResourcePathId::from(resource_id) - .push_named( - crate::runtime::RawTexture::TYPE, - format!("{}_Metalness", texture_name(&info.texture()).unwrap()) - .as_str(), - ) - .push_named(crate::runtime::BinTexture::TYPE, "Metalness") - }); - materials.push(( - Material { - albedo, - normal, - roughness, - metalness, - base_albedo, - base_metalness, - base_roughness, - sampler: material_sampler.borrow().as_ref().map(build_sampler), - ..Material::default() - }, - String::from(material_name), - )); + models.push((Model { meshes }, String::from(mesh.name().unwrap()))); } - materials + (models, references) } + pub fn gather_materials( + &self, + resource_id: ResourceTypeAndId, + ) -> (Vec<(Material, String)>, Vec) { + extract_materials_from_document(&self.document, resource_id) + } pub fn gather_textures(&self) -> Vec<(RawTexture, String)> { let mut metallic_roughness_textures = Vec::new(); for material in self.document.materials() { @@ -267,10 +289,11 @@ impl GltfFile { for texture in self.document.textures() { let name = texture_name(&texture).unwrap(); let image = &self.images[texture.source().index()]; + let capacity = (image.width * image.height) as usize; if metallic_roughness_textures.contains(&name) { - let mut roughness = Vec::new(); - let mut metalness = Vec::new(); - for i in 0..(image.width * image.height) as usize { + let mut roughness = Vec::with_capacity(capacity); + let mut metalness = Vec::with_capacity(capacity); + for i in 0..capacity { roughness.push(image.pixels[i * 3 + 1]); metalness.push(image.pixels[i * 3 + 2]); } @@ -302,11 +325,12 @@ impl GltfFile { //Format::R8 => image.pixels.clone().iter().flat_map(|v| vec![*v, 0, 0, 0]).collect(), Format::R8G8B8A8 => serde_bytes::ByteBuf::from(image.pixels.clone()), Format::R8G8B8 => { - let mut rgba = Vec::new(); - for i in 0..(image.width * image.height) as usize { - rgba.push(image.pixels[i * 3]); - rgba.push(image.pixels[i * 3 + 1]); - rgba.push(image.pixels[i * 3 + 2]); + let mut rgba = Vec::with_capacity(capacity); + let source = image.pixels.chunks(3); + for pixels in source { + rgba.push(pixels[0]); + rgba.push(pixels[1]); + rgba.push(pixels[2]); rgba.push(255); } serde_bytes::ByteBuf::from(rgba) diff --git a/crates/lgn-resource-registry/src/lib.rs b/crates/lgn-resource-registry/src/lib.rs index bfd10910e0..fd94e0add5 100644 --- a/crates/lgn-resource-registry/src/lib.rs +++ b/crates/lgn-resource-registry/src/lib.rs @@ -102,13 +102,14 @@ impl ResourceRegistryPlugin { .await .expect("the editor requires valid build manager"); - Arc::new(Mutex::new(TransactionManager::new( + TransactionManager::new( Arc::new(Mutex::new(project)), build_manager, selection_manager.clone(), - ))) + ) }); - world.insert_resource(transaction_manager); + world.insert_resource(transaction_manager.get_notification_receiver()); + world.insert_resource(Arc::new(Mutex::new(transaction_manager))); } } diff --git a/crates/lgn-scene-plugin/src/lib.rs b/crates/lgn-scene-plugin/src/lib.rs index e217a46a53..c069abdb1e 100644 --- a/crates/lgn-scene-plugin/src/lib.rs +++ b/crates/lgn-scene-plugin/src/lib.rs @@ -60,7 +60,24 @@ impl ScenePlugin { scene_manager: Res<'_, Arc>, tokio_runtime: ResMut<'_, TokioAsyncRuntime>, mut commands: Commands<'_, '_>, + asset_registry_events: ResMut<'_, crossbeam_channel::Receiver>, ) { + for event in asset_registry_events.try_iter() { + match event { + AssetRegistryMessage::ChangedResources(changed_resources) => { + let asset_registry = asset_registry.clone(); + let scene_manager = scene_manager.clone(); + let changed_resources = changed_resources.clone(); + + tokio_runtime.start_detached(async move { + scene_manager + .notify_changed_resources(&changed_resources, &asset_registry) + .await; + }); + } + } + } + for event in scene_events.iter() { match event { SceneMessage::OpenScene(resource_id) => { @@ -70,7 +87,7 @@ impl ScenePlugin { tokio_runtime.start_detached(async move { match asset_registry.load_async::(resource_id).await { Ok(handle) => { - scene_manager.add_pending(handle); + scene_manager.add_pending_scene(handle); println!("ok"); } Err(err) => lgn_tracing::error!( diff --git a/crates/lgn-scene-plugin/src/scene_instance.rs b/crates/lgn-scene-plugin/src/scene_instance.rs index 3ae318afc4..d5fc204510 100644 --- a/crates/lgn-scene-plugin/src/scene_instance.rs +++ b/crates/lgn-scene-plugin/src/scene_instance.rs @@ -1,9 +1,11 @@ -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; -use lgn_data_runtime::{AssetRegistry, Handle, ResourceDescriptor, ResourceTypeAndId}; +use futures::FutureExt; +use lgn_data_runtime::prelude::*; use lgn_ecs::prelude::Commands; use lgn_hierarchy::prelude::{BuildChildren, Parent}; use lgn_tracing::warn; +use tokio::task::JoinHandle; use crate::ResourceMetaInfo; @@ -33,6 +35,26 @@ impl SceneInstance { self.id_to_entity_map.get(resource_id) } + pub(crate) fn notify_changed_resources( + &mut self, + changed: &[ResourceTypeAndId], + asset_registry: &Arc, + ) -> Vec<( + ResourceTypeAndId, + JoinHandle>, + )> { + let mut results = Vec::new(); + for resource_id in changed.iter().copied() { + if self.id_to_entity_map.contains_key(&resource_id) { + let asset_registry = asset_registry.clone(); + let handle: JoinHandle> = + tokio::spawn(async move { asset_registry.reload(resource_id).await }.boxed()); + results.push((resource_id, handle)); + } + } + results + } + pub(crate) fn unspawn_all(&mut self, commands: &mut Commands<'_, '_>) { for (_id, entity) in self.id_to_entity_map.drain() { commands.entity(entity).despawn(); @@ -167,85 +189,18 @@ impl SceneInstance { entity_command.insert(local_transform.unwrap_or_default()); entity_command.insert(lgn_transform::prelude::GlobalTransform::identity()); - /*for component in &runtime_entity.components { - if let Some(visual) = component.downcast_ref::() { - entity.insert(VisualComponent::new( - visual - .renderable_geometry - .as_ref() - .map(ModelReferenceType::id), - visual.color, - visual.color_blend, - )); - } else if let Some(gi) = - component.downcast_ref::() - { - entity.insert(gi.clone()); - } else if let Some(nav_mesh) = component.downcast_ref::() { - entity.insert(nav_mesh.clone()); - } else if let Some(view) = component.downcast_ref::() { - entity.insert(view.clone()); - } else if let Some(_gltf_loader) = - component.downcast_ref::() - { - // nothing to do - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics) = - component.downcast_ref::() - { - entity.insert(physics.clone()); - } else if let Some(physics_settings) = - component.downcast_ref::() - { - entity.insert(physics_settings.clone()); - } else if let Some(camera_setup) = - component.downcast_ref::() - { - entity.insert(camera_setup.clone()); - } else if let Some(animation_data) = - component.downcast_ref::() - { - let runtime_animation_data = RuntimeAnimationClip::new(animation_data); - entity.insert(runtime_animation_data); - } else { - error!( - "Unhandle component type {} in entity {}", - component.get_type().get_type_name(), - resource_id, - ); - } - } - - }*/ - if let Some(parent) = runtime_entity.parent.as_ref() { if let Some(parent_ecs_entity) = self.id_to_entity_map.get(&parent.id()) { entity_command.insert(Parent(*parent_ecs_entity)); } } + + lgn_tracing::info!( + "Spawned Entity: {} -> ECS id: {:?}| {}", + resource_id.id, + entity_id, + name, + ); } } } diff --git a/crates/lgn-scene-plugin/src/scene_manager.rs b/crates/lgn-scene-plugin/src/scene_manager.rs index b4c66946d5..030a50dd1d 100644 --- a/crates/lgn-scene-plugin/src/scene_manager.rs +++ b/crates/lgn-scene-plugin/src/scene_manager.rs @@ -10,7 +10,8 @@ use lgn_ecs::prelude::Commands; struct Inner { active_scenes: HashMap, - pending: Vec>, + pending_scene: Vec>, + pending_reload: Vec>, } pub struct SceneManager { @@ -22,17 +23,22 @@ impl SceneManager { Arc::new(Self { inner: RwLock::new(Inner { active_scenes: HashMap::new(), - pending: Vec::new(), + pending_scene: Vec::new(), + pending_reload: Vec::new(), }), }) } pub(crate) fn update(&self, asset_registry: &AssetRegistry, commands: &mut Commands<'_, '_>) { - let pending = std::mem::take(&mut self.inner.write().unwrap().pending); + let (pending_scene, pending_reload) = { + let mut guard = self.inner.write().unwrap(); + let pending_scene = std::mem::take(&mut guard.pending_scene); + let pending_reload = std::mem::take(&mut guard.pending_reload); + (pending_scene, pending_reload) + }; - for handle in pending { + for handle in pending_scene { let root_id = handle.id(); - let mut guard = self.inner.write().unwrap(); let scene = guard .active_scenes @@ -40,6 +46,15 @@ impl SceneManager { .or_insert_with(|| SceneInstance::new(root_id, handle.clone())); scene.spawn_entity_hierarchy(handle, asset_registry, commands); } + + for handle in pending_reload { + let resource_id = handle.id(); + for scene_instance in self.inner.write().unwrap().active_scenes.values_mut() { + if let Some(_entity) = scene_instance.find_entity(&resource_id) { + scene_instance.spawn_entity_hierarchy(handle.clone(), asset_registry, commands); + } + } + } } pub(crate) fn close_scene(&self, root_id: &ResourceTypeAndId, commands: &mut Commands<'_, '_>) { @@ -62,7 +77,36 @@ impl SceneManager { result } - pub(crate) fn add_pending(&self, entity: Handle) { - self.inner.write().unwrap().pending.push(entity); + pub async fn notify_changed_resources( + &self, + changed: &[ResourceTypeAndId], + asset_registry: &Arc, + ) { + let mut reloads = Vec::new(); + for scene_instance in self.inner.write().unwrap().active_scenes.values_mut() { + reloads.extend(scene_instance.notify_changed_resources(changed, asset_registry)); + } + + for (resource_id, job_result) in reloads { + match job_result.await { + Ok(load_result) => match load_result { + Ok(handle) => { + self.inner + .write() + .unwrap() + .pending_reload + .push(handle.into()); + } + Err(load_err) => { + lgn_tracing::error!("Failed to reload {} {:?}", resource_id, load_err); + } + }, + Err(job_error) => lgn_tracing::error!("{}", job_error), + } + } + } + + pub(crate) fn add_pending_scene(&self, entity: Handle) { + self.inner.write().unwrap().pending_scene.push(entity); } } diff --git a/crates/lgn-ubercompiler/Cargo.toml b/crates/lgn-ubercompiler/Cargo.toml index 045c3a7e88..b3e91b6d14 100644 --- a/crates/lgn-ubercompiler/Cargo.toml +++ b/crates/lgn-ubercompiler/Cargo.toml @@ -23,9 +23,7 @@ lgn-data-compiler = { path = "../lgn-data-compiler" } lgn-compiler-material = { path = "../lgn-material-compiler" } lgn-compiler-png2tex = { path = "../lgn-png2tex-compiler" } lgn-compiler-psd2tex = { path = "../lgn-psd2tex-compiler" } -lgn-compiler-gltf2model = { path = "../lgn-gltf2model-compiler" } -lgn-compiler-gltf2mat = { path = "../lgn-gltf2mat-compiler" } -lgn-compiler-gltf2tex = { path = "../lgn-gltf2tex-compiler" } +lgn-compiler-gltf = { path = "../lgn-gltf-compiler" } lgn-compiler-runtime-entity = { path = "../lgn-runtime-entity-compiler" } lgn-compiler-runtime-instance = { path = "../lgn-runtime-instance-compiler" } lgn-compiler-runtime-model = { path = "../lgn-runtime-model-compiler" } diff --git a/crates/lgn-ubercompiler/src/lib.rs b/crates/lgn-ubercompiler/src/lib.rs index 07a6216ea8..1d1fb890cc 100644 --- a/crates/lgn-ubercompiler/src/lib.rs +++ b/crates/lgn-ubercompiler/src/lib.rs @@ -16,8 +16,6 @@ pub fn create() -> compiler_node::CompilerRegistryOptions { .add_compiler(&lgn_compiler_test_split::COMPILER_INFO) .add_compiler(&lgn_compiler_testentity::COMPILER_INFO) .add_compiler(&lgn_compiler_tex2bin::COMPILER_INFO) - .add_compiler(&lgn_compiler_gltf2model::COMPILER_INFO) - .add_compiler(&lgn_compiler_gltf2mat::COMPILER_INFO) - .add_compiler(&lgn_compiler_gltf2tex::COMPILER_INFO) + .add_compiler(&lgn_compiler_gltf::COMPILER_INFO) .add_compiler(&lgn_compiler_scripting::COMPILER_INFO) } diff --git a/tests/sample-data-compiler/Cargo.toml b/tests/sample-data-compiler/Cargo.toml index d06cd3cb58..5e8dd1a12c 100644 --- a/tests/sample-data-compiler/Cargo.toml +++ b/tests/sample-data-compiler/Cargo.toml @@ -43,6 +43,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" typetag = "0.1.8" png = "0.17" +gltf = { version = "1.0.0" } [[bin]] name = "sample-data-compiler" diff --git a/tests/sample-data-compiler/src/raw_loader/mod.rs b/tests/sample-data-compiler/src/raw_loader/mod.rs index a3e6b8adb5..518334e488 100644 --- a/tests/sample-data-compiler/src/raw_loader/mod.rs +++ b/tests/sample-data-compiler/src/raw_loader/mod.rs @@ -12,14 +12,15 @@ use std::{ sync::Arc, }; -use generic_data::offline::{TestComponent, TestEntity}; use lgn_content_store::Provider; use lgn_data_offline::{Project, ResourcePathName, SourceResource}; -use lgn_data_runtime::{ - AssetRegistry, AssetRegistryOptions, Resource, ResourceDescriptor, ResourceId, ResourceType, - ResourceTypeAndId, +use lgn_data_runtime::prelude::*; +use lgn_graphics_data::{ + offline::Gltf, + offline::Psd, + offline::{Png, Visual}, }; -use lgn_graphics_data::offline::{Gltf, Png, Psd}; +use lgn_math::Vec3; use lgn_source_control::{BranchName, RepositoryIndex, RepositoryName}; use lgn_tracing::{error, info}; use lgn_utils::DefaultHasher; @@ -293,7 +294,6 @@ async fn create_or_find_default( ) -> HashMap { let mut ids = HashMap::::default(); build_resource_from_raw(file_paths, in_resources, project, &mut ids).await; - build_test_entity(project, &mut ids).await; ids } @@ -336,50 +336,6 @@ async fn build_resource_from_raw( } } -async fn build_test_entity( - project: &mut Project, - ids: &mut HashMap, -) { - // Create TestEntity Generic DataContainer - let name: ResourcePathName = "/entity/TEST_ENTITY_NAME.dc".into(); - let id = { - if let Ok(id) = project.find_resource(&name).await { - id - } else { - let id = ResourceTypeAndId { - kind: TestEntity::TYPE, - id: ResourceId::from_str("D8FE06A0-1317-46F5-902B-266B0EAE6FA8").unwrap(), - }; - let mut test_entity = TestEntity::new_with_id(name.as_str(), id); - test_entity.test_string = "Editable String Value".into(); - test_entity.test_float32 = 1.0; - test_entity.test_float64 = 2.0; - test_entity.test_int = 1337; - test_entity.test_position = lgn_math::Vec3::new(0.0, 100.0, 0.0); - - (0..3).for_each(|i| { - test_entity - .test_sub_type - .test_components - .push(Box::new(TestComponent { test_i32: i })); - }); - test_entity.test_option_set = Some(generic_data::offline::TestSubType2::default()); - test_entity.test_option_primitive_set = Some(lgn_math::Vec3::default()); - - if project.exists(id).await { - project.delete_resource(id).await.unwrap(); - } - - project - .add_resource_with_id(id, &test_entity) - .await - .unwrap(); - id - } - }; - ids.insert(name, id); -} - fn path_to_resource_name(path: &Path) -> ResourcePathName { let mut found = false; let name = path @@ -502,7 +458,9 @@ async fn load_gltf_resource( project: &mut Project, source_control_content_provider: &Arc, ) -> Option { + lgn_tracing::info!("Loading Gltf {}", name); let raw_data = fs::read(file).ok()?; + lgn_tracing::info!("Uploading raw gltf to content store"); let content_id = source_control_content_provider .write(&raw_data) .await @@ -510,7 +468,133 @@ async fn load_gltf_resource( let mut resource = Gltf::new_with_id(name.as_str(), resource_id); resource.content_id = content_id.to_string(); - project.save_resource(resource_id, &resource).await.unwrap(); + + lgn_tracing::info!("Parsing Gltf"); + let gltf = gltf::Gltf::from_slice_without_validation(&raw_data).unwrap(); + lgn_tracing::info!("Creating hierarchy"); + { + let root_name = String::from("Root"); + let root_id = ResourceTypeAndId { + kind: sample_data::offline::Entity::TYPE, + id: { + let mut hasher = DefaultHasher::new(); + resource_id.hash(&mut hasher); + root_name.hash(&mut hasher); + let id = hasher.finish(); + ResourceId::from_raw(u128::from(id) | (u128::from(id) << 64)) + }, + }; + + let mut root_entity = sample_data::offline::Entity::new_with_id( + &format!("/!{}/{}", resource_id, root_name), + root_id, + ); + root_entity.components.push(Box::new(offline_data::Name { + name: root_name.clone(), + })); + root_entity + .components + .push(Box::new(sample_data::offline::Transform::default())); + + if !project.exists(root_id).await { + project + .add_resource_with_id(root_id, &root_entity) + .await + .unwrap(); + } + + for (idx, node) in gltf.document.nodes().enumerate() { + let child_name = node.name().map_or(idx.to_string(), Into::into); + let child_id = ResourceTypeAndId { + kind: sample_data::offline::Entity::TYPE, + id: { + let mut hasher = DefaultHasher::new(); + resource_id.hash(&mut hasher); + child_name.hash(&mut hasher); + let id = hasher.finish(); + ResourceId::from_raw(u128::from(id) | (u128::from(id) << 64)) + }, + }; + + root_entity + .children + .push(ResourcePathId::from(child_id).push(sample_data::runtime::Entity::TYPE)); + + let mut child = sample_data::offline::Entity::new_with_id( + &format!("/!{}/{}", root_id, child_name), + child_id, + ); + child.parent = + Some(ResourcePathId::from(root_id).push(sample_data::runtime::Entity::TYPE)); + + child + .components + .push(Box::new(sample_data::offline::Name { name: child_name })); + + let (position, rotation, scale) = node.transform().decomposed(); + child + .components + .push(Box::new(sample_data::offline::Transform { + position: Vec3::new(position[0], position[1], -position[2]), + rotation: lgn_math::Quat::from_xyzw( + rotation[0], + rotation[1], + -rotation[2], + -rotation[3], + ), + scale: scale.into(), + })); + if let Some(mesh) = node.mesh() { + let visual = Box::new(lgn_graphics_data::offline::Visual { + renderable_geometry: Some(ResourcePathId::from(resource_id).push_named( + lgn_graphics_data::runtime::Model::TYPE, + mesh.name().unwrap(), + )), + color_blend: 0.0, + ..Visual::default() + }); + child.components.push(visual); + } + + if project.exists(child_id).await { + project.save_resource(child_id, &child).await.unwrap(); + } else { + project + .add_resource_with_id(child_id, &child) + .await + .unwrap(); + } + } + + let mut gltf_loader = offline_data::GltfLoader::default(); + for mesh in gltf.document.meshes() { + gltf_loader + .models + .push(ResourcePathId::from(resource_id).push_named( + lgn_graphics_data::runtime::Model::TYPE, + mesh.name().unwrap(), + )); + } + + let (materials, texture_references) = + lgn_graphics_data::gltf_utils::extract_materials_from_document( + &gltf.document, + resource_id, + ); + + gltf_loader.textures.extend(texture_references); + + for (_material, material_name) in &materials { + gltf_loader.materials.push( + ResourcePathId::from(resource_id) + .push_named(lgn_graphics_data::runtime::Material::TYPE, material_name), //.push(lgn_graphics_data::runtime::Material::TYPE), + ); + } + root_entity.components.push(Box::new(gltf_loader)); + + project.save_resource(root_id, &root_entity).await.unwrap(); + } + Some(resource_id) } diff --git a/tests/sample-data/raw/world/sample_1/helmet/helmet_root.ent b/tests/sample-data/raw/world/sample_1/helmet/helmet_root.ent index e8540d1b74..218daeba14 100644 --- a/tests/sample-data/raw/world/sample_1/helmet/helmet_root.ent +++ b/tests/sample-data/raw/world/sample_1/helmet/helmet_root.ent @@ -4,7 +4,7 @@ parent: Some("world/sample_1.ent"), components: [ Visual(( - renderable_geometry: Some("runtime_model(offline_model(models/helmet/FlightHelmet.gltf, 'FlightHelmet'))"), + renderable_geometry: Some("runtime_model(models/helmet/FlightHelmet.gltf, 'FlightHelmet')"), color: (255, 0, 0), color_blend: 0.0, shadow_receiver: true, @@ -19,15 +19,15 @@ )), GltfLoader(( models: [ - "runtime_model(offline_model(models/helmet/FlightHelmet.gltf, 'FlightHelmet'))", + "runtime_model(models/helmet/FlightHelmet.gltf, 'FlightHelmet')", ], materials: [ - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'HoseMat'))", - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'RubberWoodMat'))", - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'GlassPlasticMat'))", - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'MetalPartsMat'))", - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'LensesMat'))", - "runtime_material(offline_material(models/helmet/FlightHelmet.gltf, 'LeatherPartsMat'))", + "runtime_material(models/helmet/FlightHelmet.gltf, 'HoseMat')", + "runtime_material(models/helmet/FlightHelmet.gltf, 'RubberWoodMat')", + "runtime_material(models/helmet/FlightHelmet.gltf, 'GlassPlasticMat')", + "runtime_material(models/helmet/FlightHelmet.gltf, 'MetalPartsMat')", + "runtime_material(models/helmet/FlightHelmet.gltf, 'LensesMat')", + "runtime_material(models/helmet/FlightHelmet.gltf, 'LeatherPartsMat')", ], textures: [ "bintexture(rawtexture(models/helmet/FlightHelmet.gltf, '0'), 'Normal')", From fbbe2654fee00d3a720a4db778cc34a2269d92cc Mon Sep 17 00:00:00 2001 From: Alexandre Ganea Date: Mon, 27 Jun 2022 15:02:00 -0400 Subject: [PATCH 2/2] Fix tests. --- ...codegen__typescript__tests__ts_api_generation.snap | 11 ++++++++--- ...degen__typescript__tests__ts_index_generation.snap | 2 ++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_api_generation.snap b/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_api_generation.snap index e91bb7375f..32bc9fe161 100644 --- a/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_api_generation.snap +++ b/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_api_generation.snap @@ -1,13 +1,18 @@ --- source: crates/lgn-api-codegen/src/typescript/mod.rs +assertion_line: 212 expression: content --- // Auto-generated file + import { // @ts-ignore - ApiClient, // @ts-ignore - InternalError, // @ts-ignore - defaultFetch, // @ts-ignore + ApiClient, + // @ts-ignore + InternalError, + // @ts-ignore + defaultFetch, + // @ts-ignore stringifyQueryObject, } from "./index"; // @ts-ignore diff --git a/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_index_generation.snap b/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_index_generation.snap index f1e7429421..c1ce99eaef 100644 --- a/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_index_generation.snap +++ b/crates/lgn-api-codegen/src/typescript/snapshots/lgn_api_codegen__typescript__tests__ts_index_generation.snap @@ -1,8 +1,10 @@ --- source: crates/lgn-api-codegen/src/typescript/mod.rs +assertion_line: 193 expression: content --- // Auto-generated file + import qs from "qs"; declare global {