From 48f2c693cb0f7e1dd951a6c853cc58088fc88c08 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:08:57 +0200 Subject: [PATCH 01/16] ignore _all_ virtual envs --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9b321b2cd062..47d369e1ebff 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,7 @@ **/target_wasm # Python virtual environment: -/venv +**/venv # Python build artifacts: __pycache__ From f07f4697385a18080d24d6aee5bb0d5fa8630b10 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:09:22 +0200 Subject: [PATCH 02/16] more build tools --- Cargo.lock | 3 + crates/re_build_tools/Cargo.toml | 3 + crates/re_build_tools/src/hashing.rs | 155 ++++++++++++++++++ crates/re_build_tools/src/lib.rs | 9 +- crates/re_build_tools/src/rebuild_detector.rs | 2 +- 5 files changed, 170 insertions(+), 2 deletions(-) create mode 100644 crates/re_build_tools/src/hashing.rs diff --git a/Cargo.lock b/Cargo.lock index fd59e5760083..e9470f714961 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3917,7 +3917,10 @@ dependencies = [ "anyhow", "cargo_metadata", "glob", + "sha2", "time", + "unindent", + "walkdir", ] [[package]] diff --git a/crates/re_build_tools/Cargo.toml b/crates/re_build_tools/Cargo.toml index bf439458fb7f..73211452eeb1 100644 --- a/crates/re_build_tools/Cargo.toml +++ b/crates/re_build_tools/Cargo.toml @@ -20,4 +20,7 @@ all-features = true anyhow.workspace = true cargo_metadata = "0.15" glob = "0.3" +sha2 = "0.10" time = { workspace = true, features = ["formatting"] } +unindent = "0.1" +walkdir = "2.0" diff --git a/crates/re_build_tools/src/hashing.rs b/crates/re_build_tools/src/hashing.rs new file mode 100644 index 000000000000..c8a37035ce03 --- /dev/null +++ b/crates/re_build_tools/src/hashing.rs @@ -0,0 +1,155 @@ +use std::fmt::Write; +use std::path::{Path, PathBuf}; +use std::{fs, io}; + +use anyhow::Context as _; +use sha2::{Digest, Sha256}; + +// --- + +fn encode_hex(bytes: &[u8]) -> String { + let mut s = String::with_capacity(bytes.len() * 2); + for &b in bytes { + write!(&mut s, "{b:02x}").unwrap(); + } + s +} + +/// Walks the directory at `path` in filename order. +/// +/// If `extensions` is specified, only files with the right extensions will be hashed. +/// Specified extensions should include the dot, e.g. `.fbs`. +pub fn iter_dir<'a>( + path: impl AsRef, + extensions: Option<&'a [&'a str]>, +) -> impl Iterator + 'a { + fn filter(entry: &walkdir::DirEntry, extensions: Option<&[&str]>) -> bool { + let is_dir = entry.file_type().is_dir(); + let is_interesting = extensions.map_or(true, |extensions| { + extensions.iter().any(|ext| { + entry + .file_name() + .to_str() + .map_or(false, |s| s.ends_with(ext)) + }) + }); + is_dir || is_interesting + } + + let path = path.as_ref(); + walkdir::WalkDir::new(path) + .sort_by_file_name() + .into_iter() + .filter_entry(move |entry| filter(entry, extensions)) + .filter_map(|entry| entry.ok()) + .filter_map(|entry| entry.file_type().is_file().then(|| entry.into_path())) +} + +/// Given a file path, computes the sha256 hash of its contents and returns an hexadecimal string +/// for it. +/// +/// Panics if the file doesn't exist. +pub fn compute_file_hash(path: impl AsRef) -> String { + let mut hasher = Sha256::new(); + + let path = path.as_ref(); + let mut file = fs::File::open(path) + .with_context(|| format!("couldn't open {path:?}")) + .unwrap(); + io::copy(&mut file, &mut hasher) + .with_context(|| format!("couldn't copy from {path:?}")) + .unwrap(); + + encode_hex(hasher.finalize().as_slice()) +} + +/// Given a directory path, computes the sha256 hash of its contents (ordered by filename) and +/// returns an hexadecimal string for it. +/// +/// If `extensions` is specified, only files with the right extensions will be hashed. +/// Specified extensions should include the dot, e.g. `.fbs`. +pub fn compute_dir_hash<'a>(path: impl AsRef, extensions: Option<&'a [&'a str]>) -> String { + let mut hasher = Sha256::new(); + + let path = path.as_ref(); + for filepath in iter_dir(path, extensions) { + let mut file = fs::File::open(&filepath) + .with_context(|| format!("couldn't open {filepath:?}")) + .unwrap(); + io::copy(&mut file, &mut hasher) + .with_context(|| format!("couldn't copy from {filepath:?}")) + .unwrap(); + } + + encode_hex(hasher.finalize().as_slice()) +} + +/// Given a crate name, computes the sha256 hash of its source (ordered by filename) and +/// returns an hexadecimal string for it. +pub fn compute_crate_hash(pkg_name: impl AsRef) -> String { + use cargo_metadata::{CargoOpt, MetadataCommand}; + let metadata = MetadataCommand::new() + .features(CargoOpt::AllFeatures) + .exec() + .unwrap(); + + let pkg_name = pkg_name.as_ref(); + let mut files = Default::default(); + + let pkgs = crate::Packages::from_metadata(&metadata); + pkgs.track_implicit_dep(pkg_name, &mut files); + + let mut files = files.into_iter().collect::>(); + files.sort(); + + let hashes = files.into_iter().map(compute_file_hash).collect::>(); + let hashes = hashes.iter().map(|s| s.as_str()).collect::>(); + + compute_strings_hash(&hashes) +} + +/// Given a bunch of strings, computes the sha256 hash of their contents (in the order they +/// were passed in) and returns an hexadecimal string for it. +pub fn compute_strings_hash(strs: &[&str]) -> String { + let mut hasher = Sha256::new(); + + for s in strs { + hasher.update(s); + } + + encode_hex(hasher.finalize().as_slice()) +} + +/// Writes the given `hash` at the specified `path`. +/// +/// Panics on I/O errors. +/// +/// Use [`read_versioning_hash`] to read it back. +pub fn write_versioning_hash(path: impl AsRef, hash: impl AsRef) { + let path = path.as_ref(); + let hash = hash.as_ref(); + + let contents = unindent::unindent(&format!( + " + # This is a sha256 hash for all direct and indirect dependencies of this crate's build script. + # It can be safely removed at anytime to force the build script to run again. + # Check out build.rs to see how it's computed. + {hash} + " + )); + std::fs::write(path, contents) + .with_context(|| format!("couldn't write to {path:?}")) + .unwrap(); +} + +/// Reads back a versioning hash that was written with [`write_versioning_hash`]. +/// +/// Returns `None` on error. +pub fn read_versioning_hash(path: impl AsRef) -> Option { + let path = path.as_ref(); + std::fs::read_to_string(path).ok().and_then(|contents| { + contents + .lines() + .find_map(|line| (!line.trim().starts_with('#')).then(|| line.trim().to_owned())) + }) +} diff --git a/crates/re_build_tools/src/lib.rs b/crates/re_build_tools/src/lib.rs index 487ea8ae9adb..9f36c8f4e80a 100644 --- a/crates/re_build_tools/src/lib.rs +++ b/crates/re_build_tools/src/lib.rs @@ -8,9 +8,16 @@ use anyhow::Context as _; use std::process::Command; +mod hashing; mod rebuild_detector; -pub use rebuild_detector::{ +pub(crate) use self::rebuild_detector::Packages; + +pub use self::hashing::{ + compute_crate_hash, compute_dir_hash, compute_file_hash, compute_strings_hash, iter_dir, + read_versioning_hash, write_versioning_hash, +}; +pub use self::rebuild_detector::{ get_and_track_env_var, is_tracked_env_var_set, rebuild_if_crate_changed, rerun_if_changed, rerun_if_changed_glob, rerun_if_changed_or_doesnt_exist, write_file_if_necessary, }; diff --git a/crates/re_build_tools/src/rebuild_detector.rs b/crates/re_build_tools/src/rebuild_detector.rs index 102b09ced200..caa9605a34c9 100644 --- a/crates/re_build_tools/src/rebuild_detector.rs +++ b/crates/re_build_tools/src/rebuild_detector.rs @@ -105,7 +105,7 @@ pub fn write_file_if_necessary( // --- -struct Packages<'a> { +pub struct Packages<'a> { pkgs: HashMap<&'a str, &'a Package>, } From 98f538840c112d233a197e08c0a7fa729d42696e Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:22:43 +0200 Subject: [PATCH 03/16] new codegen/idl tag --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index aed2272b2b4b..2c039b36df3a 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -29,4 +29,4 @@ jobs: with: mode: minimum count: 1 - labels: "πŸ“Š analytics, πŸͺ³ bug, πŸ§‘β€πŸ’» dev experience, dependencies, πŸ“– documentation, πŸ’¬ discussion, examples, πŸ“‰ performance, 🐍 python API, ⛃ re_datastore, πŸ“Ί re_viewer, πŸ”Ί re_renderer, 🚜 refactor, β›΄ release, πŸ¦€ rust SDK, πŸ”¨ testing, ui, πŸ•ΈοΈ web" + labels: "πŸ“Š analytics, πŸͺ³ bug, codegen/idl, πŸ§‘β€πŸ’» dev experience, dependencies, πŸ“– documentation, πŸ’¬ discussion, examples, πŸ“‰ performance, 🐍 python API, ⛃ re_datastore, πŸ“Ί re_viewer, πŸ”Ί re_renderer, 🚜 refactor, β›΄ release, πŸ¦€ rust SDK, πŸ”¨ testing, ui, πŸ•ΈοΈ web" From 08862d5730c03e60c31e93e0638875cc7c389481 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:31:40 +0200 Subject: [PATCH 04/16] self-review --- crates/re_build_tools/src/hashing.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/crates/re_build_tools/src/hashing.rs b/crates/re_build_tools/src/hashing.rs index c8a37035ce03..1c9f8fd5aed3 100644 --- a/crates/re_build_tools/src/hashing.rs +++ b/crates/re_build_tools/src/hashing.rs @@ -17,7 +17,7 @@ fn encode_hex(bytes: &[u8]) -> String { /// Walks the directory at `path` in filename order. /// -/// If `extensions` is specified, only files with the right extensions will be hashed. +/// If `extensions` is specified, only files with the right extensions will be iterated. /// Specified extensions should include the dot, e.g. `.fbs`. pub fn iter_dir<'a>( path: impl AsRef, @@ -63,8 +63,10 @@ pub fn compute_file_hash(path: impl AsRef) -> String { encode_hex(hasher.finalize().as_slice()) } -/// Given a directory path, computes the sha256 hash of its contents (ordered by filename) and -/// returns an hexadecimal string for it. +/// Given a directory path, computes the sha256 hash of the accumulated contents of all of its +/// files (ordered by filename), and returns an hexadecimal string for it. +/// +/// This includes files in sub-directories (i.e. it's recursive). /// /// If `extensions` is specified, only files with the right extensions will be hashed. /// Specified extensions should include the dot, e.g. `.fbs`. @@ -84,8 +86,10 @@ pub fn compute_dir_hash<'a>(path: impl AsRef, extensions: Option<&'a [&'a encode_hex(hasher.finalize().as_slice()) } -/// Given a crate name, computes the sha256 hash of its source (ordered by filename) and +/// Given a crate name, computes the sha256 hash of its source code (ordered by filename) and /// returns an hexadecimal string for it. +/// +/// This includes the source code of all its direct and indirect dependencies. pub fn compute_crate_hash(pkg_name: impl AsRef) -> String { use cargo_metadata::{CargoOpt, MetadataCommand}; let metadata = MetadataCommand::new() @@ -137,7 +141,7 @@ pub fn write_versioning_hash(path: impl AsRef, hash: impl AsRef) { {hash} " )); - std::fs::write(path, contents) + std::fs::write(path, contents.trim()) .with_context(|| format!("couldn't write to {path:?}")) .unwrap(); } From 00b5d786a3ac3e68a63b55182749c2873a2c381d Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 13 Jun 2023 08:57:06 +0200 Subject: [PATCH 05/16] addressing PR comments --- crates/re_build_tools/src/hashing.rs | 34 +++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/crates/re_build_tools/src/hashing.rs b/crates/re_build_tools/src/hashing.rs index 1c9f8fd5aed3..f734651b8fa4 100644 --- a/crates/re_build_tools/src/hashing.rs +++ b/crates/re_build_tools/src/hashing.rs @@ -5,6 +5,8 @@ use std::{fs, io}; use anyhow::Context as _; use sha2::{Digest, Sha256}; +use crate::{rerun_if_changed, rerun_if_changed_or_doesnt_exist}; + // --- fn encode_hex(bytes: &[u8]) -> String { @@ -18,7 +20,7 @@ fn encode_hex(bytes: &[u8]) -> String { /// Walks the directory at `path` in filename order. /// /// If `extensions` is specified, only files with the right extensions will be iterated. -/// Specified extensions should include the dot, e.g. `.fbs`. +/// Specified extensions should _not_ include the leading dot, e.g. `fbs` rather than `.fbs`. pub fn iter_dir<'a>( path: impl AsRef, extensions: Option<&'a [&'a str]>, @@ -28,9 +30,9 @@ pub fn iter_dir<'a>( let is_interesting = extensions.map_or(true, |extensions| { extensions.iter().any(|ext| { entry - .file_name() - .to_str() - .map_or(false, |s| s.ends_with(ext)) + .path() + .extension() + .map_or(false, |ext2| *ext == ext2.to_string_lossy()) }) }); is_dir || is_interesting @@ -48,6 +50,8 @@ pub fn iter_dir<'a>( /// Given a file path, computes the sha256 hash of its contents and returns an hexadecimal string /// for it. /// +/// This will automatically emit a `rerun-if-changed` clause for the specified file. +/// /// Panics if the file doesn't exist. pub fn compute_file_hash(path: impl AsRef) -> String { let mut hasher = Sha256::new(); @@ -60,6 +64,8 @@ pub fn compute_file_hash(path: impl AsRef) -> String { .with_context(|| format!("couldn't copy from {path:?}")) .unwrap(); + rerun_if_changed(path); + encode_hex(hasher.finalize().as_slice()) } @@ -68,8 +74,10 @@ pub fn compute_file_hash(path: impl AsRef) -> String { /// /// This includes files in sub-directories (i.e. it's recursive). /// -/// If `extensions` is specified, only files with the right extensions will be hashed. -/// Specified extensions should include the dot, e.g. `.fbs`. +/// This will automatically emit a `rerun-if-changed` clause for all the files that were hashed. +/// +/// If `extensions` is specified, only files with the right extensions will be iterated. +/// Specified extensions should _not_ include the leading dot, e.g. `fbs` rather than `.fbs`. pub fn compute_dir_hash<'a>(path: impl AsRef, extensions: Option<&'a [&'a str]>) -> String { let mut hasher = Sha256::new(); @@ -81,6 +89,8 @@ pub fn compute_dir_hash<'a>(path: impl AsRef, extensions: Option<&'a [&'a io::copy(&mut file, &mut hasher) .with_context(|| format!("couldn't copy from {filepath:?}")) .unwrap(); + + rerun_if_changed(path); } encode_hex(hasher.finalize().as_slice()) @@ -90,6 +100,8 @@ pub fn compute_dir_hash<'a>(path: impl AsRef, extensions: Option<&'a [&'a /// returns an hexadecimal string for it. /// /// This includes the source code of all its direct and indirect dependencies. +/// +/// This will automatically emit a `rerun-if-changed` clause for all the files that were hashed. pub fn compute_crate_hash(pkg_name: impl AsRef) -> String { use cargo_metadata::{CargoOpt, MetadataCommand}; let metadata = MetadataCommand::new() @@ -126,6 +138,9 @@ pub fn compute_strings_hash(strs: &[&str]) -> String { /// Writes the given `hash` at the specified `path`. /// +/// `hash` should have been computed using of the methods in this module: [`compute_file_hash`], +/// [`compute_dir_hash`], [`compute_crate_hash`]. +/// /// Panics on I/O errors. /// /// Use [`read_versioning_hash`] to read it back. @@ -148,9 +163,16 @@ pub fn write_versioning_hash(path: impl AsRef, hash: impl AsRef) { /// Reads back a versioning hash that was written with [`write_versioning_hash`]. /// +/// This will automatically emit a `rerun-if-changed` clause for the specified filepath. +/// /// Returns `None` on error. pub fn read_versioning_hash(path: impl AsRef) -> Option { let path = path.as_ref(); + + // NOTE: It's important we trigger if the file doesn't exist, as this means the user explicitly + // deleted the versioning file, i.e. they're trying to force a rebuild. + rerun_if_changed_or_doesnt_exist(path); + std::fs::read_to_string(path).ok().and_then(|contents| { contents .lines() From 19baaa9fa39a5c1d42bdbaed1618933c999f3b41 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:44:22 +0200 Subject: [PATCH 06/16] introduce re_types_builder --- Cargo.lock | 60 +- Cargo.toml | 1 + crates/re_types_builder/Cargo.toml | 38 + crates/re_types_builder/README.md | 13 + crates/re_types_builder/build.rs | 70 ++ .../definitions/reflection.fbs | 157 +++ crates/re_types_builder/src/arrow_registry.rs | 338 ++++++ crates/re_types_builder/src/codegen/common.rs | 41 + crates/re_types_builder/src/codegen/mod.rs | 32 + crates/re_types_builder/src/codegen/python.rs | 970 ++++++++++++++++++ crates/re_types_builder/src/codegen/rust.rs | 791 ++++++++++++++ crates/re_types_builder/src/lib.rs | 247 +++++ crates/re_types_builder/src/objects.rs | 814 +++++++++++++++ 13 files changed, 3569 insertions(+), 3 deletions(-) create mode 100644 crates/re_types_builder/Cargo.toml create mode 100644 crates/re_types_builder/README.md create mode 100644 crates/re_types_builder/build.rs create mode 100644 crates/re_types_builder/definitions/reflection.fbs create mode 100644 crates/re_types_builder/src/arrow_registry.rs create mode 100644 crates/re_types_builder/src/codegen/common.rs create mode 100644 crates/re_types_builder/src/codegen/mod.rs create mode 100644 crates/re_types_builder/src/codegen/python.rs create mode 100644 crates/re_types_builder/src/codegen/rust.rs create mode 100644 crates/re_types_builder/src/lib.rs create mode 100644 crates/re_types_builder/src/objects.rs diff --git a/Cargo.lock b/Cargo.lock index e9470f714961..c846ddcb0499 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -987,6 +987,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.9.3" @@ -1738,6 +1747,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "flatbuffers" +version = "23.5.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dac53e22462d78c16d64a1cd22371b54cc3fe94aa15e7886a2fa6e5d1ab8640" +dependencies = [ + "bitflags 1.3.2", + "rustc_version", +] + [[package]] name = "flate2" version = "1.0.26" @@ -3710,7 +3729,7 @@ dependencies = [ "pyo3-build-config", "pyo3-ffi", "pyo3-macros", - "unindent", + "unindent 0.1.11", ] [[package]] @@ -3919,7 +3938,7 @@ dependencies = [ "glob", "sha2", "time", - "unindent", + "unindent 0.1.11", "walkdir", ] @@ -4208,7 +4227,7 @@ dependencies = [ "thiserror", "tobj", "type-map", - "unindent", + "unindent 0.2.1", "walkdir", "wasm-bindgen-futures", "web-sys", @@ -4478,6 +4497,20 @@ dependencies = [ "web-time", ] +[[package]] +name = "re_types_builder" +version = "0.7.0-alpha.0" +dependencies = [ + "anyhow", + "arrow2", + "convert_case", + "flatbuffers", + "indent", + "re_build_tools", + "unindent 0.2.1", + "xshell", +] + [[package]] name = "re_ui" version = "0.7.0-alpha.0" @@ -5768,6 +5801,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "unindent" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa30f5ea51ff7edfc797c6d3f9ec8cbd8cfedef5371766b7181d33977f4814f" + [[package]] name = "untrusted" version = "0.7.1" @@ -6617,6 +6656,21 @@ version = "0.8.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d8f380ae16a37b30e6a2cf67040608071384b1450c189e61bea3ff57cde922d" +[[package]] +name = "xshell" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "962c039b3a7b16cf4e9a4248397c6585c07547412e7d6a6e035389a802dcfe90" +dependencies = [ + "xshell-macros", +] + +[[package]] +name = "xshell-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c" + [[package]] name = "xxhash-rust" version = "0.8.6" diff --git a/Cargo.toml b/Cargo.toml index fbb18c31b6d8..425286b47697 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,6 +56,7 @@ re_tensor_ops = { path = "crates/re_tensor_ops", version = "0.7.0-alpha.0", defa re_time_panel = { path = "crates/re_time_panel", version = "=0.7.0-alpha.0", default-features = false } re_tracing = { path = "crates/re_tracing", version = "0.7.0-alpha.0", default-features = false } re_tuid = { path = "crates/re_tuid", version = "0.7.0-alpha.0", default-features = false } +re_types_builder = { path = "crates/re_types_builder", version = "=0.7.0-alpha.0", default-features = false } re_ui = { path = "crates/re_ui", version = "0.7.0-alpha.0", default-features = false } re_viewer = { path = "crates/re_viewer", version = "0.7.0-alpha.0", default-features = false } re_viewer_context = { path = "crates/re_viewer_context", version = "0.7.0-alpha.0", default-features = false } diff --git a/crates/re_types_builder/Cargo.toml b/crates/re_types_builder/Cargo.toml new file mode 100644 index 000000000000..01b3cc8efc4a --- /dev/null +++ b/crates/re_types_builder/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "re_types_builder" +authors.workspace = true +description = "Generates code for Rerun's SDKs from flatbuffers definitions." +edition.workspace = true +homepage.workspace = true +include.workspace = true +license.workspace = true +publish = true +readme = "README.md" +repository.workspace = true +rust-version.workspace = true +version.workspace = true + + +[package.metadata.docs.rs] +all-features = true + + +[dependencies] + +# External +anyhow.workspace = true +arrow2.workspace = true +convert_case = "0.6" +flatbuffers = "23.0" +indent = "0.1" +unindent = "0.2" +xshell = "0.2" + + +[build-dependencies] + +# Rerun +re_build_tools.workspace = true + +# External +xshell = "0.2" diff --git a/crates/re_types_builder/README.md b/crates/re_types_builder/README.md new file mode 100644 index 000000000000..80181b101dfb --- /dev/null +++ b/crates/re_types_builder/README.md @@ -0,0 +1,13 @@ +# re_types_builder + +Part of the [`rerun`](https://github.com/rerun-io/rerun) family of crates. + +[![Latest version](https://img.shields.io/crates/v/re_types_builder.svg)](https://crates.io/crates/re_types_builder) +[![Documentation](https://docs.rs/re_types_builder/badge.svg)](https://docs.rs/re_types_builder) +![MIT](https://img.shields.io/badge/license-MIT-blue.svg) +![Apache](https://img.shields.io/badge/license-Apache-blue.svg) + +This crate implements Rerun's code generation tools. + +These tools translate language-agnostic IDL definitions (flatbuffers) into code. +They are invoked from `re_types`' build script (`build.rs`). diff --git a/crates/re_types_builder/build.rs b/crates/re_types_builder/build.rs new file mode 100644 index 000000000000..199ee8e2a8bb --- /dev/null +++ b/crates/re_types_builder/build.rs @@ -0,0 +1,70 @@ +//! Generates flatbuffers reflection code from `reflection.fbs`. + +use xshell::{cmd, Shell}; + +use re_build_tools::{ + compute_file_hash, is_tracked_env_var_set, read_versioning_hash, rerun_if_changed, + rerun_if_changed_or_doesnt_exist, write_versioning_hash, +}; + +// --- + +// NOTE: Don't need to add extra context to xshell invocations, it does so on its own. + +const SOURCE_HASH_PATH: &str = "./source_hash.txt"; +const FBS_REFLECTION_DEFINITION_PATH: &str = "./definitions/reflection.fbs"; + +fn main() { + if std::env::var("CI").is_ok() { + // Don't run on CI! + // + // The code we're generating here is actual source code that gets committed into the + // repository. + return; + } + + if !is_tracked_env_var_set("IS_IN_RERUN_WORKSPACE") { + // Only run if we are in the rerun workspace, not on users machines. + return; + } + if is_tracked_env_var_set("RERUN_IS_PUBLISHING") { + // We don't need to rebuild - we should have done so beforehand! + // See `RELEASES.md` + return; + } + + rerun_if_changed_or_doesnt_exist(SOURCE_HASH_PATH); + rerun_if_changed(FBS_REFLECTION_DEFINITION_PATH); + + let cur_hash = read_versioning_hash(SOURCE_HASH_PATH); + let new_hash = compute_file_hash(FBS_REFLECTION_DEFINITION_PATH); + + // Leave these be please, very useful when debugging. + eprintln!("cur_hash: {cur_hash:?}"); + eprintln!("new_hash: {new_hash:?}"); + + if let Some(cur_hash) = cur_hash { + if cur_hash == new_hash { + // Source definition hasn't changed, no need to do anything. + return; + } + } + + let sh = Shell::new().unwrap(); + cmd!( + sh, + "flatc -o src/ --rust --gen-onefile --filename-suffix '' {FBS_REFLECTION_DEFINITION_PATH}" + ) + .run() + .unwrap(); + + // NOTE: We're purposefully ignoring the error here. + // + // In the very unlikely chance that the user doesn't have `rustfmt` in their $PATH, there's + // still no good reason to fail the build. + // + // The CI will catch the unformatted file at PR time and complain appropriately anyhow. + cmd!(sh, "cargo fmt").run().ok(); + + write_versioning_hash(SOURCE_HASH_PATH, new_hash); +} diff --git a/crates/re_types_builder/definitions/reflection.fbs b/crates/re_types_builder/definitions/reflection.fbs new file mode 100644 index 000000000000..513311f1b9c3 --- /dev/null +++ b/crates/re_types_builder/definitions/reflection.fbs @@ -0,0 +1,157 @@ +// This schema defines objects that represent a parsed schema, like +// the binary version of a .fbs file. +// This could be used to operate on unknown FlatBuffers at runtime. +// It can even ... represent itself (!) + +namespace reflection; + +// These must correspond to the enum in idl.h. +enum BaseType : byte { + None, + UType, + Bool, + Byte, + UByte, + Short, + UShort, + Int, + UInt, + Long, + ULong, + Float, + Double, + String, + Vector, + Obj, // Used for tables & structs. + Union, + Array, + Vector64, + + // Add any new type above this value. + MaxBaseType +} + +table Type { + base_type:BaseType; + element:BaseType = None; // Only if base_type == Vector + // or base_type == Array. + index:int = -1; // If base_type == Object, index into "objects" below. + // If base_type == Union, UnionType, or integral derived + // from an enum, index into "enums" below. + // If base_type == Vector && element == Union or UnionType. + fixed_length:uint16 = 0; // Only if base_type == Array. + /// The size (octets) of the `base_type` field. + base_size:uint = 4; // 4 Is a common size due to offsets being that size. + /// The size (octets) of the `element` field, if present. + element_size:uint = 0; +} + +table KeyValue { + key:string (required, key); + value:string; +} + +table EnumVal { + name:string (required); + value:long (key); + object:Object (deprecated); + union_type:Type; + documentation:[string]; + attributes:[KeyValue]; +} + +table Enum { + name:string (required, key); + values:[EnumVal] (required); // In order of their values. + is_union:bool = false; + underlying_type:Type (required); + attributes:[KeyValue]; + documentation:[string]; + /// File that this Enum is declared in. + declaration_file: string; +} + +table Field { + name:string (required, key); + type:Type (required); + id:ushort; + offset:ushort; // Offset into the vtable for tables, or into the struct. + default_integer:long = 0; + default_real:double = 0.0; + deprecated:bool = false; + required:bool = false; + key:bool = false; + attributes:[KeyValue]; + documentation:[string]; + optional:bool = false; + /// Number of padding octets to always add after this field. Structs only. + padding:uint16 = 0; + /// If the field uses 64-bit offsets. + offset64:bool = false; +} + +table Object { // Used for both tables and structs. + name:string (required, key); + fields:[Field] (required); // Sorted. + is_struct:bool = false; + minalign:int; + bytesize:int; // For structs. + attributes:[KeyValue]; + documentation:[string]; + /// File that this Object is declared in. + declaration_file: string; +} + +table RPCCall { + name:string (required, key); + request:Object (required); // must be a table (not a struct) + response:Object (required); // must be a table (not a struct) + attributes:[KeyValue]; + documentation:[string]; +} + +table Service { + name:string (required, key); + calls:[RPCCall]; + attributes:[KeyValue]; + documentation:[string]; + /// File that this Service is declared in. + declaration_file: string; +} + +/// New schema language features that are not supported by old code generators. +enum AdvancedFeatures : ulong (bit_flags) { + AdvancedArrayFeatures, + AdvancedUnionFeatures, + OptionalScalars, + DefaultVectorsAndStrings, +} + +/// File specific information. +/// Symbols declared within a file may be recovered by iterating over all +/// symbols and examining the `declaration_file` field. +table SchemaFile { + /// Filename, relative to project root. + filename:string (required, key); + /// Names of included files, relative to project root. + included_filenames:[string]; +} + +table Schema { + objects:[Object] (required); // Sorted. + enums:[Enum] (required); // Sorted. + file_ident:string; + file_ext:string; + root_table:Object; + services:[Service]; // Sorted. + advanced_features:AdvancedFeatures; + /// All the files used in this compilation. Files are relative to where + /// flatc was invoked. + fbs_files:[SchemaFile]; // Sorted. +} + +root_type Schema; + +file_identifier "BFBS"; +file_extension "bfbs"; + diff --git a/crates/re_types_builder/src/arrow_registry.rs b/crates/re_types_builder/src/arrow_registry.rs new file mode 100644 index 000000000000..dd07e7fba4d1 --- /dev/null +++ b/crates/re_types_builder/src/arrow_registry.rs @@ -0,0 +1,338 @@ +//! The Arrow registry keeps track of all type definitions and maps them to Arrow datatypes. + +use anyhow::Context as _; +use arrow2::datatypes::{DataType, Field, UnionMode}; +use std::collections::{BTreeMap, HashMap}; + +use crate::{ElementType, Object, Type}; + +// --- + +// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions +pub const ARROW_ATTR_TRANSPARENT: &str = "arrow.attr.transparent"; +pub const ARROW_ATTR_SPARSE_UNION: &str = "arrow.attr.sparse_union"; + +// --- Registry --- + +/// Computes and maintains a registry of [`arrow2::datatypes::DataType`]s for specified flatbuffers +/// definitions. +#[derive(Debug, Default)] +pub struct ArrowRegistry { + registry: HashMap, +} + +impl ArrowRegistry { + /// Computes the Arrow datatype for the specified object and stores it in the registry, to be + /// resolved later on. + pub fn register(&mut self, obj: &Object) { + let (fqname, datatype) = (obj.fqname.clone(), self.arrow_datatype_from_object(obj)); + self.registry.insert(fqname, datatype); + } + + /// Retrieves the [`arrow2::datatypes::DataType`] associated with the given fully-qualified + /// name, if any. + /// + /// This does type resolution just-in-time. + pub fn try_get(&self, fqname: impl AsRef) -> Option { + self.registry + .get(fqname.as_ref()) + .map(|dt| dt.resolve(self)) + } + + /// Retrieves the [`arrow2::datatypes::DataType`] associated with the given fully-qualified + /// name. + /// + /// Panics if missing. + /// + /// This does type resolution just-in-time. + pub fn get(&self, fqname: impl AsRef) -> DataType { + let fqname = fqname.as_ref(); + self.try_get(fqname) + .with_context(|| format!("{fqname:?} not found in Arrow registry")) + .unwrap() + } + + // --- + + fn arrow_datatype_from_object(&self, obj: &Object) -> LazyDatatype { + let is_struct = obj.is_struct(); + + let is_transparent = obj.try_get_attr::(ARROW_ATTR_TRANSPARENT).is_some(); + let num_fields = obj.fields.len(); + + assert!( + !(is_transparent && (!is_struct || num_fields != 1)), + "cannot have a transparent arrow object with any number of fields but 1: {:?} has {num_fields}", + obj.fqname, + ); + + if is_transparent { + self.arrow_datatype_from_type(&obj.fields[0].typ) + } else if is_struct { + LazyDatatype::Extension( + obj.fqname.clone(), + Box::new(LazyDatatype::Struct( + obj.fields + .iter() + .map(|field| LazyField { + name: field.name.clone(), + datatype: self.arrow_datatype_from_type(&field.typ), + is_nullable: field.required, + metadata: Default::default(), + }) + .collect(), + )), + None, + ) + } else { + let is_sparse = obj + .try_get_attr::(ARROW_ATTR_SPARSE_UNION) + .is_some(); + LazyDatatype::Extension( + obj.fqname.clone(), + Box::new(LazyDatatype::Union( + obj.fields + .iter() + .map(|field| LazyField { + name: field.name.clone(), + datatype: self.arrow_datatype_from_type(&field.typ), + is_nullable: false, + metadata: Default::default(), + }) + .collect(), + None, + if is_sparse { + arrow2::datatypes::UnionMode::Sparse + } else { + arrow2::datatypes::UnionMode::Dense + }, + )), + None, + ) + } + } + + fn arrow_datatype_from_type(&self, typ: &Type) -> LazyDatatype { + match typ { + Type::UInt8 => LazyDatatype::UInt8, + Type::UInt16 => LazyDatatype::UInt16, + Type::UInt32 => LazyDatatype::UInt32, + Type::UInt64 => LazyDatatype::UInt64, + Type::Int8 => LazyDatatype::Int8, + Type::Int16 => LazyDatatype::Int16, + Type::Int32 => LazyDatatype::Int32, + Type::Int64 => LazyDatatype::Int64, + Type::Bool => LazyDatatype::Boolean, + Type::Float16 => LazyDatatype::Float16, + Type::Float32 => LazyDatatype::Float32, + Type::Float64 => LazyDatatype::Float64, + Type::String => LazyDatatype::Utf8, + Type::Array { elem_type, length } => LazyDatatype::FixedSizeList( + Box::new(LazyField { + name: "item".into(), + datatype: self.arrow_datatype_from_element_type(elem_type), + is_nullable: false, + metadata: Default::default(), + }), + *length, + ), + Type::Vector { elem_type } => LazyDatatype::List(Box::new(LazyField { + name: "item".into(), + datatype: self.arrow_datatype_from_element_type(elem_type), + is_nullable: false, + metadata: Default::default(), + })), + Type::Object(fqname) => LazyDatatype::Unresolved(fqname.clone()), + } + } + + fn arrow_datatype_from_element_type(&self, typ: &ElementType) -> LazyDatatype { + _ = self; + match typ { + ElementType::UInt8 => LazyDatatype::UInt8, + ElementType::UInt16 => LazyDatatype::UInt16, + ElementType::UInt32 => LazyDatatype::UInt32, + ElementType::UInt64 => LazyDatatype::UInt64, + ElementType::Int8 => LazyDatatype::Int8, + ElementType::Int16 => LazyDatatype::Int16, + ElementType::Int32 => LazyDatatype::Int32, + ElementType::Int64 => LazyDatatype::Int64, + ElementType::Bool => LazyDatatype::Boolean, + ElementType::Float16 => LazyDatatype::Float16, + ElementType::Float32 => LazyDatatype::Float32, + ElementType::Float64 => LazyDatatype::Float64, + ElementType::String => LazyDatatype::Utf8, + ElementType::Object(fqname) => LazyDatatype::Unresolved(fqname.clone()), + } + } +} + +// --- Field --- + +/// A yet-to-be-resolved [`arrow2::datatypes::Field`]. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct LazyField { + /// Its name + pub name: String, + + /// Its logical [`DataType`] + pub datatype: LazyDatatype, + + /// Its nullability + pub is_nullable: bool, + + /// Additional custom (opaque) metadata. + pub metadata: BTreeMap, +} + +impl From for LazyField { + fn from(field: Field) -> Self { + let Field { + name, + data_type, + is_nullable, + metadata, + } = field; + + Self { + name, + datatype: data_type.into(), + is_nullable, + metadata, + } + } +} + +impl LazyField { + /// Recursively resolves the field using the specified `registry`. + fn resolve(&self, registry: &ArrowRegistry) -> Field { + Field { + name: self.name.clone(), + data_type: self.datatype.resolve(registry), + is_nullable: self.is_nullable, + metadata: self.metadata.clone(), + } + } +} + +// --- Datatype --- + +/// A yet-to-be-resolved [`arrow2::datatypes::DataType`]. +/// +/// Type resolution is a two-pass process as we first need to register all existing types before we +/// can denormalize their definitions into their parents. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub enum LazyDatatype { + Null, + Boolean, + Int8, + Int16, + Int32, + Int64, + UInt8, + UInt16, + UInt32, + UInt64, + Float16, + Float32, + Float64, + Binary, + FixedSizeBinary(usize), + LargeBinary, + Utf8, + LargeUtf8, + List(Box), + FixedSizeList(Box, usize), + LargeList(Box), + Struct(Vec), + Union(Vec, Option>, UnionMode), + Extension(String, Box, Option), + Unresolved(String), // fqname +} + +impl From for LazyDatatype { + fn from(datatype: DataType) -> Self { + match datatype { + DataType::Null => LazyDatatype::Null, + DataType::Boolean => LazyDatatype::Boolean, + DataType::Int8 => LazyDatatype::Int8, + DataType::Int16 => LazyDatatype::Int16, + DataType::Int32 => LazyDatatype::Int32, + DataType::Int64 => LazyDatatype::Int64, + DataType::UInt8 => LazyDatatype::UInt8, + DataType::UInt16 => LazyDatatype::UInt16, + DataType::UInt32 => LazyDatatype::UInt32, + DataType::UInt64 => LazyDatatype::UInt64, + DataType::Float16 => LazyDatatype::Float16, + DataType::Float32 => LazyDatatype::Float32, + DataType::Float64 => LazyDatatype::Float64, + DataType::Binary => LazyDatatype::Binary, + DataType::FixedSizeBinary(length) => LazyDatatype::FixedSizeBinary(length), + DataType::LargeBinary => LazyDatatype::LargeBinary, + DataType::Utf8 => LazyDatatype::Utf8, + DataType::LargeUtf8 => LazyDatatype::LargeUtf8, + DataType::List(field) => LazyDatatype::List(Box::new((*field).into())), + DataType::FixedSizeList(field, length) => { + LazyDatatype::FixedSizeList(Box::new((*field).into()), length) + } + DataType::LargeList(field) => LazyDatatype::LargeList(Box::new((*field).into())), + DataType::Struct(fields) => { + LazyDatatype::Struct(fields.into_iter().map(Into::into).collect()) + } + DataType::Union(fields, x, mode) => { + LazyDatatype::Union(fields.into_iter().map(Into::into).collect(), x, mode) + } + DataType::Extension(name, datatype, metadata) => { + LazyDatatype::Extension(name, Box::new((*datatype).into()), metadata) + } + _ => unimplemented!("{datatype:#?}"), // NOLINT + } + } +} + +impl LazyDatatype { + /// Recursively resolves the datatype using the specified `registry`. + fn resolve(&self, registry: &ArrowRegistry) -> DataType { + match self { + LazyDatatype::Null => DataType::Null, + LazyDatatype::Boolean => DataType::Boolean, + LazyDatatype::Int8 => DataType::Int8, + LazyDatatype::Int16 => DataType::Int16, + LazyDatatype::Int32 => DataType::Int32, + LazyDatatype::Int64 => DataType::Int64, + LazyDatatype::UInt8 => DataType::UInt8, + LazyDatatype::UInt16 => DataType::UInt16, + LazyDatatype::UInt32 => DataType::UInt32, + LazyDatatype::UInt64 => DataType::UInt64, + LazyDatatype::Float16 => DataType::Float16, + LazyDatatype::Float32 => DataType::Float32, + LazyDatatype::Float64 => DataType::Float64, + LazyDatatype::Binary => DataType::Binary, + LazyDatatype::FixedSizeBinary(length) => DataType::FixedSizeBinary(*length), + LazyDatatype::LargeBinary => DataType::LargeBinary, + LazyDatatype::Utf8 => DataType::Utf8, + LazyDatatype::LargeUtf8 => DataType::LargeUtf8, + LazyDatatype::List(field) => DataType::List(Box::new(field.resolve(registry))), + LazyDatatype::FixedSizeList(field, length) => { + DataType::FixedSizeList(Box::new(field.resolve(registry)), *length) + } + LazyDatatype::LargeList(field) => { + DataType::LargeList(Box::new(field.resolve(registry))) + } + LazyDatatype::Struct(fields) => { + DataType::Struct(fields.iter().map(|field| field.resolve(registry)).collect()) + } + LazyDatatype::Union(fields, x, mode) => DataType::Union( + fields.iter().map(|field| field.resolve(registry)).collect(), + x.clone(), + *mode, + ), + LazyDatatype::Extension(name, datatype, metadata) => DataType::Extension( + name.clone(), + Box::new(datatype.resolve(registry)), + metadata.clone(), + ), + LazyDatatype::Unresolved(fqname) => registry.get(fqname), + } + } +} diff --git a/crates/re_types_builder/src/codegen/common.rs b/crates/re_types_builder/src/codegen/common.rs new file mode 100644 index 000000000000..4317b76c481c --- /dev/null +++ b/crates/re_types_builder/src/codegen/common.rs @@ -0,0 +1,41 @@ +//! Helpers common to all codegen passes. + +use crate::Docs; + +pub fn quote_doc_from_docs(docs: &Docs, tags: &[&str]) -> Vec { + fn trim_mono_start_whitespace_if_needed(line: &str) -> &str { + if line.chars().next().map_or(false, |c| c.is_whitespace()) { + // NOTE: don't trim! only that very specific space should go away + &line[1..] + } else { + line + } + } + + let mut lines = Vec::new(); + + for line in &docs.doc { + lines.push(trim_mono_start_whitespace_if_needed(line).to_owned()); + } + + let empty = Vec::new(); + for tag in tags { + for line in docs.tagged_docs.get(*tag).unwrap_or(&empty) { + lines.push(trim_mono_start_whitespace_if_needed(line).to_owned()); + } + } + + // NOTE: remove duplicated blank lines. + lines.dedup(); + + // NOTE: remove trailing blank lines. + while let Some(line) = lines.last() { + if line.is_empty() { + lines.pop(); + } else { + break; + } + } + + lines +} diff --git a/crates/re_types_builder/src/codegen/mod.rs b/crates/re_types_builder/src/codegen/mod.rs new file mode 100644 index 000000000000..49bdc9d9278f --- /dev/null +++ b/crates/re_types_builder/src/codegen/mod.rs @@ -0,0 +1,32 @@ +/// Implements the codegen pass. +pub trait CodeGenerator { + /// Generates user-facing code from [`crate::Objects`]. + /// + /// Returns the paths of all generated files. + fn quote( + &mut self, + objs: &crate::Objects, + arrow_registry: &crate::ArrowRegistry, + ) -> Vec; +} + +// --- + +pub const AUTOGEN_WARNING: &str = + "NOTE: This file was autogenerated by re_types_builder; DO NOT EDIT."; + +// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions +pub const RERUN_ATTR_COMPONENT_REQUIRED: &str = "rerun.attr.component_required"; +pub const RERUN_ATTR_COMPONENT_RECOMMENDED: &str = "rerun.attr.component_recommended"; +pub const RERUN_ATTR_COMPONENT_OPTIONAL: &str = "rerun.attr.component_optional"; + +// --- + +mod common; +use self::common::quote_doc_from_docs; + +mod python; +mod rust; + +pub use self::python::PythonCodeGenerator; +pub use self::rust::RustCodeGenerator; diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs new file mode 100644 index 000000000000..9b6f85970405 --- /dev/null +++ b/crates/re_types_builder/src/codegen/python.rs @@ -0,0 +1,970 @@ +//! Implements the Python codegen pass. + +use anyhow::Context as _; +use std::{ + collections::{BTreeMap, HashMap}, + io::Write, + path::{Path, PathBuf}, +}; + +use crate::{ + codegen::AUTOGEN_WARNING, ArrowRegistry, CodeGenerator, Docs, ElementType, Object, ObjectField, + ObjectKind, Objects, Type, +}; + +// --- + +// NOTE: `rerun2` while we figure out how to integrate back into the main SDK. +const MODULE_NAME: &str = "rerun2"; + +// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions +pub const ATTR_TRANSPARENT: &str = "python.attr.transparent"; +pub const ATTR_ALIASES: &str = "python.attr.aliases"; +pub const ATTR_ARRAY_ALIASES: &str = "python.attr.array_aliases"; + +pub struct PythonCodeGenerator { + pkg_path: PathBuf, +} + +impl PythonCodeGenerator { + pub fn new(pkg_path: impl Into) -> Self { + Self { + pkg_path: pkg_path.into(), + } + } +} + +impl CodeGenerator for PythonCodeGenerator { + fn quote(&mut self, objs: &Objects, arrow_registry: &ArrowRegistry) -> Vec { + let mut filepaths = Vec::new(); + + let datatypes_path = self.pkg_path.join("datatypes"); + std::fs::create_dir_all(&datatypes_path) + .with_context(|| format!("{datatypes_path:?}")) + .unwrap(); + filepaths.extend( + quote_objects( + datatypes_path, + arrow_registry, + objs, + &objs.ordered_datatypes(), + ) + .0, + ); + + let components_path = self.pkg_path.join("components"); + std::fs::create_dir_all(&components_path) + .with_context(|| format!("{components_path:?}")) + .unwrap(); + filepaths.extend( + quote_objects( + components_path, + arrow_registry, + objs, + &objs.ordered_components(), + ) + .0, + ); + + let archetypes_path = self.pkg_path.join("archetypes"); + std::fs::create_dir_all(&archetypes_path) + .with_context(|| format!("{archetypes_path:?}")) + .unwrap(); + let (paths, archetype_names) = quote_objects( + archetypes_path, + arrow_registry, + objs, + &objs.ordered_archetypes(), + ); + filepaths.extend(paths); + + filepaths.push(quote_lib(&self.pkg_path, &archetype_names)); + + filepaths + } +} + +// --- File management --- + +fn quote_lib(out_path: impl AsRef, archetype_names: &[String]) -> PathBuf { + let out_path = out_path.as_ref(); + + std::fs::create_dir_all(out_path) + .with_context(|| format!("{out_path:?}")) + .unwrap(); + + let path = out_path.join("__init__.py"); + let archetype_names = archetype_names.join(", "); + + let mut code = String::new(); + + // NOTE: noqa F401 (unused import) because while unnecessary these listings are + // very useful to look at. + code += &unindent::unindent(&format!( + r#" + # {AUTOGEN_WARNING} + + from .archetypes import {archetype_names} # noqa: F401 + "# + )); + + std::fs::write(&path, code) + .with_context(|| format!("{path:?}")) + .unwrap(); + + path +} + +/// Returns all filepaths + all object names. +fn quote_objects( + out_path: impl AsRef, + arrow_registry: &ArrowRegistry, + all_objects: &Objects, + objs: &[&Object], +) -> (Vec, Vec) { + let out_path = out_path.as_ref(); + + let mut filepaths = Vec::new(); + let mut all_names = Vec::new(); + + let mut files = HashMap::>::new(); + for obj in objs { + all_names.push(obj.name.clone()); + + let obj = if obj.is_struct() { + QuotedObject::from_struct(arrow_registry, all_objects, obj) + } else { + QuotedObject::from_union(arrow_registry, all_objects, obj) + }; + let filepath = out_path.join(obj.filepath.file_name().unwrap()); + + match files.entry(filepath.clone()) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().push(obj); + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(vec![obj]); + } + }; + } + + // (module_name, [object_name]) + let mut mods = HashMap::>::new(); + + // rerun/{datatypes|components|archetypes}/{xxx}.py + for (filepath, objs) in files { + let names = objs + .iter() + .map(|obj| match obj.kind { + ObjectKind::Datatype | ObjectKind::Component => { + let name = &obj.name; + format!("{name}, {name}Like, {name}Array, {name}ArrayLike, {name}Type") + } + ObjectKind::Archetype => obj.name.clone(), + }) + .collect::>(); + + // NOTE: Isolating the file stem only works because we're handling datatypes, components + // and archetypes separately (and even then it's a bit shady, eh). + match mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().extend(names); + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(names); + } + }; + + filepaths.push(filepath.clone()); + let mut file = std::fs::File::create(&filepath) + .with_context(|| format!("{filepath:?}")) + .unwrap(); + + let mut code = String::new(); + code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); + + for obj in objs { + code.push_str(&obj.code); + code.push('\n'); + } + file.write_all(code.as_bytes()) + .with_context(|| format!("{filepath:?}")) + .unwrap(); + } + + // rerun/{datatypes|components|archetypes}/__init__.py + { + let path = out_path.join("__init__.py"); + + let mut code = String::new(); + + code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); + code.push_str(&unindent::unindent( + " + # NOTE: + # - we use fully qualified paths to prevent lazy circular imports + # - `noqa F401` (unused import) everywhere because, while not strictly necessary, + # these imports are very nice for end users. + + ", + )); + + for (module, names) in &mods { + let names = names.join(", "); + code.push_str(&format!("from .{module} import {names} # noqa: F401\n")); + } + + filepaths.push(path.clone()); + std::fs::write(&path, code) + .with_context(|| format!("{path:?}")) + .unwrap(); + } + + (filepaths, all_names) +} + +// --- Codegen core loop --- + +#[derive(Debug, Clone)] +struct QuotedObject { + filepath: PathBuf, + name: String, + kind: ObjectKind, + code: String, +} + +impl QuotedObject { + fn from_struct(arrow_registry: &ArrowRegistry, objects: &Objects, obj: &Object) -> Self { + assert!(obj.is_struct()); + + let Object { + filepath, + fqname: _, + pkg_name: _, + name, + docs, + kind, + attrs: _, + fields, + specifics: _, + } = obj; + + let mut code = String::new(); + + code.push_str("e_module_prelude()); + + for clause in obj + .fields + .iter() + .filter_map(quote_import_clauses_from_field) + { + code.push_str(&clause); + code.push('\n'); + } + + code.push_str(&unindent::unindent(&format!( + r#" + + @dataclass + class {name}: + "# + ))); + + code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + + for field in fields { + let ObjectField { + filepath: _, + fqname: _, + pkg_name: _, + name, + docs, + typ: _, + attrs: _, + required: _, + deprecated: _, + } = field; + + let (typ, _) = quote_field_type_from_field(objects, field, false); + let typ = if *kind == ObjectKind::Archetype { + let (typ_unwrapped, _) = quote_field_type_from_field(objects, field, true); + format!("{typ_unwrapped}Array") + } else { + typ + }; + let typ = if field.required { + typ + } else { + format!("Optional[{typ}] = None") + }; + + code.push_str(&indent::indent_all_by(4, format!("{name}: {typ}\n"))); + + code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + } + + code.push_str(&indent::indent_all_by(4, quote_str_repr_from_obj(obj))); + code.push('\n'); + + code.push_str(&indent::indent_all_by( + 4, + quote_array_method_from_obj(objects, obj), + )); + code.push('\n'); + + code.push_str(&indent::indent_all_by( + 4, + quote_str_method_from_obj(objects, obj), + )); + code.push('\n'); + + if obj.kind == ObjectKind::Archetype { + code.push_str(&indent::indent_all_by( + 4, + quote_builder_from_obj(objects, obj), + )); + code.push('\n'); + } else { + code.push_str("e_aliases_from_object(obj)); + code.push('\n'); + } + + code.push_str("e_arrow_support_from_obj(arrow_registry, obj)); + code.push('\n'); + + let mut filepath = PathBuf::from(filepath); + filepath.set_extension("py"); + + Self { + filepath, + name: obj.name.clone(), + kind: obj.kind, + code, + } + } + + fn from_union(arrow_registry: &ArrowRegistry, objects: &Objects, obj: &Object) -> Self { + assert!(!obj.is_struct()); + + let Object { + filepath, + fqname: _, + pkg_name: _, + name, + docs, + kind: _, + attrs: _, + fields, + specifics: _, + } = obj; + + let mut code = String::new(); + + code.push_str("e_module_prelude()); + + for clause in obj + .fields + .iter() + .filter_map(quote_import_clauses_from_field) + { + code.push_str(&clause); + code.push('\n'); + } + + code.push_str(&unindent::unindent(&format!( + r#" + + @dataclass + class {name}: + "# + ))); + + code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + + for field in fields { + let ObjectField { + filepath: _, + fqname: _, + pkg_name: _, + name, + docs, + typ: _, + attrs: _, + required: _, + deprecated: _, + } = field; + + let (typ, _) = quote_field_type_from_field(objects, field, false); + // NOTE: It's always optional since only one of the fields can be set at a time. + code.push_str(&indent::indent_all_by( + 4, + format!("{name}: Optional[{typ}] = None\n"), + )); + + code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + } + + code.push_str(&indent::indent_all_by(4, quote_str_repr_from_obj(obj))); + code.push('\n'); + + code.push_str(&indent::indent_all_by( + 4, + quote_array_method_from_obj(objects, obj), + )); + code.push('\n'); + + code.push_str(&indent::indent_all_by( + 4, + quote_str_method_from_obj(objects, obj), + )); + code.push('\n'); + + code.push_str("e_aliases_from_object(obj)); + code.push('\n'); + + code.push_str("e_arrow_support_from_obj(arrow_registry, obj)); + code.push('\n'); + + let mut filepath = PathBuf::from(filepath); + filepath.set_extension("py"); + + Self { + filepath, + name: obj.name.clone(), + kind: obj.kind, + code, + } + } +} + +// --- Code generators --- + +fn quote_module_prelude() -> String { + // NOTE: All the extraneous stull will be cleaned up courtesy of `ruff`. + unindent::unindent( + r#" + from __future__ import annotations + + import numpy as np + import numpy.typing as npt + import pyarrow as pa + + from dataclasses import dataclass + from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union + + "#, + ) +} + +fn quote_doc_from_docs(docs: &Docs) -> String { + let lines = crate::codegen::quote_doc_from_docs(docs, &["py", "python"]); + + if lines.is_empty() { + return String::new(); + } + + let doc = lines.join("\n"); + format!("\"\"\"\n{doc}\n\"\"\"\n\n") +} + +/// Generates generic `__str__` and `__repr__` methods for archetypes. +// +// TODO(cmc): this could alternatively import a statically defined mixin from "somewhere". +fn quote_str_repr_from_obj(obj: &Object) -> String { + if obj.kind != ObjectKind::Archetype { + return String::new(); + } + + unindent::unindent( + r#" + def __str__(self): + s = f"rr.{type(self).__name__}(\n" + + from dataclasses import fields + for field in fields(self): + data = getattr(self, field.name) + datatype = getattr(data, "type", None) + if datatype: + name = datatype.extension_name + typ = datatype.storage_type + s += f" {name}<{typ}>(\n {data.to_pylist()}\n )\n" + + s += ")" + + return s + + def __repr__(self): + return str(self) + + "#, + ) +} + +/// Automatically implement `__array__` if the object is a single +/// `npt.ArrayLike`/integer/floating-point field. +/// +/// Only applies to datatypes and components. +fn quote_array_method_from_obj(objects: &Objects, obj: &Object) -> String { + // TODO(cmc): should be using native type, but need transparency + let typ = quote_field_type_from_field(objects, &obj.fields[0], false).0; + if + // cannot be an archetype + obj.kind == ObjectKind::Archetype + // has to have a single field + || obj.fields.len() != 1 + // that single field must be `npt.ArrayLike`/integer/floating-point + || !["npt.ArrayLike", "float", "int"].contains(&typ.as_str()) + { + return String::new(); + } + + let field_name = &obj.fields[0].name; + unindent::unindent(&format!( + " + def __array__(self): + return np.asarray(self.{field_name}) + ", + )) +} + +/// Automatically implement `__str__` if the object is a single `str` field. +/// +/// Only applies to datatypes and components. +fn quote_str_method_from_obj(objects: &Objects, obj: &Object) -> String { + if + // cannot be an archetype + obj.kind == ObjectKind::Archetype + // has to have a single field + || obj.fields.len() != 1 + // that single field must be `str` + // TODO(cmc): should be using native type, but need transparency + || quote_field_type_from_field(objects, &obj.fields[0], false).0 != "str" + { + return String::new(); + } + + let field_name = &obj.fields[0].name; + unindent::unindent(&format!( + " + def __str__(self): + return self.{field_name} + ", + )) +} + +/// Only applies to datatypes and components. +fn quote_aliases_from_object(obj: &Object) -> String { + assert!(obj.kind != ObjectKind::Archetype); + + let aliases = obj.try_get_attr::(ATTR_ALIASES); + let array_aliases = obj + .try_get_attr::(ATTR_ARRAY_ALIASES) + .unwrap_or_default(); + + let name = &obj.name; + + let mut code = String::new(); + + code.push_str(&if let Some(aliases) = aliases { + unindent::unindent(&format!( + r#" + {name}Like = Union[ + {name}, + {aliases} + ] + + "#, + )) + } else { + format!("{name}Like = {name}\n") + }); + + code.push_str(&unindent::unindent(&format!( + r#" + {name}ArrayLike = Union[ + {name}Like, + Sequence[{name}Like], + {array_aliases} + ] + "#, + ))); + + code +} + +fn quote_import_clauses_from_field(field: &ObjectField) -> Option { + let fqname = match &field.typ { + Type::Array { + elem_type, + length: _, + } + | Type::Vector { elem_type } => match elem_type { + ElementType::Object(fqname) => Some(fqname), + _ => None, + }, + Type::Object(fqname) => Some(fqname), + _ => None, + }; + + // NOTE: The distinction between `from .` vs. `from rerun.datatypes` has been shown to fix some + // nasty lazy circular dependencies in weird edge cases... + // In any case it will be normalized by `ruff` if it turns out to be unnecessary. + fqname.map(|fqname| { + let (from, class) = fqname.rsplit_once('.').unwrap_or(("", fqname.as_str())); + if from.starts_with("rerun.datatypes") { + format!("from {MODULE_NAME} import datatypes") + } else if from.starts_with("rerun.components") { + format!("from {MODULE_NAME} import components") + } else if from.starts_with("rerun.archetypes") { + // NOTE: This is assuming importing other archetypes is legal... which whether it is or + // isn't for this code generator to say. + format!("from {MODULE_NAME} import archetypes") + } else if from.is_empty() { + format!("from . import {class}") + } else { + format!("from {from} import {class}") + } + }) +} + +/// Returns type name as string and whether it was force unwrapped. +fn quote_field_type_from_field( + objects: &Objects, + field: &ObjectField, + unwrap: bool, +) -> (String, bool) { + let mut unwrapped = false; + let typ = match &field.typ { + Type::UInt8 + | Type::UInt16 + | Type::UInt32 + | Type::UInt64 + | Type::Int8 + | Type::Int16 + | Type::Int32 + | Type::Int64 => "int".to_owned(), + Type::Bool => "bool".to_owned(), + Type::Float16 | Type::Float32 | Type::Float64 => "float".to_owned(), + Type::String => "str".to_owned(), + Type::Array { + elem_type, + length: _, + } + | Type::Vector { elem_type } => { + let array_like = matches!( + elem_type, + ElementType::UInt8 + | ElementType::UInt16 + | ElementType::UInt32 + | ElementType::UInt64 + | ElementType::Int8 + | ElementType::Int16 + | ElementType::Int32 + | ElementType::Int64 + | ElementType::Bool + | ElementType::Float16 + | ElementType::Float32 + | ElementType::Float64 + | ElementType::String + ); + + if array_like { + "npt.ArrayLike".to_owned() + } else { + let typ = quote_type_from_element_type(elem_type); + if unwrap { + unwrapped = true; + typ + } else { + format!("List[{typ}]") + } + } + } + Type::Object(fqname) => { + // TODO(cmc): it is a bit weird to be doing the transparency logic (which is language + // agnostic) in a python specific quoting function... a static helper at the very least + // would be nice. + let is_transparent = field.try_get_attr::(ATTR_TRANSPARENT).is_some(); + if is_transparent { + let target = objects.get(fqname); + assert!( + target.fields.len() == 1, + "transparent field must point to an object with exactly 1 field, but {:?} has {}", + fqname, target.fields.len(), + ); + // NOTE: unwrap call is safe due to assertion just above + return quote_field_type_from_field( + objects, + target.fields.first().unwrap(), + unwrap, + ); + } + quote_type_from_element_type(&ElementType::Object(fqname.clone())) + } + }; + + (typ, unwrapped) +} + +fn quote_type_from_element_type(typ: &ElementType) -> String { + match typ { + ElementType::UInt8 + | ElementType::UInt16 + | ElementType::UInt32 + | ElementType::UInt64 + | ElementType::Int8 + | ElementType::Int16 + | ElementType::Int32 + | ElementType::Int64 => "int".to_owned(), + ElementType::Bool => "bool".to_owned(), + ElementType::Float16 | ElementType::Float32 | ElementType::Float64 => "float".to_owned(), + ElementType::String => "str".to_owned(), + ElementType::Object(fqname) => { + let (from, class) = fqname.rsplit_once('.').unwrap_or(("", fqname.as_str())); + if from.starts_with("rerun.datatypes") { + // NOTE: Only need the class name, pre-generated import clause takes care of the rest. + format!("datatypes.{class}") + } else if from.starts_with("rerun.components") { + format!("components.{class}") + } else if from.starts_with("rerun.archetypes") { + // NOTE: This is assuming importing other archetypes is legal... which whether it is or + // isn't for this code generator to say. + format!("archetypes.{class}") + } else if from.is_empty() { + format!("from . import {class}") + } else { + format!("from {from} import {class}") + } + } + } +} + +fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> String { + let Object { + fqname, name, kind, .. + } = obj; + + match kind { + ObjectKind::Datatype | ObjectKind::Component => { + let datatype = quote_arrow_datatype(&arrow_registry.get(fqname)); + + let mono = name.clone(); + let mono_aliases = format!("{name}Like"); + let many = format!("{name}Array"); + let many_aliases = format!("{name}ArrayLike"); + let arrow = format!("{name}Type"); + + use convert_case::{Boundary, Case, Casing}; + let pkg = name + .from_case(Case::Camel) + .without_boundaries(&[ + Boundary::DigitLower, + Boundary::DigitUpper, + Boundary::LowerDigit, + Boundary::UpperDigit, + ]) + .to_case(Case::Snake); + + unindent::unindent(&format!( + r#" + + # --- Arrow support --- + + from .{pkg}_ext import {many}Ext # noqa: E402 + + class {arrow}(pa.ExtensionType): + def __init__(self: type[pa.ExtensionType]) -> None: + pa.ExtensionType.__init__( + self, {datatype}, "{fqname}" + ) + + def __arrow_ext_serialize__(self: type[pa.ExtensionType]) -> bytes: + # since we don't have a parameterized type, we don't need extra metadata to be deserialized + return b"" + + @classmethod + def __arrow_ext_deserialize__( + cls: type[pa.ExtensionType], storage_type: Any, serialized: Any + ) -> type[pa.ExtensionType]: + # return an instance of this subclass given the serialized metadata. + return {arrow}() + + def __arrow_ext_class__(self: type[pa.ExtensionType]) -> type[pa.ExtensionArray]: + return {many} + + pa.register_extension_type({arrow}()) + + class {many}(pa.ExtensionArray, {many}Ext): # type: ignore[misc] + @staticmethod + def from_similar(data: Optional[{many_aliases}]): + if data is None: + return {arrow}().wrap_array(pa.array([], type={arrow}().storage_type)) + else: + return {many}Ext.from_similar( + data, + mono={mono}, + mono_aliases={mono_aliases}, + many={many}, + many_aliases={many_aliases}, + arrow={arrow}, + ) + "# + )) + } + ObjectKind::Archetype => String::new(), + } +} + +/// Only makes sense for archetypes. +fn quote_builder_from_obj(objects: &Objects, obj: &Object) -> String { + assert_eq!(ObjectKind::Archetype, obj.kind); + + let required = obj + .fields + .iter() + .filter(|field| field.required) + .collect::>(); + let optional = obj + .fields + .iter() + .filter(|field| !field.required) + .collect::>(); + + let mut code = String::new(); + + let required_args = required + .iter() + .map(|field| { + let (typ, unwrapped) = quote_field_type_from_field(objects, field, true); + if unwrapped { + // This was originally a vec/array! + format!("{}: {typ}ArrayLike", field.name) + } else { + format!("{}: {typ}Like", field.name) + } + }) + .collect::>() + .join(", "); + let optional_args = optional + .iter() + .map(|field| { + let (typ, unwrapped) = quote_field_type_from_field(objects, field, true); + if unwrapped { + // This was originally a vec/array! + format!("{}: Optional[{typ}ArrayLike] = None", field.name) + } else { + format!("{}: Optional[{typ}Like] = None", field.name) + } + }) + .collect::>() + .join(", "); + + code.push_str(&format!( + "def __init__(self, {required_args}, *, {optional_args}) -> None:\n" + )); + + code.push_str(&indent::indent_all_by(4, "# Required components\n")); + for field in required { + let name = &field.name; + let (typ, _) = quote_field_type_from_field(objects, field, true); + code.push_str(&indent::indent_all_by( + 4, + format!("self.{name} = {typ}Array.from_similar({name})\n"), + )); + } + + code.push('\n'); + + code.push_str(&indent::indent_all_by(4, "# Optional components\n")); + for field in optional { + let name = &field.name; + let (typ, _) = quote_field_type_from_field(objects, field, true); + code.push_str(&indent::indent_all_by( + 4, + format!("self.{name} = {typ}Array.from_similar({name})\n"), + )); + } + + code +} + +// --- Arrow registry code generators --- + +use arrow2::datatypes::{DataType, Field, UnionMode}; + +fn quote_arrow_datatype(datatype: &DataType) -> String { + match datatype { + DataType::Null => "pa.null()".to_owned(), + DataType::Boolean => "pa.bool_()".to_owned(), + DataType::Int8 => "pa.int8()".to_owned(), + DataType::Int16 => "pa.int16()".to_owned(), + DataType::Int32 => "pa.int32()".to_owned(), + DataType::Int64 => "pa.int64()".to_owned(), + DataType::UInt8 => "pa.uint8()".to_owned(), + DataType::UInt16 => "pa.uint16()".to_owned(), + DataType::UInt32 => "pa.uint32()".to_owned(), + DataType::UInt64 => "pa.uint64()".to_owned(), + DataType::Float16 => "pa.float16()".to_owned(), + DataType::Float32 => "pa.float32()".to_owned(), + DataType::Float64 => "pa.float64()".to_owned(), + DataType::Date32 => "pa.date32()".to_owned(), + DataType::Date64 => "pa.date64()".to_owned(), + DataType::Binary => "pa.binary()".to_owned(), + DataType::LargeBinary => "pa.large_binary()".to_owned(), + DataType::Utf8 => "pa.utf8()".to_owned(), + DataType::LargeUtf8 => "pa.large_utf8()".to_owned(), + DataType::FixedSizeList(field, length) => { + let field = quote_arrow_field(field); + format!("pa.list_({field}, {length})") + } + DataType::Union(fields, _, mode) => { + let fields = fields + .iter() + .map(quote_arrow_field) + .collect::>() + .join(", "); + match mode { + UnionMode::Dense => format!(r#"pa.dense_union([{fields}])"#), + UnionMode::Sparse => format!(r#"pa.sparse_union([{fields}])"#), + } + } + DataType::Struct(fields) => { + let fields = fields + .iter() + .map(quote_arrow_field) + .collect::>() + .join(", "); + format!("pa.struct([{fields}])") + } + DataType::Extension(_, datatype, _) => { + // TODO(cmc): not sure we need all that for the python backend since we already + // do the wrapping trick...? + quote_arrow_datatype(datatype) + } + _ => unimplemented!("{datatype:#?}"), // NOLINT + } +} + +fn quote_arrow_field(field: &Field) -> String { + let Field { + name, + data_type, + is_nullable, + metadata, + } = field; + + let datatype = quote_arrow_datatype(data_type); + let is_nullable = is_nullable.then_some("True").unwrap_or("False"); + let metadata = quote_metadata_map(metadata); + + format!(r#"pa.field("{name}", {datatype}, {is_nullable}, {metadata})"#) +} + +fn quote_metadata_map(metadata: &BTreeMap) -> String { + let kvs = metadata + .iter() + .map(|(k, v)| format!("{k:?}, {v:?}")) + .collect::>() + .join(", "); + format!("{{{kvs}}}") +} diff --git a/crates/re_types_builder/src/codegen/rust.rs b/crates/re_types_builder/src/codegen/rust.rs new file mode 100644 index 000000000000..588c5ebc7663 --- /dev/null +++ b/crates/re_types_builder/src/codegen/rust.rs @@ -0,0 +1,791 @@ +//! Implements the Rust codegen pass. + +use anyhow::Context as _; +use std::{ + collections::{BTreeMap, HashMap}, + io::Write, + path::{Path, PathBuf}, +}; + +use crate::{ + codegen::{ + AUTOGEN_WARNING, RERUN_ATTR_COMPONENT_OPTIONAL, RERUN_ATTR_COMPONENT_RECOMMENDED, + RERUN_ATTR_COMPONENT_REQUIRED, + }, + ArrowRegistry, CodeGenerator, Docs, ElementType, Object, ObjectField, ObjectKind, Objects, + Type, +}; + +// --- + +// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions +pub const ATTR_DERIVE: &str = "rust.attr.derive"; +pub const ATTR_REPR: &str = "rust.attr.repr"; +pub const ATTR_TUPLE_STRUCT: &str = "rust.attr.tuple_struct"; + +pub struct RustCodeGenerator { + crate_path: PathBuf, +} + +impl RustCodeGenerator { + pub fn new(crate_path: impl Into) -> Self { + Self { + crate_path: crate_path.into(), + } + } +} + +impl CodeGenerator for RustCodeGenerator { + fn quote(&mut self, objects: &Objects, arrow_registry: &ArrowRegistry) -> Vec { + let mut filepaths = Vec::new(); + + let datatypes_path = self.crate_path.join("src/datatypes"); + std::fs::create_dir_all(&datatypes_path) + .with_context(|| format!("{datatypes_path:?}")) + .unwrap(); + filepaths.extend(quote_objects( + datatypes_path, + arrow_registry, + &objects.ordered_datatypes(), + )); + + let components_path = self.crate_path.join("src/components"); + std::fs::create_dir_all(&components_path) + .with_context(|| format!("{components_path:?}")) + .unwrap(); + filepaths.extend(quote_objects( + components_path, + arrow_registry, + &objects.ordered_components(), + )); + + let archetypes_path = self.crate_path.join("src/archetypes"); + std::fs::create_dir_all(&archetypes_path) + .with_context(|| format!("{archetypes_path:?}")) + .unwrap(); + filepaths.extend(quote_objects( + archetypes_path, + arrow_registry, + &objects.ordered_archetypes(), + )); + + filepaths + } +} + +// --- File management --- + +fn quote_objects( + out_path: impl AsRef, + arrow_registry: &ArrowRegistry, + objs: &[&Object], +) -> Vec { + let out_path = out_path.as_ref(); + + let mut filepaths = Vec::new(); + + let mut files = HashMap::>::new(); + for obj in objs { + let obj = if obj.is_struct() { + QuotedObject::from_struct(arrow_registry, obj) + } else { + QuotedObject::from_union(arrow_registry, obj) + }; + + let filepath = out_path.join(obj.filepath.file_name().unwrap()); + + match files.entry(filepath.clone()) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().push(obj); + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(vec![obj]); + } + }; + } + + // (module_name, [object_name]) + let mut mods = HashMap::>::new(); + + // src/{datatypes|components|archetypes}/{xxx}.rs + for (filepath, objs) in files { + // NOTE: Isolating the file stem only works because we're handling datatypes, components + // and archetypes separately (and even then it's a bit shady, eh). + let names = objs.iter().map(|obj| obj.name.clone()).collect::>(); + match mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().extend(names); + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(names); + } + }; + + filepaths.push(filepath.clone()); + let mut file = std::fs::File::create(&filepath) + .with_context(|| format!("{filepath:?}")) + .unwrap(); + + let mut code = String::new(); + code.push_str(&format!("// {AUTOGEN_WARNING}\n\n")); + + for obj in objs { + code.push_str(&obj.code); + code.push('\n'); + } + file.write_all(code.as_bytes()) + .with_context(|| format!("{filepath:?}")) + .unwrap(); + } + + // src/{datatypes|components|archetypes}/mod.rs + { + let path = out_path.join("mod.rs"); + + let mut code = String::new(); + + code.push_str(&format!("// {AUTOGEN_WARNING}\n\n")); + + for module in mods.keys() { + code.push_str(&format!("mod {module};\n")); + + // NOTE: detect if someone manually created an extension file, and automatically + // import it if so. + let mut ext_path = out_path.join(format!("{module}_ext")); + ext_path.set_extension("rs"); + if ext_path.exists() { + code.push_str(&format!("mod {module}_ext;\n")); + } + } + + code += "\n\n"; + + for (module, names) in &mods { + let names = names.join(", "); + code.push_str(&format!("pub use self::{module}::{{{names}}};\n")); + } + + filepaths.push(path.clone()); + std::fs::write(&path, code) + .with_context(|| format!("{path:?}")) + .unwrap(); + } + + filepaths +} + +// --- Codegen core loop --- + +#[derive(Debug, Clone)] +struct QuotedObject { + filepath: PathBuf, + name: String, + code: String, +} + +impl QuotedObject { + fn from_struct(arrow_registry: &ArrowRegistry, obj: &Object) -> Self { + assert!(obj.is_struct()); + + let Object { + filepath, + fqname: _, + pkg_name: _, + name, + docs, + kind, + attrs: _, + fields, + specifics: _, + } = obj; + + let mut code = String::new(); + + code.push_str("e_doc_from_docs(docs)); + + if let Some(clause) = quote_derive_clause_from_obj(obj) { + code.push_str(&clause); + code.push('\n'); + } + if let Some(clause) = quote_repr_clause_from_obj(obj) { + code.push_str(&clause); + code.push('\n'); + } + + let is_tuple_struct = is_tuple_struct_from_obj(obj); + + if is_tuple_struct { + code.push_str(&format!("pub struct {name}(")); + } else { + code.push_str(&format!("pub struct {name} {{\n")); + } + + for field in fields { + let ObjectField { + filepath: _, + pkg_name: _, + fqname: _, + name, + docs, + typ: _, + attrs: _, + required, + // TODO(cmc): support for deprecation notices + deprecated: _, + } = field; + + code.push_str("e_doc_from_docs(docs)); + + let (typ, _) = quote_field_type_from_field(field, false); + let typ = if *required { + typ + } else { + format!("Option<{typ}>") + }; + + if is_tuple_struct { + code.push_str(&format!("pub {typ}")); + } else { + code.push_str(&format!("pub {name}: {typ},\n\n")); + } + } + + if is_tuple_struct { + code += ");\n\n"; + } else { + code += "}\n\n"; + } + + code.push_str("e_trait_impls_from_obj(arrow_registry, obj)); + code.push('\n'); + + if kind == &ObjectKind::Archetype { + code.push_str("e_builder_from_obj(obj)); + } + + let mut filepath = PathBuf::from(filepath); + filepath.set_extension("rs"); + + Self { + filepath, + name: obj.name.clone(), + code, + } + } + + fn from_union(arrow_registry: &ArrowRegistry, obj: &Object) -> Self { + assert!(!obj.is_struct()); + + let Object { + filepath, + fqname: _, + pkg_name: _, + name, + docs, + kind: _, + attrs: _, + fields, + specifics: _, + } = obj; + + let mut code = String::new(); + + code.push_str("e_doc_from_docs(docs)); + + if let Some(clause) = quote_derive_clause_from_obj(obj) { + code.push_str(&clause); + code.push('\n'); + } + if let Some(clause) = quote_repr_clause_from_obj(obj) { + code.push_str(&clause); + code.push('\n'); + } + + code.push_str(&format!("pub enum {name} {{\n")); + + for field in fields { + let ObjectField { + filepath: _, + fqname: _, + pkg_name: _, + name, + docs, + typ: _, + attrs: _, + required: _, + deprecated: _, + } = field; + + code.push_str("e_doc_from_docs(docs)); + + let (typ, _) = quote_field_type_from_field(field, false); + + code.push_str(&format!("{name}({typ}),\n\n")); + } + + code += "}\n\n"; + + code.push_str("e_trait_impls_from_obj(arrow_registry, obj)); + code.push('\n'); + + let mut filepath = PathBuf::from(filepath); + filepath.set_extension("rs"); + + Self { + filepath, + name: obj.name.clone(), + code, + } + } +} + +// --- Code generators --- + +fn quote_doc_from_docs(docs: &Docs) -> String { + let lines = crate::codegen::quote_doc_from_docs(docs, &["rs", "rust"]); + let lines = lines + .into_iter() + .map(|line| format!("/// {line}")) + .collect::>(); + + let mut doc = lines.join("\n"); + doc.push('\n'); + doc +} + +/// Returns type name as string and whether it was force unwrapped. +fn quote_field_type_from_field(field: &ObjectField, unwrap: bool) -> (String, bool) { + let mut unwrapped = false; + let typ = match &field.typ { + Type::UInt8 => "u8".to_owned(), + Type::UInt16 => "u16".to_owned(), + Type::UInt32 => "u32".to_owned(), + Type::UInt64 => "u64".to_owned(), + Type::Int8 => "i8".to_owned(), + Type::Int16 => "i16".to_owned(), + Type::Int32 => "i32".to_owned(), + Type::Int64 => "i64".to_owned(), + Type::Bool => "bool".to_owned(), + Type::Float16 => unimplemented!("ResolvedType::Float16"), // NOLINT + Type::Float32 => "f32".to_owned(), + Type::Float64 => "f64".to_owned(), + // TODO(cmc): ref for deserialization? + Type::String => "String".to_owned(), + Type::Array { elem_type, length } => { + let typ = quote_type_from_element_type(elem_type); + if unwrap { + unwrapped = true; + typ + } else { + format!("[{typ}; {length}]") + } + } + Type::Vector { elem_type } => { + let typ = quote_type_from_element_type(elem_type); + if unwrap { + unwrapped = true; + typ + } else { + format!("Vec<{typ}>") + } + } + Type::Object(fqname) => fqname.replace('.', "::").replace("rerun", "crate"), + }; + + (typ, unwrapped) +} + +fn quote_type_from_element_type(typ: &ElementType) -> String { + match typ { + ElementType::UInt8 => "u8".to_owned(), + ElementType::UInt16 => "u16".to_owned(), + ElementType::UInt32 => "u32".to_owned(), + ElementType::UInt64 => "u64".to_owned(), + ElementType::Int8 => "i8".to_owned(), + ElementType::Int16 => "i16".to_owned(), + ElementType::Int32 => "i32".to_owned(), + ElementType::Int64 => "i64".to_owned(), + ElementType::Bool => "bool".to_owned(), + ElementType::Float16 => unimplemented!("ResolvedType::Float16"), // NOLINT + ElementType::Float32 => "f32".to_owned(), + ElementType::Float64 => "f64".to_owned(), + // TODO(cmc): ref for deserialization? + ElementType::String => "String".to_owned(), + ElementType::Object(fqname) => fqname.replace('.', "::").replace("rerun", "crate"), + } +} + +fn quote_derive_clause_from_obj(obj: &Object) -> Option { + obj.try_get_attr::(ATTR_DERIVE) + .map(|what| format!("#[derive({what})]")) +} + +fn quote_repr_clause_from_obj(obj: &Object) -> Option { + obj.try_get_attr::(ATTR_REPR) + .map(|what| format!("#[repr({what})]")) +} + +fn is_tuple_struct_from_obj(obj: &Object) -> bool { + obj.is_struct() + && obj.fields.len() == 1 + && obj.try_get_attr::(ATTR_TUPLE_STRUCT).is_some() +} + +fn quote_trait_impls_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> String { + let Object { + filepath: _, + fqname, + pkg_name: _, + name, + docs: _, + kind, + attrs: _, + fields: _, + specifics: _, + } = obj; + + match kind { + ObjectKind::Datatype => { + let datatype = quote_arrow_datatype(&arrow_registry.get(fqname)); + format!( + r#" + impl crate::Datatype for {name} {{ + fn name() -> ::std::borrow::Cow<'static, str> {{ + ::std::borrow::Cow::Borrowed({fqname:?}) + }} + + #[allow(clippy::wildcard_imports)] + fn to_arrow_datatype() -> arrow2::datatypes::DataType {{ + use ::arrow2::datatypes::*; + {datatype} + }} + }} + "# + ) + } + ObjectKind::Component => { + let datatype = quote_arrow_datatype(&arrow_registry.get(fqname)); + format!( + r#" + impl crate::Component for {name} {{ + fn name() -> ::std::borrow::Cow<'static, str> {{ + ::std::borrow::Cow::Borrowed({fqname:?}) + }} + + #[allow(clippy::wildcard_imports)] + fn to_arrow_datatype() -> arrow2::datatypes::DataType {{ + use ::arrow2::datatypes::*; + {datatype} + }} + }} + "# + ) + } + ObjectKind::Archetype => { + fn compute_components(obj: &Object, attr: &'static str) -> (usize, String) { + let components = iter_archetype_components(obj, attr).collect::>(); + + let num_components = components.len(); + let components = components + .into_iter() + .map(|fqname| format!("::std::borrow::Cow::Borrowed({fqname:?})")) + .collect::>() + .join(", "); + + (num_components, components) + } + + let (num_required, required) = compute_components(obj, RERUN_ATTR_COMPONENT_REQUIRED); + let (num_recommended, recommended) = + compute_components(obj, RERUN_ATTR_COMPONENT_RECOMMENDED); + let (num_optional, optional) = compute_components(obj, RERUN_ATTR_COMPONENT_OPTIONAL); + + let num_all = num_required + num_recommended + num_optional; + let all = [required.as_str(), recommended.as_str(), optional.as_str()] + .as_slice() + .join(", "); + + format!( + r#" + impl {name} {{ + pub const REQUIRED_COMPONENTS: [::std::borrow::Cow<'static, str>; {num_required}] = [{required}]; + + pub const RECOMMENDED_COMPONENTS: [::std::borrow::Cow<'static, str>; {num_recommended}] = [{recommended}]; + + pub const OPTIONAL_COMPONENTS: [::std::borrow::Cow<'static, str>; {num_optional}] = [{optional}]; + + pub const ALL_COMPONENTS: [::std::borrow::Cow<'static, str>; {num_all}] = [{all}]; + }} + + impl crate::Archetype for {name} {{ + fn name() -> ::std::borrow::Cow<'static, str> {{ + ::std::borrow::Cow::Borrowed({fqname:?}) + }} + + fn required_components() -> Vec<::std::borrow::Cow<'static, str>> {{ + Self::REQUIRED_COMPONENTS.to_vec() + }} + + fn recommended_components() -> Vec<::std::borrow::Cow<'static, str>> {{ + Self::RECOMMENDED_COMPONENTS.to_vec() + }} + + fn optional_components() -> Vec<::std::borrow::Cow<'static, str>> {{ + Self::OPTIONAL_COMPONENTS.to_vec() + }} + + #[allow(clippy::unimplemented)] + fn to_arrow_datatypes() -> Vec {{ + unimplemented!("query the registry for all fqnames"); // NOLINT + }} + }} + "# + ) + } + } +} + +/// Only makes sense for archetypes. +fn quote_builder_from_obj(obj: &Object) -> String { + assert_eq!(ObjectKind::Archetype, obj.kind); + + let Object { + filepath: _, + fqname: _, + pkg_name: _, + name, + docs: _, + kind: _, + attrs: _, + fields, + specifics: _, + } = obj; + + let required = fields + .iter() + .filter(|field| field.required) + .collect::>(); + let optional = fields + .iter() + .filter(|field| !field.required) + .collect::>(); + + let mut code = String::new(); + + code.push_str(&format!("impl {name} {{\n")); + { + // --- impl new() --- + + let new_params = required + .iter() + .map(|field| { + let (typ, unwrapped) = quote_field_type_from_field(field, true); + if unwrapped { + // This was originally a vec/array! + format!( + "{}: impl IntoIterator>", + field.name, typ + ) + } else { + format!("{}: impl Into<{}>", field.name, typ) + } + }) + .collect::>() + .join(", "); + code.push_str(&format!("pub fn new({new_params}) -> Self {{\n")); + { + code += "Self {\n"; + { + for field in &required { + let (_, unwrapped) = quote_field_type_from_field(field, true); + if unwrapped { + // This was originally a vec/array! + code.push_str(&format!( + "{}: {}.into_iter().map(Into::into).collect(),\n", + field.name, field.name + )); + } else { + code.push_str(&format!("{}: {}.into(),\n", field.name, field.name)); + } + } + for field in &optional { + code.push_str(&format!("{}: None,\n", field.name)); + } + } + code += "}\n"; + } + code += "}\n\n"; + + // --- impl with_*() --- + + for field in &optional { + let name = &field.name; + let (typ, unwrapped) = quote_field_type_from_field(field, true); + + if unwrapped { + // This was originally a vec/array! + code.push_str(&format!( + "pub fn with_{name}(mut self, {name}: impl IntoIterator>) -> Self {{\n", + )); + { + code.push_str(&format!( + "self.{name} = Some({name}.into_iter().map(Into::into).collect());\n" + )); + code += "self\n"; + } + } else { + code.push_str(&format!( + "pub fn with_{name}(mut self, {name}: impl Into<{typ}>) -> Self {{\n", + )); + { + code.push_str(&format!("self.{name} = Some({name}.into());\n")); + code += "self\n"; + } + } + + code += "}\n\n"; + } + } + code += "}\n\n"; + + code +} + +// --- Arrow registry code generators --- + +use arrow2::datatypes::{DataType, Field}; + +fn quote_arrow_datatype(datatype: &DataType) -> String { + match datatype { + DataType::Null => "DataType::Null".to_owned(), + DataType::Boolean => "DataType::Boolean".to_owned(), + DataType::Int8 => "DataType::Int8".to_owned(), + DataType::Int16 => "DataType::Int16".to_owned(), + DataType::Int32 => "DataType::Int32".to_owned(), + DataType::Int64 => "DataType::Int64".to_owned(), + DataType::UInt8 => "DataType::UInt8".to_owned(), + DataType::UInt16 => "DataType::UInt16".to_owned(), + DataType::UInt32 => "DataType::UInt32".to_owned(), + DataType::UInt64 => "DataType::UInt64".to_owned(), + DataType::Float16 => "DataType::Float16".to_owned(), + DataType::Float32 => "DataType::Float32".to_owned(), + DataType::Float64 => "DataType::Float64".to_owned(), + DataType::Date32 => "DataType::Date32".to_owned(), + DataType::Date64 => "DataType::Date64".to_owned(), + DataType::Binary => "DataType::Binary".to_owned(), + DataType::LargeBinary => "DataType::LargeBinary".to_owned(), + DataType::Utf8 => "DataType::Utf8".to_owned(), + DataType::LargeUtf8 => "DataType::LargeUtf8".to_owned(), + DataType::FixedSizeList(field, length) => { + let field = quote_arrow_field(field); + format!("DataType::FixedSizeList(Box::new({field}), {length})") + } + DataType::Union(fields, _, mode) => { + let fields = fields + .iter() + .map(quote_arrow_field) + .collect::>() + .join(", "); + + // NOTE: unindenting to work around a rustfmt bug + unindent::unindent(&format!( + r#" + DataType::Union( + vec![{fields}], + None, + UnionMode::{mode:?}, + ) + "# + )) + } + DataType::Struct(fields) => { + let fields = fields + .iter() + .map(quote_arrow_field) + .collect::>() + .join(", "); + + format!("DataType::Struct(vec![{fields}])") + } + DataType::Extension(name, datatype, metadata) => { + let datatype = quote_arrow_datatype(datatype); + let metadata = quote_optional_string(metadata.as_deref()); + + // NOTE: unindenting to work around a rustfmt bug + unindent::unindent(&format!( + r#" + DataType::Extension( + "{name}".to_owned(), + Box::new({datatype}), + {metadata}, + ) + "# + )) + } + _ => unimplemented!("{datatype:#?}"), // NOLINT + } +} + +fn quote_arrow_field(field: &Field) -> String { + let Field { + name, + data_type, + is_nullable, + metadata, + } = field; + + let datatype = quote_arrow_datatype(data_type); + let metadata = quote_metadata_map(metadata); + + // NOTE: unindenting to work around a rustfmt bug + unindent::unindent(&format!( + r#" + Field {{ + name: "{name}".to_owned(), + data_type: {datatype}, + is_nullable: {is_nullable}, + metadata: {metadata}, + }} + "# + )) +} + +fn quote_optional_string(s: Option<&str>) -> String { + if let Some(s) = s { + format!("Some({s:?})") + } else { + "None".into() + } +} + +fn quote_metadata_map(metadata: &BTreeMap) -> String { + let kvs = metadata + .iter() + .map(|(k, v)| format!("({k:?}, {v:?})")) + .collect::>() + .join(", "); + format!("[{kvs}].into()") +} + +// --- Helpers --- + +fn iter_archetype_components<'a>( + obj: &'a Object, + requirement_attr_value: &'static str, +) -> impl Iterator + 'a { + assert_eq!(ObjectKind::Archetype, obj.kind); + obj.fields.iter().filter_map(move |field| { + field + .try_get_attr::(requirement_attr_value) + .map(|_| match &field.typ { + Type::Object(fqname) => fqname.clone(), + Type::Vector { elem_type } => match elem_type { + ElementType::Object(fqname) => fqname.clone(), + _ => { + panic!("archetype field must be an object/union or an array/vector of such") + } + }, + _ => panic!("archetype field must be an object/union or an array/vector of such"), + }) + }) +} diff --git a/crates/re_types_builder/src/lib.rs b/crates/re_types_builder/src/lib.rs new file mode 100644 index 000000000000..b15e3704e1b0 --- /dev/null +++ b/crates/re_types_builder/src/lib.rs @@ -0,0 +1,247 @@ +//! This crate implements Rerun's code generation tools. +//! +//! These tools translate language-agnostic IDL definitions (flatbuffers) into code. +//! They are invoked by `re_types`'s build script (`build.rs`). +//! +//! +//! ### Organization +//! +//! The code generation process happens in 4 phases. +//! +//! #### 1. Generate binary reflection data from flatbuffers definitions. +//! +//! All this does is invoke the flatbuffers compiler (`flatc`) with the right flags in order to +//! generate the binary dumps. +//! +//! Look for `compile_binary_schemas` in the code. +//! +//! ####. 2. Run the semantic pass. +//! +//! The semantic pass transforms the low-level raw reflection data generated by the first phase +//! into higher level objects that are much easier to inspect/manipulate and overall friendler +//! to work with. +//! +//! Look for `objects.rs`. +//! +//! ####. 3. Fill the Arrow registry. +//! +//! The Arrow registry keeps track of all type definitions and maps them to Arrow datatypes. +//! +//! Look for `arrow_registry.rs`. +//! +//! ####. 4. Run the actual codegen pass for a given language. +//! +//! We currently have two different codegen passes implemented at the moment: Python & Rust. +//! +//! Codegen passes use the semantic objects from phase two and the registry from phase three +//! in order to generate user-facing code for Rerun's SDKs. +//! +//! These passes are intentionally implemented using a very low-tech no-frills approach (stitch +//! strings together, make liberal use of `unimplemented`, etc) that keep them flexible in the +//! face of ever changing needs in the generated code. +//! +//! Look for `codegen/python.rs` and `codegen/rust.rs`. +//! +//! +//! ### Error handling +//! +//! Keep in mind: this is all _build-time_ code that will never see the light of runtime. +//! There is therefore no need for fancy error handling in this crate: all errors are fatal to the +//! build anyway. +//! +//! Make sure to crash as soon as possible when something goes wrong and to attach all the +//! appropriate/available context using `anyhow`'s `with_context` (e.g. always include the +//! fully-qualified name of the faulty type/field) and you're good to go. +//! +//! +//! ### Testing +//! +//! Same comment as with error handling: this code becomes irrelevant at runtime, and so testing it +//! brings very little value. +//! +//! Make sure to test the behavior of its output though: `re_types`! + +// --- + +// NOTE: Official generated code from flatbuffers; ignore _everything_. +#[allow( + warnings, + unused, + unsafe_code, + unsafe_op_in_unsafe_fn, + dead_code, + unused_imports, + explicit_outlives_requirements, + clippy::all +)] +mod reflection; + +pub use self::reflection::reflection::{ + root_as_schema, BaseType as FbsBaseType, Enum as FbsEnum, EnumVal as FbsEnumVal, + Field as FbsField, KeyValue as FbsKeyValue, Object as FbsObject, Schema as FbsSchema, + Type as FbsType, +}; + +// NOTE: This crate isn't only okay with `unimplemented`, it actively encourages it. + +#[allow(clippy::unimplemented)] +mod arrow_registry; +#[allow(clippy::unimplemented)] +mod codegen; +#[allow(clippy::unimplemented)] +mod objects; + +pub use self::arrow_registry::ArrowRegistry; +pub use self::codegen::{CodeGenerator, PythonCodeGenerator, RustCodeGenerator}; +pub use self::objects::{ + Attributes, Docs, ElementType, Object, ObjectField, ObjectKind, Objects, Type, +}; + +// --- Entrypoints --- + +use std::path::{Path, PathBuf}; + +/// Compiles binary reflection dumps from flatbuffers definitions. +/// +/// Panics on error. +/// +/// - `include_dir_path`: path to the root directory of the fbs definition tree. +/// - `output_dir_path`: output directory, where the binary schemas will be stored. +/// - `entrypoint_path`: path to the root file of the fbs definition tree. +/// +/// E.g.: +/// ```no_run +/// re_types_builder::compile_binary_schemas( +/// "definitions/", +/// "out/", +/// "definitions/rerun/archetypes.fbs", +/// ); +/// ``` +pub fn compile_binary_schemas( + include_dir_path: impl AsRef, + output_dir_path: impl AsRef, + entrypoint_path: impl AsRef, +) { + let include_dir_path = include_dir_path.as_ref().to_str().unwrap(); + let output_dir_path = output_dir_path.as_ref().to_str().unwrap(); + let entrypoint_path = entrypoint_path.as_ref().to_str().unwrap(); + + use xshell::{cmd, Shell}; + let sh = Shell::new().unwrap(); + cmd!( + sh, + "flatc -I {include_dir_path} + -o {output_dir_path} + -b --bfbs-comments --schema + {entrypoint_path}" + ) + .run() + .unwrap(); +} + +/// Generates Rust code from a set of flatbuffers definitions. +/// +/// Panics on error. +/// +/// - `include_dir_path`: path to the root directory of the fbs definition tree. +/// - `output_crate_path`: path to the root of the output crate. +/// - `entrypoint_path`: path to the root file of the fbs definition tree. +/// - `source_hash`: optional sha256 hash of the source definition files. +/// +/// E.g.: +/// ```no_run +/// re_types_builder::generate_rust_code( +/// "./definitions", +/// ".", +/// "./definitions/rerun/archetypes.fbs", +/// ); +/// ``` +pub fn generate_rust_code( + include_dir_path: impl AsRef, + output_crate_path: impl AsRef, + entrypoint_path: impl AsRef, +) { + use xshell::Shell; + + let sh = Shell::new().unwrap(); + let tmp = sh.create_temp_dir().unwrap(); + + let entrypoint_path = entrypoint_path.as_ref(); + let entrypoint_filename = entrypoint_path.file_name().unwrap(); + + // generate bfbs definitions + compile_binary_schemas(include_dir_path, tmp.path(), entrypoint_path); + + let mut binary_entrypoint_path = PathBuf::from(entrypoint_filename); + binary_entrypoint_path.set_extension("bfbs"); + + // semantic pass: high level objects from low-level reflection data + let objects = Objects::from_buf( + sh.read_binary_file(tmp.path().join(binary_entrypoint_path)) + .unwrap() + .as_slice(), + ); + + // create and fill out arrow registry + let mut arrow_registry = ArrowRegistry::default(); + for obj in objects.ordered_objects(None) { + arrow_registry.register(obj); + } + + // generate rust code + let mut gen = RustCodeGenerator::new(output_crate_path.as_ref()); + let _filepaths = gen.quote(&objects, &arrow_registry); +} + +/// Generates Python code from a set of flatbuffers definitions. +/// +/// Panics on error. +/// +/// - `include_dir_path`: path to the root directory of the fbs definition tree. +/// - `output_pkg_path`: path to the root of the output package. +/// - `entrypoint_path`: path to the root file of the fbs definition tree. +/// +/// E.g.: +/// ```no_run +/// re_types_builder::generate_rust_code( +/// "./definitions", +/// "./rerun_py", +/// "./definitions/rerun/archetypes.fbs", +/// ); +/// ``` +pub fn generate_python_code( + include_dir_path: impl AsRef, + output_pkg_path: impl AsRef, + entrypoint_path: impl AsRef, +) { + use xshell::Shell; + + let sh = Shell::new().unwrap(); + let tmp = sh.create_temp_dir().unwrap(); + + let entrypoint_path = entrypoint_path.as_ref(); + let entrypoint_filename = entrypoint_path.file_name().unwrap(); + + // generate bfbs definitions + compile_binary_schemas(include_dir_path, tmp.path(), entrypoint_path); + + let mut binary_entrypoint_path = PathBuf::from(entrypoint_filename); + binary_entrypoint_path.set_extension("bfbs"); + + // semantic pass: high level objects from low-level reflection data + let objects = Objects::from_buf( + sh.read_binary_file(tmp.path().join(binary_entrypoint_path)) + .unwrap() + .as_slice(), + ); + + // create and fill out arrow registry + let mut arrow_registry = ArrowRegistry::default(); + for obj in objects.ordered_objects(None) { + arrow_registry.register(obj); + } + + // generate python code + let mut gen = PythonCodeGenerator::new(output_pkg_path.as_ref()); + let _filepaths = gen.quote(&objects, &arrow_registry); +} diff --git a/crates/re_types_builder/src/objects.rs b/crates/re_types_builder/src/objects.rs new file mode 100644 index 000000000000..4f6168c9667d --- /dev/null +++ b/crates/re_types_builder/src/objects.rs @@ -0,0 +1,814 @@ +//! This package implements the semantic pass of the codegen process. +//! +//! The semantic pass transforms the low-level raw reflection data into higher level types that +//! are much easier to inspect and manipulate / friendler to work with. + +use anyhow::Context as _; +use std::collections::{HashMap, HashSet}; + +use crate::{ + root_as_schema, FbsBaseType, FbsEnum, FbsEnumVal, FbsField, FbsKeyValue, FbsObject, FbsSchema, + FbsType, +}; + +// --- + +/// The result of the semantic pass: an intermediate representation of all available object +/// types; including structs, enums and unions. +#[derive(Debug)] +pub struct Objects { + /// Maps fully-qualified type names to their resolved object definitions. + pub objects: HashMap, +} + +impl Objects { + /// Runs the semantic pass on a serialized flatbuffers schema. + /// + /// The buffer must be a serialized [`FbsSchema`] (i.e. `.bfbs` data). + pub fn from_buf(buf: &[u8]) -> Self { + let schema = root_as_schema(buf).unwrap(); + Self::from_raw_schema(&schema) + } + + /// Runs the semantic pass on a deserialized flatbuffers [`FbsSchema`]. + pub fn from_raw_schema(schema: &FbsSchema<'_>) -> Self { + let mut resolved_objs = HashMap::new(); + let mut resolved_enums = HashMap::new(); + + let enums = schema.enums().iter().collect::>(); + let objs = schema.objects().iter().collect::>(); + + // resolve enums + for enm in schema.enums() { + let resolved_enum = Object::from_raw_enum(&enums, &objs, &enm); + resolved_enums.insert(resolved_enum.fqname.clone(), resolved_enum); + } + + // resolve objects + for obj in schema + .objects() + .iter() + // NOTE: Wrapped scalar types used by unions, not actual objects: ignore. + .filter(|obj| !obj.name().starts_with("fbs.scalars.")) + { + let resolved_obj = Object::from_raw_object(&enums, &objs, &obj); + resolved_objs.insert(resolved_obj.fqname.clone(), resolved_obj); + } + + Self { + objects: resolved_enums.into_iter().chain(resolved_objs).collect(), + } + } +} + +impl Objects { + /// Returns a resolved object using its fully-qualified name. + /// + /// Panics if missing. + /// + /// E.g.: + /// ```ignore + /// resolved.get("rerun.datatypes.Vec3D"); + /// resolved.get("rerun.datatypes.Angle"); + /// resolved.get("rerun.components.Label"); + /// resolved.get("rerun.archetypes.Point2D"); + /// ``` + pub fn get(&self, fqname: impl AsRef) -> &Object { + let fqname = fqname.as_ref(); + self.objects + .get(fqname) + .with_context(|| format!("unknown object: {fqname:?}")) + .unwrap() + } + + /// Returns all available datatypes, pre-sorted in ascending order based on their `order` + /// attribute. + pub fn ordered_datatypes(&self) -> Vec<&Object> { + self.ordered_objects(ObjectKind::Datatype.into()) + } + + /// Returns all available components, pre-sorted in ascending order based on their `order` + /// attribute. + pub fn ordered_components(&self) -> Vec<&Object> { + self.ordered_objects(ObjectKind::Component.into()) + } + + /// Returns all available archetypes, pre-sorted in ascending order based on their `order` + /// attribute. + pub fn ordered_archetypes(&self) -> Vec<&Object> { + self.ordered_objects(ObjectKind::Archetype.into()) + } + + /// Returns all available objects, pre-sorted in ascending order based on their `order` + /// attribute. + pub fn ordered_objects(&self, kind: Option) -> Vec<&Object> { + let objs = self + .objects + .values() + .filter(|obj| kind.map_or(true, |kind| obj.kind == kind)); + + let mut objs = objs.collect::>(); + objs.sort_by_key(|anyobj| anyobj.order()); + + objs + } +} + +// --- + +/// The kind of the object, as determined by its package root (e.g. `rerun.components`). +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ObjectKind { + Datatype, + Component, + Archetype, +} + +/// A high-level representation of a flatbuffers object's documentation. +#[derive(Debug, Clone)] +pub struct Docs { + /// General documentation for the object. + /// + /// Each entry in the vector is a raw line, extracted as-is from the fbs definition. + /// Trim it yourself if needed! + /// + /// See also [`Docs::tagged_docs`]. + pub doc: Vec, + + /// Tagged documentation for the object. + /// + /// Each entry maps a tag value to a bunch of lines. + /// Each entry in the vector is a raw line, extracted as-is from the fbs definition. + /// Trim it yourself if needed! + /// + /// E.g. the following will be associated with the `py` tag: + /// ```flatbuffers + /// /// \py Something something about how this fields behave in python. + /// my_field: uint32, + /// ``` + /// + /// See also [`Docs::doc`]. + pub tagged_docs: HashMap>, +} + +impl Docs { + fn from_raw_docs( + docs: Option>>, + ) -> Self { + // language-agnostic docs + let doc = docs + .into_iter() + .flat_map(|doc| doc.into_iter()) + // NOTE: discard tagged lines! + .filter(|line| !line.trim().starts_with('\\')) + .map(ToOwned::to_owned) + .collect::>(); + + // tagged docs, e.g. `\py this only applies to python!` + let tagged_docs = { + let tagged_lines = docs + .into_iter() + .flat_map(|doc| doc.into_iter()) + // NOTE: discard _un_tagged lines! + .filter_map(|line| { + let trimmed = line.trim(); + trimmed.starts_with('\\').then(|| { + let tag = trimmed.split_whitespace().next().unwrap(); + let line = &trimmed[tag.len()..]; + (tag[1..].to_owned(), line.to_owned()) + }) + }) + .collect::>(); + + let all_tags: HashSet<_> = tagged_lines.iter().map(|(tag, _)| tag).collect(); + let mut tagged_docs = HashMap::new(); + + for cur_tag in all_tags { + tagged_docs.insert( + cur_tag.clone(), + tagged_lines + .iter() + .filter_map(|(tag, line)| (cur_tag == tag).then(|| line.clone())) + .collect(), + ); + } + + tagged_docs + }; + + Self { doc, tagged_docs } + } +} + +/// A high-level representation of a flatbuffers object, which can be either a struct, a union or +/// an enum. +#[derive(Debug, Clone)] +pub struct Object { + /// File path of the associated fbs definition, e.g. `//rerun/components/point2d.fbs`. + pub filepath: String, + + /// Fully-qualified name of the object, e.g. `rerun.components.Point2D`. + pub fqname: String, + + /// Fully-qualified package name of the object, e.g. `rerun.components`. + pub pkg_name: String, + + /// Name of the object, e.g. `Point2D`. + pub name: String, + + /// The object's multiple layers of documentation. + pub docs: Docs, + + /// The object's kind: datatype, component or archetype. + pub kind: ObjectKind, + + /// The object's attributes. + pub attrs: Attributes, + + /// The object's inner fields, which can be either struct members or union values. + /// + /// These are pre-sorted, in ascending order, using their `order` attribute. + pub fields: Vec, + + /// Properties that only apply to either structs or unions. + pub specifics: ObjectSpecifics, +} + +impl Object { + /// Resolves a raw [`crate::Object`] into a higher-level representation that can be easily + /// interpreted and manipulated. + pub fn from_raw_object( + enums: &[FbsEnum<'_>], + objs: &[FbsObject<'_>], + obj: &FbsObject<'_>, + ) -> Self { + let fqname = obj.name().to_owned(); + let (pkg_name, name) = fqname + .rsplit_once('.') + .map_or((String::new(), fqname.clone()), |(pkg_name, name)| { + (pkg_name.to_owned(), name.to_owned()) + }); + + let filepath = obj + .declaration_file() + .map(ToOwned::to_owned) + .with_context(|| format!("no declaration_file found for {fqname}")) + .unwrap(); + + let docs = Docs::from_raw_docs(obj.documentation()); + + let kind = if pkg_name.starts_with("rerun.datatypes") { + ObjectKind::Datatype + } else if pkg_name.starts_with("rerun.components") { + ObjectKind::Component + } else if pkg_name.starts_with("rerun.archetypes") { + ObjectKind::Archetype + } else { + // TODO(cmc): support IDL definitions from outside the repo + panic!("unknown package {pkg_name:?}"); + }; + + let attrs = Attributes::from_raw_attrs(obj.attributes()); + + let fields = { + let mut fields: Vec<_> = obj + .fields() + .iter() + // NOTE: These are intermediate fields used by flatbuffers internals, we don't care. + .filter(|field| field.type_().base_type() != FbsBaseType::UType) + .map(|field| ObjectField::from_raw_object_field(enums, objs, obj, &field)) + .collect(); + fields.sort_by_key(|field| field.order()); + fields + }; + + Self { + filepath, + fqname, + pkg_name, + name, + docs, + kind, + attrs, + fields, + specifics: ObjectSpecifics::Struct {}, + } + } + + /// Resolves a raw [`FbsEnum`] into a higher-level representation that can be easily + /// interpreted and manipulated. + pub fn from_raw_enum(enums: &[FbsEnum<'_>], objs: &[FbsObject<'_>], enm: &FbsEnum<'_>) -> Self { + let fqname = enm.name().to_owned(); + let (pkg_name, name) = fqname + .rsplit_once('.') + .map_or((String::new(), fqname.clone()), |(pkg_name, name)| { + (pkg_name.to_owned(), name.to_owned()) + }); + + let filepath = enm + .declaration_file() + .map(ToOwned::to_owned) + .with_context(|| format!("no declaration_file found for {fqname}")) + .unwrap(); + + let docs = Docs::from_raw_docs(enm.documentation()); + + let kind = if pkg_name.starts_with("rerun.datatypes") { + ObjectKind::Datatype + } else if pkg_name.starts_with("rerun.components") { + ObjectKind::Component + } else if pkg_name.starts_with("rerun.archetypes") { + ObjectKind::Archetype + } else { + // TODO(cmc): support IDL definitions from outside the repo + panic!("unknown package {pkg_name:?}"); + }; + + let utype = { + if enm.underlying_type().base_type() == FbsBaseType::UType { + // This is a union. + None + } else { + Some(ElementType::from_raw_base_type( + enums, + objs, + enm.underlying_type(), + enm.underlying_type().base_type(), + )) + } + }; + let attrs = Attributes::from_raw_attrs(enm.attributes()); + + let fields = enm + .values() + .iter() + // NOTE: `BaseType::None` is only used by internal flatbuffers fields, we don't care. + .filter(|val| { + val.union_type() + .filter(|utype| utype.base_type() != FbsBaseType::None) + .is_some() + }) + .map(|val| ObjectField::from_raw_enum_value(enums, objs, enm, &val)) + .collect(); + + Self { + filepath, + fqname, + pkg_name, + name, + docs, + kind, + attrs, + fields, + specifics: ObjectSpecifics::Union { utype }, + } + } + + pub fn get_attr(&self, name: impl AsRef) -> T + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + self.attrs.get(self.fqname.as_str(), name) + } + + pub fn try_get_attr(&self, name: impl AsRef) -> Option + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + self.attrs.try_get(self.fqname.as_str(), name) + } + + /// Returns the mandatory `order` attribute of this object. + /// + /// Panics if no order has been set. + pub fn order(&self) -> u32 { + self.attrs.get::(&self.fqname, "order") + } + + pub fn is_struct(&self) -> bool { + match &self.specifics { + ObjectSpecifics::Struct {} => true, + ObjectSpecifics::Union { utype: _ } => false, + } + } + + pub fn is_enum(&self) -> bool { + match &self.specifics { + ObjectSpecifics::Struct {} => false, + ObjectSpecifics::Union { utype } => utype.is_some(), + } + } + + pub fn is_union(&self) -> bool { + match &self.specifics { + ObjectSpecifics::Struct {} => false, + ObjectSpecifics::Union { utype } => utype.is_none(), + } + } +} + +/// Properties specific to either structs or unions, but not both. +#[derive(Debug, Clone)] +pub enum ObjectSpecifics { + Struct {}, + Union { + /// The underlying type of the union. + /// + /// `None` if this is a union, some value if this is an enum. + utype: Option, + }, +} + +/// A high-level representation of a flatbuffers field, which can be either a struct member or a +/// union value. +#[derive(Debug, Clone)] +pub struct ObjectField { + /// File path of the associated fbs definition, e.g. `//rerun/components/point2d.fbs`. + pub filepath: String, + + /// Fully-qualified name of the field, e.g. `rerun.components.Point2D#position`. + pub fqname: String, + + /// Fully-qualified package name of the field, e.g. `rerun.components`. + pub pkg_name: String, + + /// Name of the object, e.g. `Point2D`. + pub name: String, + + /// The field's multiple layers of documentation. + pub docs: Docs, + + /// The field's type. + pub typ: Type, + + /// The field's attributes. + pub attrs: Attributes, + + /// Whether the field is required. + /// + /// Always true for `struct` types. + pub required: bool, + + /// Whether the field is deprecated. + // + // TODO(cmc): implement custom attr to specify deprecation reason + pub deprecated: bool, +} + +impl ObjectField { + pub fn from_raw_object_field( + enums: &[FbsEnum<'_>], + objs: &[FbsObject<'_>], + obj: &FbsObject<'_>, + field: &FbsField<'_>, + ) -> Self { + let fqname = format!("{}.{}", obj.name(), field.name()); + let (pkg_name, name) = fqname + .rsplit_once('.') + .map_or((String::new(), fqname.clone()), |(pkg_name, name)| { + (pkg_name.to_owned(), name.to_owned()) + }); + + let filepath = obj + .declaration_file() + .map(ToOwned::to_owned) + .with_context(|| format!("no declaration_file found for {fqname}")) + .unwrap(); + + let docs = Docs::from_raw_docs(field.documentation()); + + let typ = Type::from_raw_type(enums, objs, field.type_()); + let attrs = Attributes::from_raw_attrs(field.attributes()); + + let required = field.required() || obj.is_struct(); + let deprecated = field.deprecated(); + + Self { + filepath, + fqname, + pkg_name, + name, + docs, + typ, + attrs, + required, + deprecated, + } + } + + pub fn from_raw_enum_value( + enums: &[FbsEnum<'_>], + objs: &[FbsObject<'_>], + enm: &FbsEnum<'_>, + val: &FbsEnumVal<'_>, + ) -> Self { + let fqname = format!("{}.{}", enm.name(), val.name()); + let (pkg_name, name) = fqname + .rsplit_once('.') + .map_or((String::new(), fqname.clone()), |(pkg_name, name)| { + (pkg_name.to_owned(), name.to_owned()) + }); + + let filepath = enm + .declaration_file() + .map(ToOwned::to_owned) + .with_context(|| format!("no declaration_file found for {fqname}")) + .unwrap(); + + let docs = Docs::from_raw_docs(val.documentation()); + + let typ = Type::from_raw_type( + enums, + objs, + // NOTE: Unwrapping is safe, we never resolve enums without union types. + val.union_type().unwrap(), + ); + + let attrs = Attributes::from_raw_attrs(val.attributes()); + + // TODO(cmc): not sure about this, but fbs unions are a bit weird that way + let required = true; + let deprecated = false; + + Self { + filepath, + fqname, + pkg_name, + name, + docs, + typ, + attrs, + required, + deprecated, + } + } + + /// Returns the mandatory `order` attribute of this field. + /// + /// Panics if no order has been set. + #[inline] + pub fn order(&self) -> u32 { + self.attrs.get::(&self.fqname, "order") + } + + pub fn get_attr(&self, name: impl AsRef) -> T + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + self.attrs.get(self.fqname.as_str(), name) + } + + pub fn try_get_attr(&self, name: impl AsRef) -> Option + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + self.attrs.try_get(self.fqname.as_str(), name) + } +} + +/// The underlying type of a `ResolvedObjectField`. +#[derive(Debug, Clone)] +pub enum Type { + UInt8, + UInt16, + UInt32, + UInt64, + Int8, + Int16, + Int32, + Int64, + Bool, + Float16, + Float32, + Float64, + String, + Array { + elem_type: ElementType, + length: usize, + }, + Vector { + elem_type: ElementType, + }, + Object(String), // fqname +} + +impl Type { + pub fn from_raw_type( + enums: &[FbsEnum<'_>], + objs: &[FbsObject<'_>], + field_type: FbsType<'_>, + ) -> Self { + fn flatten_scalar_wrappers(obj: &FbsObject<'_>) -> Type { + if obj.name().starts_with("fbs.scalars.") { + match obj.name() { + "fbs.scalars.Float32" => Type::Float32, + _ => unimplemented!(), // NOLINT + } + } else { + Type::Object(obj.name().to_owned()) + } + } + + match field_type.base_type() { + FbsBaseType::Bool => Self::Bool, + FbsBaseType::Byte => Self::Int8, + FbsBaseType::UByte => Self::UInt8, + FbsBaseType::Short => Self::Int16, + FbsBaseType::UShort => Self::UInt16, + FbsBaseType::Int => Self::Int32, + FbsBaseType::UInt => Self::UInt32, + FbsBaseType::Long => Self::Int64, + FbsBaseType::ULong => Self::UInt64, + // TODO(cmc): half support + FbsBaseType::Float => Self::Float32, + FbsBaseType::Double => Self::Float64, + FbsBaseType::String => Self::String, + FbsBaseType::Obj => { + let obj = &objs[field_type.index() as usize]; + flatten_scalar_wrappers(obj) + } + FbsBaseType::Union => { + let union = &enums[field_type.index() as usize]; + Self::Object(union.name().to_owned()) + } + // NOTE: flatbuffers doesn't support directly nesting multiple layers of arrays, they + // always have to be wrapped into intermediate layers of structs or tables. + FbsBaseType::Array => Self::Array { + elem_type: ElementType::from_raw_base_type( + enums, + objs, + field_type, + field_type.element(), + ), + length: field_type.fixed_length() as usize, + }, + FbsBaseType::Vector => Self::Vector { + elem_type: ElementType::from_raw_base_type( + enums, + objs, + field_type, + field_type.element(), + ), + }, + FbsBaseType::None => unimplemented!(), // NOLINT + FbsBaseType::UType => unimplemented!(), // NOLINT + FbsBaseType::Vector64 => unimplemented!(), // NOLINT + _ => unreachable!(), + } + } +} + +/// The underlying element type for arrays/vectors/maps. +/// +/// Flatbuffers doesn't support directly nesting multiple layers of arrays, they +/// always have to be wrapped into intermediate layers of structs or tables! +#[derive(Debug, Clone)] +pub enum ElementType { + UInt8, + UInt16, + UInt32, + UInt64, + Int8, + Int16, + Int32, + Int64, + Bool, + Float16, + Float32, + Float64, + String, + Object(String), // fqname +} + +impl ElementType { + pub fn from_raw_base_type( + _enums: &[FbsEnum<'_>], + objs: &[FbsObject<'_>], + outer_type: FbsType<'_>, + inner_type: FbsBaseType, + ) -> Self { + /// Helper to turn wrapped scalars into actual scalars. + fn flatten_scalar_wrappers(obj: &FbsObject<'_>) -> ElementType { + if obj.name().starts_with("fbs.scalars.") { + match obj.name() { + "fbs.scalars.Float32" => ElementType::Float32, + _ => unimplemented!(), // NOLINT + } + } else { + ElementType::Object(obj.name().to_owned()) + } + } + + match inner_type { + FbsBaseType::Bool => Self::Bool, + FbsBaseType::Byte => Self::Int8, + FbsBaseType::UByte => Self::UInt8, + FbsBaseType::Short => Self::Int16, + FbsBaseType::UShort => Self::UInt16, + FbsBaseType::Int => Self::Int32, + FbsBaseType::UInt => Self::UInt32, + FbsBaseType::Long => Self::Int64, + FbsBaseType::ULong => Self::UInt64, + // TODO(cmc): half support + FbsBaseType::Float => Self::Float32, + FbsBaseType::Double => Self::Float64, + FbsBaseType::String => Self::String, + FbsBaseType::Obj => { + let obj = &objs[outer_type.index() as usize]; + flatten_scalar_wrappers(obj) + } + FbsBaseType::Union => unimplemented!(), // NOLINT + // NOTE: flatbuffers doesn't support directly nesting multiple layers of arrays, they + // always have to be wrapped into intermediate layers of structs or tables. + FbsBaseType::Array => unimplemented!(), // NOLINT + FbsBaseType::None => unimplemented!(), // NOLINT + FbsBaseType::UType => unimplemented!(), // NOLINT + FbsBaseType::Vector => unimplemented!(), // NOLINT + FbsBaseType::Vector64 => unimplemented!(), // NOLINT + _ => unreachable!(), + } + } +} + +// --- Common --- + +/// A collection of arbitrary attributes. +#[derive(Debug, Default, Clone)] +pub struct Attributes(HashMap>); + +impl Attributes { + fn from_raw_attrs( + attrs: Option>>>, + ) -> Self { + Self( + attrs + .map(|attrs| { + attrs + .into_iter() + .map(|kv| (kv.key().to_owned(), kv.value().map(ToOwned::to_owned))) + .collect::>() + }) + .unwrap_or_default(), + ) + } +} + +impl Attributes { + pub fn get(&self, owner_fqname: impl AsRef, name: impl AsRef) -> T + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + let owner_fqname = owner_fqname.as_ref(); + let name = name.as_ref(); + + let value_str = self + .0 + .get(name) + .cloned() // cannot flatten it otherwise + .flatten() + .with_context(|| format!("no `{name}` attribute was specified for `{owner_fqname}`")) + .unwrap(); + + value_str + .parse() + .with_context(|| { + format!( + "invalid `{name}` attribute for `{owner_fqname}`: \ + expected unsigned integer, got `{value_str}` instead" + ) + }) + .unwrap() + } + + pub fn try_get(&self, owner_fqname: impl AsRef, name: impl AsRef) -> Option + where + T: std::str::FromStr, + T::Err: std::error::Error + Send + Sync + 'static, + { + let owner_fqname = owner_fqname.as_ref(); + let name = name.as_ref(); + + let value_str = self + .0 + .get(name) + .cloned() // cannot flatten it otherwise + .flatten()?; + + Some( + value_str + .parse() + .with_context(|| { + format!( + "invalid `{name}` attribute for `{owner_fqname}`: \ + expected unsigned integer, got `{value_str}` instead" + ) + }) + .unwrap(), + ) + } +} From 98d85f11659402fcf4ea6ae4df65f974ccef2e7c Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:45:02 +0200 Subject: [PATCH 07/16] generate reflection code --- crates/re_types_builder/source_hash.txt | 4 + crates/re_types_builder/src/reflection.rs | 2988 +++++++++++++++++++++ 2 files changed, 2992 insertions(+) create mode 100644 crates/re_types_builder/source_hash.txt create mode 100644 crates/re_types_builder/src/reflection.rs diff --git a/crates/re_types_builder/source_hash.txt b/crates/re_types_builder/source_hash.txt new file mode 100644 index 000000000000..f1766f06bb26 --- /dev/null +++ b/crates/re_types_builder/source_hash.txt @@ -0,0 +1,4 @@ +# This is a sha256 hash for all direct and indirect dependencies of this crate's build script. +# It can be safely removed at anytime to force the build script to run again. +# Check out build.rs to see how it's computed. +72d936d50287d6c16d0c1b91f86bd74120642c8fc08e885f08dd2b92bb52e8a4 \ No newline at end of file diff --git a/crates/re_types_builder/src/reflection.rs b/crates/re_types_builder/src/reflection.rs new file mode 100644 index 000000000000..adf5c858348e --- /dev/null +++ b/crates/re_types_builder/src/reflection.rs @@ -0,0 +1,2988 @@ +// automatically generated by the FlatBuffers compiler, do not modify + +// @generated + +use core::cmp::Ordering; +use core::mem; + +extern crate flatbuffers; +use self::flatbuffers::{EndianScalar, Follow}; + +#[allow(unused_imports, dead_code)] +pub mod reflection { + + use core::cmp::Ordering; + use core::mem; + + extern crate flatbuffers; + use self::flatbuffers::{EndianScalar, Follow}; + + #[deprecated( + since = "2.0.0", + note = "Use associated constants instead. This will no longer be generated in 2021." + )] + pub const ENUM_MIN_BASE_TYPE: i8 = 0; + #[deprecated( + since = "2.0.0", + note = "Use associated constants instead. This will no longer be generated in 2021." + )] + pub const ENUM_MAX_BASE_TYPE: i8 = 19; + #[deprecated( + since = "2.0.0", + note = "Use associated constants instead. This will no longer be generated in 2021." + )] + #[allow(non_camel_case_types)] + pub const ENUM_VALUES_BASE_TYPE: [BaseType; 20] = [ + BaseType::None, + BaseType::UType, + BaseType::Bool, + BaseType::Byte, + BaseType::UByte, + BaseType::Short, + BaseType::UShort, + BaseType::Int, + BaseType::UInt, + BaseType::Long, + BaseType::ULong, + BaseType::Float, + BaseType::Double, + BaseType::String, + BaseType::Vector, + BaseType::Obj, + BaseType::Union, + BaseType::Array, + BaseType::Vector64, + BaseType::MaxBaseType, + ]; + + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] + #[repr(transparent)] + pub struct BaseType(pub i8); + #[allow(non_upper_case_globals)] + impl BaseType { + pub const None: Self = Self(0); + pub const UType: Self = Self(1); + pub const Bool: Self = Self(2); + pub const Byte: Self = Self(3); + pub const UByte: Self = Self(4); + pub const Short: Self = Self(5); + pub const UShort: Self = Self(6); + pub const Int: Self = Self(7); + pub const UInt: Self = Self(8); + pub const Long: Self = Self(9); + pub const ULong: Self = Self(10); + pub const Float: Self = Self(11); + pub const Double: Self = Self(12); + pub const String: Self = Self(13); + pub const Vector: Self = Self(14); + pub const Obj: Self = Self(15); + pub const Union: Self = Self(16); + pub const Array: Self = Self(17); + pub const Vector64: Self = Self(18); + pub const MaxBaseType: Self = Self(19); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 19; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::None, + Self::UType, + Self::Bool, + Self::Byte, + Self::UByte, + Self::Short, + Self::UShort, + Self::Int, + Self::UInt, + Self::Long, + Self::ULong, + Self::Float, + Self::Double, + Self::String, + Self::Vector, + Self::Obj, + Self::Union, + Self::Array, + Self::Vector64, + Self::MaxBaseType, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::None => Some("None"), + Self::UType => Some("UType"), + Self::Bool => Some("Bool"), + Self::Byte => Some("Byte"), + Self::UByte => Some("UByte"), + Self::Short => Some("Short"), + Self::UShort => Some("UShort"), + Self::Int => Some("Int"), + Self::UInt => Some("UInt"), + Self::Long => Some("Long"), + Self::ULong => Some("ULong"), + Self::Float => Some("Float"), + Self::Double => Some("Double"), + Self::String => Some("String"), + Self::Vector => Some("Vector"), + Self::Obj => Some("Obj"), + Self::Union => Some("Union"), + Self::Array => Some("Array"), + Self::Vector64 => Some("Vector64"), + Self::MaxBaseType => Some("MaxBaseType"), + _ => None, + } + } + } + impl core::fmt::Debug for BaseType { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } + } + impl<'a> flatbuffers::Follow<'a> for BaseType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = flatbuffers::read_scalar_at::(buf, loc); + Self(b) + } + } + + impl flatbuffers::Push for BaseType { + type Output = BaseType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + flatbuffers::emplace_scalar::(dst, self.0); + } + } + + impl flatbuffers::EndianScalar for BaseType { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } + } + + impl<'a> flatbuffers::Verifiable for BaseType { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + i8::run_verifier(v, pos) + } + } + + impl flatbuffers::SimpleToVerifyInSlice for BaseType {} + #[allow(non_upper_case_globals)] + mod bitflags_advanced_features { + flatbuffers::bitflags::bitflags! { + /// New schema language features that are not supported by old code generators. + #[derive(Default)] + pub struct AdvancedFeatures: u64 { + const AdvancedArrayFeatures = 1; + const AdvancedUnionFeatures = 2; + const OptionalScalars = 4; + const DefaultVectorsAndStrings = 8; + } + } + } + pub use self::bitflags_advanced_features::AdvancedFeatures; + + impl<'a> flatbuffers::Follow<'a> for AdvancedFeatures { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = flatbuffers::read_scalar_at::(buf, loc); + // Safety: + // This is safe because we know bitflags is implemented with a repr transparent uint of the correct size. + // from_bits_unchecked will be replaced by an equivalent but safe from_bits_retain in bitflags 2.0 + // https://github.com/bitflags/bitflags/issues/262 + Self::from_bits_unchecked(b) + } + } + + impl flatbuffers::Push for AdvancedFeatures { + type Output = AdvancedFeatures; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + flatbuffers::emplace_scalar::(dst, self.bits()); + } + } + + impl flatbuffers::EndianScalar for AdvancedFeatures { + type Scalar = u64; + #[inline] + fn to_little_endian(self) -> u64 { + self.bits().to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u64) -> Self { + let b = u64::from_le(v); + // Safety: + // This is safe because we know bitflags is implemented with a repr transparent uint of the correct size. + // from_bits_unchecked will be replaced by an equivalent but safe from_bits_retain in bitflags 2.0 + // https://github.com/bitflags/bitflags/issues/262 + unsafe { Self::from_bits_unchecked(b) } + } + } + + impl<'a> flatbuffers::Verifiable for AdvancedFeatures { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + u64::run_verifier(v, pos) + } + } + + impl flatbuffers::SimpleToVerifyInSlice for AdvancedFeatures {} + pub enum TypeOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Type<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Type<'a> { + type Inner = Type<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Type<'a> { + pub const VT_BASE_TYPE: flatbuffers::VOffsetT = 4; + pub const VT_ELEMENT: flatbuffers::VOffsetT = 6; + pub const VT_INDEX: flatbuffers::VOffsetT = 8; + pub const VT_FIXED_LENGTH: flatbuffers::VOffsetT = 10; + pub const VT_BASE_SIZE: flatbuffers::VOffsetT = 12; + pub const VT_ELEMENT_SIZE: flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Type { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TypeArgs, + ) -> flatbuffers::WIPOffset> { + let mut builder = TypeBuilder::new(_fbb); + builder.add_element_size(args.element_size); + builder.add_base_size(args.base_size); + builder.add_index(args.index); + builder.add_fixed_length(args.fixed_length); + builder.add_element(args.element); + builder.add_base_type(args.base_type); + builder.finish() + } + + #[inline] + pub fn base_type(&self) -> BaseType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Type::VT_BASE_TYPE, Some(BaseType::None)) + .unwrap() + } + } + #[inline] + pub fn element(&self) -> BaseType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Type::VT_ELEMENT, Some(BaseType::None)) + .unwrap() + } + } + #[inline] + pub fn index(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Type::VT_INDEX, Some(-1)).unwrap() } + } + #[inline] + pub fn fixed_length(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Type::VT_FIXED_LENGTH, Some(0)) + .unwrap() + } + } + /// The size (octets) of the `base_type` field. + #[inline] + pub fn base_size(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Type::VT_BASE_SIZE, Some(4)).unwrap() } + } + /// The size (octets) of the `element` field, if present. + #[inline] + pub fn element_size(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Type::VT_ELEMENT_SIZE, Some(0)) + .unwrap() + } + } + } + + impl flatbuffers::Verifiable for Type<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::("base_type", Self::VT_BASE_TYPE, false)? + .visit_field::("element", Self::VT_ELEMENT, false)? + .visit_field::("index", Self::VT_INDEX, false)? + .visit_field::("fixed_length", Self::VT_FIXED_LENGTH, false)? + .visit_field::("base_size", Self::VT_BASE_SIZE, false)? + .visit_field::("element_size", Self::VT_ELEMENT_SIZE, false)? + .finish(); + Ok(()) + } + } + pub struct TypeArgs { + pub base_type: BaseType, + pub element: BaseType, + pub index: i32, + pub fixed_length: u16, + pub base_size: u32, + pub element_size: u32, + } + impl<'a> Default for TypeArgs { + #[inline] + fn default() -> Self { + TypeArgs { + base_type: BaseType::None, + element: BaseType::None, + index: -1, + fixed_length: 0, + base_size: 4, + element_size: 0, + } + } + } + + pub struct TypeBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> TypeBuilder<'a, 'b> { + #[inline] + pub fn add_base_type(&mut self, base_type: BaseType) { + self.fbb_ + .push_slot::(Type::VT_BASE_TYPE, base_type, BaseType::None); + } + #[inline] + pub fn add_element(&mut self, element: BaseType) { + self.fbb_ + .push_slot::(Type::VT_ELEMENT, element, BaseType::None); + } + #[inline] + pub fn add_index(&mut self, index: i32) { + self.fbb_.push_slot::(Type::VT_INDEX, index, -1); + } + #[inline] + pub fn add_fixed_length(&mut self, fixed_length: u16) { + self.fbb_ + .push_slot::(Type::VT_FIXED_LENGTH, fixed_length, 0); + } + #[inline] + pub fn add_base_size(&mut self, base_size: u32) { + self.fbb_.push_slot::(Type::VT_BASE_SIZE, base_size, 4); + } + #[inline] + pub fn add_element_size(&mut self, element_size: u32) { + self.fbb_ + .push_slot::(Type::VT_ELEMENT_SIZE, element_size, 0); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TypeBuilder<'a, 'b> { + let start = _fbb.start_table(); + TypeBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Type<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Type"); + ds.field("base_type", &self.base_type()); + ds.field("element", &self.element()); + ds.field("index", &self.index()); + ds.field("fixed_length", &self.fixed_length()); + ds.field("base_size", &self.base_size()); + ds.field("element_size", &self.element_size()); + ds.finish() + } + } + pub enum KeyValueOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct KeyValue<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for KeyValue<'a> { + type Inner = KeyValue<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> KeyValue<'a> { + pub const VT_KEY: flatbuffers::VOffsetT = 4; + pub const VT_VALUE: flatbuffers::VOffsetT = 6; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + KeyValue { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args KeyValueArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = KeyValueBuilder::new(_fbb); + if let Some(x) = args.value { + builder.add_value(x); + } + if let Some(x) = args.key { + builder.add_key(x); + } + builder.finish() + } + + #[inline] + pub fn key(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(KeyValue::VT_KEY, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &KeyValue) -> bool { + self.key() < o.key() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.key(); + key.cmp(val) + } + #[inline] + pub fn value(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(KeyValue::VT_VALUE, None) + } + } + } + + impl flatbuffers::Verifiable for KeyValue<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("key", Self::VT_KEY, true)? + .visit_field::>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } + } + pub struct KeyValueArgs<'a> { + pub key: Option>, + pub value: Option>, + } + impl<'a> Default for KeyValueArgs<'a> { + #[inline] + fn default() -> Self { + KeyValueArgs { + key: None, // required field + value: None, + } + } + } + + pub struct KeyValueBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> KeyValueBuilder<'a, 'b> { + #[inline] + pub fn add_key(&mut self, key: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(KeyValue::VT_KEY, key); + } + #[inline] + pub fn add_value(&mut self, value: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(KeyValue::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> KeyValueBuilder<'a, 'b> { + let start = _fbb.start_table(); + KeyValueBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, KeyValue::VT_KEY, "key"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for KeyValue<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("KeyValue"); + ds.field("key", &self.key()); + ds.field("value", &self.value()); + ds.finish() + } + } + pub enum EnumValOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct EnumVal<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for EnumVal<'a> { + type Inner = EnumVal<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> EnumVal<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_VALUE: flatbuffers::VOffsetT = 6; + pub const VT_UNION_TYPE: flatbuffers::VOffsetT = 10; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 12; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + EnumVal { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args EnumValArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = EnumValBuilder::new(_fbb); + builder.add_value(args.value); + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.union_type { + builder.add_union_type(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(EnumVal::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn value(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EnumVal::VT_VALUE, Some(0)).unwrap() } + } + #[inline] + pub fn key_compare_less_than(&self, o: &EnumVal) -> bool { + self.value() < o.value() + } + + #[inline] + pub fn key_compare_with_value(&self, val: i64) -> ::core::cmp::Ordering { + let key = self.value(); + key.cmp(&val) + } + #[inline] + pub fn union_type(&self) -> Option> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(EnumVal::VT_UNION_TYPE, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(EnumVal::VT_DOCUMENTATION, None) + } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(EnumVal::VT_ATTRIBUTES, None) + } + } + } + + impl flatbuffers::Verifiable for EnumVal<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::("value", Self::VT_VALUE, false)? + .visit_field::>( + "union_type", + Self::VT_UNION_TYPE, + false, + )? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .finish(); + Ok(()) + } + } + pub struct EnumValArgs<'a> { + pub name: Option>, + pub value: i64, + pub union_type: Option>>, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + } + impl<'a> Default for EnumValArgs<'a> { + #[inline] + fn default() -> Self { + EnumValArgs { + name: None, // required field + value: 0, + union_type: None, + documentation: None, + attributes: None, + } + } + } + + pub struct EnumValBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> EnumValBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(EnumVal::VT_NAME, name); + } + #[inline] + pub fn add_value(&mut self, value: i64) { + self.fbb_.push_slot::(EnumVal::VT_VALUE, value, 0); + } + #[inline] + pub fn add_union_type(&mut self, union_type: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>( + EnumVal::VT_UNION_TYPE, + union_type, + ); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + EnumVal::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(EnumVal::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> EnumValBuilder<'a, 'b> { + let start = _fbb.start_table(); + EnumValBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, EnumVal::VT_NAME, "name"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for EnumVal<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("EnumVal"); + ds.field("name", &self.name()); + ds.field("value", &self.value()); + ds.field("union_type", &self.union_type()); + ds.field("documentation", &self.documentation()); + ds.field("attributes", &self.attributes()); + ds.finish() + } + } + pub enum EnumOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Enum<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Enum<'a> { + type Inner = Enum<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Enum<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_VALUES: flatbuffers::VOffsetT = 6; + pub const VT_IS_UNION: flatbuffers::VOffsetT = 8; + pub const VT_UNDERLYING_TYPE: flatbuffers::VOffsetT = 10; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 12; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 14; + pub const VT_DECLARATION_FILE: flatbuffers::VOffsetT = 16; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Enum { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args EnumArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = EnumBuilder::new(_fbb); + if let Some(x) = args.declaration_file { + builder.add_declaration_file(x); + } + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + if let Some(x) = args.underlying_type { + builder.add_underlying_type(x); + } + if let Some(x) = args.values { + builder.add_values(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.add_is_union(args.is_union); + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Enum::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &Enum) -> bool { + self.name() < o.name() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.name(); + key.cmp(val) + } + #[inline] + pub fn values(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>, + >>(Enum::VT_VALUES, None) + .unwrap() + } + } + #[inline] + pub fn is_union(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Enum::VT_IS_UNION, Some(false)) + .unwrap() + } + } + #[inline] + pub fn underlying_type(&self) -> Type<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Enum::VT_UNDERLYING_TYPE, None) + .unwrap() + } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Enum::VT_ATTRIBUTES, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Enum::VT_DOCUMENTATION, None) + } + } + /// File that this Enum is declared in. + #[inline] + pub fn declaration_file(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Enum::VT_DECLARATION_FILE, None) + } + } + } + + impl flatbuffers::Verifiable for Enum<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::>, + >>("values", Self::VT_VALUES, true)? + .visit_field::("is_union", Self::VT_IS_UNION, false)? + .visit_field::>( + "underlying_type", + Self::VT_UNDERLYING_TYPE, + true, + )? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .visit_field::>( + "declaration_file", + Self::VT_DECLARATION_FILE, + false, + )? + .finish(); + Ok(()) + } + } + pub struct EnumArgs<'a> { + pub name: Option>, + pub values: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub is_union: bool, + pub underlying_type: Option>>, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + pub declaration_file: Option>, + } + impl<'a> Default for EnumArgs<'a> { + #[inline] + fn default() -> Self { + EnumArgs { + name: None, // required field + values: None, // required field + is_union: false, + underlying_type: None, // required field + attributes: None, + documentation: None, + declaration_file: None, + } + } + } + + pub struct EnumBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> EnumBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Enum::VT_NAME, name); + } + #[inline] + pub fn add_values( + &mut self, + values: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Enum::VT_VALUES, values); + } + #[inline] + pub fn add_is_union(&mut self, is_union: bool) { + self.fbb_ + .push_slot::(Enum::VT_IS_UNION, is_union, false); + } + #[inline] + pub fn add_underlying_type(&mut self, underlying_type: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>( + Enum::VT_UNDERLYING_TYPE, + underlying_type, + ); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Enum::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + Enum::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn add_declaration_file(&mut self, declaration_file: flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::>( + Enum::VT_DECLARATION_FILE, + declaration_file, + ); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> EnumBuilder<'a, 'b> { + let start = _fbb.start_table(); + EnumBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Enum::VT_NAME, "name"); + self.fbb_.required(o, Enum::VT_VALUES, "values"); + self.fbb_ + .required(o, Enum::VT_UNDERLYING_TYPE, "underlying_type"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Enum<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Enum"); + ds.field("name", &self.name()); + ds.field("values", &self.values()); + ds.field("is_union", &self.is_union()); + ds.field("underlying_type", &self.underlying_type()); + ds.field("attributes", &self.attributes()); + ds.field("documentation", &self.documentation()); + ds.field("declaration_file", &self.declaration_file()); + ds.finish() + } + } + pub enum FieldOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Field<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Field<'a> { + type Inner = Field<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Field<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_TYPE_: flatbuffers::VOffsetT = 6; + pub const VT_ID: flatbuffers::VOffsetT = 8; + pub const VT_OFFSET: flatbuffers::VOffsetT = 10; + pub const VT_DEFAULT_INTEGER: flatbuffers::VOffsetT = 12; + pub const VT_DEFAULT_REAL: flatbuffers::VOffsetT = 14; + pub const VT_DEPRECATED: flatbuffers::VOffsetT = 16; + pub const VT_REQUIRED: flatbuffers::VOffsetT = 18; + pub const VT_KEY: flatbuffers::VOffsetT = 20; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 22; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 24; + pub const VT_OPTIONAL: flatbuffers::VOffsetT = 26; + pub const VT_PADDING: flatbuffers::VOffsetT = 28; + pub const VT_OFFSET64: flatbuffers::VOffsetT = 30; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Field { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args FieldArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = FieldBuilder::new(_fbb); + builder.add_default_real(args.default_real); + builder.add_default_integer(args.default_integer); + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + if let Some(x) = args.type_ { + builder.add_type_(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.add_padding(args.padding); + builder.add_offset(args.offset); + builder.add_id(args.id); + builder.add_offset64(args.offset64); + builder.add_optional(args.optional); + builder.add_key(args.key); + builder.add_required(args.required); + builder.add_deprecated(args.deprecated); + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Field::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &Field) -> bool { + self.name() < o.name() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.name(); + key.cmp(val) + } + #[inline] + pub fn type_(&self) -> Type<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Field::VT_TYPE_, None) + .unwrap() + } + } + #[inline] + pub fn id(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Field::VT_ID, Some(0)).unwrap() } + } + #[inline] + pub fn offset(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Field::VT_OFFSET, Some(0)).unwrap() } + } + #[inline] + pub fn default_integer(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_DEFAULT_INTEGER, Some(0)) + .unwrap() + } + } + #[inline] + pub fn default_real(&self) -> f64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_DEFAULT_REAL, Some(0.0)) + .unwrap() + } + } + #[inline] + pub fn deprecated(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_DEPRECATED, Some(false)) + .unwrap() + } + } + #[inline] + pub fn required(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_REQUIRED, Some(false)) + .unwrap() + } + } + #[inline] + pub fn key(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Field::VT_KEY, Some(false)).unwrap() } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Field::VT_ATTRIBUTES, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Field::VT_DOCUMENTATION, None) + } + } + #[inline] + pub fn optional(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_OPTIONAL, Some(false)) + .unwrap() + } + } + /// Number of padding octets to always add after this field. Structs only. + #[inline] + pub fn padding(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Field::VT_PADDING, Some(0)).unwrap() } + } + /// If the field uses 64-bit offsets. + #[inline] + pub fn offset64(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Field::VT_OFFSET64, Some(false)) + .unwrap() + } + } + } + + impl flatbuffers::Verifiable for Field<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::>("type_", Self::VT_TYPE_, true)? + .visit_field::("id", Self::VT_ID, false)? + .visit_field::("offset", Self::VT_OFFSET, false)? + .visit_field::("default_integer", Self::VT_DEFAULT_INTEGER, false)? + .visit_field::("default_real", Self::VT_DEFAULT_REAL, false)? + .visit_field::("deprecated", Self::VT_DEPRECATED, false)? + .visit_field::("required", Self::VT_REQUIRED, false)? + .visit_field::("key", Self::VT_KEY, false)? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .visit_field::("optional", Self::VT_OPTIONAL, false)? + .visit_field::("padding", Self::VT_PADDING, false)? + .visit_field::("offset64", Self::VT_OFFSET64, false)? + .finish(); + Ok(()) + } + } + pub struct FieldArgs<'a> { + pub name: Option>, + pub type_: Option>>, + pub id: u16, + pub offset: u16, + pub default_integer: i64, + pub default_real: f64, + pub deprecated: bool, + pub required: bool, + pub key: bool, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + pub optional: bool, + pub padding: u16, + pub offset64: bool, + } + impl<'a> Default for FieldArgs<'a> { + #[inline] + fn default() -> Self { + FieldArgs { + name: None, // required field + type_: None, // required field + id: 0, + offset: 0, + default_integer: 0, + default_real: 0.0, + deprecated: false, + required: false, + key: false, + attributes: None, + documentation: None, + optional: false, + padding: 0, + offset64: false, + } + } + } + + pub struct FieldBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> FieldBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Field::VT_NAME, name); + } + #[inline] + pub fn add_type_(&mut self, type_: flatbuffers::WIPOffset>) { + self.fbb_ + .push_slot_always::>(Field::VT_TYPE_, type_); + } + #[inline] + pub fn add_id(&mut self, id: u16) { + self.fbb_.push_slot::(Field::VT_ID, id, 0); + } + #[inline] + pub fn add_offset(&mut self, offset: u16) { + self.fbb_.push_slot::(Field::VT_OFFSET, offset, 0); + } + #[inline] + pub fn add_default_integer(&mut self, default_integer: i64) { + self.fbb_ + .push_slot::(Field::VT_DEFAULT_INTEGER, default_integer, 0); + } + #[inline] + pub fn add_default_real(&mut self, default_real: f64) { + self.fbb_ + .push_slot::(Field::VT_DEFAULT_REAL, default_real, 0.0); + } + #[inline] + pub fn add_deprecated(&mut self, deprecated: bool) { + self.fbb_ + .push_slot::(Field::VT_DEPRECATED, deprecated, false); + } + #[inline] + pub fn add_required(&mut self, required: bool) { + self.fbb_ + .push_slot::(Field::VT_REQUIRED, required, false); + } + #[inline] + pub fn add_key(&mut self, key: bool) { + self.fbb_.push_slot::(Field::VT_KEY, key, false); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Field::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + Field::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn add_optional(&mut self, optional: bool) { + self.fbb_ + .push_slot::(Field::VT_OPTIONAL, optional, false); + } + #[inline] + pub fn add_padding(&mut self, padding: u16) { + self.fbb_.push_slot::(Field::VT_PADDING, padding, 0); + } + #[inline] + pub fn add_offset64(&mut self, offset64: bool) { + self.fbb_ + .push_slot::(Field::VT_OFFSET64, offset64, false); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> FieldBuilder<'a, 'b> { + let start = _fbb.start_table(); + FieldBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Field::VT_NAME, "name"); + self.fbb_.required(o, Field::VT_TYPE_, "type_"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Field<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Field"); + ds.field("name", &self.name()); + ds.field("type_", &self.type_()); + ds.field("id", &self.id()); + ds.field("offset", &self.offset()); + ds.field("default_integer", &self.default_integer()); + ds.field("default_real", &self.default_real()); + ds.field("deprecated", &self.deprecated()); + ds.field("required", &self.required()); + ds.field("key", &self.key()); + ds.field("attributes", &self.attributes()); + ds.field("documentation", &self.documentation()); + ds.field("optional", &self.optional()); + ds.field("padding", &self.padding()); + ds.field("offset64", &self.offset64()); + ds.finish() + } + } + pub enum ObjectOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Object<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Object<'a> { + type Inner = Object<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Object<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_FIELDS: flatbuffers::VOffsetT = 6; + pub const VT_IS_STRUCT: flatbuffers::VOffsetT = 8; + pub const VT_MINALIGN: flatbuffers::VOffsetT = 10; + pub const VT_BYTESIZE: flatbuffers::VOffsetT = 12; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 14; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 16; + pub const VT_DECLARATION_FILE: flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Object { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args ObjectArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = ObjectBuilder::new(_fbb); + if let Some(x) = args.declaration_file { + builder.add_declaration_file(x); + } + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + builder.add_bytesize(args.bytesize); + builder.add_minalign(args.minalign); + if let Some(x) = args.fields { + builder.add_fields(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.add_is_struct(args.is_struct); + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Object::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &Object) -> bool { + self.name() < o.name() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.name(); + key.cmp(val) + } + #[inline] + pub fn fields(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>, + >>(Object::VT_FIELDS, None) + .unwrap() + } + } + #[inline] + pub fn is_struct(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Object::VT_IS_STRUCT, Some(false)) + .unwrap() + } + } + #[inline] + pub fn minalign(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Object::VT_MINALIGN, Some(0)).unwrap() } + } + #[inline] + pub fn bytesize(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Object::VT_BYTESIZE, Some(0)).unwrap() } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Object::VT_ATTRIBUTES, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Object::VT_DOCUMENTATION, None) + } + } + /// File that this Object is declared in. + #[inline] + pub fn declaration_file(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Object::VT_DECLARATION_FILE, None) + } + } + } + + impl flatbuffers::Verifiable for Object<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::>, + >>("fields", Self::VT_FIELDS, true)? + .visit_field::("is_struct", Self::VT_IS_STRUCT, false)? + .visit_field::("minalign", Self::VT_MINALIGN, false)? + .visit_field::("bytesize", Self::VT_BYTESIZE, false)? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .visit_field::>( + "declaration_file", + Self::VT_DECLARATION_FILE, + false, + )? + .finish(); + Ok(()) + } + } + pub struct ObjectArgs<'a> { + pub name: Option>, + pub fields: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub is_struct: bool, + pub minalign: i32, + pub bytesize: i32, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + pub declaration_file: Option>, + } + impl<'a> Default for ObjectArgs<'a> { + #[inline] + fn default() -> Self { + ObjectArgs { + name: None, // required field + fields: None, // required field + is_struct: false, + minalign: 0, + bytesize: 0, + attributes: None, + documentation: None, + declaration_file: None, + } + } + } + + pub struct ObjectBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> ObjectBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Object::VT_NAME, name); + } + #[inline] + pub fn add_fields( + &mut self, + fields: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Object::VT_FIELDS, fields); + } + #[inline] + pub fn add_is_struct(&mut self, is_struct: bool) { + self.fbb_ + .push_slot::(Object::VT_IS_STRUCT, is_struct, false); + } + #[inline] + pub fn add_minalign(&mut self, minalign: i32) { + self.fbb_.push_slot::(Object::VT_MINALIGN, minalign, 0); + } + #[inline] + pub fn add_bytesize(&mut self, bytesize: i32) { + self.fbb_.push_slot::(Object::VT_BYTESIZE, bytesize, 0); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Object::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + Object::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn add_declaration_file(&mut self, declaration_file: flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::>( + Object::VT_DECLARATION_FILE, + declaration_file, + ); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ObjectBuilder<'a, 'b> { + let start = _fbb.start_table(); + ObjectBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Object::VT_NAME, "name"); + self.fbb_.required(o, Object::VT_FIELDS, "fields"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Object<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Object"); + ds.field("name", &self.name()); + ds.field("fields", &self.fields()); + ds.field("is_struct", &self.is_struct()); + ds.field("minalign", &self.minalign()); + ds.field("bytesize", &self.bytesize()); + ds.field("attributes", &self.attributes()); + ds.field("documentation", &self.documentation()); + ds.field("declaration_file", &self.declaration_file()); + ds.finish() + } + } + pub enum RPCCallOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct RPCCall<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for RPCCall<'a> { + type Inner = RPCCall<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> RPCCall<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_REQUEST: flatbuffers::VOffsetT = 6; + pub const VT_RESPONSE: flatbuffers::VOffsetT = 8; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 10; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + RPCCall { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args RPCCallArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = RPCCallBuilder::new(_fbb); + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + if let Some(x) = args.response { + builder.add_response(x); + } + if let Some(x) = args.request { + builder.add_request(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(RPCCall::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &RPCCall) -> bool { + self.name() < o.name() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.name(); + key.cmp(val) + } + #[inline] + pub fn request(&self) -> Object<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(RPCCall::VT_REQUEST, None) + .unwrap() + } + } + #[inline] + pub fn response(&self) -> Object<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(RPCCall::VT_RESPONSE, None) + .unwrap() + } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(RPCCall::VT_ATTRIBUTES, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(RPCCall::VT_DOCUMENTATION, None) + } + } + } + + impl flatbuffers::Verifiable for RPCCall<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::>( + "request", + Self::VT_REQUEST, + true, + )? + .visit_field::>( + "response", + Self::VT_RESPONSE, + true, + )? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .finish(); + Ok(()) + } + } + pub struct RPCCallArgs<'a> { + pub name: Option>, + pub request: Option>>, + pub response: Option>>, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + } + impl<'a> Default for RPCCallArgs<'a> { + #[inline] + fn default() -> Self { + RPCCallArgs { + name: None, // required field + request: None, // required field + response: None, // required field + attributes: None, + documentation: None, + } + } + } + + pub struct RPCCallBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> RPCCallBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(RPCCall::VT_NAME, name); + } + #[inline] + pub fn add_request(&mut self, request: flatbuffers::WIPOffset>) { + self.fbb_ + .push_slot_always::>(RPCCall::VT_REQUEST, request); + } + #[inline] + pub fn add_response(&mut self, response: flatbuffers::WIPOffset>) { + self.fbb_ + .push_slot_always::>(RPCCall::VT_RESPONSE, response); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(RPCCall::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + RPCCall::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> RPCCallBuilder<'a, 'b> { + let start = _fbb.start_table(); + RPCCallBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, RPCCall::VT_NAME, "name"); + self.fbb_.required(o, RPCCall::VT_REQUEST, "request"); + self.fbb_.required(o, RPCCall::VT_RESPONSE, "response"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for RPCCall<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("RPCCall"); + ds.field("name", &self.name()); + ds.field("request", &self.request()); + ds.field("response", &self.response()); + ds.field("attributes", &self.attributes()); + ds.field("documentation", &self.documentation()); + ds.finish() + } + } + pub enum ServiceOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Service<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Service<'a> { + type Inner = Service<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Service<'a> { + pub const VT_NAME: flatbuffers::VOffsetT = 4; + pub const VT_CALLS: flatbuffers::VOffsetT = 6; + pub const VT_ATTRIBUTES: flatbuffers::VOffsetT = 8; + pub const VT_DOCUMENTATION: flatbuffers::VOffsetT = 10; + pub const VT_DECLARATION_FILE: flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Service { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args ServiceArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = ServiceBuilder::new(_fbb); + if let Some(x) = args.declaration_file { + builder.add_declaration_file(x); + } + if let Some(x) = args.documentation { + builder.add_documentation(x); + } + if let Some(x) = args.attributes { + builder.add_attributes(x); + } + if let Some(x) = args.calls { + builder.add_calls(x); + } + if let Some(x) = args.name { + builder.add_name(x); + } + builder.finish() + } + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Service::VT_NAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &Service) -> bool { + self.name() < o.name() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.name(); + key.cmp(val) + } + #[inline] + pub fn calls( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Service::VT_CALLS, None) + } + } + #[inline] + pub fn attributes( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Service::VT_ATTRIBUTES, None) + } + } + #[inline] + pub fn documentation( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Service::VT_DOCUMENTATION, None) + } + } + /// File that this Service is declared in. + #[inline] + pub fn declaration_file(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Service::VT_DECLARATION_FILE, None) + } + } + } + + impl flatbuffers::Verifiable for Service<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>("name", Self::VT_NAME, true)? + .visit_field::>, + >>("calls", Self::VT_CALLS, false)? + .visit_field::>, + >>("attributes", Self::VT_ATTRIBUTES, false)? + .visit_field::>, + >>("documentation", Self::VT_DOCUMENTATION, false)? + .visit_field::>( + "declaration_file", + Self::VT_DECLARATION_FILE, + false, + )? + .finish(); + Ok(()) + } + } + pub struct ServiceArgs<'a> { + pub name: Option>, + pub calls: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub attributes: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub documentation: Option< + flatbuffers::WIPOffset>>, + >, + pub declaration_file: Option>, + } + impl<'a> Default for ServiceArgs<'a> { + #[inline] + fn default() -> Self { + ServiceArgs { + name: None, // required field + calls: None, + attributes: None, + documentation: None, + declaration_file: None, + } + } + } + + pub struct ServiceBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> ServiceBuilder<'a, 'b> { + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Service::VT_NAME, name); + } + #[inline] + pub fn add_calls( + &mut self, + calls: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Service::VT_CALLS, calls); + } + #[inline] + pub fn add_attributes( + &mut self, + attributes: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Service::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn add_documentation( + &mut self, + documentation: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + Service::VT_DOCUMENTATION, + documentation, + ); + } + #[inline] + pub fn add_declaration_file(&mut self, declaration_file: flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::>( + Service::VT_DECLARATION_FILE, + declaration_file, + ); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ServiceBuilder<'a, 'b> { + let start = _fbb.start_table(); + ServiceBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Service::VT_NAME, "name"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Service<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Service"); + ds.field("name", &self.name()); + ds.field("calls", &self.calls()); + ds.field("attributes", &self.attributes()); + ds.field("documentation", &self.documentation()); + ds.field("declaration_file", &self.declaration_file()); + ds.finish() + } + } + pub enum SchemaFileOffset {} + #[derive(Copy, Clone, PartialEq)] + + /// File specific information. + /// Symbols declared within a file may be recovered by iterating over all + /// symbols and examining the `declaration_file` field. + pub struct SchemaFile<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for SchemaFile<'a> { + type Inner = SchemaFile<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> SchemaFile<'a> { + pub const VT_FILENAME: flatbuffers::VOffsetT = 4; + pub const VT_INCLUDED_FILENAMES: flatbuffers::VOffsetT = 6; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + SchemaFile { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args SchemaFileArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = SchemaFileBuilder::new(_fbb); + if let Some(x) = args.included_filenames { + builder.add_included_filenames(x); + } + if let Some(x) = args.filename { + builder.add_filename(x); + } + builder.finish() + } + + /// Filename, relative to project root. + #[inline] + pub fn filename(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(SchemaFile::VT_FILENAME, None) + .unwrap() + } + } + #[inline] + pub fn key_compare_less_than(&self, o: &SchemaFile) -> bool { + self.filename() < o.filename() + } + + #[inline] + pub fn key_compare_with_value(&self, val: &str) -> ::core::cmp::Ordering { + let key = self.filename(); + key.cmp(val) + } + /// Names of included files, relative to project root. + #[inline] + pub fn included_filenames( + &self, + ) -> Option>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(SchemaFile::VT_INCLUDED_FILENAMES, None) + } + } + } + + impl flatbuffers::Verifiable for SchemaFile<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>( + "filename", + Self::VT_FILENAME, + true, + )? + .visit_field::>, + >>("included_filenames", Self::VT_INCLUDED_FILENAMES, false)? + .finish(); + Ok(()) + } + } + pub struct SchemaFileArgs<'a> { + pub filename: Option>, + pub included_filenames: Option< + flatbuffers::WIPOffset>>, + >, + } + impl<'a> Default for SchemaFileArgs<'a> { + #[inline] + fn default() -> Self { + SchemaFileArgs { + filename: None, // required field + included_filenames: None, + } + } + } + + pub struct SchemaFileBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> SchemaFileBuilder<'a, 'b> { + #[inline] + pub fn add_filename(&mut self, filename: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(SchemaFile::VT_FILENAME, filename); + } + #[inline] + pub fn add_included_filenames( + &mut self, + included_filenames: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<&'b str>>, + >, + ) { + self.fbb_.push_slot_always::>( + SchemaFile::VT_INCLUDED_FILENAMES, + included_filenames, + ); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SchemaFileBuilder<'a, 'b> { + let start = _fbb.start_table(); + SchemaFileBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, SchemaFile::VT_FILENAME, "filename"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for SchemaFile<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("SchemaFile"); + ds.field("filename", &self.filename()); + ds.field("included_filenames", &self.included_filenames()); + ds.finish() + } + } + pub enum SchemaOffset {} + #[derive(Copy, Clone, PartialEq)] + + pub struct Schema<'a> { + pub _tab: flatbuffers::Table<'a>, + } + + impl<'a> flatbuffers::Follow<'a> for Schema<'a> { + type Inner = Schema<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table::new(buf, loc), + } + } + } + + impl<'a> Schema<'a> { + pub const VT_OBJECTS: flatbuffers::VOffsetT = 4; + pub const VT_ENUMS: flatbuffers::VOffsetT = 6; + pub const VT_FILE_IDENT: flatbuffers::VOffsetT = 8; + pub const VT_FILE_EXT: flatbuffers::VOffsetT = 10; + pub const VT_ROOT_TABLE: flatbuffers::VOffsetT = 12; + pub const VT_SERVICES: flatbuffers::VOffsetT = 14; + pub const VT_ADVANCED_FEATURES: flatbuffers::VOffsetT = 16; + pub const VT_FBS_FILES: flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Schema { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args SchemaArgs<'args>, + ) -> flatbuffers::WIPOffset> { + let mut builder = SchemaBuilder::new(_fbb); + builder.add_advanced_features(args.advanced_features); + if let Some(x) = args.fbs_files { + builder.add_fbs_files(x); + } + if let Some(x) = args.services { + builder.add_services(x); + } + if let Some(x) = args.root_table { + builder.add_root_table(x); + } + if let Some(x) = args.file_ext { + builder.add_file_ext(x); + } + if let Some(x) = args.file_ident { + builder.add_file_ident(x); + } + if let Some(x) = args.enums { + builder.add_enums(x); + } + if let Some(x) = args.objects { + builder.add_objects(x); + } + builder.finish() + } + + #[inline] + pub fn objects(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>, + >>(Schema::VT_OBJECTS, None) + .unwrap() + } + } + #[inline] + pub fn enums(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>, + >>(Schema::VT_ENUMS, None) + .unwrap() + } + } + #[inline] + pub fn file_ident(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Schema::VT_FILE_IDENT, None) + } + } + #[inline] + pub fn file_ext(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Schema::VT_FILE_EXT, None) + } + } + #[inline] + pub fn root_table(&self) -> Option> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::>(Schema::VT_ROOT_TABLE, None) + } + } + #[inline] + pub fn services( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Schema::VT_SERVICES, None) + } + } + #[inline] + pub fn advanced_features(&self) -> AdvancedFeatures { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab + .get::(Schema::VT_ADVANCED_FEATURES, Some(Default::default())) + .unwrap() + } + } + /// All the files used in this compilation. Files are relative to where + /// flatc was invoked. + #[inline] + pub fn fbs_files( + &self, + ) -> Option>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { + self._tab.get::>, + >>(Schema::VT_FBS_FILES, None) + } + } + } + + impl flatbuffers::Verifiable for Schema<'_> { + #[inline] + fn run_verifier( + v: &mut flatbuffers::Verifier, + pos: usize, + ) -> Result<(), flatbuffers::InvalidFlatbuffer> { + use self::flatbuffers::Verifiable; + v.visit_table(pos)? + .visit_field::>, + >>("objects", Self::VT_OBJECTS, true)? + .visit_field::>, + >>("enums", Self::VT_ENUMS, true)? + .visit_field::>( + "file_ident", + Self::VT_FILE_IDENT, + false, + )? + .visit_field::>( + "file_ext", + Self::VT_FILE_EXT, + false, + )? + .visit_field::>( + "root_table", + Self::VT_ROOT_TABLE, + false, + )? + .visit_field::>, + >>("services", Self::VT_SERVICES, false)? + .visit_field::( + "advanced_features", + Self::VT_ADVANCED_FEATURES, + false, + )? + .visit_field::>, + >>("fbs_files", Self::VT_FBS_FILES, false)? + .finish(); + Ok(()) + } + } + pub struct SchemaArgs<'a> { + pub objects: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub enums: Option< + flatbuffers::WIPOffset>>>, + >, + pub file_ident: Option>, + pub file_ext: Option>, + pub root_table: Option>>, + pub services: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + pub advanced_features: AdvancedFeatures, + pub fbs_files: Option< + flatbuffers::WIPOffset< + flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset>>, + >, + >, + } + impl<'a> Default for SchemaArgs<'a> { + #[inline] + fn default() -> Self { + SchemaArgs { + objects: None, // required field + enums: None, // required field + file_ident: None, + file_ext: None, + root_table: None, + services: None, + advanced_features: Default::default(), + fbs_files: None, + } + } + } + + pub struct SchemaBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, + } + impl<'a: 'b, 'b> SchemaBuilder<'a, 'b> { + #[inline] + pub fn add_objects( + &mut self, + objects: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Schema::VT_OBJECTS, objects); + } + #[inline] + pub fn add_enums( + &mut self, + enums: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Schema::VT_ENUMS, enums); + } + #[inline] + pub fn add_file_ident(&mut self, file_ident: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Schema::VT_FILE_IDENT, file_ident); + } + #[inline] + pub fn add_file_ext(&mut self, file_ext: flatbuffers::WIPOffset<&'b str>) { + self.fbb_ + .push_slot_always::>(Schema::VT_FILE_EXT, file_ext); + } + #[inline] + pub fn add_root_table(&mut self, root_table: flatbuffers::WIPOffset>) { + self.fbb_ + .push_slot_always::>( + Schema::VT_ROOT_TABLE, + root_table, + ); + } + #[inline] + pub fn add_services( + &mut self, + services: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Schema::VT_SERVICES, services); + } + #[inline] + pub fn add_advanced_features(&mut self, advanced_features: AdvancedFeatures) { + self.fbb_.push_slot::( + Schema::VT_ADVANCED_FEATURES, + advanced_features, + Default::default(), + ); + } + #[inline] + pub fn add_fbs_files( + &mut self, + fbs_files: flatbuffers::WIPOffset< + flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset>>, + >, + ) { + self.fbb_ + .push_slot_always::>(Schema::VT_FBS_FILES, fbs_files); + } + #[inline] + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SchemaBuilder<'a, 'b> { + let start = _fbb.start_table(); + SchemaBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Schema::VT_OBJECTS, "objects"); + self.fbb_.required(o, Schema::VT_ENUMS, "enums"); + flatbuffers::WIPOffset::new(o.value()) + } + } + + impl core::fmt::Debug for Schema<'_> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut ds = f.debug_struct("Schema"); + ds.field("objects", &self.objects()); + ds.field("enums", &self.enums()); + ds.field("file_ident", &self.file_ident()); + ds.field("file_ext", &self.file_ext()); + ds.field("root_table", &self.root_table()); + ds.field("services", &self.services()); + ds.field("advanced_features", &self.advanced_features()); + ds.field("fbs_files", &self.fbs_files()); + ds.finish() + } + } + #[inline] + /// Verifies that a buffer of bytes contains a `Schema` + /// and returns it. + /// Note that verification is still experimental and may not + /// catch every error, or be maximally performant. For the + /// previous, unchecked, behavior use + /// `root_as_schema_unchecked`. + pub fn root_as_schema(buf: &[u8]) -> Result { + flatbuffers::root::(buf) + } + #[inline] + /// Verifies that a buffer of bytes contains a size prefixed + /// `Schema` and returns it. + /// Note that verification is still experimental and may not + /// catch every error, or be maximally performant. For the + /// previous, unchecked, behavior use + /// `size_prefixed_root_as_schema_unchecked`. + pub fn size_prefixed_root_as_schema( + buf: &[u8], + ) -> Result { + flatbuffers::size_prefixed_root::(buf) + } + #[inline] + /// Verifies, with the given options, that a buffer of bytes + /// contains a `Schema` and returns it. + /// Note that verification is still experimental and may not + /// catch every error, or be maximally performant. For the + /// previous, unchecked, behavior use + /// `root_as_schema_unchecked`. + pub fn root_as_schema_with_opts<'b, 'o>( + opts: &'o flatbuffers::VerifierOptions, + buf: &'b [u8], + ) -> Result, flatbuffers::InvalidFlatbuffer> { + flatbuffers::root_with_opts::>(opts, buf) + } + #[inline] + /// Verifies, with the given verifier options, that a buffer of + /// bytes contains a size prefixed `Schema` and returns + /// it. Note that verification is still experimental and may not + /// catch every error, or be maximally performant. For the + /// previous, unchecked, behavior use + /// `root_as_schema_unchecked`. + pub fn size_prefixed_root_as_schema_with_opts<'b, 'o>( + opts: &'o flatbuffers::VerifierOptions, + buf: &'b [u8], + ) -> Result, flatbuffers::InvalidFlatbuffer> { + flatbuffers::size_prefixed_root_with_opts::>(opts, buf) + } + #[inline] + /// Assumes, without verification, that a buffer of bytes contains a Schema and returns it. + /// # Safety + /// Callers must trust the given bytes do indeed contain a valid `Schema`. + pub unsafe fn root_as_schema_unchecked(buf: &[u8]) -> Schema { + flatbuffers::root_unchecked::(buf) + } + #[inline] + /// Assumes, without verification, that a buffer of bytes contains a size prefixed Schema and returns it. + /// # Safety + /// Callers must trust the given bytes do indeed contain a valid size prefixed `Schema`. + pub unsafe fn size_prefixed_root_as_schema_unchecked(buf: &[u8]) -> Schema { + flatbuffers::size_prefixed_root_unchecked::(buf) + } + pub const SCHEMA_IDENTIFIER: &str = "BFBS"; + + #[inline] + pub fn schema_buffer_has_identifier(buf: &[u8]) -> bool { + flatbuffers::buffer_has_identifier(buf, SCHEMA_IDENTIFIER, false) + } + + #[inline] + pub fn schema_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + flatbuffers::buffer_has_identifier(buf, SCHEMA_IDENTIFIER, true) + } + + pub const SCHEMA_EXTENSION: &str = "bfbs"; + + #[inline] + pub fn finish_schema_buffer<'a, 'b>( + fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, + root: flatbuffers::WIPOffset>, + ) { + fbb.finish(root, Some(SCHEMA_IDENTIFIER)); + } + + #[inline] + pub fn finish_size_prefixed_schema_buffer<'a, 'b>( + fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, + root: flatbuffers::WIPOffset>, + ) { + fbb.finish_size_prefixed(root, Some(SCHEMA_IDENTIFIER)); + } +} // pub mod reflection From 4a224b8432654ae3196f90223d85ada100b91d02 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:46:18 +0200 Subject: [PATCH 08/16] adapting lints for codegen --- _typos.toml | 1 + scripts/check_large_files_allow_list.txt | 1 + scripts/lint.py | 4 +++- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/_typos.toml b/_typos.toml index 5bc80ff01e65..c943f61426b5 100644 --- a/_typos.toml +++ b/_typos.toml @@ -13,6 +13,7 @@ extend-exclude = [ [default.extend-words] lod = "lod" # level-of-detail teh = "teh" # part of @teh-cmc +ND = "ND" # np.NDArray # American English: grey = "gray" diff --git a/scripts/check_large_files_allow_list.txt b/scripts/check_large_files_allow_list.txt index d9882929b4e5..d3449cbe583d 100644 --- a/scripts/check_large_files_allow_list.txt +++ b/scripts/check_large_files_allow_list.txt @@ -1,2 +1,3 @@ Cargo.lock +crates/re_types_builder/src/reflection.rs crates/re_ui/data/Inter-Medium.otf diff --git a/scripts/lint.py b/scripts/lint.py index 879edf2b6a9f..5eb57c32b7f5 100755 --- a/scripts/lint.py +++ b/scripts/lint.py @@ -159,6 +159,7 @@ def is_empty(line: str) -> bool: or line.endswith("(") or line.endswith("\\") or line.endswith('r"') + or line.endswith('r#"') or line.endswith("]") ) @@ -443,10 +444,11 @@ def main() -> None: exclude_paths = { "./CODE_STYLE.md", + "./crates/re_types_builder/src/reflection.rs", # auto-generated "./examples/rust/objectron/src/objectron.rs", # auto-generated "./scripts/lint.py", # we contain all the patterns we are linting against - "./web_viewer/re_viewer_debug.js", # auto-generated by wasm_bindgen "./web_viewer/re_viewer.js", # auto-generated by wasm_bindgen + "./web_viewer/re_viewer_debug.js", # auto-generated by wasm_bindgen } for root, dirs, files in os.walk(".", topdown=True): From 148a7d8c54aa1cf426773f0916ffd496cd404848 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 17:56:36 +0200 Subject: [PATCH 09/16] unindent 0.1 everywhere --- Cargo.lock | 14 ++++---------- Cargo.toml | 1 + crates/re_renderer/Cargo.toml | 2 +- crates/re_types_builder/Cargo.toml | 2 +- 4 files changed, 7 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c846ddcb0499..151e21ab0a2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3729,7 +3729,7 @@ dependencies = [ "pyo3-build-config", "pyo3-ffi", "pyo3-macros", - "unindent 0.1.11", + "unindent", ] [[package]] @@ -3938,7 +3938,7 @@ dependencies = [ "glob", "sha2", "time", - "unindent 0.1.11", + "unindent", "walkdir", ] @@ -4227,7 +4227,7 @@ dependencies = [ "thiserror", "tobj", "type-map", - "unindent 0.2.1", + "unindent", "walkdir", "wasm-bindgen-futures", "web-sys", @@ -4507,7 +4507,7 @@ dependencies = [ "flatbuffers", "indent", "re_build_tools", - "unindent 0.2.1", + "unindent", "xshell", ] @@ -5801,12 +5801,6 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" -[[package]] -name = "unindent" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa30f5ea51ff7edfc797c6d3f9ec8cbd8cfedef5371766b7181d33977f4814f" - [[package]] name = "untrusted" version = "0.7.1" diff --git a/Cargo.toml b/Cargo.toml index 425286b47697..13b1df640125 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -110,6 +110,7 @@ time = { version = "0.3", default-features = false, features = [ ] } tinyvec = { version = "1.6", features = ["alloc", "rustc_1_55"] } tokio = { version = "1.24", default-features = false } +unindent = "0.1" vec1 = "1.8" web-time = "0.2.0" wgpu = { version = "0.16.1" } diff --git a/crates/re_renderer/Cargo.toml b/crates/re_renderer/Cargo.toml index 88a9c4bce12c..5549b4f5a950 100644 --- a/crates/re_renderer/Cargo.toml +++ b/crates/re_renderer/Cargo.toml @@ -92,7 +92,7 @@ winit = "0.28.1" zip = { version = "0.6", default-features = false, features = ["deflate"] } # For tests: -unindent = "0.1" +unindent.workspace = true # native [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] diff --git a/crates/re_types_builder/Cargo.toml b/crates/re_types_builder/Cargo.toml index 01b3cc8efc4a..ffdd334219bd 100644 --- a/crates/re_types_builder/Cargo.toml +++ b/crates/re_types_builder/Cargo.toml @@ -25,7 +25,7 @@ arrow2.workspace = true convert_case = "0.6" flatbuffers = "23.0" indent = "0.1" -unindent = "0.2" +unindent.workspace = true xshell = "0.2" From e429e0b860bdd42f0da38b20f70fde3cdfacfe48 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sun, 11 Jun 2023 20:55:20 +0200 Subject: [PATCH 10/16] self-review --- crates/re_types_builder/build.rs | 6 +- crates/re_types_builder/src/arrow_registry.rs | 5 +- crates/re_types_builder/src/codegen/mod.rs | 1 - crates/re_types_builder/src/codegen/python.rs | 12 +- crates/re_types_builder/src/codegen/rust.rs | 15 +- crates/re_types_builder/src/lib.rs | 3 +- crates/re_types_builder/src/objects.rs | 140 ++++++++++-------- 7 files changed, 95 insertions(+), 87 deletions(-) diff --git a/crates/re_types_builder/build.rs b/crates/re_types_builder/build.rs index 199ee8e2a8bb..5c949bf4f298 100644 --- a/crates/re_types_builder/build.rs +++ b/crates/re_types_builder/build.rs @@ -50,6 +50,8 @@ fn main() { } } + // NOTE: This requires `flatc` to be in $PATH, but only for contributors, not end users. + // Even for contributors, `flatc` won't be needed unless they edit some of the .fbs files. let sh = Shell::new().unwrap(); cmd!( sh, @@ -60,8 +62,8 @@ fn main() { // NOTE: We're purposefully ignoring the error here. // - // In the very unlikely chance that the user doesn't have `rustfmt` in their $PATH, there's - // still no good reason to fail the build. + // In the very unlikely chance that the user doesn't have the `fmt` component installed, + // there's still no good reason to fail the build. // // The CI will catch the unformatted file at PR time and complain appropriately anyhow. cmd!(sh, "cargo fmt").run().ok(); diff --git a/crates/re_types_builder/src/arrow_registry.rs b/crates/re_types_builder/src/arrow_registry.rs index dd07e7fba4d1..e0edc01cddf5 100644 --- a/crates/re_types_builder/src/arrow_registry.rs +++ b/crates/re_types_builder/src/arrow_registry.rs @@ -8,7 +8,6 @@ use crate::{ElementType, Object, Type}; // --- -// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions pub const ARROW_ATTR_TRANSPARENT: &str = "arrow.attr.transparent"; pub const ARROW_ATTR_SPARSE_UNION: &str = "arrow.attr.sparse_union"; @@ -56,7 +55,6 @@ impl ArrowRegistry { fn arrow_datatype_from_object(&self, obj: &Object) -> LazyDatatype { let is_struct = obj.is_struct(); - let is_transparent = obj.try_get_attr::(ARROW_ATTR_TRANSPARENT).is_some(); let num_fields = obj.fields.len(); @@ -170,6 +168,9 @@ impl ArrowRegistry { // --- Field --- /// A yet-to-be-resolved [`arrow2::datatypes::Field`]. +/// +/// Type resolution is a two-pass process as we first need to register all existing types before we +/// can denormalize their definitions into their parents. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct LazyField { /// Its name diff --git a/crates/re_types_builder/src/codegen/mod.rs b/crates/re_types_builder/src/codegen/mod.rs index 49bdc9d9278f..eb2e4d553265 100644 --- a/crates/re_types_builder/src/codegen/mod.rs +++ b/crates/re_types_builder/src/codegen/mod.rs @@ -15,7 +15,6 @@ pub trait CodeGenerator { pub const AUTOGEN_WARNING: &str = "NOTE: This file was autogenerated by re_types_builder; DO NOT EDIT."; -// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions pub const RERUN_ATTR_COMPONENT_REQUIRED: &str = "rerun.attr.component_required"; pub const RERUN_ATTR_COMPONENT_RECOMMENDED: &str = "rerun.attr.component_recommended"; pub const RERUN_ATTR_COMPONENT_OPTIONAL: &str = "rerun.attr.component_optional"; diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 9b6f85970405..2cba90d0a457 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -17,7 +17,6 @@ use crate::{ // NOTE: `rerun2` while we figure out how to integrate back into the main SDK. const MODULE_NAME: &str = "rerun2"; -// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions pub const ATTR_TRANSPARENT: &str = "python.attr.transparent"; pub const ATTR_ALIASES: &str = "python.attr.aliases"; pub const ATTR_ARRAY_ALIASES: &str = "python.attr.array_aliases"; @@ -440,7 +439,7 @@ impl QuotedObject { // --- Code generators --- fn quote_module_prelude() -> String { - // NOTE: All the extraneous stull will be cleaned up courtesy of `ruff`. + // NOTE: All the extraneous stuff will be cleaned up courtesy of `ruff`. unindent::unindent( r#" from __future__ import annotations @@ -722,7 +721,6 @@ fn quote_type_from_element_type(typ: &ElementType) -> String { ElementType::Object(fqname) => { let (from, class) = fqname.rsplit_once('.').unwrap_or(("", fqname.as_str())); if from.starts_with("rerun.datatypes") { - // NOTE: Only need the class name, pre-generated import clause takes care of the rest. format!("datatypes.{class}") } else if from.starts_with("rerun.components") { format!("components.{class}") @@ -808,7 +806,7 @@ fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> many_aliases={many_aliases}, arrow={arrow}, ) - "# + "# )) } ObjectKind::Archetype => String::new(), @@ -936,11 +934,7 @@ fn quote_arrow_datatype(datatype: &DataType) -> String { .join(", "); format!("pa.struct([{fields}])") } - DataType::Extension(_, datatype, _) => { - // TODO(cmc): not sure we need all that for the python backend since we already - // do the wrapping trick...? - quote_arrow_datatype(datatype) - } + DataType::Extension(_, datatype, _) => quote_arrow_datatype(datatype), _ => unimplemented!("{datatype:#?}"), // NOLINT } } diff --git a/crates/re_types_builder/src/codegen/rust.rs b/crates/re_types_builder/src/codegen/rust.rs index 588c5ebc7663..0a928185ae47 100644 --- a/crates/re_types_builder/src/codegen/rust.rs +++ b/crates/re_types_builder/src/codegen/rust.rs @@ -18,7 +18,6 @@ use crate::{ // --- -// TODO(cmc): find a way to extract attr name constants directly from the IDL definitions pub const ATTR_DERIVE: &str = "rust.attr.derive"; pub const ATTR_REPR: &str = "rust.attr.repr"; pub const ATTR_TUPLE_STRUCT: &str = "rust.attr.tuple_struct"; @@ -149,7 +148,7 @@ fn quote_objects( for module in mods.keys() { code.push_str(&format!("mod {module};\n")); - // NOTE: detect if someone manually created an extension file, and automatically + // Detect if someone manually created an extension file, and automatically // import it if so. let mut ext_path = out_path.join(format!("{module}_ext")); ext_path.set_extension("rs"); @@ -230,7 +229,7 @@ impl QuotedObject { typ: _, attrs: _, required, - // TODO(cmc): support for deprecation notices + // TODO(#2366): support for deprecation notices deprecated: _, } = field; @@ -356,7 +355,8 @@ fn quote_doc_from_docs(docs: &Docs) -> String { /// Returns type name as string and whether it was force unwrapped. fn quote_field_type_from_field(field: &ObjectField, unwrap: bool) -> (String, bool) { let mut unwrapped = false; - let typ = match &field.typ { + let typ = &field.typ; + let typ = match typ { Type::UInt8 => "u8".to_owned(), Type::UInt16 => "u16".to_owned(), Type::UInt32 => "u32".to_owned(), @@ -366,10 +366,9 @@ fn quote_field_type_from_field(field: &ObjectField, unwrap: bool) -> (String, bo Type::Int32 => "i32".to_owned(), Type::Int64 => "i64".to_owned(), Type::Bool => "bool".to_owned(), - Type::Float16 => unimplemented!("ResolvedType::Float16"), // NOLINT + Type::Float16 => unimplemented!("{typ:#?}"), // NOLINT Type::Float32 => "f32".to_owned(), Type::Float64 => "f64".to_owned(), - // TODO(cmc): ref for deserialization? Type::String => "String".to_owned(), Type::Array { elem_type, length } => { let typ = quote_type_from_element_type(elem_type); @@ -406,10 +405,9 @@ fn quote_type_from_element_type(typ: &ElementType) -> String { ElementType::Int32 => "i32".to_owned(), ElementType::Int64 => "i64".to_owned(), ElementType::Bool => "bool".to_owned(), - ElementType::Float16 => unimplemented!("ResolvedType::Float16"), // NOLINT + ElementType::Float16 => unimplemented!("{typ:#?}"), // NOLINT ElementType::Float32 => "f32".to_owned(), ElementType::Float64 => "f64".to_owned(), - // TODO(cmc): ref for deserialization? ElementType::String => "String".to_owned(), ElementType::Object(fqname) => fqname.replace('.', "::").replace("rerun", "crate"), } @@ -536,6 +534,7 @@ fn quote_trait_impls_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> S #[allow(clippy::unimplemented)] fn to_arrow_datatypes() -> Vec {{ + // TODO(#2368): dump the arrow registry into the generated code unimplemented!("query the registry for all fqnames"); // NOLINT }} }} diff --git a/crates/re_types_builder/src/lib.rs b/crates/re_types_builder/src/lib.rs index b15e3704e1b0..23ee304525b7 100644 --- a/crates/re_types_builder/src/lib.rs +++ b/crates/re_types_builder/src/lib.rs @@ -61,6 +61,8 @@ //! //! Make sure to test the behavior of its output though: `re_types`! +// TODO(#2365): support for external IDL definitions + // --- // NOTE: Official generated code from flatbuffers; ignore _everything_. @@ -146,7 +148,6 @@ pub fn compile_binary_schemas( /// - `include_dir_path`: path to the root directory of the fbs definition tree. /// - `output_crate_path`: path to the root of the output crate. /// - `entrypoint_path`: path to the root file of the fbs definition tree. -/// - `source_hash`: optional sha256 hash of the source definition files. /// /// E.g.: /// ```no_run diff --git a/crates/re_types_builder/src/objects.rs b/crates/re_types_builder/src/objects.rs index 4f6168c9667d..343e40583ed1 100644 --- a/crates/re_types_builder/src/objects.rs +++ b/crates/re_types_builder/src/objects.rs @@ -124,6 +124,22 @@ pub enum ObjectKind { Archetype, } +impl ObjectKind { + // TODO(#2364): use an attr instead of the path + pub fn from_pkg_name(pkg_name: impl AsRef) -> Self { + let pkg_name = pkg_name.as_ref(); + if pkg_name.starts_with("rerun.datatypes") { + ObjectKind::Datatype + } else if pkg_name.starts_with("rerun.components") { + ObjectKind::Component + } else if pkg_name.starts_with("rerun.archetypes") { + ObjectKind::Archetype + } else { + panic!("unknown package {pkg_name:?}"); + } + } +} + /// A high-level representation of a flatbuffers object's documentation. #[derive(Debug, Clone)] pub struct Docs { @@ -256,18 +272,7 @@ impl Object { .unwrap(); let docs = Docs::from_raw_docs(obj.documentation()); - - let kind = if pkg_name.starts_with("rerun.datatypes") { - ObjectKind::Datatype - } else if pkg_name.starts_with("rerun.components") { - ObjectKind::Component - } else if pkg_name.starts_with("rerun.archetypes") { - ObjectKind::Archetype - } else { - // TODO(cmc): support IDL definitions from outside the repo - panic!("unknown package {pkg_name:?}"); - }; - + let kind = ObjectKind::from_pkg_name(&pkg_name); let attrs = Attributes::from_raw_attrs(obj.attributes()); let fields = { @@ -312,17 +317,7 @@ impl Object { .unwrap(); let docs = Docs::from_raw_docs(enm.documentation()); - - let kind = if pkg_name.starts_with("rerun.datatypes") { - ObjectKind::Datatype - } else if pkg_name.starts_with("rerun.components") { - ObjectKind::Component - } else if pkg_name.starts_with("rerun.archetypes") { - ObjectKind::Archetype - } else { - // TODO(cmc): support IDL definitions from outside the repo - panic!("unknown package {pkg_name:?}"); - }; + let kind = ObjectKind::from_pkg_name(&pkg_name); let utype = { if enm.underlying_type().base_type() == FbsBaseType::UType { @@ -453,7 +448,8 @@ pub struct ObjectField { /// Whether the field is deprecated. // - // TODO(cmc): implement custom attr to specify deprecation reason + // TODO(#2366): do something with this + // TODO(#2367): implement custom attr to specify deprecation reason pub deprecated: bool, } @@ -570,7 +566,7 @@ impl ObjectField { } } -/// The underlying type of a `ResolvedObjectField`. +/// The underlying type of an [`ObjectField`]. #[derive(Debug, Clone)] pub enum Type { UInt8, @@ -596,24 +592,35 @@ pub enum Type { Object(String), // fqname } +impl From for Type { + fn from(typ: ElementType) -> Self { + match typ { + ElementType::UInt8 => Self::UInt8, + ElementType::UInt16 => Self::UInt16, + ElementType::UInt32 => Self::UInt32, + ElementType::UInt64 => Self::UInt64, + ElementType::Int8 => Self::Int8, + ElementType::Int16 => Self::Int16, + ElementType::Int32 => Self::Int32, + ElementType::Int64 => Self::Int64, + ElementType::Bool => Self::Bool, + ElementType::Float16 => Self::Float16, + ElementType::Float32 => Self::Float32, + ElementType::Float64 => Self::Float64, + ElementType::String => Self::String, + ElementType::Object(fqname) => Self::Object(fqname), + } + } +} + impl Type { pub fn from_raw_type( enums: &[FbsEnum<'_>], objs: &[FbsObject<'_>], field_type: FbsType<'_>, ) -> Self { - fn flatten_scalar_wrappers(obj: &FbsObject<'_>) -> Type { - if obj.name().starts_with("fbs.scalars.") { - match obj.name() { - "fbs.scalars.Float32" => Type::Float32, - _ => unimplemented!(), // NOLINT - } - } else { - Type::Object(obj.name().to_owned()) - } - } - - match field_type.base_type() { + let typ = field_type.base_type(); + match typ { FbsBaseType::Bool => Self::Bool, FbsBaseType::Byte => Self::Int8, FbsBaseType::UByte => Self::UInt8, @@ -629,7 +636,7 @@ impl Type { FbsBaseType::String => Self::String, FbsBaseType::Obj => { let obj = &objs[field_type.index() as usize]; - flatten_scalar_wrappers(obj) + flatten_scalar_wrappers(obj).into() } FbsBaseType::Union => { let union = &enums[field_type.index() as usize]; @@ -654,10 +661,11 @@ impl Type { field_type.element(), ), }, - FbsBaseType::None => unimplemented!(), // NOLINT - FbsBaseType::UType => unimplemented!(), // NOLINT - FbsBaseType::Vector64 => unimplemented!(), // NOLINT - _ => unreachable!(), + FbsBaseType::None | FbsBaseType::UType | FbsBaseType::Vector64 => { + unimplemented!("{typ:#?}") // NOLINT + } // NOLINT + // NOTE: `FbsBaseType` isn't actually an enum, it's just a bunch of constants... + _ => unreachable!("{typ:#?}"), } } } @@ -691,18 +699,7 @@ impl ElementType { outer_type: FbsType<'_>, inner_type: FbsBaseType, ) -> Self { - /// Helper to turn wrapped scalars into actual scalars. - fn flatten_scalar_wrappers(obj: &FbsObject<'_>) -> ElementType { - if obj.name().starts_with("fbs.scalars.") { - match obj.name() { - "fbs.scalars.Float32" => ElementType::Float32, - _ => unimplemented!(), // NOLINT - } - } else { - ElementType::Object(obj.name().to_owned()) - } - } - + #[allow(clippy::match_same_arms)] match inner_type { FbsBaseType::Bool => Self::Bool, FbsBaseType::Byte => Self::Int8, @@ -713,7 +710,6 @@ impl ElementType { FbsBaseType::UInt => Self::UInt32, FbsBaseType::Long => Self::Int64, FbsBaseType::ULong => Self::UInt64, - // TODO(cmc): half support FbsBaseType::Float => Self::Float32, FbsBaseType::Double => Self::Float64, FbsBaseType::String => Self::String, @@ -721,15 +717,16 @@ impl ElementType { let obj = &objs[outer_type.index() as usize]; flatten_scalar_wrappers(obj) } - FbsBaseType::Union => unimplemented!(), // NOLINT + FbsBaseType::Union => unimplemented!("{inner_type:#?}"), // NOLINT // NOTE: flatbuffers doesn't support directly nesting multiple layers of arrays, they // always have to be wrapped into intermediate layers of structs or tables. - FbsBaseType::Array => unimplemented!(), // NOLINT - FbsBaseType::None => unimplemented!(), // NOLINT - FbsBaseType::UType => unimplemented!(), // NOLINT - FbsBaseType::Vector => unimplemented!(), // NOLINT - FbsBaseType::Vector64 => unimplemented!(), // NOLINT - _ => unreachable!(), + FbsBaseType::None + | FbsBaseType::UType + | FbsBaseType::Array + | FbsBaseType::Vector + | FbsBaseType::Vector64 => unreachable!("{inner_type:#?}"), + // NOTE: `FbsType` isn't actually an enum, it's just a bunch of constants... + _ => unreachable!("{inner_type:#?}"), } } } @@ -777,9 +774,10 @@ impl Attributes { value_str .parse() .with_context(|| { + let type_of_t = std::any::type_name::(); format!( "invalid `{name}` attribute for `{owner_fqname}`: \ - expected unsigned integer, got `{value_str}` instead" + expected {type_of_t}, got `{value_str}` instead" ) }) .unwrap() @@ -803,12 +801,26 @@ impl Attributes { value_str .parse() .with_context(|| { + let type_of_t = std::any::type_name::(); format!( "invalid `{name}` attribute for `{owner_fqname}`: \ - expected unsigned integer, got `{value_str}` instead" + expected {type_of_t}, got `{value_str}` instead" ) }) .unwrap(), ) } } + +/// Helper to turn wrapped scalars into actual scalars. +fn flatten_scalar_wrappers(obj: &FbsObject<'_>) -> ElementType { + let name = obj.name(); + if name.starts_with("fbs.scalars.") { + match name { + "fbs.scalars.Float32" => ElementType::Float32, + _ => unimplemented!("{name:#?}"), // NOLINT + } + } else { + ElementType::Object(name.to_owned()) + } +} From 24da1ab674c2fb523d849b1a03e45b8d239dc885 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 12 Jun 2023 11:13:24 +0200 Subject: [PATCH 11/16] adhering to py38+ style guide --- crates/re_types_builder/src/codegen/python.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 2cba90d0a457..4d06dcc60e86 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -294,7 +294,7 @@ impl QuotedObject { let typ = if field.required { typ } else { - format!("Optional[{typ}] = None") + format!("{typ} | None = None") }; code.push_str(&indent::indent_all_by(4, format!("{name}: {typ}\n"))); @@ -397,7 +397,7 @@ impl QuotedObject { // NOTE: It's always optional since only one of the fields can be set at a time. code.push_str(&indent::indent_all_by( 4, - format!("{name}: Optional[{typ}] = None\n"), + format!("{name}: {typ} | None = None\n"), )); code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); @@ -794,7 +794,7 @@ fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> class {many}(pa.ExtensionArray, {many}Ext): # type: ignore[misc] @staticmethod - def from_similar(data: Optional[{many_aliases}]): + def from_similar(data: {many_aliases} | None): if data is None: return {arrow}().wrap_array(pa.array([], type={arrow}().storage_type)) else: @@ -849,9 +849,9 @@ fn quote_builder_from_obj(objects: &Objects, obj: &Object) -> String { let (typ, unwrapped) = quote_field_type_from_field(objects, field, true); if unwrapped { // This was originally a vec/array! - format!("{}: Optional[{typ}ArrayLike] = None", field.name) + format!("{}: {typ}ArrayLike | None = None", field.name) } else { - format!("{}: Optional[{typ}Like] = None", field.name) + format!("{}: {typ}Like | None = None", field.name) } }) .collect::>() From afca0ab2078ff4c913c3fa4d64867686f4b4ed30 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 12 Jun 2023 17:25:01 +0200 Subject: [PATCH 12/16] vscode flatbuffer things. Fix comment typo --- .vscode/extensions.json | 35 +++++++++++++++--------------- .vscode/settings.json | 1 + crates/re_types_builder/src/lib.rs | 2 +- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 32731cdfadeb..a6d1d9afb305 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,19 +1,20 @@ { - // See https://go.microsoft.com/fwlink/?LinkId=827846 - // for the documentation about the extensions.json format - "recommendations": [ - "charliermarsh.ruff", - "github.vscode-github-actions", - "ms-python.python", - "ms-vsliveshare.vsliveshare", - "polymeilex.wgsl", - "rust-lang.rust-analyzer", - "serayuzgur.crates", - "streetsidesoftware.code-spell-checker", - "tamasfe.even-better-toml", - "vadimcn.vscode-lldb", - "wayou.vscode-todo-highlight", - "webfreak.debug", - "zxh404.vscode-proto3", - ] + // See https://go.microsoft.com/fwlink/?LinkId=827846 + // for the documentation about the extensions.json format + "recommendations": [ + "charliermarsh.ruff", + "github.vscode-github-actions", + "ms-python.python", + "ms-vsliveshare.vsliveshare", + "polymeilex.wgsl", + "rust-lang.rust-analyzer", + "serayuzgur.crates", + "streetsidesoftware.code-spell-checker", + "tamasfe.even-better-toml", + "vadimcn.vscode-lldb", + "wayou.vscode-todo-highlight", + "webfreak.debug", + "zxh404.vscode-proto3", + "gaborv.flatbuffers" + ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 34a059af4981..4a08b90c21f7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -27,6 +27,7 @@ "bindgroup", "colormap", "emath", + "flatbuffers", "framebuffer", "hoverable", "ilog", diff --git a/crates/re_types_builder/src/lib.rs b/crates/re_types_builder/src/lib.rs index 23ee304525b7..436126e7b885 100644 --- a/crates/re_types_builder/src/lib.rs +++ b/crates/re_types_builder/src/lib.rs @@ -18,7 +18,7 @@ //! ####. 2. Run the semantic pass. //! //! The semantic pass transforms the low-level raw reflection data generated by the first phase -//! into higher level objects that are much easier to inspect/manipulate and overall friendler +//! into higher level objects that are much easier to inspect/manipulate and overall friendlier //! to work with. //! //! Look for `objects.rs`. From fa05337c8b7f0f03f03545ec01107bdb2da91ac8 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 13 Jun 2023 10:13:30 +0200 Subject: [PATCH 13/16] turn the inner from_similar into _from_similar to appease linters --- crates/re_types_builder/src/codegen/python.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 4d06dcc60e86..548c5ebfebd0 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -798,7 +798,7 @@ fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> if data is None: return {arrow}().wrap_array(pa.array([], type={arrow}().storage_type)) else: - return {many}Ext.from_similar( + return {many}Ext._from_similar( data, mono={mono}, mono_aliases={mono_aliases}, From 00375a2133a4e10e5f3e0a48b7c43b011d76b8a6 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 13 Jun 2023 11:21:13 +0200 Subject: [PATCH 14/16] generate __all__ everywhere to make python tools behave --- crates/re_types_builder/src/codegen/python.rs | 66 ++++++++++++++----- 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 548c5ebfebd0..9c4bb20f72d4 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -93,17 +93,24 @@ fn quote_lib(out_path: impl AsRef, archetype_names: &[String]) -> PathBuf .unwrap(); let path = out_path.join("__init__.py"); + let all_names = archetype_names + .iter() + .map(|name| format!("{name:?}")) + .collect::>() + .join(", "); let archetype_names = archetype_names.join(", "); let mut code = String::new(); - // NOTE: noqa F401 (unused import) because while unnecessary these listings are - // very useful to look at. code += &unindent::unindent(&format!( r#" # {AUTOGEN_WARNING} - from .archetypes import {archetype_names} # noqa: F401 + from __future__ import annotations + + __all__ = [{all_names}] + + from .archetypes import {archetype_names} "# )); @@ -154,12 +161,19 @@ fn quote_objects( for (filepath, objs) in files { let names = objs .iter() - .map(|obj| match obj.kind { + .flat_map(|obj| match obj.kind { ObjectKind::Datatype | ObjectKind::Component => { let name = &obj.name; - format!("{name}, {name}Like, {name}Array, {name}ArrayLike, {name}Type") + + vec![ + format!("{name}"), + format!("{name}Like"), + format!("{name}Array"), + format!("{name}ArrayLike"), + format!("{name}Type"), + ] } - ObjectKind::Archetype => obj.name.clone(), + ObjectKind::Archetype => vec![obj.name.clone()], }) .collect::>(); @@ -167,10 +181,10 @@ fn quote_objects( // and archetypes separately (and even then it's a bit shady, eh). match mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) { std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().extend(names); + entry.get_mut().extend(names.iter().cloned()); } std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(names); + entry.insert(names.clone()); } }; @@ -182,6 +196,20 @@ fn quote_objects( let mut code = String::new(); code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); + let names = names + .into_iter() + .map(|name| format!("{name:?}")) + .collect::>() + .join(", "); + code.push_str(&unindent::unindent(&format!( + " + from __future__ import annotations + + __all__ = [{names}] + + ", + ))); + for obj in objs { code.push_str(&obj.code); code.push('\n'); @@ -197,20 +225,26 @@ fn quote_objects( let mut code = String::new(); + let all_names = mods + .iter() + .flat_map(|(_, names)| names.iter().map(|name| format!("{name:?}"))) + .collect::>() + .join(", "); + code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); - code.push_str(&unindent::unindent( + code.push_str(&unindent::unindent(&format!( " - # NOTE: - # - we use fully qualified paths to prevent lazy circular imports - # - `noqa F401` (unused import) everywhere because, while not strictly necessary, - # these imports are very nice for end users. + from __future__ import annotations + __all__ = [{all_names}] + + # NOTE: we use fully qualified paths to prevent lazy circular imports. ", - )); + ))); for (module, names) in &mods { let names = names.join(", "); - code.push_str(&format!("from .{module} import {names} # noqa: F401\n")); + code.push_str(&format!("from .{module} import {names}\n")); } filepaths.push(path.clone()); @@ -442,8 +476,6 @@ fn quote_module_prelude() -> String { // NOTE: All the extraneous stuff will be cleaned up courtesy of `ruff`. unindent::unindent( r#" - from __future__ import annotations - import numpy as np import numpy.typing as npt import pyarrow as pa From 68464a8098f3c46069a72d9d48bdb40bc96dd398 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 13 Jun 2023 12:21:49 +0200 Subject: [PATCH 15/16] make sure __all__ manifests are lexically sorted --- crates/re_types_builder/src/codegen/python.rs | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 9c4bb20f72d4..78f5c1e9cf93 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -93,11 +93,7 @@ fn quote_lib(out_path: impl AsRef, archetype_names: &[String]) -> PathBuf .unwrap(); let path = out_path.join("__init__.py"); - let all_names = archetype_names - .iter() - .map(|name| format!("{name:?}")) - .collect::>() - .join(", "); + let manifest = quote_manifest(archetype_names); let archetype_names = archetype_names.join(", "); let mut code = String::new(); @@ -108,7 +104,7 @@ fn quote_lib(out_path: impl AsRef, archetype_names: &[String]) -> PathBuf from __future__ import annotations - __all__ = [{all_names}] + __all__ = [{manifest}] from .archetypes import {archetype_names} "# @@ -196,16 +192,12 @@ fn quote_objects( let mut code = String::new(); code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); - let names = names - .into_iter() - .map(|name| format!("{name:?}")) - .collect::>() - .join(", "); + let manifest = quote_manifest(names); code.push_str(&unindent::unindent(&format!( " from __future__ import annotations - __all__ = [{names}] + __all__ = [{manifest}] ", ))); @@ -225,18 +217,14 @@ fn quote_objects( let mut code = String::new(); - let all_names = mods - .iter() - .flat_map(|(_, names)| names.iter().map(|name| format!("{name:?}"))) - .collect::>() - .join(", "); + let manifest = quote_manifest(mods.iter().flat_map(|(_, names)| names.iter())); code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); code.push_str(&unindent::unindent(&format!( " from __future__ import annotations - __all__ = [{all_names}] + __all__ = [{manifest}] # NOTE: we use fully qualified paths to prevent lazy circular imports. ", @@ -472,6 +460,16 @@ impl QuotedObject { // --- Code generators --- +fn quote_manifest(names: impl IntoIterator>) -> String { + let mut quoted_names: Vec<_> = names + .into_iter() + .map(|name| format!("{:?}", name.as_ref())) + .collect(); + quoted_names.sort(); + + quoted_names.join(", ") +} + fn quote_module_prelude() -> String { // NOTE: All the extraneous stuff will be cleaned up courtesy of `ruff`. unindent::unindent( From 3112d0789b1f5a5d6d0fb6a408708f93cee7f611 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Wed, 14 Jun 2023 10:49:25 +0200 Subject: [PATCH 16/16] adressing PR comments --- crates/re_types_builder/build.rs | 14 +- .../definitions/reflection.fbs | 3 + crates/re_types_builder/source_hash.txt | 2 +- crates/re_types_builder/src/arrow_registry.rs | 9 +- crates/re_types_builder/src/codegen/common.rs | 19 ++ crates/re_types_builder/src/codegen/mod.rs | 6 +- crates/re_types_builder/src/codegen/python.rs | 264 ++++++++---------- crates/re_types_builder/src/codegen/rust.rs | 142 +++++----- crates/re_types_builder/src/lib.rs | 99 ++++--- crates/re_types_builder/src/objects.rs | 6 +- scripts/publish_crates.sh | 1 + scripts/setup.sh | 14 + 12 files changed, 289 insertions(+), 290 deletions(-) diff --git a/crates/re_types_builder/build.rs b/crates/re_types_builder/build.rs index 5c949bf4f298..bac5e2d4deb6 100644 --- a/crates/re_types_builder/build.rs +++ b/crates/re_types_builder/build.rs @@ -3,8 +3,7 @@ use xshell::{cmd, Shell}; use re_build_tools::{ - compute_file_hash, is_tracked_env_var_set, read_versioning_hash, rerun_if_changed, - rerun_if_changed_or_doesnt_exist, write_versioning_hash, + compute_file_hash, is_tracked_env_var_set, read_versioning_hash, write_versioning_hash, }; // --- @@ -33,9 +32,14 @@ fn main() { return; } - rerun_if_changed_or_doesnt_exist(SOURCE_HASH_PATH); - rerun_if_changed(FBS_REFLECTION_DEFINITION_PATH); - + // We're building an actual build graph here, and Cargo has no idea about it. + // + // Worse: some nodes in our build graph actually output artifacts into the src/ directory, + // which Cargo always interprets as "need to rebuild everything ASAP", leading to an infinite + // feedback loop. + // + // For these reasons, we manually compute and track signature hashes for the graph nodes we + // depend on, and make sure to exit early if everything's already up to date. let cur_hash = read_versioning_hash(SOURCE_HASH_PATH); let new_hash = compute_file_hash(FBS_REFLECTION_DEFINITION_PATH); diff --git a/crates/re_types_builder/definitions/reflection.fbs b/crates/re_types_builder/definitions/reflection.fbs index 513311f1b9c3..cf6cb9e3c4e4 100644 --- a/crates/re_types_builder/definitions/reflection.fbs +++ b/crates/re_types_builder/definitions/reflection.fbs @@ -1,3 +1,6 @@ +// Copied verbatim from the official flatbuffers source tree: +// https://github.com/google/flatbuffers/blob/63b7b25289447313ab6e79191fa1733748dca0da/reflection/reflection.fbs + // This schema defines objects that represent a parsed schema, like // the binary version of a .fbs file. // This could be used to operate on unknown FlatBuffers at runtime. diff --git a/crates/re_types_builder/source_hash.txt b/crates/re_types_builder/source_hash.txt index f1766f06bb26..2d51980aefb3 100644 --- a/crates/re_types_builder/source_hash.txt +++ b/crates/re_types_builder/source_hash.txt @@ -1,4 +1,4 @@ # This is a sha256 hash for all direct and indirect dependencies of this crate's build script. # It can be safely removed at anytime to force the build script to run again. # Check out build.rs to see how it's computed. -72d936d50287d6c16d0c1b91f86bd74120642c8fc08e885f08dd2b92bb52e8a4 \ No newline at end of file +674450e64722effea9b29a84c34fab81b9d57497136a8f161561b8e19f7441b2 \ No newline at end of file diff --git a/crates/re_types_builder/src/arrow_registry.rs b/crates/re_types_builder/src/arrow_registry.rs index e0edc01cddf5..505d0ccd253a 100644 --- a/crates/re_types_builder/src/arrow_registry.rs +++ b/crates/re_types_builder/src/arrow_registry.rs @@ -4,12 +4,7 @@ use anyhow::Context as _; use arrow2::datatypes::{DataType, Field, UnionMode}; use std::collections::{BTreeMap, HashMap}; -use crate::{ElementType, Object, Type}; - -// --- - -pub const ARROW_ATTR_TRANSPARENT: &str = "arrow.attr.transparent"; -pub const ARROW_ATTR_SPARSE_UNION: &str = "arrow.attr.sparse_union"; +use crate::{ElementType, Object, Type, ARROW_ATTR_SPARSE_UNION, ARROW_ATTR_TRANSPARENT}; // --- Registry --- @@ -59,7 +54,7 @@ impl ArrowRegistry { let num_fields = obj.fields.len(); assert!( - !(is_transparent && (!is_struct || num_fields != 1)), + !is_transparent || (is_struct && num_fields == 1), "cannot have a transparent arrow object with any number of fields but 1: {:?} has {num_fields}", obj.fqname, ); diff --git a/crates/re_types_builder/src/codegen/common.rs b/crates/re_types_builder/src/codegen/common.rs index 4317b76c481c..d0bd52a5716c 100644 --- a/crates/re_types_builder/src/codegen/common.rs +++ b/crates/re_types_builder/src/codegen/common.rs @@ -39,3 +39,22 @@ pub fn quote_doc_from_docs(docs: &Docs, tags: &[&str]) -> Vec { lines } + +pub trait StringExt { + fn push_text(&mut self, text: impl AsRef, linefeeds: usize, indent: usize) -> &mut Self; + fn push_unindented_text(&mut self, text: impl AsRef, linefeeds: usize) -> &mut Self; +} + +impl StringExt for String { + fn push_text(&mut self, text: impl AsRef, linefeeds: usize, indent: usize) -> &mut Self { + self.push_str(&indent::indent_all_by(indent, text.as_ref())); + self.push_str(&vec!["\n"; linefeeds].join("")); + self + } + + fn push_unindented_text(&mut self, text: impl AsRef, linefeeds: usize) -> &mut Self { + self.push_str(&unindent::unindent(text.as_ref())); + self.push_str(&vec!["\n"; linefeeds].join("")); + self + } +} diff --git a/crates/re_types_builder/src/codegen/mod.rs b/crates/re_types_builder/src/codegen/mod.rs index eb2e4d553265..240fabd14e9a 100644 --- a/crates/re_types_builder/src/codegen/mod.rs +++ b/crates/re_types_builder/src/codegen/mod.rs @@ -15,14 +15,10 @@ pub trait CodeGenerator { pub const AUTOGEN_WARNING: &str = "NOTE: This file was autogenerated by re_types_builder; DO NOT EDIT."; -pub const RERUN_ATTR_COMPONENT_REQUIRED: &str = "rerun.attr.component_required"; -pub const RERUN_ATTR_COMPONENT_RECOMMENDED: &str = "rerun.attr.component_recommended"; -pub const RERUN_ATTR_COMPONENT_OPTIONAL: &str = "rerun.attr.component_optional"; - // --- mod common; -use self::common::quote_doc_from_docs; +use self::common::{quote_doc_from_docs, StringExt}; mod python; mod rust; diff --git a/crates/re_types_builder/src/codegen/python.rs b/crates/re_types_builder/src/codegen/python.rs index 78f5c1e9cf93..a695cbee5970 100644 --- a/crates/re_types_builder/src/codegen/python.rs +++ b/crates/re_types_builder/src/codegen/python.rs @@ -8,8 +8,9 @@ use std::{ }; use crate::{ - codegen::AUTOGEN_WARNING, ArrowRegistry, CodeGenerator, Docs, ElementType, Object, ObjectField, - ObjectKind, Objects, Type, + codegen::{StringExt as _, AUTOGEN_WARNING}, + ArrowRegistry, CodeGenerator, Docs, ElementType, Object, ObjectField, ObjectKind, Objects, + Type, PYTHON_ATTR_ALIASES, PYTHON_ATTR_ARRAY_ALIASES, PYTHON_ATTR_TRANSPARENT, }; // --- @@ -17,10 +18,6 @@ use crate::{ // NOTE: `rerun2` while we figure out how to integrate back into the main SDK. const MODULE_NAME: &str = "rerun2"; -pub const ATTR_TRANSPARENT: &str = "python.attr.transparent"; -pub const ATTR_ALIASES: &str = "python.attr.aliases"; -pub const ATTR_ARRAY_ALIASES: &str = "python.attr.array_aliases"; - pub struct PythonCodeGenerator { pkg_path: PathBuf, } @@ -138,16 +135,9 @@ fn quote_objects( } else { QuotedObject::from_union(arrow_registry, all_objects, obj) }; - let filepath = out_path.join(obj.filepath.file_name().unwrap()); - match files.entry(filepath.clone()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().push(obj); - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(vec![obj]); - } - }; + let filepath = out_path.join(obj.filepath.file_name().unwrap()); + files.entry(filepath.clone()).or_default().push(obj); } // (module_name, [object_name]) @@ -175,14 +165,9 @@ fn quote_objects( // NOTE: Isolating the file stem only works because we're handling datatypes, components // and archetypes separately (and even then it's a bit shady, eh). - match mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().extend(names.iter().cloned()); - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(names.clone()); - } - }; + mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) + .or_default() + .extend(names.iter().cloned()); filepaths.push(filepath.clone()); let mut file = std::fs::File::create(&filepath) @@ -190,21 +175,23 @@ fn quote_objects( .unwrap(); let mut code = String::new(); - code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); + code.push_text(&format!("# {AUTOGEN_WARNING}"), 2, 0); let manifest = quote_manifest(names); - code.push_str(&unindent::unindent(&format!( - " - from __future__ import annotations + code.push_unindented_text( + format!( + " + from __future__ import annotations - __all__ = [{manifest}] + __all__ = [{manifest}] - ", - ))); + ", + ), + 0, + ); for obj in objs { - code.push_str(&obj.code); - code.push('\n'); + code.push_text(&obj.code, 1, 0); } file.write_all(code.as_bytes()) .with_context(|| format!("{filepath:?}")) @@ -219,20 +206,23 @@ fn quote_objects( let manifest = quote_manifest(mods.iter().flat_map(|(_, names)| names.iter())); - code.push_str(&format!("# {AUTOGEN_WARNING}\n\n")); - code.push_str(&unindent::unindent(&format!( - " - from __future__ import annotations + code.push_text(&format!("# {AUTOGEN_WARNING}"), 2, 0); + code.push_unindented_text( + format!( + " + from __future__ import annotations - __all__ = [{manifest}] + __all__ = [{manifest}] - # NOTE: we use fully qualified paths to prevent lazy circular imports. - ", - ))); + # NOTE: we use fully qualified paths to prevent lazy circular imports. + ", + ), + 0, + ); for (module, names) in &mods { let names = names.join(", "); - code.push_str(&format!("from .{module} import {names}\n")); + code.push_text(&format!("from .{module} import {names}"), 1, 0); } filepaths.push(path.clone()); @@ -272,26 +262,28 @@ impl QuotedObject { let mut code = String::new(); - code.push_str("e_module_prelude()); + code.push_text("e_module_prelude(), 0, 0); for clause in obj .fields .iter() .filter_map(quote_import_clauses_from_field) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } - code.push_str(&unindent::unindent(&format!( - r#" + code.push_unindented_text( + format!( + r#" - @dataclass - class {name}: - "# - ))); + @dataclass + class {name}: + "# + ), + 0, + ); - code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + code.push_text(quote_doc_from_docs(docs), 0, 4); for field in fields { let ObjectField { @@ -319,39 +311,22 @@ impl QuotedObject { format!("{typ} | None = None") }; - code.push_str(&indent::indent_all_by(4, format!("{name}: {typ}\n"))); + code.push_text(format!("{name}: {typ}"), 1, 4); - code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + code.push_text(quote_doc_from_docs(docs), 0, 4); } - code.push_str(&indent::indent_all_by(4, quote_str_repr_from_obj(obj))); - code.push('\n'); - - code.push_str(&indent::indent_all_by( - 4, - quote_array_method_from_obj(objects, obj), - )); - code.push('\n'); - - code.push_str(&indent::indent_all_by( - 4, - quote_str_method_from_obj(objects, obj), - )); - code.push('\n'); + code.push_text(quote_str_repr_from_obj(obj), 1, 4); + code.push_text(quote_array_method_from_obj(objects, obj), 1, 4); + code.push_text(quote_str_method_from_obj(objects, obj), 1, 4); if obj.kind == ObjectKind::Archetype { - code.push_str(&indent::indent_all_by( - 4, - quote_builder_from_obj(objects, obj), - )); - code.push('\n'); + code.push_text(quote_builder_from_obj(objects, obj), 1, 4); } else { - code.push_str("e_aliases_from_object(obj)); - code.push('\n'); + code.push_text(quote_aliases_from_object(obj), 1, 4); } - code.push_str("e_arrow_support_from_obj(arrow_registry, obj)); - code.push('\n'); + code.push_text(quote_arrow_support_from_obj(arrow_registry, obj), 1, 4); let mut filepath = PathBuf::from(filepath); filepath.set_extension("py"); @@ -381,26 +356,28 @@ impl QuotedObject { let mut code = String::new(); - code.push_str("e_module_prelude()); + code.push_text("e_module_prelude(), 0, 0); for clause in obj .fields .iter() .filter_map(quote_import_clauses_from_field) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } - code.push_str(&unindent::unindent(&format!( - r#" + code.push_unindented_text( + format!( + r#" - @dataclass - class {name}: - "# - ))); + @dataclass + class {name}: + "# + ), + 0, + ); - code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + code.push_text(quote_doc_from_docs(docs), 0, 4); for field in fields { let ObjectField { @@ -417,34 +394,16 @@ impl QuotedObject { let (typ, _) = quote_field_type_from_field(objects, field, false); // NOTE: It's always optional since only one of the fields can be set at a time. - code.push_str(&indent::indent_all_by( - 4, - format!("{name}: {typ} | None = None\n"), - )); + code.push_text(format!("{name}: {typ} | None = None"), 1, 4); - code.push_str(&indent::indent_all_by(4, quote_doc_from_docs(docs))); + code.push_text(quote_doc_from_docs(docs), 0, 4); } - code.push_str(&indent::indent_all_by(4, quote_str_repr_from_obj(obj))); - code.push('\n'); - - code.push_str(&indent::indent_all_by( - 4, - quote_array_method_from_obj(objects, obj), - )); - code.push('\n'); - - code.push_str(&indent::indent_all_by( - 4, - quote_str_method_from_obj(objects, obj), - )); - code.push('\n'); - - code.push_str("e_aliases_from_object(obj)); - code.push('\n'); - - code.push_str("e_arrow_support_from_obj(arrow_registry, obj)); - code.push('\n'); + code.push_text(quote_str_repr_from_obj(obj), 1, 4); + code.push_text(quote_array_method_from_obj(objects, obj), 1, 4); + code.push_text(quote_str_method_from_obj(objects, obj), 1, 4); + code.push_text(quote_aliases_from_object(obj), 1, 4); + code.push_text(quote_arrow_support_from_obj(arrow_registry, obj), 1, 4); let mut filepath = PathBuf::from(filepath); filepath.set_extension("py"); @@ -585,38 +544,43 @@ fn quote_str_method_from_obj(objects: &Objects, obj: &Object) -> String { fn quote_aliases_from_object(obj: &Object) -> String { assert!(obj.kind != ObjectKind::Archetype); - let aliases = obj.try_get_attr::(ATTR_ALIASES); + let aliases = obj.try_get_attr::(PYTHON_ATTR_ALIASES); let array_aliases = obj - .try_get_attr::(ATTR_ARRAY_ALIASES) + .try_get_attr::(PYTHON_ATTR_ARRAY_ALIASES) .unwrap_or_default(); let name = &obj.name; let mut code = String::new(); - code.push_str(&if let Some(aliases) = aliases { - unindent::unindent(&format!( + code.push_unindented_text( + &if let Some(aliases) = aliases { + format!( + r#" + {name}Like = Union[ + {name}, + {aliases} + ] + "#, + ) + } else { + format!("{name}Like = {name}") + }, + 1, + ); + + code.push_unindented_text( + format!( r#" - {name}Like = Union[ - {name}, - {aliases} + {name}ArrayLike = Union[ + {name}Like, + Sequence[{name}Like], + {array_aliases} ] - "#, - )) - } else { - format!("{name}Like = {name}\n") - }); - - code.push_str(&unindent::unindent(&format!( - r#" - {name}ArrayLike = Union[ - {name}Like, - Sequence[{name}Like], - {array_aliases} - ] - "#, - ))); + ), + 0, + ); code } @@ -657,6 +621,10 @@ fn quote_import_clauses_from_field(field: &ObjectField) -> Option { } /// Returns type name as string and whether it was force unwrapped. +/// +/// Specifying `unwrap = true` will unwrap the final type before returning it, e.g. `Vec` +/// becomes just `String`. +/// The returned boolean indicates whether there was anything to unwrap at all. fn quote_field_type_from_field( objects: &Objects, field: &ObjectField, @@ -713,7 +681,9 @@ fn quote_field_type_from_field( // TODO(cmc): it is a bit weird to be doing the transparency logic (which is language // agnostic) in a python specific quoting function... a static helper at the very least // would be nice. - let is_transparent = field.try_get_attr::(ATTR_TRANSPARENT).is_some(); + let is_transparent = field + .try_get_attr::(PYTHON_ATTR_TRANSPARENT) + .is_some(); if is_transparent { let target = objects.get(fqname); assert!( @@ -887,30 +857,34 @@ fn quote_builder_from_obj(objects: &Objects, obj: &Object) -> String { .collect::>() .join(", "); - code.push_str(&format!( - "def __init__(self, {required_args}, *, {optional_args}) -> None:\n" - )); + code.push_text( + format!("def __init__(self, {required_args}, *, {optional_args}) -> None:"), + 1, + 0, + ); - code.push_str(&indent::indent_all_by(4, "# Required components\n")); + code.push_text("# Required components", 1, 4); for field in required { let name = &field.name; let (typ, _) = quote_field_type_from_field(objects, field, true); - code.push_str(&indent::indent_all_by( + code.push_text( + format!("self.{name} = {typ}Array.from_similar({name})"), + 1, 4, - format!("self.{name} = {typ}Array.from_similar({name})\n"), - )); + ); } code.push('\n'); - code.push_str(&indent::indent_all_by(4, "# Optional components\n")); + code.push_text("# Optional components\n", 1, 4); for field in optional { let name = &field.name; let (typ, _) = quote_field_type_from_field(objects, field, true); - code.push_str(&indent::indent_all_by( + code.push_text( + format!("self.{name} = {typ}Array.from_similar({name})"), + 1, 4, - format!("self.{name} = {typ}Array.from_similar({name})\n"), - )); + ); } code diff --git a/crates/re_types_builder/src/codegen/rust.rs b/crates/re_types_builder/src/codegen/rust.rs index 0a928185ae47..ae87dedf9398 100644 --- a/crates/re_types_builder/src/codegen/rust.rs +++ b/crates/re_types_builder/src/codegen/rust.rs @@ -8,20 +8,14 @@ use std::{ }; use crate::{ - codegen::{ - AUTOGEN_WARNING, RERUN_ATTR_COMPONENT_OPTIONAL, RERUN_ATTR_COMPONENT_RECOMMENDED, - RERUN_ATTR_COMPONENT_REQUIRED, - }, + codegen::{StringExt as _, AUTOGEN_WARNING}, ArrowRegistry, CodeGenerator, Docs, ElementType, Object, ObjectField, ObjectKind, Objects, - Type, + Type, RERUN_ATTR_COMPONENT_OPTIONAL, RERUN_ATTR_COMPONENT_RECOMMENDED, + RERUN_ATTR_COMPONENT_REQUIRED, RUST_ATTR_DERIVE, RUST_ATTR_REPR, RUST_ATTR_TUPLE_STRUCT, }; // --- -pub const ATTR_DERIVE: &str = "rust.attr.derive"; -pub const ATTR_REPR: &str = "rust.attr.repr"; -pub const ATTR_TUPLE_STRUCT: &str = "rust.attr.tuple_struct"; - pub struct RustCodeGenerator { crate_path: PathBuf, } @@ -92,15 +86,7 @@ fn quote_objects( }; let filepath = out_path.join(obj.filepath.file_name().unwrap()); - - match files.entry(filepath.clone()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().push(obj); - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(vec![obj]); - } - }; + files.entry(filepath.clone()).or_default().push(obj); } // (module_name, [object_name]) @@ -111,14 +97,9 @@ fn quote_objects( // NOTE: Isolating the file stem only works because we're handling datatypes, components // and archetypes separately (and even then it's a bit shady, eh). let names = objs.iter().map(|obj| obj.name.clone()).collect::>(); - match mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().extend(names); - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(names); - } - }; + mods.entry(filepath.file_stem().unwrap().to_string_lossy().to_string()) + .or_default() + .extend(names); filepaths.push(filepath.clone()); let mut file = std::fs::File::create(&filepath) @@ -126,11 +107,10 @@ fn quote_objects( .unwrap(); let mut code = String::new(); - code.push_str(&format!("// {AUTOGEN_WARNING}\n\n")); + code.push_text(format!("// {AUTOGEN_WARNING}"), 2, 0); for obj in objs { - code.push_str(&obj.code); - code.push('\n'); + code.push_text(&obj.code, 1, 0); } file.write_all(code.as_bytes()) .with_context(|| format!("{filepath:?}")) @@ -143,17 +123,17 @@ fn quote_objects( let mut code = String::new(); - code.push_str(&format!("// {AUTOGEN_WARNING}\n\n")); + code.push_text(format!("// {AUTOGEN_WARNING}"), 2, 0); for module in mods.keys() { - code.push_str(&format!("mod {module};\n")); + code.push_text(format!("mod {module};"), 1, 0); // Detect if someone manually created an extension file, and automatically // import it if so. let mut ext_path = out_path.join(format!("{module}_ext")); ext_path.set_extension("rs"); if ext_path.exists() { - code.push_str(&format!("mod {module}_ext;\n")); + code.push_text(format!("mod {module}_ext;"), 1, 0); } } @@ -161,7 +141,7 @@ fn quote_objects( for (module, names) in &mods { let names = names.join(", "); - code.push_str(&format!("pub use self::{module}::{{{names}}};\n")); + code.push_text(format!("pub use self::{module}::{{{names}}};"), 1, 0); } filepaths.push(path.clone()); @@ -200,23 +180,21 @@ impl QuotedObject { let mut code = String::new(); - code.push_str("e_doc_from_docs(docs)); + code.push_text("e_doc_from_docs(docs), 0, 0); if let Some(clause) = quote_derive_clause_from_obj(obj) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } if let Some(clause) = quote_repr_clause_from_obj(obj) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } let is_tuple_struct = is_tuple_struct_from_obj(obj); if is_tuple_struct { - code.push_str(&format!("pub struct {name}(")); + code.push_text(&format!("pub struct {name}("), 0, 0); } else { - code.push_str(&format!("pub struct {name} {{\n")); + code.push_text(&format!("pub struct {name} {{"), 1, 0); } for field in fields { @@ -233,7 +211,7 @@ impl QuotedObject { deprecated: _, } = field; - code.push_str("e_doc_from_docs(docs)); + code.push_text("e_doc_from_docs(docs), 0, 0); let (typ, _) = quote_field_type_from_field(field, false); let typ = if *required { @@ -243,9 +221,9 @@ impl QuotedObject { }; if is_tuple_struct { - code.push_str(&format!("pub {typ}")); + code.push_text(&format!("pub {typ}"), 0, 0); } else { - code.push_str(&format!("pub {name}: {typ},\n\n")); + code.push_text(&format!("pub {name}: {typ},"), 2, 0); } } @@ -255,11 +233,10 @@ impl QuotedObject { code += "}\n\n"; } - code.push_str("e_trait_impls_from_obj(arrow_registry, obj)); - code.push('\n'); + code.push_text("e_trait_impls_from_obj(arrow_registry, obj), 1, 0); if kind == &ObjectKind::Archetype { - code.push_str("e_builder_from_obj(obj)); + code.push_text("e_builder_from_obj(obj), 0, 0); } let mut filepath = PathBuf::from(filepath); @@ -289,18 +266,16 @@ impl QuotedObject { let mut code = String::new(); - code.push_str("e_doc_from_docs(docs)); + code.push_text("e_doc_from_docs(docs), 0, 0); if let Some(clause) = quote_derive_clause_from_obj(obj) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } if let Some(clause) = quote_repr_clause_from_obj(obj) { - code.push_str(&clause); - code.push('\n'); + code.push_text(&clause, 1, 0); } - code.push_str(&format!("pub enum {name} {{\n")); + code.push_text(&format!("pub enum {name} {{"), 1, 0); for field in fields { let ObjectField { @@ -315,17 +290,16 @@ impl QuotedObject { deprecated: _, } = field; - code.push_str("e_doc_from_docs(docs)); + code.push_text("e_doc_from_docs(docs), 0, 0); let (typ, _) = quote_field_type_from_field(field, false); - code.push_str(&format!("{name}({typ}),\n\n")); + code.push_text(&format!("{name}({typ}),"), 2, 0); } code += "}\n\n"; - code.push_str("e_trait_impls_from_obj(arrow_registry, obj)); - code.push('\n'); + code.push_text("e_trait_impls_from_obj(arrow_registry, obj), 1, 0); let mut filepath = PathBuf::from(filepath); filepath.set_extension("rs"); @@ -353,6 +327,10 @@ fn quote_doc_from_docs(docs: &Docs) -> String { } /// Returns type name as string and whether it was force unwrapped. +/// +/// Specifying `unwrap = true` will unwrap the final type before returning it, e.g. `Vec` +/// becomes just `String`. +/// The returned boolean indicates whether there was anything to unwrap at all. fn quote_field_type_from_field(field: &ObjectField, unwrap: bool) -> (String, bool) { let mut unwrapped = false; let typ = &field.typ; @@ -414,19 +392,19 @@ fn quote_type_from_element_type(typ: &ElementType) -> String { } fn quote_derive_clause_from_obj(obj: &Object) -> Option { - obj.try_get_attr::(ATTR_DERIVE) + obj.try_get_attr::(RUST_ATTR_DERIVE) .map(|what| format!("#[derive({what})]")) } fn quote_repr_clause_from_obj(obj: &Object) -> Option { - obj.try_get_attr::(ATTR_REPR) + obj.try_get_attr::(RUST_ATTR_REPR) .map(|what| format!("#[repr({what})]")) } fn is_tuple_struct_from_obj(obj: &Object) -> bool { obj.is_struct() && obj.fields.len() == 1 - && obj.try_get_attr::(ATTR_TUPLE_STRUCT).is_some() + && obj.try_get_attr::(RUST_ATTR_TUPLE_STRUCT).is_some() } fn quote_trait_impls_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) -> String { @@ -571,7 +549,7 @@ fn quote_builder_from_obj(obj: &Object) -> String { let mut code = String::new(); - code.push_str(&format!("impl {name} {{\n")); + code.push_text(&format!("impl {name} {{"), 1, 0); { // --- impl new() --- @@ -591,7 +569,7 @@ fn quote_builder_from_obj(obj: &Object) -> String { }) .collect::>() .join(", "); - code.push_str(&format!("pub fn new({new_params}) -> Self {{\n")); + code.push_text(&format!("pub fn new({new_params}) -> Self {{"), 1, 0); { code += "Self {\n"; { @@ -599,16 +577,20 @@ fn quote_builder_from_obj(obj: &Object) -> String { let (_, unwrapped) = quote_field_type_from_field(field, true); if unwrapped { // This was originally a vec/array! - code.push_str(&format!( - "{}: {}.into_iter().map(Into::into).collect(),\n", - field.name, field.name - )); + code.push_text( + &format!( + "{}: {}.into_iter().map(Into::into).collect(),", + field.name, field.name + ), + 1, + 0, + ); } else { - code.push_str(&format!("{}: {}.into(),\n", field.name, field.name)); + code.push_text(&format!("{}: {}.into(),", field.name, field.name), 1, 0); } } for field in &optional { - code.push_str(&format!("{}: None,\n", field.name)); + code.push_text(&format!("{}: None,", field.name), 1, 0); } } code += "}\n"; @@ -623,21 +605,27 @@ fn quote_builder_from_obj(obj: &Object) -> String { if unwrapped { // This was originally a vec/array! - code.push_str(&format!( - "pub fn with_{name}(mut self, {name}: impl IntoIterator>) -> Self {{\n", - )); + code.push_text(&format!( + "pub fn with_{name}(mut self, {name}: impl IntoIterator>) -> Self {{", + ), 1, 0); { - code.push_str(&format!( - "self.{name} = Some({name}.into_iter().map(Into::into).collect());\n" - )); + code.push_text( + &format!( + "self.{name} = Some({name}.into_iter().map(Into::into).collect());" + ), + 1, + 0, + ); code += "self\n"; } } else { - code.push_str(&format!( - "pub fn with_{name}(mut self, {name}: impl Into<{typ}>) -> Self {{\n", - )); + code.push_text( + &format!("pub fn with_{name}(mut self, {name}: impl Into<{typ}>) -> Self {{",), + 1, + 0, + ); { - code.push_str(&format!("self.{name} = Some({name}.into());\n")); + code.push_text(&format!("self.{name} = Some({name}.into());"), 1, 0); code += "self\n"; } } diff --git a/crates/re_types_builder/src/lib.rs b/crates/re_types_builder/src/lib.rs index 436126e7b885..71669189d7f6 100644 --- a/crates/re_types_builder/src/lib.rs +++ b/crates/re_types_builder/src/lib.rs @@ -99,12 +99,31 @@ pub use self::objects::{ Attributes, Docs, ElementType, Object, ObjectField, ObjectKind, Objects, Type, }; +// --- Attributes --- + +pub const ARROW_ATTR_TRANSPARENT: &str = "arrow.attr.transparent"; +pub const ARROW_ATTR_SPARSE_UNION: &str = "arrow.attr.sparse_union"; + +pub const RERUN_ATTR_COMPONENT_REQUIRED: &str = "rerun.attr.component_required"; +pub const RERUN_ATTR_COMPONENT_RECOMMENDED: &str = "rerun.attr.component_recommended"; +pub const RERUN_ATTR_COMPONENT_OPTIONAL: &str = "rerun.attr.component_optional"; + +pub const PYTHON_ATTR_TRANSPARENT: &str = "python.attr.transparent"; +pub const PYTHON_ATTR_ALIASES: &str = "python.attr.aliases"; +pub const PYTHON_ATTR_ARRAY_ALIASES: &str = "python.attr.array_aliases"; + +pub const RUST_ATTR_DERIVE: &str = "rust.attr.derive"; +pub const RUST_ATTR_REPR: &str = "rust.attr.repr"; +pub const RUST_ATTR_TUPLE_STRUCT: &str = "rust.attr.tuple_struct"; + // --- Entrypoints --- use std::path::{Path, PathBuf}; /// Compiles binary reflection dumps from flatbuffers definitions. /// +/// Requires `flatc` available in $PATH. +/// /// Panics on error. /// /// - `include_dir_path`: path to the root directory of the fbs definition tree. @@ -141,27 +160,14 @@ pub fn compile_binary_schemas( .unwrap(); } -/// Generates Rust code from a set of flatbuffers definitions. -/// -/// Panics on error. -/// -/// - `include_dir_path`: path to the root directory of the fbs definition tree. -/// - `output_crate_path`: path to the root of the output crate. -/// - `entrypoint_path`: path to the root file of the fbs definition tree. -/// -/// E.g.: -/// ```no_run -/// re_types_builder::generate_rust_code( -/// "./definitions", -/// ".", -/// "./definitions/rerun/archetypes.fbs", -/// ); -/// ``` -pub fn generate_rust_code( +/// Handles the first 3 language-agnostic passes of the codegen pipeline: +/// 1. Generate binary reflection dumps for our definitions. +/// 2. Run the semantic pass +/// 3. Compute the Arrow registry +fn generate_lang_agnostic( include_dir_path: impl AsRef, - output_crate_path: impl AsRef, entrypoint_path: impl AsRef, -) { +) -> (Objects, ArrowRegistry) { use xshell::Shell; let sh = Shell::new().unwrap(); @@ -189,6 +195,33 @@ pub fn generate_rust_code( arrow_registry.register(obj); } + (objects, arrow_registry) +} + +/// Generates Rust code from a set of flatbuffers definitions. +/// +/// Panics on error. +/// +/// - `include_dir_path`: path to the root directory of the fbs definition tree. +/// - `output_crate_path`: path to the root of the output crate. +/// - `entrypoint_path`: path to the root file of the fbs definition tree. +/// +/// E.g.: +/// ```no_run +/// re_types_builder::generate_rust_code( +/// "./definitions", +/// ".", +/// "./definitions/rerun/archetypes.fbs", +/// ); +/// ``` +pub fn generate_rust_code( + include_dir_path: impl AsRef, + output_crate_path: impl AsRef, + entrypoint_path: impl AsRef, +) { + // passes 1 through 3: bfbs, semantic, arrow registry + let (objects, arrow_registry) = generate_lang_agnostic(include_dir_path, entrypoint_path); + // generate rust code let mut gen = RustCodeGenerator::new(output_crate_path.as_ref()); let _filepaths = gen.quote(&objects, &arrow_registry); @@ -215,32 +248,8 @@ pub fn generate_python_code( output_pkg_path: impl AsRef, entrypoint_path: impl AsRef, ) { - use xshell::Shell; - - let sh = Shell::new().unwrap(); - let tmp = sh.create_temp_dir().unwrap(); - - let entrypoint_path = entrypoint_path.as_ref(); - let entrypoint_filename = entrypoint_path.file_name().unwrap(); - - // generate bfbs definitions - compile_binary_schemas(include_dir_path, tmp.path(), entrypoint_path); - - let mut binary_entrypoint_path = PathBuf::from(entrypoint_filename); - binary_entrypoint_path.set_extension("bfbs"); - - // semantic pass: high level objects from low-level reflection data - let objects = Objects::from_buf( - sh.read_binary_file(tmp.path().join(binary_entrypoint_path)) - .unwrap() - .as_slice(), - ); - - // create and fill out arrow registry - let mut arrow_registry = ArrowRegistry::default(); - for obj in objects.ordered_objects(None) { - arrow_registry.register(obj); - } + // passes 1 through 3: bfbs, semantic, arrow registry + let (objects, arrow_registry) = generate_lang_agnostic(include_dir_path, entrypoint_path); // generate python code let mut gen = PythonCodeGenerator::new(output_pkg_path.as_ref()); diff --git a/crates/re_types_builder/src/objects.rs b/crates/re_types_builder/src/objects.rs index 343e40583ed1..677779a7c7b3 100644 --- a/crates/re_types_builder/src/objects.rs +++ b/crates/re_types_builder/src/objects.rs @@ -443,7 +443,7 @@ pub struct ObjectField { /// Whether the field is required. /// - /// Always true for `struct` types. + /// Always true for IDL definitions using flatbuffers' `struct` type (as opposed to `table`). pub required: bool, /// Whether the field is deprecated. @@ -642,8 +642,6 @@ impl Type { let union = &enums[field_type.index() as usize]; Self::Object(union.name().to_owned()) } - // NOTE: flatbuffers doesn't support directly nesting multiple layers of arrays, they - // always have to be wrapped into intermediate layers of structs or tables. FbsBaseType::Array => Self::Array { elem_type: ElementType::from_raw_base_type( enums, @@ -718,8 +716,6 @@ impl ElementType { flatten_scalar_wrappers(obj) } FbsBaseType::Union => unimplemented!("{inner_type:#?}"), // NOLINT - // NOTE: flatbuffers doesn't support directly nesting multiple layers of arrays, they - // always have to be wrapped into intermediate layers of structs or tables. FbsBaseType::None | FbsBaseType::UType | FbsBaseType::Array diff --git a/scripts/publish_crates.sh b/scripts/publish_crates.sh index 56755e9dc44b..6ddd01b44758 100755 --- a/scripts/publish_crates.sh +++ b/scripts/publish_crates.sh @@ -90,6 +90,7 @@ export RERUN_IS_PUBLISHING=yes echo "Publishing crates…" +cargo publish $FLAGS -p re_types_builder cargo publish $FLAGS -p re_build_info cargo publish $FLAGS -p re_build_tools cargo publish $FLAGS -p re_log diff --git a/scripts/setup.sh b/scripts/setup.sh index 2c3efc3976bf..8897f4c0ceb3 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -31,6 +31,20 @@ elif [ -x "$(command -v dnf)" ]; then pkg-config fi +packagesNeeded='flatbuffers' +if [ -x "$(command -v brew)" ]; then brew install $packagesNeeded +elif [ -x "$(command -v port)" ]; then sudo port install $packagesNeeded +elif [ -x "$(command -v apt-get)" ]; then sudo apt-get -y install $packagesNeeded +elif [ -x "$(command -v dnf)" ]; then sudo dnf install $packagesNeeded +elif [ -x "$(command -v zypper)" ]; then sudo zypper install $packagesNeeded +elif [ -x "$(command -v apk)" ]; then sudo apk add --no-cache $packagesNeeded +elif [ -x "$(command -v winget)" ]; then sudo winget add --no-cache $packagesNeeded +elif [ -x "$(command -v pacman)" ]; then sudo pacman -S $packagesNeeded +else + echo "FAILED TO INSTALL PACKAGE: Package manager not found. You must manually install: $packagesNeeded">&2; + exit 1 +fi + # Needed to compile and check the code: rustup install 1.69.0 ./scripts/setup_web.sh