summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Henri Chataing <henrichataing@google.com> 2023-05-23 18:43:11 +0000
committer Henri Chataing <henrichataing@google.com> 2023-05-26 21:04:52 +0000
commitd15a41aea19802c7642a648c739d2ce2d00be2a9 (patch)
treee43f464125c1e64be2f5f1d60c3399f70698ace3
parentb8f35f3e915eae4a156942bc4450f57627ba63b0 (diff)
pdl: Migrate to external/rust/crates/pdl-compiler
Bug: 283153347 Test: m Change-Id: I5909f0e778032942978a19dc9fda2e8a5e82f8aa
-rw-r--r--system/gd/Android.bp2
-rw-r--r--system/rust/build.rs6
-rw-r--r--tools/pdl/Android.bp538
-rw-r--r--tools/pdl/CONTRIBUTING.md33
-rw-r--r--tools/pdl/Cargo.toml35
-rw-r--r--tools/pdl/LICENSE202
-rw-r--r--tools/pdl/OWNERS6
-rw-r--r--tools/pdl/README.md33
-rw-r--r--tools/pdl/doc/reference.md682
-rw-r--r--tools/pdl/scripts/Android.bp44
-rwxr-xr-xtools/pdl/scripts/generate_cxx_backend.py1392
-rwxr-xr-xtools/pdl/scripts/generate_cxx_backend_tests.py319
-rwxr-xr-xtools/pdl/scripts/generate_python_backend.py1059
-rw-r--r--tools/pdl/scripts/packet_runtime.h157
-rw-r--r--tools/pdl/scripts/pdl/ast.py281
-rw-r--r--tools/pdl/scripts/pdl/core.py334
-rw-r--r--tools/pdl/scripts/pdl/utils.py39
-rw-r--r--tools/pdl/src/analyzer.rs2627
-rw-r--r--tools/pdl/src/ast.rs552
-rw-r--r--tools/pdl/src/backends.rs20
-rw-r--r--tools/pdl/src/backends/intermediate.rs537
-rw-r--r--tools/pdl/src/backends/json.rs23
-rw-r--r--tools/pdl/src/backends/rust.rs1585
-rw-r--r--tools/pdl/src/backends/rust/parser.rs793
-rw-r--r--tools/pdl/src/backends/rust/preamble.rs114
-rw-r--r--tools/pdl/src/backends/rust/serializer.rs390
-rw-r--r--tools/pdl/src/backends/rust/types.rs181
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/computed_values.rs169
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/enums.rs81
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/mod.rs117
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/packet_parser.rs363
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/packet_serializer.rs315
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/preamble.rs294
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/test.rs336
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/test_preamble.rs39
-rw-r--r--tools/pdl/src/backends/rust_no_allocation/utils.rs25
-rw-r--r--tools/pdl/src/bin/generate-canonical-tests.rs240
-rw-r--r--tools/pdl/src/lint.rs320
-rw-r--r--tools/pdl/src/main.rs121
-rw-r--r--tools/pdl/src/parser.rs669
-rw-r--r--tools/pdl/src/pdl.pest125
-rw-r--r--tools/pdl/src/test_utils.rs170
-rw-r--r--tools/pdl/src/utils.rs67
-rw-r--r--tools/pdl/tests/canonical/be_test_vectors.json4271
-rw-r--r--tools/pdl/tests/canonical/le_rust_noalloc_test_file.pdl610
-rw-r--r--tools/pdl/tests/canonical/le_rust_test_file.pdl573
-rw-r--r--tools/pdl/tests/canonical/le_test_file.pdl780
-rw-r--r--tools/pdl/tests/canonical/le_test_vectors.json4377
-rw-r--r--tools/pdl/tests/custom_types.py56
-rw-r--r--tools/pdl/tests/examples/array-field.pdl39
-rw-r--r--tools/pdl/tests/examples/checksum-field.pdl22
-rw-r--r--tools/pdl/tests/examples/count-field.pdl25
-rw-r--r--tools/pdl/tests/examples/decl-scope.pdl26
-rw-r--r--tools/pdl/tests/examples/example.pdl78
-rw-r--r--tools/pdl/tests/examples/fixed-field.pdl22
-rw-r--r--tools/pdl/tests/examples/group-constraint.pdl39
-rw-r--r--tools/pdl/tests/examples/packet.pdl52
-rw-r--r--tools/pdl/tests/examples/recurse.pdl38
-rw-r--r--tools/pdl/tests/examples/size-field.pdl58
-rw-r--r--tools/pdl/tests/examples/struct.pdl52
-rw-r--r--tools/pdl/tests/examples/typedef-field.pdl36
-rw-r--r--tools/pdl/tests/generated/custom_field_declaration_big_endian.rs85
-rw-r--r--tools/pdl/tests/generated/custom_field_declaration_little_endian.rs85
-rw-r--r--tools/pdl/tests/generated/enum_declaration_big_endian.rs400
-rw-r--r--tools/pdl/tests/generated/enum_declaration_little_endian.rs400
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_enum_array_big_endian.rs206
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_enum_array_little_endian.rs206
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_enum_big_endian.rs198
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_enum_little_endian.rs198
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_scalar_array_big_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_scalar_array_little_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_scalar_big_endian.rs149
-rw-r--r--tools/pdl/tests/generated/packet_decl_24bit_scalar_little_endian.rs149
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_enum_array_big_endian.rs191
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_enum_array_little_endian.rs191
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_enum_big_endian.rs183
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_enum_little_endian.rs183
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_scalar_array_big_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_scalar_array_little_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_scalar_big_endian.rs146
-rw-r--r--tools/pdl/tests/generated/packet_decl_64bit_scalar_little_endian.rs146
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_enum_array_big_endian.rs221
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_enum_array_little_endian.rs221
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_enum_big_endian.rs213
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_enum_little_endian.rs213
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_scalar_array_big_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_scalar_array_little_endian.rs152
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_scalar_big_endian.rs146
-rw-r--r--tools/pdl/tests/generated/packet_decl_8bit_scalar_little_endian.rs146
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_dynamic_count_big_endian.rs178
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_dynamic_count_little_endian.rs178
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_dynamic_size_big_endian.rs189
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_dynamic_size_little_endian.rs189
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_big_endian.rs212
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_little_endian.rs212
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_big_endian.rs224
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_little_endian.rs224
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_with_padding_big_endian.rs216
-rw-r--r--tools/pdl/tests/generated/packet_decl_array_with_padding_little_endian.rs216
-rw-r--r--tools/pdl/tests/generated/packet_decl_child_packets_big_endian.rs584
-rw-r--r--tools/pdl/tests/generated/packet_decl_child_packets_little_endian.rs584
-rw-r--r--tools/pdl/tests/generated/packet_decl_complex_scalars_big_endian.rs218
-rw-r--r--tools/pdl/tests/generated/packet_decl_complex_scalars_little_endian.rs218
-rw-r--r--tools/pdl/tests/generated/packet_decl_custom_field_big_endian.rs185
-rw-r--r--tools/pdl/tests/generated/packet_decl_custom_field_little_endian.rs185
-rw-r--r--tools/pdl/tests/generated/packet_decl_empty_big_endian.rs129
-rw-r--r--tools/pdl/tests/generated/packet_decl_empty_little_endian.rs129
-rw-r--r--tools/pdl/tests/generated/packet_decl_fixed_enum_field_big_endian.rs226
-rw-r--r--tools/pdl/tests/generated/packet_decl_fixed_enum_field_little_endian.rs226
-rw-r--r--tools/pdl/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs160
-rw-r--r--tools/pdl/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs160
-rw-r--r--tools/pdl/tests/generated/packet_decl_grand_children_big_endian.rs993
-rw-r--r--tools/pdl/tests/generated/packet_decl_grand_children_little_endian.rs993
-rw-r--r--tools/pdl/tests/generated/packet_decl_mask_scalar_value_big_endian.rs173
-rw-r--r--tools/pdl/tests/generated/packet_decl_mask_scalar_value_little_endian.rs173
-rw-r--r--tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs312
-rw-r--r--tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs312
-rw-r--r--tools/pdl/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs945
-rw-r--r--tools/pdl/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs945
-rw-r--r--tools/pdl/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs376
-rw-r--r--tools/pdl/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs376
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_big_endian.rs201
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_little_endian.rs201
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_big_endian.rs208
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_little_endian.rs208
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_variable_size_big_endian.rs235
-rw-r--r--tools/pdl/tests/generated/packet_decl_payload_field_variable_size_little_endian.rs235
-rw-r--r--tools/pdl/tests/generated/packet_decl_reserved_field_big_endian.rs139
-rw-r--r--tools/pdl/tests/generated/packet_decl_reserved_field_little_endian.rs139
-rw-r--r--tools/pdl/tests/generated/packet_decl_simple_scalars_big_endian.rs181
-rw-r--r--tools/pdl/tests/generated/packet_decl_simple_scalars_little_endian.rs181
-rw-r--r--tools/pdl/tests/generated/preamble.rs46
-rw-r--r--tools/pdl/tests/generated/struct_decl_complex_scalars_big_endian.rs126
-rw-r--r--tools/pdl/tests/generated/struct_decl_complex_scalars_little_endian.rs126
-rwxr-xr-xtools/pdl/tests/generated_files_compile.sh39
-rw-r--r--tools/pdl/tests/python_generator_test.py197
-rw-r--r--tools/rootcanal/Android.bp4
-rw-r--r--tools/rootcanal/CMakeLists.txt16
138 files changed, 14 insertions, 44885 deletions
diff --git a/system/gd/Android.bp b/system/gd/Android.bp
index b01953ee0a..5f76fb98cb 100644
--- a/system/gd/Android.bp
+++ b/system/gd/Android.bp
@@ -874,7 +874,7 @@ cc_library_host_shared {
genrule {
name: "gd_hci_packets_python3_gen",
defaults: ["pdl_python_generator_defaults"],
- cmd: "$(location :pdl) $(in) |" +
+ cmd: "$(location :pdlc) $(in) |" +
" $(location :pdl_python_generator)" +
" --output $(out) --custom-type-location blueberry.utils.bluetooth",
srcs: [
diff --git a/system/rust/build.rs b/system/rust/build.rs
index c4ccf5c05e..fced4d3223 100644
--- a/system/rust/build.rs
+++ b/system/rust/build.rs
@@ -1,7 +1,7 @@
//! Build file to generate packets
//!
-//! Run `cargo install .` in `tools/pdl` to ensure `pdl` is in your
-//! path.
+//! Run `cargo install --path .` in `external/rust/crates/pdl-compiler` to ensure `pdlc`
+//! is in your path.
use std::{
env,
fs::File,
@@ -14,7 +14,7 @@ fn main() {
let dest_path = Path::new(&out_dir).join("_packets.rs");
let dest_file = File::create(dest_path).unwrap();
- let pdl = Command::new("pdl")
+ let pdl = Command::new("pdlc")
.args(["--output-format", "rust_no_alloc", "src/packets.pdl"])
.stdout(Stdio::piped())
.spawn()
diff --git a/tools/pdl/Android.bp b/tools/pdl/Android.bp
deleted file mode 100644
index 7ed24b4851..0000000000
--- a/tools/pdl/Android.bp
+++ /dev/null
@@ -1,538 +0,0 @@
-package {
- // See: http://go/android-license-faq
- // A large-scale-change added 'default_applicable_licenses' to import
- // all of the 'license_kinds' from "system_bt_license"
- // to get the below license kinds:
- // SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["system_bt_license"],
-}
-
-rust_defaults {
- name: "pdl_defaults",
- // LINT.IfChange
- rustlibs: [
- "libargh",
- "libcodespan_reporting",
- "libheck",
- "libpest",
- "libprettyplease",
- "libproc_macro2",
- "libquote",
- "libserde",
- "libserde_json",
- "libsyn",
- "libtempfile",
- ],
- proc_macros: [
- "libpest_derive",
- ],
- // LINT.ThenChange(Cargo.toml)
-}
-
-rust_binary_host {
- name: "pdl",
- defaults: ["pdl_defaults"],
- srcs: ["src/main.rs"],
- visibility: [
- "//external/uwb/src",
- "//packages/modules/Bluetooth:__subpackages__",
- ],
-}
-
-filegroup {
- name: "pdl_generated_files",
- srcs: [
- "tests/generated/custom_field_declaration_big_endian.rs",
- "tests/generated/custom_field_declaration_little_endian.rs",
- "tests/generated/enum_declaration_big_endian.rs",
- "tests/generated/enum_declaration_little_endian.rs",
- "tests/generated/packet_decl_8bit_enum_array_big_endian.rs",
- "tests/generated/packet_decl_8bit_enum_array_little_endian.rs",
- "tests/generated/packet_decl_8bit_enum_big_endian.rs",
- "tests/generated/packet_decl_8bit_enum_little_endian.rs",
- "tests/generated/packet_decl_8bit_scalar_array_big_endian.rs",
- "tests/generated/packet_decl_8bit_scalar_array_little_endian.rs",
- "tests/generated/packet_decl_8bit_scalar_big_endian.rs",
- "tests/generated/packet_decl_8bit_scalar_little_endian.rs",
- "tests/generated/packet_decl_24bit_enum_array_big_endian.rs",
- "tests/generated/packet_decl_24bit_enum_array_little_endian.rs",
- "tests/generated/packet_decl_24bit_enum_big_endian.rs",
- "tests/generated/packet_decl_24bit_enum_little_endian.rs",
- "tests/generated/packet_decl_24bit_scalar_array_big_endian.rs",
- "tests/generated/packet_decl_24bit_scalar_array_little_endian.rs",
- "tests/generated/packet_decl_24bit_scalar_big_endian.rs",
- "tests/generated/packet_decl_24bit_scalar_little_endian.rs",
- "tests/generated/packet_decl_64bit_enum_array_big_endian.rs",
- "tests/generated/packet_decl_64bit_enum_array_little_endian.rs",
- "tests/generated/packet_decl_64bit_enum_big_endian.rs",
- "tests/generated/packet_decl_64bit_enum_little_endian.rs",
- "tests/generated/packet_decl_64bit_scalar_array_big_endian.rs",
- "tests/generated/packet_decl_64bit_scalar_array_little_endian.rs",
- "tests/generated/packet_decl_64bit_scalar_big_endian.rs",
- "tests/generated/packet_decl_64bit_scalar_little_endian.rs",
- "tests/generated/packet_decl_array_dynamic_count_big_endian.rs",
- "tests/generated/packet_decl_array_dynamic_count_little_endian.rs",
- "tests/generated/packet_decl_array_dynamic_size_big_endian.rs",
- "tests/generated/packet_decl_array_dynamic_size_little_endian.rs",
- "tests/generated/packet_decl_array_unknown_element_width_dynamic_count_big_endian.rs",
- "tests/generated/packet_decl_array_unknown_element_width_dynamic_count_little_endian.rs",
- "tests/generated/packet_decl_array_unknown_element_width_dynamic_size_big_endian.rs",
- "tests/generated/packet_decl_array_unknown_element_width_dynamic_size_little_endian.rs",
- "tests/generated/packet_decl_array_with_padding_big_endian.rs",
- "tests/generated/packet_decl_array_with_padding_little_endian.rs",
- "tests/generated/packet_decl_child_packets_big_endian.rs",
- "tests/generated/packet_decl_child_packets_little_endian.rs",
- "tests/generated/packet_decl_complex_scalars_big_endian.rs",
- "tests/generated/packet_decl_complex_scalars_little_endian.rs",
- "tests/generated/packet_decl_custom_field_big_endian.rs",
- "tests/generated/packet_decl_custom_field_little_endian.rs",
- "tests/generated/packet_decl_empty_big_endian.rs",
- "tests/generated/packet_decl_empty_little_endian.rs",
- "tests/generated/packet_decl_fixed_enum_field_big_endian.rs",
- "tests/generated/packet_decl_fixed_enum_field_little_endian.rs",
- "tests/generated/packet_decl_fixed_scalar_field_big_endian.rs",
- "tests/generated/packet_decl_fixed_scalar_field_little_endian.rs",
- "tests/generated/packet_decl_grand_children_big_endian.rs",
- "tests/generated/packet_decl_grand_children_little_endian.rs",
- "tests/generated/packet_decl_mask_scalar_value_big_endian.rs",
- "tests/generated/packet_decl_mask_scalar_value_little_endian.rs",
- "tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs",
- "tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs",
- "tests/generated/packet_decl_parent_with_alias_child_big_endian.rs",
- "tests/generated/packet_decl_parent_with_alias_child_little_endian.rs",
- "tests/generated/packet_decl_parent_with_no_payload_big_endian.rs",
- "tests/generated/packet_decl_parent_with_no_payload_little_endian.rs",
- "tests/generated/packet_decl_payload_field_unknown_size_big_endian.rs",
- "tests/generated/packet_decl_payload_field_unknown_size_little_endian.rs",
- "tests/generated/packet_decl_payload_field_unknown_size_terminal_big_endian.rs",
- "tests/generated/packet_decl_payload_field_unknown_size_terminal_little_endian.rs",
- "tests/generated/packet_decl_payload_field_variable_size_big_endian.rs",
- "tests/generated/packet_decl_payload_field_variable_size_little_endian.rs",
- "tests/generated/packet_decl_reserved_field_big_endian.rs",
- "tests/generated/packet_decl_reserved_field_little_endian.rs",
- "tests/generated/packet_decl_simple_scalars_big_endian.rs",
- "tests/generated/packet_decl_simple_scalars_little_endian.rs",
- "tests/generated/preamble.rs",
- "tests/generated/struct_decl_complex_scalars_big_endian.rs",
- "tests/generated/struct_decl_complex_scalars_little_endian.rs",
- ],
-}
-
-rust_test_host {
- name: "pdl_tests",
- defaults: ["pdl_defaults"],
- srcs: ["src/main.rs"],
- proc_macros: [
- "libpaste",
- ],
- test_suites: ["general-tests"],
- data: [
- ":pdl_generated_files",
- ],
-}
-
-genrule {
- name: "pdl_generated_files_compile_rs",
- cmd: "$(location tests/generated_files_compile.sh) $(in) > $(out)",
- srcs: [":pdl_generated_files"],
- out: ["generated_files_compile.rs"],
- tool_files: ["tests/generated_files_compile.sh"],
-}
-
-rust_test_host {
- name: "pdl_generated_files_compile",
- srcs: [":pdl_generated_files_compile_rs"],
- test_suites: ["general-tests"],
- clippy_lints: "none",
- lints: "none",
- defaults: ["pdl_backend_defaults"],
-}
-
-genrule_defaults {
- name: "pdl_rust_generator_defaults",
- cmd: "$(location :pdl) --output-format rust $(in) > $(out)",
- tools: [":pdl"],
- defaults_visibility: [
- "//external/uwb/src",
- "//packages/modules/Bluetooth:__subpackages__",
- ],
-}
-
-// The generators support more features for LE packets than for BE
-// packets. We use a single input written for LE packets and remove
-// the parts that don't work for BE packets. We do this by removing
-// everything between
-//
-// // Start: little_endian_only
-//
-// and
-//
-// // End: little_endian_only
-//
-// from the LE packet input.
-genrule_defaults {
- name: "pdl_be_test_file_defaults",
- cmd: "sed -e 's/little_endian_packets/big_endian_packets/' " +
- " -e '/Start: little_endian_only/,/End: little_endian_only/d' " +
- " < $(in) > $(out)",
-}
-
-genrule {
- name: "pdl_be_rust_test_file",
- defaults: ["pdl_be_test_file_defaults"],
- srcs: ["tests/canonical/le_rust_test_file.pdl"],
- out: ["be_rust_test_file.pdl"],
-}
-
-genrule {
- name: "pdl_be_test_file",
- defaults: ["pdl_be_test_file_defaults"],
- srcs: ["tests/canonical/le_test_file.pdl"],
- out: ["be_test_file.pdl"],
-}
-
-// Generate the Rust parser+serializer backends.
-genrule {
- name: "pdl_le_backend",
- defaults: ["pdl_rust_generator_defaults"],
- srcs: ["tests/canonical/le_rust_test_file.pdl"],
- out: ["le_backend.rs"],
-}
-
-genrule {
- name: "pdl_be_backend",
- defaults: ["pdl_rust_generator_defaults"],
- srcs: [":pdl_be_rust_test_file"],
- out: ["be_backend.rs"],
-}
-
-rust_defaults {
- name: "pdl_backend_defaults",
- features: ["serde"],
- rustlibs: [
- "libbytes",
- "libnum_traits",
- "libserde",
- "libtempfile",
- "libthiserror",
- ],
- proc_macros: [
- "libnum_derive",
- "libserde_derive",
- ],
-}
-
-rust_library_host {
- name: "libpdl_le_backend",
- crate_name: "pdl_le_backend",
- srcs: [":pdl_le_backend"],
- defaults: ["pdl_backend_defaults"],
- clippy_lints: "none",
- lints: "none",
-}
-
-rust_library_host {
- name: "libpdl_be_backend",
- crate_name: "pdl_be_backend",
- srcs: [":pdl_be_backend"],
- defaults: ["pdl_backend_defaults"],
- clippy_lints: "none",
- lints: "none",
-}
-
-rust_binary_host {
- name: "pdl_generate_tests",
- srcs: ["src/bin/generate-canonical-tests.rs"],
- rustlibs: [
- "libprettyplease",
- "libproc_macro2",
- "libquote",
- "libserde",
- "libserde_json",
- "libsyn",
- "libtempfile",
- ],
-}
-
-genrule {
- name: "pdl_rust_generator_tests_le_src",
- cmd: "$(location :pdl_generate_tests) $(in) pdl_le_backend > $(out)",
- srcs: ["tests/canonical/le_test_vectors.json"],
- out: ["le_canonical.rs"],
- tools: [":pdl_generate_tests"],
-}
-
-genrule {
- name: "pdl_rust_generator_tests_be_src",
- cmd: "$(location :pdl_generate_tests) $(in) pdl_be_backend > $(out)",
- srcs: ["tests/canonical/be_test_vectors.json"],
- out: ["be_canonical.rs"],
- tools: [":pdl_generate_tests"],
-}
-
-rust_test_host {
- name: "pdl_rust_generator_tests_le",
- srcs: [":pdl_rust_generator_tests_le_src"],
- test_suites: ["general-tests"],
- rustlibs: [
- "libnum_traits",
- "libpdl_le_backend",
- "libserde_json",
- ],
- clippy_lints: "none",
- lints: "none",
-}
-
-rust_test_host {
- name: "pdl_rust_generator_tests_be",
- srcs: [":pdl_rust_generator_tests_be_src"],
- test_suites: ["general-tests"],
- rustlibs: [
- "libnum_traits",
- "libpdl_be_backend",
- "libserde_json",
- ],
- clippy_lints: "none",
- lints: "none",
-}
-
-// Defaults for PDL python backend generation.
-genrule_defaults {
- name: "pdl_python_generator_defaults",
- tools: [
- ":pdl",
- ":pdl_python_generator",
- ],
-}
-
-// Defaults for PDL python backend generation.
-genrule_defaults {
- name: "pdl_cxx_generator_defaults",
- tools: [
- ":pdl",
- ":pdl_cxx_generator",
- ],
-}
-
-// Generate the python parser+serializer backend for the
-// little endian test file located at tests/canonical/le_test_file.pdl.
-genrule {
- name: "pdl_python_generator_le_test_gen",
- defaults: ["pdl_python_generator_defaults"],
- cmd: "set -o pipefail;" +
- " $(location :pdl) $(in) |" +
- " $(location :pdl_python_generator)" +
- " --output $(out) --custom-type-location tests.custom_types",
- tool_files: [
- "tests/custom_types.py",
- ],
- srcs: [
- "tests/canonical/le_test_file.pdl",
- ],
- out: [
- "le_pdl_test.py",
- ],
-}
-
-// Generate the python parser+serializer backend for a big endian test
-// file derived from tests/canonical/le_test_file.pdl.
-genrule {
- name: "pdl_python_generator_be_test_gen",
- defaults: ["pdl_python_generator_defaults"],
- cmd: "set -o pipefail;" +
- " $(location :pdl) $(in) |" +
- " $(location :pdl_python_generator)" +
- " --output $(out) --custom-type-location tests.custom_types",
- tool_files: [
- "tests/custom_types.py",
- ],
- srcs: [
- ":pdl_be_test_file",
- ],
- out: [
- "be_pdl_test.py",
- ],
-}
-
-// Test the generated python parser+serializer against
-// pre-generated binary inputs.
-python_test_host {
- name: "pdl_python_generator_test",
- main: "tests/python_generator_test.py",
- srcs: [
- ":pdl_python_generator_be_test_gen",
- ":pdl_python_generator_le_test_gen",
- "tests/custom_types.py",
- "tests/python_generator_test.py",
- ],
- data: [
- "tests/canonical/be_test_vectors.json",
- "tests/canonical/le_test_vectors.json",
- ],
- libs: [
- "typing_extensions",
- ],
- test_options: {
- unit_test: true,
- },
- version: {
- py3: {
- embedded_launcher: true,
- },
- },
-}
-
-// Defaults for the rust_noalloc backend
-genrule_defaults {
- name: "pdl_rust_noalloc_generator_defaults",
- cmd: "$(location :pdl) --output-format rust_no_alloc $(in) > $(out)",
- tools: [":pdl"],
-}
-
-// Generate the rust_noalloc backend srcs against the little-endian test vectors
-genrule {
- name: "pdl_rust_noalloc_le_test_backend_srcs",
- defaults: ["pdl_rust_noalloc_generator_defaults"],
- srcs: ["tests/canonical/le_rust_noalloc_test_file.pdl"],
- out: ["_packets.rs"],
-}
-
-// Generate the rust_noalloc test harness srcs for the supplied test vectors
-genrule {
- name: "pdl_rust_noalloc_le_test_gen_harness",
- cmd: "set -o pipefail;" +
- " $(location :pdl) $(in) --output-format rust_no_alloc_test" +
- " > $(out)",
- srcs: ["tests/canonical/le_rust_noalloc_test_file.pdl"],
- out: ["test_rust_noalloc_parser.rs"],
- tools: [":pdl"],
-}
-
-// The test target for rust_noalloc
-rust_test_host {
- name: "pdl_rust_noalloc_le_test",
- srcs: [
- ":pdl_rust_noalloc_le_test_gen_harness",
-
- ":pdl_rust_noalloc_le_test_backend_srcs",
- ],
- test_suites: ["general-tests"],
-}
-
-// Generate the C++ parser+serializer backend for the
-// little endian test file located at tests/canonical/le_test_file.pdl.
-genrule {
- name: "pdl_cxx_canonical_le_src_gen",
- defaults: ["pdl_cxx_generator_defaults"],
- cmd: "set -o pipefail;" +
- " $(location :pdl) $(in) |" +
- " $(location :pdl_cxx_generator)" +
- " --namespace le_test" +
- " --output $(out)",
- srcs: [
- "tests/canonical/le_test_file.pdl",
- ],
- out: [
- "canonical_le_test_file.h",
- ],
-}
-
-// Generate the C++ parser+serializer backend tests for the
-// little endian test file located at tests/canonical/le_test_file.pdl.
-genrule {
- name: "pdl_cxx_canonical_le_test_gen",
- cmd: "set -o pipefail;" +
- " inputs=( $(in) ) &&" +
- " $(location :pdl) $${inputs[0]} |" +
- " $(location :pdl_cxx_unittest_generator)" +
- " --output $(out)" +
- " --test-vectors $${inputs[1]}" +
- " --include-header $$(basename $${inputs[2]})" +
- " --using-namespace le_test" +
- " --namespace le_test" +
- " --parser-test-suite LeParserTest" +
- " --serializer-test-suite LeSerializerTest",
- tools: [
- ":pdl",
- ":pdl_cxx_unittest_generator",
- ],
- srcs: [
- "tests/canonical/le_test_file.pdl",
-
- "tests/canonical/le_test_vectors.json",
-
- ":pdl_cxx_canonical_le_src_gen",
- ],
- out: [
- "canonical_le_test.cc",
- ],
-}
-
-// Generate the C++ parser+serializer backend for the
-// big endian test file.
-genrule {
- name: "pdl_cxx_canonical_be_src_gen",
- defaults: ["pdl_cxx_generator_defaults"],
- cmd: "set -o pipefail;" +
- " $(location :pdl) $(in) |" +
- " $(location :pdl_cxx_generator)" +
- " --namespace be_test" +
- " --output $(out)",
- srcs: [
- ":pdl_be_test_file",
- ],
- out: [
- "canonical_be_test_file.h",
- ],
-}
-
-// Generate the C++ parser+serializer backend tests for the
-// big endian test file.
-genrule {
- name: "pdl_cxx_canonical_be_test_gen",
- cmd: "set -o pipefail;" +
- " inputs=( $(in) ) &&" +
- " $(location :pdl) $${inputs[0]} |" +
- " $(location :pdl_cxx_unittest_generator)" +
- " --output $(out)" +
- " --test-vectors $${inputs[1]}" +
- " --include-header $$(basename $${inputs[2]})" +
- " --using-namespace be_test" +
- " --namespace be_test" +
- " --parser-test-suite BeParserTest" +
- " --serializer-test-suite BeSerializerTest",
- tools: [
- ":pdl",
- ":pdl_cxx_unittest_generator",
- ],
- srcs: [
- ":pdl_be_test_file",
-
- "tests/canonical/be_test_vectors.json",
-
- ":pdl_cxx_canonical_be_src_gen",
- ],
- out: [
- "canonical_be_test.cc",
- ],
-}
-
-// Test the generated C++ parser+serializer against
-// pre-generated binary inputs.
-cc_test_host {
- name: "pdl_cxx_generator_test",
- local_include_dirs: [
- "scripts",
- ],
- generated_headers: [
- "pdl_cxx_canonical_be_src_gen",
- "pdl_cxx_canonical_le_src_gen",
- ],
- generated_sources: [
- "pdl_cxx_canonical_be_test_gen",
- "pdl_cxx_canonical_le_test_gen",
- ],
- static_libs: [
- "libgtest",
- ],
-}
diff --git a/tools/pdl/CONTRIBUTING.md b/tools/pdl/CONTRIBUTING.md
deleted file mode 100644
index b16bd94428..0000000000
--- a/tools/pdl/CONTRIBUTING.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# How to contribute
-
-We'd love to accept your patches and contributions to this project.
-
-## Before you begin
-
-### Sign our Contributor License Agreement
-
-Contributions to this project must be accompanied by a
-[Contributor License Agreement](https://cla.developers.google.com/about) (CLA).
-You (or your employer) retain the copyright to your contribution; this simply
-gives us permission to use and redistribute your contributions as part of the
-project.
-
-If you or your current employer have already signed the Google CLA (even if it
-was for a different project), you probably don't need to do it again.
-
-Visit <https://cla.developers.google.com/> to see your current agreements or to
-sign a new one.
-
-### Review our community guidelines
-
-This project follows
-[Google's Open Source Community Guidelines](https://opensource.google/conduct/).
-
-## Contribution process
-
-### Code reviews
-
-All submissions, including submissions by project members, require review. We
-use GitHub pull requests for this purpose. Consult
-[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
-information on using pull requests.
diff --git a/tools/pdl/Cargo.toml b/tools/pdl/Cargo.toml
deleted file mode 100644
index cfb4b64ad7..0000000000
--- a/tools/pdl/Cargo.toml
+++ /dev/null
@@ -1,35 +0,0 @@
-[package]
-name = "pdl"
-version = "0.1.0"
-edition = "2021"
-default-run = "pdl"
-
-[workspace]
-
-[features]
-default = ["serde"]
-
-[dependencies]
-codespan-reporting = "0.11.1"
-heck = "0.4.0"
-pest = "2.5.5"
-pest_derive = "2.5.5"
-proc-macro2 = "1.0.46"
-quote = "1.0.21"
-serde_json = "1.0.86"
-argh = "0.1.7"
-syn = "2.0.16"
-prettyplease = "0.2.6"
-
-[dependencies.serde]
-version = "1.0.145"
-features = ["default", "derive", "serde_derive", "std", "rc"]
-optional = true
-
-[dev-dependencies]
-tempfile = "3.3.0"
-bytes = { version = "1.2.1", features = ["serde"] }
-num-derive = "0.3.3"
-num-traits = "0.2.15"
-thiserror = "1.0.37"
-paste = "1.0.6"
diff --git a/tools/pdl/LICENSE b/tools/pdl/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/tools/pdl/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/tools/pdl/OWNERS b/tools/pdl/OWNERS
deleted file mode 100644
index 02d2aab7b2..0000000000
--- a/tools/pdl/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-# Reviewers for /tools/pdl
-
-henrichataing@google.com
-licorne@google.com
-mgeisler@google.com
-mylesgw@google.com
diff --git a/tools/pdl/README.md b/tools/pdl/README.md
deleted file mode 100644
index c6eb85d451..0000000000
--- a/tools/pdl/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# Packet Description Language (PDL)
-
-PDL is a domain specific language for writing the definition of binary protocol
-packets. Parsing and validating packets from raw bytes is tedious and error
-prone in any language. PDL generates memory safe and tailored backends for
-mulitple target languages:
-
- - Rust
- - C++
- - Python
-
-## How to use PDL
-
-1. Write the protocol definition
-1. `cargo run my-protocol.pdl --output-format rust > my-protocol.rs`
-
-Language specific instructions are provided in another section.
-
-## Supported Features
-
-[Full reference documentation](#doc/reference.md)
-- Scalar values
-- Enumerators
-- Arrays
-- Nested packets
-- Conditional packet derivation
-- Custom field definitions
-
-## Similar projects
-
-* [Kaitai](https://kaitai.io)
-* [EMBOSS](https://github.com/kimrutherford/EMBOSS)
-* [P4](https://p4.org/p4-spec/docs/P4-16-v1.0.0-spec.html)
diff --git a/tools/pdl/doc/reference.md b/tools/pdl/doc/reference.md
deleted file mode 100644
index 2529c29419..0000000000
--- a/tools/pdl/doc/reference.md
+++ /dev/null
@@ -1,682 +0,0 @@
-# Packet Description Language
-
-[TOC]
-
-## Notation
-
-| Notation | Example | Meaning |
-|:-------------:|:----------------------------:|:----------------------------------------------------:|
-| __ANY__ | __ANY__ | Any character |
-| CAPITAL | IDENTIFIER, INT | A token production |
-| snake_case | declaration, constraint | A syntactical production |
-| `string` | `enum`, `=` | The exact character(s) |
-| \x | \n, \r, \t, \0 | The character represented by this escape |
-| x? | `,`? | An optional item |
-| x* | ALPHANUM* | 0 or more of x |
-| x+ | HEXDIGIT+ | 1 or more of x |
-| x \| y | ALPHA \| DIGIT, `0x` \| `0X` | Either x or y |
-| [x-y] | [`a`-`z`] | Any of the characters in the range from x to y |
-| !x | !\n | Negative Predicate (lookahead), do not consume input |
-| () | (`,` enum_tag) | Groups items |
-
-
-[WHITESPACE](#Whitespace) and [COMMENT](#Comment) are implicitly inserted between every item
-and repetitions in syntactical rules (snake_case).
-
-```
-file: endianess declaration*
-```
-behaves like:
-```
-file: (WHITESPACE | COMMENT)* endianess (WHITESPACE | COMMENT)* (declaration | WHITESPACE | COMMENT)*
-```
-
-## File
-
-> file:\
-> &nbsp;&nbsp; endianess [declaration](#declarations)*
->
-> endianess:\
-> &nbsp;&nbsp; `little_endian_packets` | `big_endian_packets`
-
-The structure of a `.pdl`file is:
-1. A declaration of the protocol endianess: `little_endian_packets` or `big_endian_packets`. Followed by
-2. Declarations describing the structure of the protocol.
-
-```
-// The protocol is little endian
-little_endian_packets
-
-// Brew a coffee
-packet Brew {
- pot: 8, // Output Pot: 8bit, 0-255
- additions: CoffeeAddition[2] // Coffee Additions: array of 2 CoffeeAddition
-}
-```
-
-The endianess affects how fields of fractional byte sizes (hence named
-bit-fields) are parsed or serialized. Such fields are grouped together to the
-next byte boundary, least significant bit first, and then byte-swapped to the
-required endianess before being written to memory, or after being read from
-memory.
-
-```
-packet Coffee {
- a: 1,
- b: 15,
- c: 3,
- d: 5,
-}
-
-// The first two field are laid out as a single
-// integer of 16-bits
-// MSB LSB
-// 16 8 0
-// +---------------------------------------+
-// | b14 .. .. b0 |a|
-// +---------------------------------------+
-//
-// The file endianness is applied to this integer
-// to obtain the byte layout of the packet fields.
-//
-// Little endian layout
-// MSB LSB
-// 7 6 5 4 3 2 1 0
-// +---------------------------------------+
-// 0 | b[6:0] | a |
-// +---------------------------------------+
-// 1 | b[14:7] |
-// +---------------------------------------+
-// 2 | d | c |
-// +---------------------------------------+
-//
-// Big endian layout
-// MSB LSB
-// 7 6 5 4 3 2 1 0
-// +---------------------------------------+
-// 0 | b[14:7] |
-// +---------------------------------------+
-// 1 | b[6:0] | a |
-// +---------------------------------------+
-// 2 | d | c |
-// +---------------------------------------+
-```
-
-Fields which qualify as bit-fields are:
-- [Scalar](#fields-scalar) fields
-- [Size](#fields-size) fields
-- [Count](#fields-count) fields
-- [Fixed](#fields-fixed) fields
-- [Reserved](#fields-reserved) fields
-- [Typedef](#fields-typedef) fields, when the field type is an
- [Enum](#enum)
-
-Fields that do not qualify as bit-fields _must_ start and end on a byte boundary.
-
-## Identifiers
-
-- Identifiers can denote a field; an enumeration tag; or a declared type.
-
-- Field identifiers declared in a [packet](#packet) (resp. [struct](#struct)) belong to the _scope_ that extends
- to the packet (resp. struct), and all derived packets (resp. structs).
-
-- Field identifiers declared in a [group](#group) belong to the _scope_ that
- extends to the packets declaring a [group field](#group_field) for this group.
-
-- Two fields may not be declared with the same identifier in any packet scope.
-
-- Two types may not be declared width the same identifier.
-
-## Declarations
-
-> declaration: {#declaration}\
-> &nbsp;&nbsp; [enum_declaration](#enum) |\
-> &nbsp;&nbsp; [packet_declaration](#packet) |\
-> &nbsp;&nbsp; [struct_declaration](#struct) |\
-> &nbsp;&nbsp; [group_declaration](#group) |\
-> &nbsp;&nbsp; [checksum_declaration](#checksum) |\
-> &nbsp;&nbsp; [custom_field_declaration](#custom-field) |\
-> &nbsp;&nbsp; [test_declaration](#test)
-
-A *declaration* defines a type inside a `.pdl` file. A declaration can reference
-another declaration appearing later in the file.
-
-A declaration is either:
-- an [Enum](#enum) declaration
-- a [Packet](#packet) declaration
-- a [Struct](#struct) declaration
-- a [Group](#group) declaration
-- a [Checksum](#checksum) declaration
-- a [Custom Field](#custom-field) declaration
-- a [Test](#test) declaration
-
-### Enum
-
-> enum_declaration:\
-> &nbsp;&nbsp; `enum` [IDENTIFIER](#identifier) `:` [INTEGER](#integer) `{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; enum_tag_list\
-> &nbsp;&nbsp; `}`
->
-> enum_tag_list:\
-> &nbsp;&nbsp; enum_tag (`,` enum_tag)* `,`?
->
-> enum_tag:\
-> &nbsp;&nbsp; enum_range | enum_value
->
-> enum_range:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` [INTEGER](#integer) `..` [INTEGER](#integer)) (`{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; enum_value_list\
-> &nbsp;&nbsp; `}`)?
->
-> enum_value_list:\
-> &nbsp;&nbsp; enum_value (`,` enum_value)* `,`?
->
-> enum_value:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` [INTEGER](#integer)
-
-An *enumeration* or for short *enum*, is a declaration of a set of named [integer](#integer) constants
-or named [integer](#integer) ranges. [integer](#integer) ranges are inclusive in both ends.
-[integer](#integer) value within a range *must* be unique. [integer](#integer) ranges
-*must not* overlap.
-
-The [integer](#integer) following the name specifies the bit size of the values.
-
-```
-enum CoffeeAddition: 5 {
- Empty = 0,
-
- NonAlcoholic = 1..9 {
- Cream = 1,
- Vanilla = 2,
- Chocolate = 3,
- },
-
- Alcoholic = 10..19 {
- Whisky = 10,
- Rum = 11,
- Kahlua = 12,
- Aquavit = 13,
- },
-
- Custom = 20..29,
-}
-```
-
-### Packet
-
-> packet_declaration:\
-> &nbsp;&nbsp; `packet` [IDENTIFIER](#identifier)\
-> &nbsp;&nbsp;&nbsp;&nbsp; (`:` [IDENTIFIER](#identifier)\
-> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; (`(` [constraint_list](#constraints) `)`)?\
-> &nbsp;&nbsp;&nbsp;&nbsp; )?\
-> &nbsp;&nbsp; `{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; [field_list](#fields)?\
-> &nbsp;&nbsp; `}`
-
-A *packet* is a declaration of a sequence of [fields](#fields). While packets
-can contain bit-fields, the size of the whole packet must be a multiple of 8
-bits.
-
-A *packet* can optionally inherit from another *packet* declaration. In this case the packet
-inherits the parent's fields and the child's fields replace the
-[*\_payload\_*](#fields-payload) or [*\_body\_*](#fields-body) field of the parent.
-
-When inheriting, you can use constraints to set values on parent fields.
-See [constraints](#constraints) for more details.
-
-```
-packet Error {
- code: 32,
- _payload_
-}
-
-packet ImATeapot: Error(code = 418) {
- brand_id: 8
-}
-```
-
-### Struct
-
-> struct_declaration:\
-> &nbsp;&nbsp; `struct` [IDENTIFIER](#identifier)\
-> &nbsp;&nbsp;&nbsp;&nbsp; (`:` [IDENTIFIER](#identifier)\
-> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; (`(` [constraint_list](#constraints) `)`)?\
-> &nbsp;&nbsp;&nbsp;&nbsp; )?\
-> &nbsp;&nbsp; `{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; [field_list](#fields)?\
-> &nbsp;&nbsp; `}`
-
-A *struct* follows the same rules as a [*packet*](#packet) with the following differences:
-- It inherits from a *struct* declaration instead of *packet* declaration.
-- A [typedef](#fields-typedef) field can reference a *struct*.
-
-### Group
-
-> group_declaration:\
-> &nbsp;&nbsp; `group` [IDENTIFIER](#identifier) `{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; [field_list](#fields)\
-> &nbsp;&nbsp; `}`
-
-A *group* is a sequence of [fields](#fields) that expand in a
-[packet](#packet) or [struct](#struct) when used.
-
-See also the [Group field](#fields-group).
-
-```
-group Paged {
- offset: 8,
- limit: 8
-}
-
-packet AskBrewHistory {
- pot: 8, // Coffee Pot
- Paged
-}
-```
-behaves like:
-```
-packet AskBrewHistory {
- pot: 8, // Coffee Pot
- offset: 8,
- limit: 8
-}
-```
-
-### Checksum
-
-> checksum_declaration:\
-> &nbsp;&nbsp; `checksum` [IDENTIFIER](#identifier) `:` [INTEGER](#integer) [STRING](#string)
-
-A *checksum* is a native type (not implemented in PDL). See your generator documentation
-for more information on how to use it.
-
-The [integer](#integer) following the name specify the bit size of the checksum value.
-The [string](#string) following the size is a value defined by the generator implementation.
-
-```
-checksum CRC16: 16 "crc16"
-```
-
-### Custom Field
-
-> custom_field_declaration:\
-> &nbsp;&nbsp; `custom_field` [IDENTIFIER](#identifier) (`:` [INTEGER](#integer))? [STRING](#string)
-
-A *custom field* is a native type (not implemented in PDL). See your generator documentation for more
-information on how to use it.
-
-If present, the [integer](#integer) following the name specify the bit size of the value.
-The [string](#string) following the size is a value defined by the generator implementation.
-
-```
-custom_field URL "url"
-```
-
-### Test
-
-> test_declaration:\
-> &nbsp;&nbsp; `test` [IDENTIFIER](#identifier) `{`\
-> &nbsp;&nbsp;&nbsp;&nbsp; test_case_list\
-> &nbsp;&nbsp; `}`
->
-> test_case_list:\
-> &nbsp;&nbsp; test_case (`,` test_case)* `,`?
->
-> test_case:\
-> &nbsp;&nbsp; [STRING](#string)
-
-A *test* declares a set of valid octet representations of a packet identified by its name.
-The generator implementation defines how to use the test data.
-
-A test passes if the packet parser accepts the input; if you want to test
-the values returned for each field, you may specify a derived packet with field values enforced using
-constraints.
-
-```
-packet Brew {
- pot: 8,
- addition: CoffeeAddition
-}
-
-test Brew {
- "\x00\x00",
- "\x00\x04"
-}
-
-// Fully Constrained Packet
-packet IrishCoffeeBrew: Brew(pot = 0, additions_list = Whisky) {}
-
-test IrishCoffeeBrew {
- "\x00\x04"
-}
-```
-
-## Constraints
-
-> constraint:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` [IDENTIFIER](#identifier) | [INTEGER](#integer)
->
-> constraint_list:\
-> &nbsp;&nbsp; constraint (`,` constraint)* `,`?
-
-A *constraint* defines the value of a parent field.
-The value can either be an [enum](#enum) tag or an [integer](#integer).
-
-```
-group Additionable {
- addition: CoffeAddition
-}
-
-packet IrishCoffeeBrew {
- pot: 8,
- Additionable {
- addition = Whisky
- }
-}
-
-packet Pot0IrishCoffeeBrew: IrishCoffeeBrew(pot = 0) {}
-```
-
-## Fields
-
-> field_list:\
-> &nbsp;&nbsp; field (`,` field)* `,`?
->
-> field:\
-> &nbsp;&nbsp; [checksum_field](#fields-checksum) |\
-> &nbsp;&nbsp; [padding_field](#fields-padding) |\
-> &nbsp;&nbsp; [size_field](#fields-size) |\
-> &nbsp;&nbsp; [count_field](#fields-count) |\
-> &nbsp;&nbsp; [payload_field](#fields-payload) |\
-> &nbsp;&nbsp; [body_field](#fields-body) |\
-> &nbsp;&nbsp; [fixed_field](#fields-fixed) |\
-> &nbsp;&nbsp; [reserved_field](#fields-reserved) |\
-> &nbsp;&nbsp; [array_field](#fields-array) |\
-> &nbsp;&nbsp; [scalar_field](#fields-scalar) |\
-> &nbsp;&nbsp; [typedef_field](#fields-typedef) |\
-> &nbsp;&nbsp; [group_field](#fields-group)
-
-A field is either:
-- a [Scalar](#fields-scalar) field
-- a [Typedef](#fields-typedef) field
-- a [Group](#fields-group) field
-- an [Array](#fields-array) field
-- a [Size](#fields-size) field
-- a [Count](#fields-count) field
-- a [Payload](#fields-payload) field
-- a [Body](#fields-body) field
-- a [Fixed](#fields-fixed) field
-- a [Checksum](#fields-checksum) field
-- a [Padding](#fields-padding) field
-- a [Reserved](#fields-reserved) field
-
-### Scalar {#fields-scalar}
-
-> scalar_field:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `:` [INTEGER](#integer)
-
-A *scalar* field defines a numeric value with a bit size.
-
-```
-struct Coffee {
- temperature: 8
-}
-```
-
-### Typedef {#fields-typedef}
-
-> typedef_field:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `:` [IDENTIFIER](#identifier)
-
-A *typedef* field defines a field taking as value either an [enum](#enum), [struct](#struct),
-[checksum](#checksum) or a [custom_field](#custom-field).
-
-```
-packet LastTimeModification {
- coffee: Coffee,
- addition: CoffeeAddition
-}
-```
-
-### Array {#fields-array}
-
-> array_field:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) `:` [INTEGER](#integer) | [IDENTIFIER](#identifier) `[`\
-> &nbsp;&nbsp;&nbsp;&nbsp; [SIZE_MODIFIER](#size-modifier) | [INTEGER](#integer)\
-> &nbsp;&nbsp; `]`
-
-An *array* field defines a sequence of `N` elements of type `T`.
-
-`N` can be:
-- An [integer](#integer) value.
-- A [size modifier](#size-modifier).
-- Unspecified: In this case the array is dynamically sized using a
-[*\_size\_*](#fields-size) or a [*\_count\_*](#fields-count).
-
-`T` can be:
-- An [integer](#integer) denoting the bit size of one element.
-- An [identifier](#identifier) referencing an [enum](#enum), a [struct](#struct)
-or a [custom field](#custom-field) type.
-
-The size of `T` must always be a multiple of 8 bits, that is, the array elements
-must start at byte boundaries.
-
-```
-packet Brew {
- pots: 8[2],
- additions: CoffeeAddition[2],
- extra_additions: CoffeeAddition[],
-}
-```
-
-### Group {#fields-group}
-
-> group_field:\
-> &nbsp;&nbsp; [IDENTIFIER](#identifier) (`{` [constraint_list](#constraints) `}`)?
-
-A *group* field inlines all the fields defined in the referenced group.
-
-If a [constraint list](#constraints) constrains a [scalar](#fields-scalar) field
-or [typedef](#fields-typedef) field with an [enum](#enum) type, the field will
-become a [fixed](#fields-fixed) field.
-The [fixed](#fields-fixed) field inherits the type or size of the original field and the
-value from the constraint list.
-
-See [Group Declaration](#group) for more information.
-
-### Size {#fields-size}
-
-> size_field:\
-> &nbsp;&nbsp; `_size_` `(` [IDENTIFIER](#identifier) | `_payload_` | `_body_` `)` `:` [INTEGER](#integer)
-
-A *\_size\_* field is a [scalar](#fields-scalar) field with as value the size in octet of the designated
-[array](#fields-array), [*\_payload\_*](#fields-payload) or [*\_body\_*](#fields-body).
-
-```
-packet Parent {
- _size_(_payload_): 2,
- _payload_
-}
-
-packet Brew {
- pot: 8,
- _size_(additions): 8,
- additions: CoffeeAddition[]
-}
-```
-
-### Count {#fields-count}
-
-> count_field:\
-> &nbsp;&nbsp; `_count_` `(` [IDENTIFIER](#identifier) `)` `:` [INTEGER](#integer)
-
-A *\_count\_* field is a [*scalar*](#fields-scalar) field with as value the number of elements of the designated
-[array](#fields-array).
-
-```
-packet Brew {
- pot: 8,
- _count_(additions): 8,
- additions: CoffeeAddition[]
-}
-```
-
-### Payload {#fields-payload}
-
-> payload_field:\
-> &nbsp;&nbsp; `_payload_` (`:` `[` [SIZE_MODIFIER](#size-modifier) `]` )?
-
-A *\_payload\_* field is a dynamically sized array of octets.
-
-It declares where to parse the definition of a child [packet](#packet) or [struct](#struct).
-
-A [*\_size\_*](#fields-size) or a [*\_count\_*](#fields-count) field referencing
-the payload induce its size.
-
-If used, a [size modifier](#size-modifier) can alter the octet size.
-
-### Body {#fields-body}
-
-> body_field:\
-> &nbsp;&nbsp; `_body_`
-
-A *\_body\_* field is like a [*\_payload\_*](#fields-payload) field with the following differences:
-- The body field is private to the packet definition, it's accessible only when inheriting.
-- The body does not accept a size modifier.
-
-### Fixed {#fields-fixed}
-
-> fixed_field:\
-> &nbsp;&nbsp; `_fixed_` `=` \
-> &nbsp;&nbsp;&nbsp;&nbsp; ( [INTEGER](#integer) `:` [INTEGER](#integer) ) |\
-> &nbsp;&nbsp;&nbsp;&nbsp; ( [IDENTIFIER](#identifier) `:` [IDENTIFIER](#identifier) )
-
-A *\_fixed\_* field defines a constant with a known bit size.
-The constant can be either:
-- An [integer](#integer) value
-- An [enum](#enum) tag
-
-```
-packet Teapot {
- _fixed_ = 42: 8,
- _fixed_ = Empty: CoffeeAddition
-}
-```
-
-### Checksum {#fields-checksum}
-
-> checksum_field:\
-> &nbsp;&nbsp; `_checksum_start_` `(` [IDENTIFIER](#identifier) `)`
-
-A *\_checksum_start\_* field is a zero sized field that acts as a marker for the beginning of
-the fields covered by a checksum.
-
-The *\_checksum_start\_* references a [typedef](#fields-typedef) field
-with a [checksum](#checksum) type that stores the checksum value and selects the algorithm
-for the checksum.
-
-```
-checksum CRC16: 16 "crc16"
-
-packet CRCedBrew {
- crc: CRC16,
- _checksum_start_(crc),
- pot: 8,
-}
-```
-
-### Padding {#fields-padding}
-
-> padding_field:\
-> &nbsp;&nbsp; `_padding_` `[` [INTEGER](#integer) `]`
-
-A *\_padding\_* field immediately following an array field pads the array field with `0`s to the
-specified number of **octets**.
-
-```
-packet PaddedCoffee {
- additions: CoffeeAddition[],
- _padding_[100]
-}
-```
-
-### Reserved {#fields-reserved}
-
-> reserved_field:\
-> &nbsp;&nbsp; `_reserved_` `:` [INTEGER](#integer)
-
-A *\_reserved\_* field adds reserved bits.
-
-```
-packet DeloreanCoffee {
- _reserved_: 2014
-}
-```
-
-## Tokens
-
-### Integer
-
-> INTEGER:\
-> &nbsp;&nbsp; HEXVALUE | INTVALUE
->
-> HEXVALUE:\
-> &nbsp;&nbsp; `0x` | `0X` HEXDIGIT<sup>+</sup>
->
-> INTVALUE:\
-> &nbsp;&nbsp; DIGIT<sup>+</sup>
->
-> HEXDIGIT:\
-> &nbsp;&nbsp; DIGIT | [`a`-`f`] | [`A`-`F`]
->
-> DIGIT:\
-> &nbsp;&nbsp; [`0`-`9`]
-
-A integer is a number in base 10 (decimal) or in base 16 (hexadecimal) with
-the prefix `0x`
-
-### String
-
-> STRING:\
-> &nbsp;&nbsp; `"` (!`"` __ANY__)* `"`
-
-A string is sequence of character. It can be multi-line.
-
-### Identifier
-
-> IDENTIFIER: \
-> &nbsp;&nbsp; ALPHA (ALPHANUM | `_`)*
->
-> ALPHA:\
-> &nbsp;&nbsp; [`a`-`z`] | [`A`-`Z`]
->
-> ALPHANUM:\
-> &nbsp;&nbsp; ALPHA | DIGIT
-
-An identifier is a sequence of alphanumeric or `_` characters
-starting with a letter.
-
-### Size Modifier
-
-> SIZE_MODIFIER:\
-> &nbsp;&nbsp; `+` INTVALUE
-
-A size modifier alters the octet size of the field it is attached to.
-For example, `+ 2` defines that the size is 2 octet bigger than the
-actual field size.
-
-### Comment
-
-> COMMENT:\
-> &nbsp;&nbsp; BLOCK_COMMENT | LINE_COMMENT
->
-> BLOCK_COMMENT:\
-> &nbsp;&nbsp; `/*` (!`*/` ANY) `*/`
->
-> LINE_COMMENT:\
-> &nbsp;&nbsp; `//` (!\n ANY) `//`
-
-### Whitespace
-
-> WHITESPACE:\
-> &nbsp;&nbsp; ` ` | `\t` | `\n`
diff --git a/tools/pdl/scripts/Android.bp b/tools/pdl/scripts/Android.bp
deleted file mode 100644
index 8035e1d465..0000000000
--- a/tools/pdl/scripts/Android.bp
+++ /dev/null
@@ -1,44 +0,0 @@
-package {
- // See: http://go/android-license-faq
- // A large-scale-change added 'default_applicable_licenses' to import
- // all of the 'license_kinds' from "system_bt_license"
- // to get the below license kinds:
- // SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["system_bt_license"],
-}
-
-// Python generator.
-python_binary_host {
- name: "pdl_python_generator",
- main: "generate_python_backend.py",
- srcs: [
- "generate_python_backend.py",
- "pdl/ast.py",
- "pdl/core.py",
- "pdl/utils.py",
- ],
-}
-
-// C++ generator.
-python_binary_host {
- name: "pdl_cxx_generator",
- main: "generate_cxx_backend.py",
- srcs: [
- "generate_cxx_backend.py",
- "pdl/ast.py",
- "pdl/core.py",
- "pdl/utils.py",
- ],
-}
-
-// C++ test generator.
-python_binary_host {
- name: "pdl_cxx_unittest_generator",
- main: "generate_cxx_backend_tests.py",
- srcs: [
- "generate_cxx_backend_tests.py",
- "pdl/ast.py",
- "pdl/core.py",
- "pdl/utils.py",
- ],
-}
diff --git a/tools/pdl/scripts/generate_cxx_backend.py b/tools/pdl/scripts/generate_cxx_backend.py
deleted file mode 100755
index a728d13411..0000000000
--- a/tools/pdl/scripts/generate_cxx_backend.py
+++ /dev/null
@@ -1,1392 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-from dataclasses import dataclass, field
-import json
-from pathlib import Path
-import sys
-from textwrap import dedent
-from typing import List, Tuple, Union, Optional
-
-from pdl import ast, core
-from pdl.utils import indent, to_pascal_case
-
-
-def mask(width: int) -> str:
- return hex((1 << width) - 1)
-
-
-def deref(var: Optional[str], id: str) -> str:
- return f'{var}.{id}' if var else id
-
-
-def get_cxx_scalar_type(width: int) -> str:
- """Return the cxx scalar type to be used to back a PDL type."""
- for n in [8, 16, 32, 64]:
- if width <= n:
- return f'uint{n}_t'
- # PDL type does not fit on non-extended scalar types.
- assert False
-
-
-@dataclass
-class FieldParser:
- byteorder: str
- offset: int = 0
- shift: int = 0
- extract_arrays: bool = field(default=False)
- chunk: List[Tuple[int, int, ast.Field]] = field(default_factory=lambda: [])
- chunk_nr: int = 0
- unchecked_code: List[str] = field(default_factory=lambda: [])
- code: List[str] = field(default_factory=lambda: [])
-
- def unchecked_append_(self, line: str):
- """Append unchecked field parsing code.
- The function check_size_ must be called to generate a size guard
- after parsing is completed."""
- self.unchecked_code.append(line)
-
- def append_(self, line: str):
- """Append field parsing code.
- There must be no unchecked code left before this function is called."""
- assert len(self.unchecked_code) == 0
- self.code.append(line)
-
- def check_size_(self, size: str):
- """Generate a check of the current span size."""
- self.append_(f"if (span.size() < {size}) {{")
- self.append_(" return false;")
- self.append_("}")
-
- def check_code_(self):
- """Generate a size check for pending field parsing."""
- if len(self.unchecked_code) > 0:
- assert len(self.chunk) == 0
- unchecked_code = self.unchecked_code
- self.unchecked_code = []
- self.check_size_(str(self.offset))
- self.code.extend(unchecked_code)
- self.offset = 0
-
- def parse_bit_field_(self, field: ast.Field):
- """Parse the selected field as a bit field.
- The field is added to the current chunk. When a byte boundary
- is reached all saved fields are extracted together."""
-
- # Add to current chunk.
- width = core.get_field_size(field)
- self.chunk.append((self.shift, width, field))
- self.shift += width
-
- # Wait for more fields if not on a byte boundary.
- if (self.shift % 8) != 0:
- return
-
- # Parse the backing integer using the configured endianness,
- # extract field values.
- size = int(self.shift / 8)
- backing_type = get_cxx_scalar_type(self.shift)
-
- # Special case when no field is actually used from
- # the chunk.
- should_skip_value = all(isinstance(field, ast.ReservedField) for (_, _, field) in self.chunk)
- if should_skip_value:
- self.unchecked_append_(f"span.skip({size}); // skip reserved fields")
- self.offset += size
- self.shift = 0
- self.chunk = []
- return
-
- if len(self.chunk) > 1:
- value = f"chunk{self.chunk_nr}"
- self.unchecked_append_(f"{backing_type} {value} = span.read_{self.byteorder}<{backing_type}, {size}>();")
- self.chunk_nr += 1
- else:
- value = f"span.read_{self.byteorder}<{backing_type}, {size}>()"
-
- for shift, width, field in self.chunk:
- v = (value if len(self.chunk) == 1 and shift == 0 else f"({value} >> {shift}) & {mask(width)}")
-
- if isinstance(field, ast.ScalarField):
- self.unchecked_append_(f"{field.id}_ = {v};")
- elif isinstance(field, ast.FixedField) and field.enum_id:
- self.unchecked_append_(f"if ({field.enum_id}({v}) != {field.enum_id}::{field.tag_id}) {{")
- self.unchecked_append_(" return false;")
- self.unchecked_append_("}")
- elif isinstance(field, ast.FixedField):
- self.unchecked_append_(f"if (({v}) != {hex(field.value)}) {{")
- self.unchecked_append_(" return false;")
- self.unchecked_append_("}")
- elif isinstance(field, ast.TypedefField):
- self.unchecked_append_(f"{field.id}_ = {field.type_id}({v});")
- elif isinstance(field, ast.SizeField):
- self.unchecked_append_(f"{field.field_id}_size = {v};")
- elif isinstance(field, ast.CountField):
- self.unchecked_append_(f"{field.field_id}_count = {v};")
- elif isinstance(field, ast.ReservedField):
- pass
- else:
- raise Exception(f'Unsupported bit field type {field.kind}')
-
- # Reset state.
- self.offset += size
- self.shift = 0
- self.chunk = []
-
- def parse_typedef_field_(self, field: ast.TypedefField):
- """Parse a typedef field, to the exclusion of Enum fields."""
- if self.shift != 0:
- raise Exception('Typedef field does not start on an octet boundary')
-
- self.check_code_()
- self.append_(
- dedent("""\
- if (!{field_type}::Parse(span, &{field_id}_)) {{
- return false;
- }}""".format(field_type=field.type.id, field_id=field.id)))
-
- def parse_array_field_lite_(self, field: ast.ArrayField):
- """Parse the selected array field.
- This function does not attempt to parse all elements but just to
- identify the span of the array."""
- array_size = core.get_array_field_size(field)
- element_width = core.get_array_element_size(field)
- padded_size = field.padded_size
-
- if element_width:
- element_width = int(element_width / 8)
-
- if isinstance(array_size, int):
- size = None
- count = array_size
- elif isinstance(array_size, ast.SizeField):
- size = f'{field.id}_size'
- count = None
- elif isinstance(array_size, ast.CountField):
- size = None
- count = f'{field.id}_count'
- else:
- size = None
- count = None
-
- # Shift the span to reset the offset to 0.
- self.check_code_()
-
- # Apply the size modifier.
- if field.size_modifier and size:
- self.append_(f"{size} = {size} - {field.size_modifier};")
-
- # Compute the array size if the count and element width are known.
- if count is not None and element_width is not None:
- size = f"{count} * {element_width}"
-
- # Parse from the padded array if padding is present.
- if padded_size:
- self.check_size_(padded_size)
- self.append_("{")
- self.append_(
- f"pdl::packet::slice remaining_span = span.subrange({padded_size}, span.size() - {padded_size});")
- self.append_(f"span = span.subrange(0, {padded_size});")
-
- # The array size is known in bytes.
- if size is not None:
- self.check_size_(size)
- self.append_(f"{field.id}_ = span.subrange(0, {size});")
- self.append_(f"span.skip({size});")
-
- # The array count is known. The element width is dynamic.
- # Parse each element iteratively and derive the array span.
- elif count is not None:
- self.append_("{")
- self.append_("pdl::packet::slice temp_span = span;")
- self.append_(f"for (size_t n = 0; n < {count}; n++) {{")
- self.append_(f" {field.type_id} element;")
- self.append_(f" if (!{field.type_id}::Parse(temp_span, &element)) {{")
- self.append_(" return false;")
- self.append_(" }")
- self.append_("}")
- self.append_(f"{field.id}_ = span.subrange(0, span.size() - temp_span.size());")
- self.append_(f"span.skip({field.id}_.size());")
- self.append_("}")
-
- # The array size is not known, assume the array takes the
- # full remaining space. TODO support having fixed sized fields
- # following the array.
- else:
- self.append_(f"{field.id}_ = span;")
- self.append_("span.clear();")
-
- if padded_size:
- self.append_(f"span = remaining_span;")
- self.append_("}")
-
- def parse_array_field_full_(self, field: ast.ArrayField):
- """Parse the selected array field.
- This function does not attempt to parse all elements but just to
- identify the span of the array."""
- array_size = core.get_array_field_size(field)
- element_width = core.get_array_element_size(field)
- element_type = field.type_id or get_cxx_scalar_type(field.width)
- padded_size = field.padded_size
-
- if element_width:
- element_width = int(element_width / 8)
-
- if isinstance(array_size, int):
- size = None
- count = array_size
- elif isinstance(array_size, ast.SizeField):
- size = f'{field.id}_size'
- count = None
- elif isinstance(array_size, ast.CountField):
- size = None
- count = f'{field.id}_count'
- else:
- size = None
- count = None
-
- # Shift the span to reset the offset to 0.
- self.check_code_()
-
- # Apply the size modifier.
- if field.size_modifier and size:
- self.append_(f"{size} = {size} - {field.size_modifier};")
-
- # Compute the array size if the count and element width are known.
- if count is not None and element_width is not None:
- size = f"{count} * {element_width}"
-
- # Parse from the padded array if padding is present.
- if padded_size:
- self.check_size_(padded_size)
- self.append_("{")
- self.append_(
- f"pdl::packet::slice remaining_span = span.subrange({padded_size}, span.size() - {padded_size});")
- self.append_(f"span = span.subrange(0, {padded_size});")
-
- # The array size is known in bytes.
- if size is not None:
- self.check_size_(size)
- self.append_("{")
- self.append_(f"pdl::packet::slice temp_span = span.subrange(0, {size});")
- self.append_(f"span.skip({size});")
- self.append_(f"while (temp_span.size() > 0) {{")
- if field.width:
- element_size = int(field.width / 8)
- self.append_(f" if (temp_span.size() < {element_size}) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(
- f" {field.id}_.push_back(temp_span.read_{self.byteorder}<{element_type}, {element_size}>());")
- elif isinstance(field.type, ast.EnumDeclaration):
- backing_type = get_cxx_scalar_type(field.type.width)
- element_size = int(field.type.width / 8)
- self.append_(f" if (temp_span.size() < {element_size}) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(
- f" {field.id}_.push_back({element_type}(temp_span.read_{self.byteorder}<{backing_type}, {element_size}>()));"
- )
- else:
- self.append_(f" {element_type} element;")
- self.append_(f" if (!{element_type}::Parse(temp_span, &element)) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(f" {field.id}_.emplace_back(std::move(element));")
- self.append_("}")
- self.append_("}")
-
- # The array count is known. The element width is dynamic.
- # Parse each element iteratively and derive the array span.
- elif count is not None:
- self.append_(f"for (size_t n = 0; n < {count}; n++) {{")
- self.append_(f" {element_type} element;")
- self.append_(f" if (!{field.type_id}::Parse(span, &element)) {{")
- self.append_(" return false;")
- self.append_(" }")
- self.append_(f" {field.id}_.emplace_back(std::move(element));")
- self.append_("}")
-
- # The array size is not known, assume the array takes the
- # full remaining space. TODO support having fixed sized fields
- # following the array.
- elif field.width:
- element_size = int(field.width / 8)
- self.append_(f"while (span.size() > 0) {{")
- self.append_(f" if (span.size() < {element_size}) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(f" {field.id}_.push_back(span.read_{self.byteorder}<{element_type}, {element_size}>());")
- self.append_("}")
- elif isinstance(field.type, ast.EnumDeclaration):
- element_size = int(field.type.width / 8)
- backing_type = get_cxx_scalar_type(field.type.width)
- self.append_(f"while (span.size() > 0) {{")
- self.append_(f" if (span.size() < {element_size}) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(
- f" {field.id}_.push_back({element_type}(span.read_{self.byteorder}<{backing_type}, {element_size}>()));"
- )
- self.append_("}")
- else:
- self.append_(f"while (span.size() > 0) {{")
- self.append_(f" {element_type} element;")
- self.append_(f" if (!{element_type}::Parse(span, &element)) {{")
- self.append_(f" return false;")
- self.append_(" }")
- self.append_(f" {field.id}_.emplace_back(std::move(element));")
- self.append_("}")
-
- if padded_size:
- self.append_(f"span = remaining_span;")
- self.append_("}")
-
- def parse_payload_field_lite_(self, field: Union[ast.BodyField, ast.PayloadField]):
- """Parse body and payload fields."""
- if self.shift != 0:
- raise Exception('Payload field does not start on an octet boundary')
-
- payload_size = core.get_payload_field_size(field)
- offset_from_end = core.get_field_offset_from_end(field)
- self.check_code_()
-
- if payload_size and getattr(field, 'size_modifier', None):
- self.append_(f"{field.id}_size -= {field.size_modifier};")
-
- # The payload or body has a known size.
- # Consume the payload and update the span in case
- # fields are placed after the payload.
- if payload_size:
- self.check_size_(f"{field.id}_size")
- self.append_(f"payload_ = span.subrange(0, {field.id}_size);")
- self.append_(f"span.skip({field.id}_size);")
- # The payload or body is the last field of a packet,
- # consume the remaining span.
- elif offset_from_end == 0:
- self.append_(f"payload_ = span;")
- self.append_(f"span.clear();")
- # The payload or body is followed by fields of static size.
- # Consume the span that is not reserved for the following fields.
- elif offset_from_end:
- if (offset_from_end % 8) != 0:
- raise Exception('Payload field offset from end of packet is not a multiple of 8')
- offset_from_end = int(offset_from_end / 8)
- self.check_size_(f'{offset_from_end}')
- self.append_(f"payload_ = span.subrange(0, span.size() - {offset_from_end});")
- self.append_(f"span.skip(payload_.size());")
-
- def parse_payload_field_full_(self, field: Union[ast.BodyField, ast.PayloadField]):
- """Parse body and payload fields."""
- if self.shift != 0:
- raise Exception('Payload field does not start on an octet boundary')
-
- payload_size = core.get_payload_field_size(field)
- offset_from_end = core.get_field_offset_from_end(field)
- self.check_code_()
-
- if payload_size and getattr(field, 'size_modifier', None):
- self.append_(f"{field.id}_size -= {field.size_modifier};")
-
- # The payload or body has a known size.
- # Consume the payload and update the span in case
- # fields are placed after the payload.
- if payload_size:
- self.check_size_(f"{field.id}_size")
- self.append_(f"for (size_t n = 0; n < {field.id}_size; n++) {{")
- self.append_(f" payload_.push_back(span.read_{self.byteorder}<uint8_t>();")
- self.append_("}")
- # The payload or body is the last field of a packet,
- # consume the remaining span.
- elif offset_from_end == 0:
- self.append_("while (span.size() > 0) {")
- self.append_(f" payload_.push_back(span.read_{self.byteorder}<uint8_t>();")
- self.append_("}")
- # The payload or body is followed by fields of static size.
- # Consume the span that is not reserved for the following fields.
- elif offset_from_end is not None:
- if (offset_from_end % 8) != 0:
- raise Exception('Payload field offset from end of packet is not a multiple of 8')
- offset_from_end = int(offset_from_end / 8)
- self.check_size_(f'{offset_from_end}')
- self.append_(f"while (span.size() > {offset_from_end}) {{")
- self.append_(f" payload_.push_back(span.read_{self.byteorder}<uint8_t>();")
- self.append_("}")
-
- def parse(self, field: ast.Field):
- # Field has bit granularity.
- # Append the field to the current chunk,
- # check if a byte boundary was reached.
- if core.is_bit_field(field):
- self.parse_bit_field_(field)
-
- # Padding fields.
- elif isinstance(field, ast.PaddingField):
- pass
-
- # Array fields.
- elif isinstance(field, ast.ArrayField) and self.extract_arrays:
- self.parse_array_field_full_(field)
-
- elif isinstance(field, ast.ArrayField) and not self.extract_arrays:
- self.parse_array_field_lite_(field)
-
- # Other typedef fields.
- elif isinstance(field, ast.TypedefField):
- self.parse_typedef_field_(field)
-
- # Payload and body fields.
- elif isinstance(field, (ast.PayloadField, ast.BodyField)) and self.extract_arrays:
- self.parse_payload_field_full_(field)
-
- elif isinstance(field, (ast.PayloadField, ast.BodyField)) and not self.extract_arrays:
- self.parse_payload_field_lite_(field)
-
- else:
- raise Exception(f'Unsupported field type {field.kind}')
-
- def done(self):
- self.check_code_()
-
-
-@dataclass
-class FieldSerializer:
- byteorder: str
- shift: int = 0
- value: List[Tuple[str, int]] = field(default_factory=lambda: [])
- code: List[str] = field(default_factory=lambda: [])
- indent: int = 0
-
- def indent_(self):
- self.indent += 1
-
- def unindent_(self):
- self.indent -= 1
-
- def append_(self, line: str):
- """Append field serializing code."""
- lines = line.split('\n')
- self.code.extend([' ' * self.indent + line for line in lines])
-
- def get_payload_field_size(self, var: Optional[str], payload: ast.PayloadField, decl: ast.Declaration) -> str:
- """Compute the size of the selected payload field, with the information
- of the builder for the selected declaration. The payload field can be
- the payload of any of the parent declarations, or the current declaration."""
-
- if payload.parent.id == decl.id:
- return deref(var, 'payload_.size()')
-
- # Get the child packet declaration that will match the current
- # declaration further down.
- child = decl
- while child.parent_id != payload.parent.id:
- child = child.parent
-
- # The payload is the result of serializing the children fields.
- constant_width = 0
- variable_width = []
- for f in child.fields:
- field_size = core.get_field_size(f)
- if field_size is not None:
- constant_width += field_size
- elif isinstance(f, (ast.PayloadField, ast.BodyField)):
- variable_width.append(self.get_payload_field_size(var, f, decl))
- elif isinstance(f, ast.TypedefField):
- variable_width.append(f"{f.id}_.GetSize()")
- elif isinstance(f, ast.ArrayField):
- variable_width.append(f"Get{to_pascal_case(f.id)}Size()")
- else:
- raise Exception("Unsupported field type")
-
- constant_width = int(constant_width / 8)
- if constant_width and not variable_width:
- return str(constant_width)
-
- temp_var = f'{payload.parent.id.lower()}_payload_size'
- self.append_(f"size_t {temp_var} = {constant_width};")
- for dyn in variable_width:
- self.append_(f"{temp_var} += {dyn};")
- return temp_var
-
- def serialize_array_element_(self, field: ast.ArrayField, var: str):
- """Serialize a single array field element."""
- if field.width:
- backing_type = get_cxx_scalar_type(field.width)
- element_size = int(field.width / 8)
- self.append_(
- f"pdl::packet::Builder::write_{self.byteorder}<{backing_type}, {element_size}>(output, {var});")
- elif isinstance(field.type, ast.EnumDeclaration):
- backing_type = get_cxx_scalar_type(field.type.width)
- element_size = int(field.type.width / 8)
- self.append_(f"pdl::packet::Builder::write_{self.byteorder}<{backing_type}, {element_size}>(" +
- f"output, static_cast<{backing_type}>({var}));")
- else:
- self.append_(f"{var}.Serialize(output);")
-
- def serialize_array_field_(self, field: ast.ArrayField, var: str):
- """Serialize the selected array field."""
- if field.padded_size:
- self.append_(f"size_t {field.id}_end = output.size() + {field.padded_size};")
-
- if field.width == 8:
- self.append_(f"output.insert(output.end(), {var}.begin(), {var}.end());")
- else:
- self.append_(f"for (size_t n = 0; n < {var}.size(); n++) {{")
- self.indent_()
- self.serialize_array_element_(field, f'{var}[n]')
- self.unindent_()
- self.append_("}")
-
- if field.padded_size:
- self.append_(f"while (output.size() < {field.id}_end) {{")
- self.append_(" output.push_back(0);")
- self.append_("}")
-
- def serialize_bit_field_(self, field: ast.Field, parent_var: Optional[str], var: Optional[str],
- decl: ast.Declaration):
- """Serialize the selected field as a bit field.
- The field is added to the current chunk. When a byte boundary
- is reached all saved fields are serialized together."""
-
- # Add to current chunk.
- width = core.get_field_size(field)
- shift = self.shift
-
- if isinstance(field, ast.ScalarField):
- self.value.append((f"{var} & {mask(field.width)}", shift))
- elif isinstance(field, ast.FixedField) and field.enum_id:
- self.value.append((f"{field.enum_id}::{field.tag_id}", shift))
- elif isinstance(field, ast.FixedField):
- self.value.append((f"{field.value}", shift))
- elif isinstance(field, ast.TypedefField):
- self.value.append((f"{var}", shift))
-
- elif isinstance(field, ast.SizeField):
- max_size = (1 << field.width) - 1
- value_field = core.get_packet_field(field.parent, field.field_id)
- size_modifier = ''
-
- if getattr(value_field, 'size_modifier', None):
- size_modifier = f' + {value_field.size_modifier}'
-
- if isinstance(value_field, (ast.PayloadField, ast.BodyField)):
- array_size = self.get_payload_field_size(var, field, decl) + size_modifier
-
- elif isinstance(value_field, ast.ArrayField):
- accessor_name = to_pascal_case(field.field_id)
- array_size = deref(var, f'Get{accessor_name}Size()') + size_modifier
-
- self.value.append((f"{array_size}", shift))
-
- elif isinstance(field, ast.CountField):
- max_count = (1 << field.width) - 1
- self.value.append((f"{field.field_id}_.size()", shift))
-
- elif isinstance(field, ast.ReservedField):
- pass
- else:
- raise Exception(f'Unsupported bit field type {field.kind}')
-
- # Check if a byte boundary is reached.
- self.shift += width
- if (self.shift % 8) == 0:
- self.pack_bit_fields_()
-
- def pack_bit_fields_(self):
- """Pack serialized bit fields."""
-
- # Should have an integral number of bytes now.
- assert (self.shift % 8) == 0
-
- # Generate the backing integer, and serialize it
- # using the configured endiannes,
- size = int(self.shift / 8)
- backing_type = get_cxx_scalar_type(self.shift)
- value = [f"(static_cast<{backing_type}>({v[0]}) << {v[1]})" for v in self.value]
-
- if len(value) == 0:
- self.append_(f"pdl::packet::Builder::write_{self.byteorder}<{backing_type}, {size}>(output, 0);")
- elif len(value) == 1:
- self.append_(f"pdl::packet::Builder::write_{self.byteorder}<{backing_type}, {size}>(output, {value[0]});")
- else:
- self.append_(
- f"pdl::packet::Builder::write_{self.byteorder}<{backing_type}, {size}>(output, {' | '.join(value)});")
-
- # Reset state.
- self.shift = 0
- self.value = []
-
- def serialize_typedef_field_(self, field: ast.TypedefField, var: str):
- """Serialize a typedef field, to the exclusion of Enum fields."""
-
- if self.shift != 0:
- raise Exception('Typedef field does not start on an octet boundary')
- if (isinstance(field.type, ast.StructDeclaration) and field.type.parent_id is not None):
- raise Exception('Derived struct used in typedef field')
-
- self.append_(f"{var}.Serialize(output);")
-
- def serialize_payload_field_(self, field: Union[ast.BodyField, ast.PayloadField], var: str):
- """Serialize body and payload fields."""
-
- if self.shift != 0:
- raise Exception('Payload field does not start on an octet boundary')
-
- self.append_(f"output.insert(output.end(), {var}.begin(), {var}.end());")
-
- def serialize(self, field: ast.Field, decl: ast.Declaration, var: Optional[str] = None):
- field_var = deref(var, f'{field.id}_') if hasattr(field, 'id') else None
-
- # Field has bit granularity.
- # Append the field to the current chunk,
- # check if a byte boundary was reached.
- if core.is_bit_field(field):
- self.serialize_bit_field_(field, var, field_var, decl)
-
- # Padding fields.
- elif isinstance(field, ast.PaddingField):
- pass
-
- # Array fields.
- elif isinstance(field, ast.ArrayField):
- self.serialize_array_field_(field, field_var)
-
- # Other typedef fields.
- elif isinstance(field, ast.TypedefField):
- self.serialize_typedef_field_(field, field_var)
-
- # Payload and body fields.
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- self.serialize_payload_field_(field, deref(var, 'payload_'))
-
- else:
- raise Exception(f'Unimplemented field type {field.kind}')
-
-
-def generate_enum_declaration(decl: ast.EnumDeclaration) -> str:
- """Generate the implementation of an enum type."""
-
- enum_name = decl.id
- enum_type = get_cxx_scalar_type(decl.width)
- tag_decls = []
- for t in decl.tags:
- tag_decls.append(f"{t.id} = {hex(t.value)},")
-
- return dedent("""\
-
- enum class {enum_name} : {enum_type} {{
- {tag_decls}
- }};
- """).format(enum_name=enum_name, enum_type=enum_type, tag_decls=indent(tag_decls, 1))
-
-
-def generate_enum_to_text(decl: ast.EnumDeclaration) -> str:
- """Generate the helper function that will convert an enum tag to string."""
-
- enum_name = decl.id
- tag_cases = []
- for t in decl.tags:
- tag_cases.append(f"case {enum_name}::{t.id}: return \"{t.id}\";")
-
- return dedent("""\
-
- inline std::string {enum_name}Text({enum_name} tag) {{
- switch (tag) {{
- {tag_cases}
- default:
- return std::string("Unknown {enum_name}: " +
- std::to_string(static_cast<uint64_t>(tag)));
- }}
- }}
- """).format(enum_name=enum_name, tag_cases=indent(tag_cases, 2))
-
-
-def generate_packet_field_members(decl: ast.Declaration, view: bool) -> List[str]:
- """Return the declaration of fields that are backed in the view
- class declaration.
-
- Backed fields include all named fields that do not have a constrained
- value in the selected declaration and its parents.
-
- :param decl: target declaration
- :param view: if true the payload and array fields are generated as slices"""
-
- fields = core.get_unconstrained_parent_fields(decl) + decl.fields
- members = []
- for field in fields:
- if isinstance(field, (ast.PayloadField, ast.BodyField)) and view:
- members.append("pdl::packet::slice payload_;")
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- members.append("std::vector<uint8_t> payload_;")
- elif isinstance(field, ast.ArrayField) and view:
- members.append(f"pdl::packet::slice {field.id}_;")
- elif isinstance(field, ast.ArrayField):
- element_type = field.type_id or get_cxx_scalar_type(field.width)
- members.append(f"std::vector<{element_type}> {field.id}_;")
- elif isinstance(field, ast.ScalarField):
- members.append(f"{get_cxx_scalar_type(field.width)} {field.id}_{{0}};")
- elif isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration):
- members.append(f"{field.type_id} {field.id}_{{{field.type_id}::{field.type.tags[0].id}}};")
- elif isinstance(field, ast.TypedefField):
- members.append(f"{field.type_id} {field.id}_;")
-
- return members
-
-
-def generate_packet_field_serializers(packet: ast.Declaration) -> List[str]:
- """Generate the code to serialize the fields of a packet builder or struct."""
- serializer = FieldSerializer(byteorder=packet.file.byteorder_short)
- constraints = core.get_parent_constraints(packet)
- constraints = dict([(c.id, c) for c in constraints])
- for field in core.get_packet_fields(packet):
- field_id = getattr(field, 'id', None)
- constraint = constraints.get(field_id, None)
- fixed_field = None
- if constraint and constraint.tag_id:
- fixed_field = ast.FixedField(enum_id=field.type_id,
- tag_id=constraint.tag_id,
- loc=field.loc,
- kind='fixed_field')
- fixed_field.parent = field.parent
- elif constraint:
- fixed_field = ast.FixedField(width=field.width, value=constraint.value, loc=field.loc, kind='fixed_field')
- fixed_field.parent = field.parent
- serializer.serialize(fixed_field or field, packet)
- return serializer.code
-
-
-def generate_scalar_array_field_accessor(field: ast.ArrayField) -> str:
- """Parse the selected scalar array field."""
- element_size = int(field.width / 8)
- backing_type = get_cxx_scalar_type(field.width)
- byteorder = field.parent.file.byteorder_short
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{backing_type}> elements;
- while (span.size() >= {element_size}) {{
- elements.push_back(span.read_{byteorder}<{backing_type}, {element_size}>());
- }}
- return elements;""").format(field_id=field.id,
- backing_type=backing_type,
- element_size=element_size,
- byteorder=byteorder)
-
-
-def generate_enum_array_field_accessor(field: ast.ArrayField) -> str:
- """Parse the selected enum array field."""
- element_size = int(field.type.width / 8)
- backing_type = get_cxx_scalar_type(field.type.width)
- byteorder = field.parent.file.byteorder_short
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{enum_type}> elements;
- while (span.size() >= {element_size}) {{
- elements.push_back({enum_type}(span.read_{byteorder}<{backing_type}, {element_size}>()));
- }}
- return elements;""").format(field_id=field.id,
- enum_type=field.type_id,
- backing_type=backing_type,
- element_size=element_size,
- byteorder=byteorder)
-
-
-def generate_typedef_array_field_accessor(field: ast.ArrayField) -> str:
- """Parse the selected typedef array field."""
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{struct_type}> elements;
- for (;;) {{
- {struct_type} element;
- if (!{struct_type}::Parse(span, &element)) {{
- break;
- }}
- elements.emplace_back(std::move(element));
- }}
- return elements;""").format(field_id=field.id, struct_type=field.type_id)
-
-
-def generate_array_field_accessor(field: ast.ArrayField):
- """Parse the selected array field."""
-
- if field.width is not None:
- return generate_scalar_array_field_accessor(field)
- elif isinstance(field.type, ast.EnumDeclaration):
- return generate_enum_array_field_accessor(field)
- else:
- return generate_typedef_array_field_accessor(field)
-
-
-def generate_array_field_size_getters(decl: ast.Declaration) -> str:
- """Generate size getters for array fields. Produces the serialized
- size of the array in bytes."""
-
- getters = []
- fields = core.get_unconstrained_parent_fields(decl) + decl.fields
- for field in fields:
- if not isinstance(field, ast.ArrayField):
- continue
-
- element_width = field.width or core.get_declaration_size(field.type)
- size = None
-
- if element_width and field.size:
- size = int(element_width * field.size / 8)
- elif element_width:
- size = f"{field.id}_.size() * {int(element_width / 8)}"
-
- if size:
- getters.append(
- dedent("""\
- size_t Get{accessor_name}Size() const {{
- return {size};
- }}
- """).format(accessor_name=to_pascal_case(field.id), size=size))
- else:
- getters.append(
- dedent("""\
- size_t Get{accessor_name}Size() const {{
- size_t array_size = 0;
- for (size_t n = 0; n < {field_id}_.size(); n++) {{
- array_size += {field_id}_[n].GetSize();
- }}
- return array_size;
- }}
- """).format(accessor_name=to_pascal_case(field.id), field_id=field.id))
-
- return '\n'.join(getters)
-
-
-def generate_packet_size_getter(decl: ast.Declaration) -> List[str]:
- """Generate a size getter the current packet. Produces the serialized
- size of the packet in bytes."""
-
- constant_width = 0
- variable_width = []
- for f in core.get_packet_fields(decl):
- field_size = core.get_field_size(f)
- if field_size is not None:
- constant_width += field_size
- elif isinstance(f, (ast.PayloadField, ast.BodyField)):
- variable_width.append("payload_.size()")
- elif isinstance(f, ast.TypedefField):
- variable_width.append(f"{f.id}_.GetSize()")
- elif isinstance(f, ast.ArrayField):
- variable_width.append(f"Get{to_pascal_case(f.id)}Size()")
- else:
- raise Exception("Unsupported field type")
-
- constant_width = int(constant_width / 8)
- if not variable_width:
- return [f"return {constant_width};"]
- elif len(variable_width) == 1 and constant_width:
- return [f"return {variable_width[0]} + {constant_width};"]
- elif len(variable_width) == 1:
- return [f"return {variable_width[0]};"]
- elif len(variable_width) > 1 and constant_width:
- return ([f"return {constant_width} + ("] + " +\n ".join(variable_width).split("\n") + [");"])
- elif len(variable_width) > 1:
- return (["return ("] + " +\n ".join(variable_width).split("\n") + [");"])
- else:
- assert False
-
-
-def generate_packet_view_field_accessors(packet: ast.PacketDeclaration) -> List[str]:
- """Return the declaration of accessors for the named packet fields."""
-
- accessors = []
-
- # Add accessors for the backed fields.
- fields = core.get_unconstrained_parent_fields(packet) + packet.fields
- for field in fields:
- if isinstance(field, (ast.PayloadField, ast.BodyField)):
- accessors.append(
- dedent("""\
- std::vector<uint8_t> GetPayload() const {
- ASSERT(valid_);
- return payload_.bytes();
- }
-
- """))
- elif isinstance(field, ast.ArrayField):
- element_type = field.type_id or get_cxx_scalar_type(field.width)
- accessor_name = to_pascal_case(field.id)
- accessors.append(
- dedent("""\
- std::vector<{element_type}> Get{accessor_name}() const {{
- ASSERT(valid_);
- {accessor}
- }}
-
- """).format(element_type=element_type,
- accessor_name=accessor_name,
- accessor=indent(generate_array_field_accessor(field), 1)))
- elif isinstance(field, ast.ScalarField):
- field_type = get_cxx_scalar_type(field.width)
- accessor_name = to_pascal_case(field.id)
- accessors.append(
- dedent("""\
- {field_type} Get{accessor_name}() const {{
- ASSERT(valid_);
- return {member_name}_;
- }}
-
- """).format(field_type=field_type, accessor_name=accessor_name, member_name=field.id))
- elif isinstance(field, ast.TypedefField):
- field_qualifier = "" if isinstance(field.type, ast.EnumDeclaration) else " const&"
- accessor_name = to_pascal_case(field.id)
- accessors.append(
- dedent("""\
- {field_type}{field_qualifier} Get{accessor_name}() const {{
- ASSERT(valid_);
- return {member_name}_;
- }}
-
- """).format(field_type=field.type_id,
- field_qualifier=field_qualifier,
- accessor_name=accessor_name,
- member_name=field.id))
-
- # Add accessors for constrained parent fields.
- # The accessors return a constant value in this case.
- for c in core.get_parent_constraints(packet):
- field = core.get_packet_field(packet, c.id)
- if isinstance(field, ast.ScalarField):
- field_type = get_cxx_scalar_type(field.width)
- accessor_name = to_pascal_case(field.id)
- accessors.append(
- dedent("""\
- {field_type} Get{accessor_name}() const {{
- return {value};
- }}
-
- """).format(field_type=field_type, accessor_name=accessor_name, value=c.value))
- else:
- accessor_name = to_pascal_case(field.id)
- accessors.append(
- dedent("""\
- {field_type} Get{accessor_name}() const {{
- return {field_type}::{tag_id};
- }}
-
- """).format(field_type=field.type_id, accessor_name=accessor_name, tag_id=c.tag_id))
-
- return "".join(accessors)
-
-
-def generate_packet_stringifier(packet: ast.PacketDeclaration) -> str:
- """Generate the packet printer. TODO """
- return dedent("""\
- std::string ToString() const {
- return "";
- }
- """)
-
-
-def generate_packet_view_field_parsers(packet: ast.PacketDeclaration) -> str:
- """Generate the packet parser. The validator will extract
- the fields it can in a pre-parsing phase. """
-
- code = []
-
- # Generate code to check the validity of the parent,
- # and import parent fields that do not have a fixed value in the
- # current packet.
- if packet.parent:
- code.append(
- dedent("""\
- // Check validity of parent packet.
- if (!parent.IsValid()) {
- return false;
- }
- """))
- parent_fields = core.get_unconstrained_parent_fields(packet)
- if parent_fields:
- code.append("// Copy parent field values.")
- for f in parent_fields:
- code.append(f"{f.id}_ = parent.{f.id}_;")
- code.append("")
- span = "parent.payload_"
- else:
- span = "parent"
-
- # Validate parent constraints.
- for c in packet.constraints:
- if c.tag_id:
- enum_type = core.get_packet_field(packet.parent, c.id).type_id
- code.append(
- dedent("""\
- if (parent.{field_id}_ != {enum_type}::{tag_id}) {{
- return false;
- }}
- """).format(field_id=c.id, enum_type=enum_type, tag_id=c.tag_id))
- else:
- code.append(
- dedent("""\
- if (parent.{field_id}_ != {value}) {{
- return false;
- }}
- """).format(field_id=c.id, value=c.value))
-
- # Parse fields linearly.
- if packet.fields:
- code.append("// Parse packet field values.")
- code.append(f"pdl::packet::slice span = {span};")
- for f in packet.fields:
- if isinstance(f, ast.SizeField):
- code.append(f"{get_cxx_scalar_type(f.width)} {f.field_id}_size;")
- elif isinstance(f, (ast.SizeField, ast.CountField)):
- code.append(f"{get_cxx_scalar_type(f.width)} {f.field_id}_count;")
- parser = FieldParser(extract_arrays=False, byteorder=packet.file.byteorder_short)
- for f in packet.fields:
- parser.parse(f)
- parser.done()
- code.extend(parser.code)
-
- code.append("return true;")
- return '\n'.join(code)
-
-
-def generate_packet_view_friend_classes(packet: ast.PacketDeclaration) -> str:
- """Generate the list of friend declarations for a packet.
- These are the direct children of the class."""
-
- return [f"friend class {decl.id}View;" for (_, decl) in core.get_derived_packets(packet, traverse=False)]
-
-
-def generate_packet_view(packet: ast.PacketDeclaration) -> str:
- """Generate the implementation of the View class for a
- packet declaration."""
-
- parent_class = f"{packet.parent.id}View" if packet.parent else "pdl::packet::slice"
- field_members = generate_packet_field_members(packet, view=True)
- field_accessors = generate_packet_view_field_accessors(packet)
- field_parsers = generate_packet_view_field_parsers(packet)
- friend_classes = generate_packet_view_friend_classes(packet)
- stringifier = generate_packet_stringifier(packet)
-
- return dedent("""\
-
- class {packet_name}View {{
- public:
- static {packet_name}View Create({parent_class} const& parent) {{
- return {packet_name}View(parent);
- }}
-
- {field_accessors}
- {stringifier}
-
- bool IsValid() const {{
- return valid_;
- }}
-
- protected:
- explicit {packet_name}View({parent_class} const& parent) {{
- valid_ = Parse(parent);
- }}
-
- bool Parse({parent_class} const& parent) {{
- {field_parsers}
- }}
-
- bool valid_{{false}};
- {field_members}
-
- {friend_classes}
- }};
- """).format(packet_name=packet.id,
- parent_class=parent_class,
- field_accessors=indent(field_accessors, 1),
- field_members=indent(field_members, 1),
- field_parsers=indent(field_parsers, 2),
- friend_classes=indent(friend_classes, 1),
- stringifier=indent(stringifier, 1))
-
-
-def generate_packet_constructor(struct: ast.StructDeclaration, constructor_name: str) -> str:
- """Generate the implementation of the constructor for a
- struct declaration."""
-
- constructor_params = []
- constructor_initializers = []
- fields = core.get_unconstrained_parent_fields(struct) + struct.fields
-
- for field in fields:
- if isinstance(field, (ast.PayloadField, ast.BodyField)):
- constructor_params.append("std::vector<uint8_t> payload")
- constructor_initializers.append("payload_(std::move(payload))")
- elif isinstance(field, ast.ArrayField):
- element_type = field.type_id or get_cxx_scalar_type(field.width)
- constructor_params.append(f"std::vector<{element_type}> {field.id}")
- constructor_initializers.append(f"{field.id}_(std::move({field.id}))")
- elif isinstance(field, ast.ScalarField):
- backing_type = get_cxx_scalar_type(field.width)
- constructor_params.append(f"{backing_type} {field.id}")
- constructor_initializers.append(f"{field.id}_({field.id})")
- elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration)):
- constructor_params.append(f"{field.type_id} {field.id}")
- constructor_initializers.append(f"{field.id}_({field.id})")
- elif isinstance(field, ast.TypedefField):
- constructor_params.append(f"{field.type_id} {field.id}")
- constructor_initializers.append(f"{field.id}_(std::move({field.id}))")
-
- if not constructor_params:
- return ""
-
- explicit = 'explicit ' if len(constructor_params) == 1 else ''
- constructor_params = ', '.join(constructor_params)
- constructor_initializers = ', '.join(constructor_initializers)
-
- return dedent("""\
- {explicit}{constructor_name}({constructor_params})
- : {constructor_initializers} {{}}""").format(explicit=explicit,
- constructor_name=constructor_name,
- constructor_params=constructor_params,
- constructor_initializers=constructor_initializers)
-
-
-def generate_packet_builder(packet: ast.PacketDeclaration) -> str:
- """Generate the implementation of the Builder class for a
- packet declaration."""
-
- class_name = f'{packet.id}Builder'
- builder_constructor = generate_packet_constructor(packet, constructor_name=class_name)
- field_members = generate_packet_field_members(packet, view=False)
- field_serializers = generate_packet_field_serializers(packet)
- size_getter = generate_packet_size_getter(packet)
- array_field_size_getters = generate_array_field_size_getters(packet)
-
- return dedent("""\
-
- class {class_name} : public pdl::packet::Builder {{
- public:
- ~{class_name}() override = default;
- {class_name}() = default;
- {class_name}({class_name} const&) = default;
- {class_name}({class_name}&&) = default;
- {class_name}& operator=({class_name} const&) = default;
- {builder_constructor}
-
- void Serialize(std::vector<uint8_t>& output) const override {{
- {field_serializers}
- }}
-
- size_t GetSize() const override {{
- {size_getter}
- }}
-
- {array_field_size_getters}
- {field_members}
- }};
- """).format(class_name=f'{packet.id}Builder',
- builder_constructor=builder_constructor,
- field_members=indent(field_members, 1),
- field_serializers=indent(field_serializers, 2),
- size_getter=indent(size_getter, 1),
- array_field_size_getters=indent(array_field_size_getters, 1))
-
-
-def generate_struct_field_parsers(struct: ast.StructDeclaration) -> str:
- """Generate the struct parser. The validator will extract
- the fields it can in a pre-parsing phase. """
-
- code = []
- parsed_fields = []
- post_processing = []
-
- for field in struct.fields:
- if isinstance(field, (ast.PayloadField, ast.BodyField)):
- code.append("std::vector<uint8_t> payload_;")
- parsed_fields.append("std::move(payload_)")
- elif isinstance(field, ast.ArrayField):
- element_type = field.type_id or get_cxx_scalar_type(field.width)
- code.append(f"std::vector<{element_type}> {field.id}_;")
- parsed_fields.append(f"std::move({field.id}_)")
- elif isinstance(field, ast.ScalarField):
- backing_type = get_cxx_scalar_type(field.width)
- code.append(f"{backing_type} {field.id}_;")
- parsed_fields.append(f"{field.id}_")
- elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration)):
- code.append(f"{field.type_id} {field.id}_;")
- parsed_fields.append(f"{field.id}_")
- elif isinstance(field, ast.TypedefField):
- code.append(f"{field.type_id} {field.id}_;")
- parsed_fields.append(f"std::move({field.id}_)")
- elif isinstance(field, ast.SizeField):
- code.append(f"{get_cxx_scalar_type(field.width)} {field.field_id}_size;")
- elif isinstance(field, ast.CountField):
- code.append(f"{get_cxx_scalar_type(field.width)} {field.field_id}_count;")
-
- parser = FieldParser(extract_arrays=True, byteorder=struct.file.byteorder_short)
- for f in struct.fields:
- parser.parse(f)
- parser.done()
- code.extend(parser.code)
-
- parsed_fields = ', '.join(parsed_fields)
- code.append(f"*output = {struct.id}({parsed_fields});")
- code.append("return true;")
- return '\n'.join(code)
-
-
-def generate_struct_declaration(struct: ast.StructDeclaration) -> str:
- """Generate the implementation of the class for a
- struct declaration."""
-
- if struct.parent:
- raise Exception("Struct declaration with parents are not supported")
-
- struct_constructor = generate_packet_constructor(struct, constructor_name=struct.id)
- field_members = generate_packet_field_members(struct, view=False)
- field_parsers = generate_struct_field_parsers(struct)
- field_serializers = generate_packet_field_serializers(struct)
- size_getter = generate_packet_size_getter(struct)
- array_field_size_getters = generate_array_field_size_getters(struct)
- stringifier = generate_packet_stringifier(struct)
-
- return dedent("""\
-
- class {struct_name} : public pdl::packet::Builder {{
- public:
- ~{struct_name}() override = default;
- {struct_name}() = default;
- {struct_name}({struct_name} const&) = default;
- {struct_name}({struct_name}&&) = default;
- {struct_name}& operator=({struct_name} const&) = default;
- {struct_constructor}
-
- static bool Parse(pdl::packet::slice& span, {struct_name}* output) {{
- {field_parsers}
- }}
-
- void Serialize(std::vector<uint8_t>& output) const override {{
- {field_serializers}
- }}
-
- size_t GetSize() const override {{
- {size_getter}
- }}
-
- {array_field_size_getters}
- {stringifier}
- {field_members}
- }};
- """).format(struct_name=struct.id,
- struct_constructor=struct_constructor,
- field_members=indent(field_members, 1),
- field_parsers=indent(field_parsers, 2),
- field_serializers=indent(field_serializers, 2),
- stringifier=indent(stringifier, 1),
- size_getter=indent(size_getter, 1),
- array_field_size_getters=indent(array_field_size_getters, 1))
-
-
-def run(input: argparse.FileType, output: argparse.FileType, namespace: Optional[str], include_header: List[str],
- using_namespace: List[str]):
-
- file = ast.File.from_json(json.load(input))
- core.desugar(file)
-
- include_header = '\n'.join([f'#include <{header}>' for header in include_header])
- using_namespace = '\n'.join([f'using namespace {namespace};' for namespace in using_namespace])
- open_namespace = f"namespace {namespace} {{" if namespace else ""
- close_namespace = f"}} // {namespace}" if namespace else ""
-
- # Disable unsupported features in the canonical test suite.
- skipped_decls = [
- 'Packet_Custom_Field_ConstantSize',
- 'Packet_Custom_Field_VariableSize',
- 'Packet_Checksum_Field_FromStart',
- 'Packet_Checksum_Field_FromEnd',
- 'Struct_Custom_Field_ConstantSize',
- 'Struct_Custom_Field_VariableSize',
- 'Struct_Checksum_Field_FromStart',
- 'Struct_Checksum_Field_FromEnd',
- 'Struct_Custom_Field_ConstantSize_',
- 'Struct_Custom_Field_VariableSize_',
- 'Struct_Checksum_Field_FromStart_',
- 'Struct_Checksum_Field_FromEnd_',
- 'PartialParent5',
- 'PartialChild5_A',
- 'PartialChild5_B',
- 'PartialParent12',
- 'PartialChild12_A',
- 'PartialChild12_B',
- ]
-
- output.write(
- dedent("""\
- // File generated from {input_name}, with the command:
- // {input_command}
- // /!\\ Do not edit by hand
-
- #pragma once
-
- #include <cstdint>
- #include <string>
- #include <utility>
- #include <vector>
-
- #include <packet_runtime.h>
-
- {include_header}
- {using_namespace}
-
- #ifndef ASSERT
- #include <cassert>
- #define ASSERT assert
- #endif // !ASSERT
-
- {open_namespace}
- """).format(input_name=input.name,
- input_command=' '.join(sys.argv),
- include_header=include_header,
- using_namespace=using_namespace,
- open_namespace=open_namespace))
-
- for d in file.declarations:
- if d.id in skipped_decls:
- continue
-
- if isinstance(d, ast.EnumDeclaration):
- output.write(generate_enum_declaration(d))
- output.write(generate_enum_to_text(d))
- elif isinstance(d, ast.PacketDeclaration):
- output.write(generate_packet_view(d))
- output.write(generate_packet_builder(d))
- elif isinstance(d, ast.StructDeclaration):
- output.write(generate_struct_declaration(d))
-
- output.write(f"{close_namespace}\n")
-
-
-def main() -> int:
- """Generate cxx PDL backend."""
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--input', type=argparse.FileType('r'), default=sys.stdin, help='Input PDL-JSON source')
- parser.add_argument('--output', type=argparse.FileType('w'), default=sys.stdout, help='Output C++ file')
- parser.add_argument('--namespace', type=str, help='Generated module namespace')
- parser.add_argument('--include-header', type=str, default=[], action='append', help='Added include directives')
- parser.add_argument('--using-namespace',
- type=str,
- default=[],
- action='append',
- help='Added using namespace statements')
- return run(**vars(parser.parse_args()))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/pdl/scripts/generate_cxx_backend_tests.py b/tools/pdl/scripts/generate_cxx_backend_tests.py
deleted file mode 100755
index 1f90600ac5..0000000000
--- a/tools/pdl/scripts/generate_cxx_backend_tests.py
+++ /dev/null
@@ -1,319 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-from dataclasses import dataclass, field
-import json
-from pathlib import Path
-import sys
-from textwrap import dedent
-from typing import List, Tuple, Union, Optional
-
-from pdl import ast, core
-from pdl.utils import indent, to_pascal_case
-
-
-def get_cxx_scalar_type(width: int) -> str:
- """Return the cxx scalar type to be used to back a PDL type."""
- for n in [8, 16, 32, 64]:
- if width <= n:
- return f'uint{n}_t'
- # PDL type does not fit on non-extended scalar types.
- assert False
-
-
-def generate_packet_parser_test(parser_test_suite: str, packet: ast.PacketDeclaration, tests: List[object]) -> str:
- """Generate the implementation of unit tests for the selected packet."""
-
- def parse_packet(packet: ast.PacketDeclaration) -> str:
- parent = parse_packet(packet.parent) if packet.parent else "input"
- return f"{packet.id}View::Create({parent})"
-
- def input_bytes(input: str) -> List[str]:
- input = bytes.fromhex(input)
- input_bytes = []
- for i in range(0, len(input), 16):
- input_bytes.append(' '.join(f'0x{b:x},' for b in input[i:i + 16]))
- return input_bytes
-
- def get_field(decl: ast.Declaration, var: str, id: str) -> str:
- if isinstance(decl, ast.StructDeclaration):
- return f"{var}.{id}_"
- else:
- return f"{var}.Get{to_pascal_case(id)}()"
-
- def check_members(decl: ast.Declaration, var: str, expected: object) -> List[str]:
- checks = []
- for (id, value) in expected.items():
- field = core.get_packet_field(decl, id)
- sanitized_var = var.replace('[', '_').replace(']', '')
- field_var = f'{sanitized_var}_{id}'
-
- if isinstance(field, ast.ScalarField):
- checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, {value});")
-
- elif (isinstance(field, ast.TypedefField) and
- isinstance(field.type, (ast.EnumDeclaration, ast.CustomFieldDeclaration, ast.ChecksumDeclaration))):
- checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, {field.type_id}({value}));")
-
- elif isinstance(field, ast.TypedefField):
- checks.append(f"{field.type_id} const& {field_var} = {get_field(decl, var, id)};")
- checks.extend(check_members(field.type, field_var, value))
-
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- checks.append(f"std::vector<uint8_t> expected_{field_var} {{")
- for i in range(0, len(value), 16):
- checks.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + 16]]))
- checks.append("};")
- checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
-
- elif isinstance(field, ast.ArrayField) and field.width:
- checks.append(f"std::vector<{get_cxx_scalar_type(field.width)}> expected_{field_var} {{")
- step = int(16 * 8 / field.width)
- for i in range(0, len(value), step):
- checks.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + step]]))
- checks.append("};")
- checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
-
- elif (isinstance(field, ast.ArrayField) and isinstance(field.type, ast.EnumDeclaration)):
- checks.append(f"std::vector<{field.type_id}> expected_{field_var} {{")
- for v in value:
- checks.append(f" {field.type_id}({v}),")
- checks.append("};")
- checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
-
- elif isinstance(field, ast.ArrayField):
- checks.append(f"std::vector<{field.type_id}> {field_var} = {get_field(decl, var, id)};")
- checks.append(f"ASSERT_EQ({field_var}.size(), {len(value)});")
- for (n, value) in enumerate(value):
- checks.extend(check_members(field.type, f"{field_var}[{n}]", value))
-
- else:
- pass
-
- return checks
-
- generated_tests = []
- for (test_nr, test) in enumerate(tests):
- child_packet_id = test.get('packet', packet.id)
- child_packet = packet.file.packet_scope[child_packet_id]
-
- generated_tests.append(
- dedent("""\
-
- TEST_F({parser_test_suite}, {packet_id}_Case{test_nr}) {{
- pdl::packet::slice input(std::shared_ptr<std::vector<uint8_t>>(new std::vector<uint8_t> {{
- {input_bytes}
- }}));
- {child_packet_id}View packet = {parse_packet};
- ASSERT_TRUE(packet.IsValid());
- {checks}
- }}
- """).format(parser_test_suite=parser_test_suite,
- packet_id=packet.id,
- child_packet_id=child_packet_id,
- test_nr=test_nr,
- input_bytes=indent(input_bytes(test['packed']), 2),
- parse_packet=parse_packet(child_packet),
- checks=indent(check_members(packet, 'packet', test['unpacked']), 1)))
-
- return ''.join(generated_tests)
-
-
-def generate_packet_serializer_test(serializer_test_suite: str, packet: ast.PacketDeclaration,
- tests: List[object]) -> str:
- """Generate the implementation of unit tests for the selected packet."""
-
- def build_packet(decl: ast.Declaration, var: str, initializer: object) -> (str, List[str]):
- fields = core.get_unconstrained_parent_fields(decl) + decl.fields
- declarations = []
- parameters = []
- for field in fields:
- sanitized_var = var.replace('[', '_').replace(']', '')
- field_id = getattr(field, 'id', None)
- field_var = f'{sanitized_var}_{field_id}'
- value = initializer['payload'] if isinstance(field, (ast.PayloadField,
- ast.BodyField)) else initializer.get(field_id, None)
-
- if isinstance(field, ast.ScalarField):
- parameters.append(f"{value}")
-
- elif isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration):
- parameters.append(f"{field.type_id}({value})")
-
- elif isinstance(field, ast.TypedefField):
- (element, intermediate_declarations) = build_packet(field.type, field_var, value)
- declarations.extend(intermediate_declarations)
- parameters.append(element)
-
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- declarations.append(f"std::vector<uint8_t> {field_var} {{")
- for i in range(0, len(value), 16):
- declarations.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + 16]]))
- declarations.append("};")
- parameters.append(f"std::move({field_var})")
-
- elif isinstance(field, ast.ArrayField) and field.width:
- declarations.append(f"std::vector<{get_cxx_scalar_type(field.width)}> {field_var} {{")
- step = int(16 * 8 / field.width)
- for i in range(0, len(value), step):
- declarations.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + step]]))
- declarations.append("};")
- parameters.append(f"std::move({field_var})")
-
- elif isinstance(field, ast.ArrayField) and isinstance(field.type, ast.EnumDeclaration):
- declarations.append(f"std::vector<{field.type_id}> {field_var} {{")
- for v in value:
- declarations.append(f" {field.type_id}({v}),")
- declarations.append("};")
- parameters.append(f"std::move({field_var})")
-
- elif isinstance(field, ast.ArrayField):
- elements = []
- for (n, value) in enumerate(value):
- (element, intermediate_declarations) = build_packet(field.type, f'{field_var}_{n}', value)
- elements.append(element)
- declarations.extend(intermediate_declarations)
- declarations.append(f"std::vector<{field.type_id}> {field_var} {{")
- for element in elements:
- declarations.append(f" {element},")
- declarations.append("};")
- parameters.append(f"std::move({field_var})")
-
- else:
- pass
-
- constructor_name = f'{decl.id}Builder' if isinstance(decl, ast.PacketDeclaration) else decl.id
- return (f"{constructor_name}({', '.join(parameters)})", declarations)
-
- def output_bytes(output: str) -> List[str]:
- output = bytes.fromhex(output)
- output_bytes = []
- for i in range(0, len(output), 16):
- output_bytes.append(' '.join(f'0x{b:x},' for b in output[i:i + 16]))
- return output_bytes
-
- generated_tests = []
- for (test_nr, test) in enumerate(tests):
- child_packet_id = test.get('packet', packet.id)
- child_packet = packet.file.packet_scope[child_packet_id]
-
- (built_packet, intermediate_declarations) = build_packet(child_packet, 'packet', test['unpacked'])
- generated_tests.append(
- dedent("""\
-
- TEST_F({serializer_test_suite}, {packet_id}_Case{test_nr}) {{
- std::vector<uint8_t> expected_output {{
- {output_bytes}
- }};
- {intermediate_declarations}
- {child_packet_id}Builder packet = {built_packet};
- ASSERT_EQ(packet.pdl::packet::Builder::Serialize(), expected_output);
- }}
- """).format(serializer_test_suite=serializer_test_suite,
- packet_id=packet.id,
- child_packet_id=child_packet_id,
- test_nr=test_nr,
- output_bytes=indent(output_bytes(test['packed']), 2),
- built_packet=built_packet,
- intermediate_declarations=indent(intermediate_declarations, 1)))
-
- return ''.join(generated_tests)
-
-
-def run(input: argparse.FileType, output: argparse.FileType, test_vectors: argparse.FileType, include_header: List[str],
- using_namespace: List[str], namespace: str, parser_test_suite: str, serializer_test_suite: str):
-
- file = ast.File.from_json(json.load(input))
- tests = json.load(test_vectors)
- core.desugar(file)
-
- include_header = '\n'.join([f'#include <{header}>' for header in include_header])
- using_namespace = '\n'.join([f'using namespace {namespace};' for namespace in using_namespace])
-
- skipped_tests = [
- 'Packet_Checksum_Field_FromStart',
- 'Packet_Checksum_Field_FromEnd',
- 'Struct_Checksum_Field_FromStart',
- 'Struct_Checksum_Field_FromEnd',
- 'PartialParent5',
- 'PartialParent12',
- ]
-
- output.write(
- dedent("""\
- // File generated from {input_name} and {test_vectors_name}, with the command:
- // {input_command}
- // /!\\ Do not edit by hand
-
- #include <cstdint>
- #include <string>
- #include <gtest/gtest.h>
- #include <packet_runtime.h>
-
- {include_header}
- {using_namespace}
-
- namespace {namespace} {{
-
- class {parser_test_suite} : public testing::Test {{}};
- class {serializer_test_suite} : public testing::Test {{}};
- """).format(parser_test_suite=parser_test_suite,
- serializer_test_suite=serializer_test_suite,
- input_name=input.name,
- input_command=' '.join(sys.argv),
- test_vectors_name=test_vectors.name,
- include_header=include_header,
- using_namespace=using_namespace,
- namespace=namespace))
-
- for decl in file.declarations:
- if decl.id in skipped_tests:
- continue
-
- if isinstance(decl, ast.PacketDeclaration):
- matching_tests = [test['tests'] for test in tests if test['packet'] == decl.id]
- matching_tests = [test for test_list in matching_tests for test in test_list]
- if matching_tests:
- output.write(generate_packet_parser_test(parser_test_suite, decl, matching_tests))
- output.write(generate_packet_serializer_test(serializer_test_suite, decl, matching_tests))
-
- output.write(f"}} // namespace {namespace}\n")
-
-
-def main() -> int:
- """Generate cxx PDL backend."""
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--input', type=argparse.FileType('r'), default=sys.stdin, help='Input PDL-JSON source')
- parser.add_argument('--output', type=argparse.FileType('w'), default=sys.stdout, help='Output C++ file')
- parser.add_argument('--test-vectors', type=argparse.FileType('r'), required=True, help='Input PDL test file')
- parser.add_argument('--namespace', type=str, default='pdl', help='Namespace of the generated file')
- parser.add_argument('--parser-test-suite', type=str, default='ParserTest', help='Name of the parser test suite')
- parser.add_argument('--serializer-test-suite',
- type=str,
- default='SerializerTest',
- help='Name of the serializer test suite')
- parser.add_argument('--include-header', type=str, default=[], action='append', help='Added include directives')
- parser.add_argument('--using-namespace',
- type=str,
- default=[],
- action='append',
- help='Added using namespace statements')
- return run(**vars(parser.parse_args()))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/pdl/scripts/generate_python_backend.py b/tools/pdl/scripts/generate_python_backend.py
deleted file mode 100755
index 3a1a82bb88..0000000000
--- a/tools/pdl/scripts/generate_python_backend.py
+++ /dev/null
@@ -1,1059 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-from dataclasses import dataclass, field
-import json
-from pathlib import Path
-import sys
-from textwrap import dedent
-from typing import List, Tuple, Union, Optional
-
-from pdl import ast, core
-from pdl.utils import indent
-
-
-def mask(width: int) -> str:
- return hex((1 << width) - 1)
-
-
-def generate_prelude() -> str:
- return dedent("""\
- from dataclasses import dataclass, field, fields
- from typing import Optional, List, Tuple
- import enum
- import inspect
- import math
-
- @dataclass
- class Packet:
- payload: Optional[bytes] = field(repr=False, default_factory=bytes, compare=False)
-
- @classmethod
- def parse_all(cls, span: bytes) -> 'Packet':
- packet, remain = getattr(cls, 'parse')(span)
- if len(remain) > 0:
- raise Exception('Unexpected parsing remainder')
- return packet
-
- @property
- def size(self) -> int:
- pass
-
- def show(self, prefix: str = ''):
- print(f'{self.__class__.__name__}')
-
- def print_val(p: str, pp: str, name: str, align: int, typ, val):
- if name == 'payload':
- pass
-
- # Scalar fields.
- elif typ is int:
- print(f'{p}{name:{align}} = {val} (0x{val:x})')
-
- # Byte fields.
- elif typ is bytes:
- print(f'{p}{name:{align}} = [', end='')
- line = ''
- n_pp = ''
- for (idx, b) in enumerate(val):
- if idx > 0 and idx % 8 == 0:
- print(f'{n_pp}{line}')
- line = ''
- n_pp = pp + (' ' * (align + 4))
- line += f' {b:02x}'
- print(f'{n_pp}{line} ]')
-
- # Enum fields.
- elif inspect.isclass(typ) and issubclass(typ, enum.IntEnum):
- print(f'{p}{name:{align}} = {typ.__name__}::{val.name} (0x{val:x})')
-
- # Struct fields.
- elif inspect.isclass(typ) and issubclass(typ, globals().get('Packet')):
- print(f'{p}{name:{align}} = ', end='')
- val.show(prefix=pp)
-
- # Array fields.
- elif getattr(typ, '__origin__', None) == list:
- print(f'{p}{name:{align}}')
- last = len(val) - 1
- align = 5
- for (idx, elt) in enumerate(val):
- n_p = pp + ('├── ' if idx != last else '└── ')
- n_pp = pp + ('│ ' if idx != last else ' ')
- print_val(n_p, n_pp, f'[{idx}]', align, typ.__args__[0], val[idx])
-
- # Custom fields.
- elif inspect.isclass(typ):
- print(f'{p}{name:{align}} = {repr(val)}')
-
- else:
- print(f'{p}{name:{align}} = ##{typ}##')
-
- last = len(fields(self)) - 1
- align = max(len(f.name) for f in fields(self) if f.name != 'payload')
-
- for (idx, f) in enumerate(fields(self)):
- p = prefix + ('├── ' if idx != last else '└── ')
- pp = prefix + ('│ ' if idx != last else ' ')
- val = getattr(self, f.name)
-
- print_val(p, pp, f.name, align, f.type, val)
- """)
-
-
-@dataclass
-class FieldParser:
- byteorder: str
- offset: int = 0
- shift: int = 0
- chunk: List[Tuple[int, int, ast.Field]] = field(default_factory=lambda: [])
- unchecked_code: List[str] = field(default_factory=lambda: [])
- code: List[str] = field(default_factory=lambda: [])
-
- def unchecked_append_(self, line: str):
- """Append unchecked field parsing code.
- The function check_size_ must be called to generate a size guard
- after parsing is completed."""
- self.unchecked_code.append(line)
-
- def append_(self, line: str):
- """Append field parsing code.
- There must be no unchecked code left before this function is called."""
- assert len(self.unchecked_code) == 0
- self.code.append(line)
-
- def check_size_(self, size: str):
- """Generate a check of the current span size."""
- self.append_(f"if len(span) < {size}:")
- self.append_(f" raise Exception('Invalid packet size')")
-
- def check_code_(self):
- """Generate a size check for pending field parsing."""
- if len(self.unchecked_code) > 0:
- assert len(self.chunk) == 0
- unchecked_code = self.unchecked_code
- self.unchecked_code = []
- self.check_size_(str(self.offset))
- self.code.extend(unchecked_code)
-
- def consume_span_(self, keep: int = 0) -> str:
- """Skip consumed span bytes."""
- if self.offset > 0:
- self.check_code_()
- self.append_(f'span = span[{self.offset - keep}:]')
- self.offset = 0
-
- def parse_array_element_dynamic_(self, field: ast.ArrayField, span: str):
- """Parse a single array field element of variable size."""
- if isinstance(field.type, ast.StructDeclaration):
- self.append_(f" element, {span} = {field.type_id}.parse({span})")
- self.append_(f" {field.id}.append(element)")
- else:
- raise Exception(f'Unexpected array element type {field.type_id} {field.width}')
-
- def parse_array_element_static_(self, field: ast.ArrayField, span: str):
- """Parse a single array field element of constant size."""
- if field.width is not None:
- element = f"int.from_bytes({span}, byteorder='{self.byteorder}')"
- self.append_(f" {field.id}.append({element})")
- elif isinstance(field.type, ast.EnumDeclaration):
- element = f"int.from_bytes({span}, byteorder='{self.byteorder}')"
- element = f"{field.type_id}({element})"
- self.append_(f" {field.id}.append({element})")
- else:
- element = f"{field.type_id}.parse_all({span})"
- self.append_(f" {field.id}.append({element})")
-
- def parse_byte_array_field_(self, field: ast.ArrayField):
- """Parse the selected u8 array field."""
- array_size = core.get_array_field_size(field)
- padded_size = field.padded_size
-
- # Shift the span to reset the offset to 0.
- self.consume_span_()
-
- # Derive the array size.
- if isinstance(array_size, int):
- size = array_size
- elif isinstance(array_size, ast.SizeField):
- size = f'{field.id}_size - {field.size_modifier}' if field.size_modifier else f'{field.id}_size'
- elif isinstance(array_size, ast.CountField):
- size = f'{field.id}_count'
- else:
- size = None
-
- # Parse from the padded array if padding is present.
- if padded_size and size is not None:
- self.check_size_(padded_size)
- self.append_(f"if {size} > {padded_size}:")
- self.append_(" raise Exception('Array size is larger than the padding size')")
- self.append_(f"fields['{field.id}'] = list(span[:{size}])")
- self.append_(f"span = span[{padded_size}:]")
-
- elif size is not None:
- self.check_size_(size)
- self.append_(f"fields['{field.id}'] = list(span[:{size}])")
- self.append_(f"span = span[{size}:]")
-
- else:
- self.append_(f"fields['{field.id}'] = list(span)")
- self.append_(f"span = bytes()")
-
- def parse_array_field_(self, field: ast.ArrayField):
- """Parse the selected array field."""
- array_size = core.get_array_field_size(field)
- element_width = core.get_array_element_size(field)
- padded_size = field.padded_size
-
- if element_width:
- if element_width % 8 != 0:
- raise Exception('Array element size is not a multiple of 8')
- element_width = int(element_width / 8)
-
- if isinstance(array_size, int):
- size = None
- count = array_size
- elif isinstance(array_size, ast.SizeField):
- size = f'{field.id}_size'
- count = None
- elif isinstance(array_size, ast.CountField):
- size = None
- count = f'{field.id}_count'
- else:
- size = None
- count = None
-
- # Shift the span to reset the offset to 0.
- self.consume_span_()
-
- # Apply the size modifier.
- if field.size_modifier and size:
- self.append_(f"{size} = {size} - {field.size_modifier}")
-
- # Parse from the padded array if padding is present.
- if padded_size:
- self.check_size_(padded_size)
- self.append_(f"remaining_span = span[{padded_size}:]")
- self.append_(f"span = span[:{padded_size}]")
-
- # The element width is not known, but the array full octet size
- # is known by size field. Parse elements item by item as a vector.
- if element_width is None and size is not None:
- self.check_size_(size)
- self.append_(f"array_span = span[:{size}]")
- self.append_(f"{field.id} = []")
- self.append_("while len(array_span) > 0:")
- self.parse_array_element_dynamic_(field, 'array_span')
- self.append_(f"fields['{field.id}'] = {field.id}")
- self.append_(f"span = span[{size}:]")
-
- # The element width is not known, but the array element count
- # is known statically or by count field.
- # Parse elements item by item as a vector.
- elif element_width is None and count is not None:
- self.append_(f"{field.id} = []")
- self.append_(f"for n in range({count}):")
- self.parse_array_element_dynamic_(field, 'span')
- self.append_(f"fields['{field.id}'] = {field.id}")
-
- # Neither the count not size is known,
- # parse elements until the end of the span.
- elif element_width is None:
- self.append_(f"{field.id} = []")
- self.append_("while len(span) > 0:")
- self.parse_array_element_dynamic_(field, 'span')
- self.append_(f"fields['{field.id}'] = {field.id}")
-
- # The element width is known, and the array element count is known
- # statically, or by count field.
- elif count is not None:
- array_size = (f'{count}' if element_width == 1 else f'{count} * {element_width}')
- self.check_size_(array_size)
- self.append_(f"{field.id} = []")
- self.append_(f"for n in range({count}):")
- span = ('span[n:n + 1]' if element_width == 1 else f'span[n * {element_width}:(n + 1) * {element_width}]')
- self.parse_array_element_static_(field, span)
- self.append_(f"fields['{field.id}'] = {field.id}")
- self.append_(f"span = span[{array_size}:]")
-
- # The element width is known, and the array full size is known
- # by size field, or unknown (in which case it is the remaining span
- # length).
- else:
- if size is not None:
- self.check_size_(size)
- array_size = size or 'len(span)'
- if element_width != 1:
- self.append_(f"if {array_size} % {element_width} != 0:")
- self.append_(" raise Exception('Array size is not a multiple of the element size')")
- self.append_(f"{field.id}_count = int({array_size} / {element_width})")
- array_count = f'{field.id}_count'
- else:
- array_count = array_size
- self.append_(f"{field.id} = []")
- self.append_(f"for n in range({array_count}):")
- span = ('span[n:n + 1]' if element_width == 1 else f'span[n * {element_width}:(n + 1) * {element_width}]')
- self.parse_array_element_static_(field, span)
- self.append_(f"fields['{field.id}'] = {field.id}")
- if size is not None:
- self.append_(f"span = span[{size}:]")
- else:
- self.append_(f"span = bytes()")
-
- # Drop the padding
- if padded_size:
- self.append_(f"span = remaining_span")
-
- def parse_bit_field_(self, field: ast.Field):
- """Parse the selected field as a bit field.
- The field is added to the current chunk. When a byte boundary
- is reached all saved fields are extracted together."""
-
- # Add to current chunk.
- width = core.get_field_size(field)
- self.chunk.append((self.shift, width, field))
- self.shift += width
-
- # Wait for more fields if not on a byte boundary.
- if (self.shift % 8) != 0:
- return
-
- # Parse the backing integer using the configured endiannes,
- # extract field values.
- size = int(self.shift / 8)
- end_offset = self.offset + size
-
- if size == 1:
- value = f"span[{self.offset}]"
- else:
- span = f"span[{self.offset}:{end_offset}]"
- self.unchecked_append_(f"value_ = int.from_bytes({span}, byteorder='{self.byteorder}')")
- value = "value_"
-
- for shift, width, field in self.chunk:
- v = (value if len(self.chunk) == 1 and shift == 0 else f"({value} >> {shift}) & {mask(width)}")
-
- if isinstance(field, ast.ScalarField):
- self.unchecked_append_(f"fields['{field.id}'] = {v}")
- elif isinstance(field, ast.FixedField) and field.enum_id:
- self.unchecked_append_(f"if {v} != {field.enum_id}.{field.tag_id}:")
- self.unchecked_append_(f" raise Exception('Unexpected fixed field value')")
- elif isinstance(field, ast.FixedField):
- self.unchecked_append_(f"if {v} != {hex(field.value)}:")
- self.unchecked_append_(f" raise Exception('Unexpected fixed field value')")
- elif isinstance(field, ast.TypedefField):
- self.unchecked_append_(f"fields['{field.id}'] = {field.type_id}({v})")
- elif isinstance(field, ast.SizeField):
- self.unchecked_append_(f"{field.field_id}_size = {v}")
- elif isinstance(field, ast.CountField):
- self.unchecked_append_(f"{field.field_id}_count = {v}")
- elif isinstance(field, ast.ReservedField):
- pass
- else:
- raise Exception(f'Unsupported bit field type {field.kind}')
-
- # Reset state.
- self.offset = end_offset
- self.shift = 0
- self.chunk = []
-
- def parse_typedef_field_(self, field: ast.TypedefField):
- """Parse a typedef field, to the exclusion of Enum fields."""
-
- if self.shift != 0:
- raise Exception('Typedef field does not start on an octet boundary')
- if (isinstance(field.type, ast.StructDeclaration) and field.type.parent_id is not None):
- raise Exception('Derived struct used in typedef field')
-
- width = core.get_declaration_size(field.type)
- if width is None:
- self.consume_span_()
- self.append_(f"{field.id}, span = {field.type_id}.parse(span)")
- self.append_(f"fields['{field.id}'] = {field.id}")
- else:
- if width % 8 != 0:
- raise Exception('Typedef field type size is not a multiple of 8')
- width = int(width / 8)
- end_offset = self.offset + width
- # Checksum value field is generated alongside checksum start.
- # Deal with this field as padding.
- if not isinstance(field.type, ast.ChecksumDeclaration):
- span = f'span[{self.offset}:{end_offset}]'
- self.unchecked_append_(f"fields['{field.id}'] = {field.type_id}.parse_all({span})")
- self.offset = end_offset
-
- def parse_payload_field_(self, field: Union[ast.BodyField, ast.PayloadField]):
- """Parse body and payload fields."""
-
- payload_size = core.get_payload_field_size(field)
- offset_from_end = core.get_field_offset_from_end(field)
-
- # If the payload is not byte aligned, do parse the bit fields
- # that can be extracted, but do not consume the input bytes as
- # they will also be included in the payload span.
- if self.shift != 0:
- if payload_size:
- raise Exception("Unexpected payload size for non byte aligned payload")
-
- rounded_size = int((self.shift + 7) / 8)
- padding_bits = 8 * rounded_size - self.shift
- self.parse_bit_field_(core.make_reserved_field(padding_bits))
- self.consume_span_(rounded_size)
- else:
- self.consume_span_()
-
- # The payload or body has a known size.
- # Consume the payload and update the span in case
- # fields are placed after the payload.
- if payload_size:
- if getattr(field, 'size_modifier', None):
- self.append_(f"{field.id}_size -= {field.size_modifier}")
- self.check_size_(f'{field.id}_size')
- self.append_(f"payload = span[:{field.id}_size]")
- self.append_(f"span = span[{field.id}_size:]")
- # The payload or body is the last field of a packet,
- # consume the remaining span.
- elif offset_from_end == 0:
- self.append_(f"payload = span")
- self.append_(f"span = bytes([])")
- # The payload or body is followed by fields of static size.
- # Consume the span that is not reserved for the following fields.
- elif offset_from_end is not None:
- if (offset_from_end % 8) != 0:
- raise Exception('Payload field offset from end of packet is not a multiple of 8')
- offset_from_end = int(offset_from_end / 8)
- self.check_size_(f'{offset_from_end}')
- self.append_(f"payload = span[:-{offset_from_end}]")
- self.append_(f"span = span[-{offset_from_end}:]")
- self.append_(f"fields['payload'] = payload")
-
- def parse_checksum_field_(self, field: ast.ChecksumField):
- """Generate a checksum check."""
-
- # The checksum value field can be read starting from the current
- # offset if the fields in between are of fixed size, or from the end
- # of the span otherwise.
- self.consume_span_()
- value_field = core.get_packet_field(field.parent, field.field_id)
- offset_from_start = 0
- offset_from_end = 0
- start_index = field.parent.fields.index(field)
- value_index = field.parent.fields.index(value_field)
- value_size = int(core.get_field_size(value_field) / 8)
-
- for f in field.parent.fields[start_index + 1:value_index]:
- size = core.get_field_size(f)
- if size is None:
- offset_from_start = None
- break
- else:
- offset_from_start += size
-
- trailing_fields = field.parent.fields[value_index:]
- trailing_fields.reverse()
- for f in trailing_fields:
- size = core.get_field_size(f)
- if size is None:
- offset_from_end = None
- break
- else:
- offset_from_end += size
-
- if offset_from_start is not None:
- if offset_from_start % 8 != 0:
- raise Exception('Checksum value field is not aligned to an octet boundary')
- offset_from_start = int(offset_from_start / 8)
- checksum_span = f'span[:{offset_from_start}]'
- if value_size > 1:
- start = offset_from_start
- end = offset_from_start + value_size
- value = f"int.from_bytes(span[{start}:{end}], byteorder='{self.byteorder}')"
- else:
- value = f'span[{offset_from_start}]'
- self.check_size_(offset_from_start + value_size)
-
- elif offset_from_end is not None:
- sign = ''
- if offset_from_end % 8 != 0:
- raise Exception('Checksum value field is not aligned to an octet boundary')
- offset_from_end = int(offset_from_end / 8)
- checksum_span = f'span[:-{offset_from_end}]'
- if value_size > 1:
- start = offset_from_end
- end = offset_from_end - value_size
- value = f"int.from_bytes(span[-{start}:-{end}], byteorder='{self.byteorder}')"
- else:
- value = f'span[-{offset_from_end}]'
- self.check_size_(offset_from_end)
-
- else:
- raise Exception('Checksum value field cannot be read at constant offset')
-
- self.append_(f"{value_field.id} = {value}")
- self.append_(f"fields['{value_field.id}'] = {value_field.id}")
- self.append_(f"computed_{value_field.id} = {value_field.type.function}({checksum_span})")
- self.append_(f"if computed_{value_field.id} != {value_field.id}:")
- self.append_(" raise Exception(f'Invalid checksum computation:" +
- f" {{computed_{value_field.id}}} != {{{value_field.id}}}')")
-
- def parse(self, field: ast.Field):
- # Field has bit granularity.
- # Append the field to the current chunk,
- # check if a byte boundary was reached.
- if core.is_bit_field(field):
- self.parse_bit_field_(field)
-
- # Padding fields.
- elif isinstance(field, ast.PaddingField):
- pass
-
- # Array fields.
- elif isinstance(field, ast.ArrayField) and field.width == 8:
- self.parse_byte_array_field_(field)
-
- elif isinstance(field, ast.ArrayField):
- self.parse_array_field_(field)
-
- # Other typedef fields.
- elif isinstance(field, ast.TypedefField):
- self.parse_typedef_field_(field)
-
- # Payload and body fields.
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- self.parse_payload_field_(field)
-
- # Checksum fields.
- elif isinstance(field, ast.ChecksumField):
- self.parse_checksum_field_(field)
-
- else:
- raise Exception(f'Unimplemented field type {field.kind}')
-
- def done(self):
- self.consume_span_()
-
-
-@dataclass
-class FieldSerializer:
- byteorder: str
- shift: int = 0
- value: List[str] = field(default_factory=lambda: [])
- code: List[str] = field(default_factory=lambda: [])
- indent: int = 0
-
- def indent_(self):
- self.indent += 1
-
- def unindent_(self):
- self.indent -= 1
-
- def append_(self, line: str):
- """Append field serializing code."""
- lines = line.split('\n')
- self.code.extend([' ' * self.indent + line for line in lines])
-
- def extend_(self, value: str, length: int):
- """Append data to the span being constructed."""
- if length == 1:
- self.append_(f"_span.append({value})")
- else:
- self.append_(f"_span.extend(int.to_bytes({value}, length={length}, byteorder='{self.byteorder}'))")
-
- def serialize_array_element_(self, field: ast.ArrayField):
- """Serialize a single array field element."""
- if field.width is not None:
- length = int(field.width / 8)
- self.extend_('_elt', length)
- elif isinstance(field.type, ast.EnumDeclaration):
- length = int(field.type.width / 8)
- self.extend_('_elt', length)
- else:
- self.append_("_span.extend(_elt.serialize())")
-
- def serialize_array_field_(self, field: ast.ArrayField):
- """Serialize the selected array field."""
- if field.padded_size:
- self.append_(f"_{field.id}_start = len(_span)")
-
- if field.width == 8:
- self.append_(f"_span.extend(self.{field.id})")
- else:
- self.append_(f"for _elt in self.{field.id}:")
- self.indent_()
- self.serialize_array_element_(field)
- self.unindent_()
-
- if field.padded_size:
- self.append_(f"_span.extend([0] * ({field.padded_size} - len(_span) + _{field.id}_start))")
-
- def serialize_bit_field_(self, field: ast.Field):
- """Serialize the selected field as a bit field.
- The field is added to the current chunk. When a byte boundary
- is reached all saved fields are serialized together."""
-
- # Add to current chunk.
- width = core.get_field_size(field)
- shift = self.shift
-
- if isinstance(field, str):
- self.value.append(f"({field} << {shift})")
- elif isinstance(field, ast.ScalarField):
- max_value = (1 << field.width) - 1
- self.append_(f"if self.{field.id} > {max_value}:")
- self.append_(f" print(f\"Invalid value for field {field.parent.id}::{field.id}:" +
- f" {{self.{field.id}}} > {max_value}; the value will be truncated\")")
- self.append_(f" self.{field.id} &= {max_value}")
- self.value.append(f"(self.{field.id} << {shift})")
- elif isinstance(field, ast.FixedField) and field.enum_id:
- self.value.append(f"({field.enum_id}.{field.tag_id} << {shift})")
- elif isinstance(field, ast.FixedField):
- self.value.append(f"({field.value} << {shift})")
- elif isinstance(field, ast.TypedefField):
- self.value.append(f"(self.{field.id} << {shift})")
-
- elif isinstance(field, ast.SizeField):
- max_size = (1 << field.width) - 1
- value_field = core.get_packet_field(field.parent, field.field_id)
- size_modifier = ''
-
- if getattr(value_field, 'size_modifier', None):
- size_modifier = f' + {value_field.size_modifier}'
-
- if isinstance(value_field, (ast.PayloadField, ast.BodyField)):
- self.append_(f"_payload_size = len(payload or self.payload or []){size_modifier}")
- self.append_(f"if _payload_size > {max_size}:")
- self.append_(f" print(f\"Invalid length for payload field:" +
- f" {{_payload_size}} > {max_size}; the packet cannot be generated\")")
- self.append_(f" raise Exception(\"Invalid payload length\")")
- array_size = "_payload_size"
- elif isinstance(value_field, ast.ArrayField) and value_field.width:
- array_size = f"(len(self.{value_field.id}) * {int(value_field.width / 8)}{size_modifier})"
- elif isinstance(value_field, ast.ArrayField) and isinstance(value_field.type, ast.EnumDeclaration):
- array_size = f"(len(self.{value_field.id}) * {int(value_field.type.width / 8)}{size_modifier})"
- elif isinstance(value_field, ast.ArrayField):
- self.append_(
- f"_{value_field.id}_size = sum([elt.size for elt in self.{value_field.id}]){size_modifier}")
- array_size = f"_{value_field.id}_size"
- else:
- raise Exception("Unsupported field type")
- self.value.append(f"({array_size} << {shift})")
-
- elif isinstance(field, ast.CountField):
- max_count = (1 << field.width) - 1
- self.append_(f"if len(self.{field.field_id}) > {max_count}:")
- self.append_(f" print(f\"Invalid length for field {field.parent.id}::{field.field_id}:" +
- f" {{len(self.{field.field_id})}} > {max_count}; the array will be truncated\")")
- self.append_(f" del self.{field.field_id}[{max_count}:]")
- self.value.append(f"(len(self.{field.field_id}) << {shift})")
- elif isinstance(field, ast.ReservedField):
- pass
- else:
- raise Exception(f'Unsupported bit field type {field.kind}')
-
- # Check if a byte boundary is reached.
- self.shift += width
- if (self.shift % 8) == 0:
- self.pack_bit_fields_()
-
- def pack_bit_fields_(self):
- """Pack serialized bit fields."""
-
- # Should have an integral number of bytes now.
- assert (self.shift % 8) == 0
-
- # Generate the backing integer, and serialize it
- # using the configured endiannes,
- size = int(self.shift / 8)
-
- if len(self.value) == 0:
- self.append_(f"_span.extend([0] * {size})")
- elif len(self.value) == 1:
- self.extend_(self.value[0], size)
- else:
- self.append_(f"_value = (")
- self.append_(" " + " |\n ".join(self.value))
- self.append_(")")
- self.extend_('_value', size)
-
- # Reset state.
- self.shift = 0
- self.value = []
-
- def serialize_typedef_field_(self, field: ast.TypedefField):
- """Serialize a typedef field, to the exclusion of Enum fields."""
-
- if self.shift != 0:
- raise Exception('Typedef field does not start on an octet boundary')
- if (isinstance(field.type, ast.StructDeclaration) and field.type.parent_id is not None):
- raise Exception('Derived struct used in typedef field')
-
- if isinstance(field.type, ast.ChecksumDeclaration):
- size = int(field.type.width / 8)
- self.append_(f"_checksum = {field.type.function}(_span[_checksum_start:])")
- self.extend_('_checksum', size)
- else:
- self.append_(f"_span.extend(self.{field.id}.serialize())")
-
- def serialize_payload_field_(self, field: Union[ast.BodyField, ast.PayloadField]):
- """Serialize body and payload fields."""
-
- if self.shift != 0 and self.byteorder == 'big':
- raise Exception('Payload field does not start on an octet boundary')
-
- if self.shift == 0:
- self.append_(f"_span.extend(payload or self.payload or [])")
- else:
- # Supported case of packet inheritance;
- # the incomplete fields are serialized into
- # the payload, rather than separately.
- # First extract the padding bits from the payload,
- # then recombine them with the bit fields to be serialized.
- rounded_size = int((self.shift + 7) / 8)
- padding_bits = 8 * rounded_size - self.shift
- self.append_(f"_payload = payload or self.payload or bytes()")
- self.append_(f"if len(_payload) < {rounded_size}:")
- self.append_(f" raise Exception(f\"Invalid length for payload field:" +
- f" {{len(_payload)}} < {rounded_size}\")")
- self.append_(
- f"_padding = int.from_bytes(_payload[:{rounded_size}], byteorder='{self.byteorder}') >> {self.shift}")
- self.value.append(f"(_padding << {self.shift})")
- self.shift += padding_bits
- self.pack_bit_fields_()
- self.append_(f"_span.extend(_payload[{rounded_size}:])")
-
- def serialize_checksum_field_(self, field: ast.ChecksumField):
- """Generate a checksum check."""
-
- self.append_("_checksum_start = len(_span)")
-
- def serialize(self, field: ast.Field):
- # Field has bit granularity.
- # Append the field to the current chunk,
- # check if a byte boundary was reached.
- if core.is_bit_field(field):
- self.serialize_bit_field_(field)
-
- # Padding fields.
- elif isinstance(field, ast.PaddingField):
- pass
-
- # Array fields.
- elif isinstance(field, ast.ArrayField):
- self.serialize_array_field_(field)
-
- # Other typedef fields.
- elif isinstance(field, ast.TypedefField):
- self.serialize_typedef_field_(field)
-
- # Payload and body fields.
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- self.serialize_payload_field_(field)
-
- # Checksum fields.
- elif isinstance(field, ast.ChecksumField):
- self.serialize_checksum_field_(field)
-
- else:
- raise Exception(f'Unimplemented field type {field.kind}')
-
-
-def generate_toplevel_packet_serializer(packet: ast.Declaration) -> List[str]:
- """Generate the serialize() function for a toplevel Packet or Struct
- declaration."""
-
- serializer = FieldSerializer(byteorder=packet.file.byteorder)
- for f in packet.fields:
- serializer.serialize(f)
- return ['_span = bytearray()'] + serializer.code + ['return bytes(_span)']
-
-
-def generate_derived_packet_serializer(packet: ast.Declaration) -> List[str]:
- """Generate the serialize() function for a derived Packet or Struct
- declaration."""
-
- packet_shift = core.get_packet_shift(packet)
- if packet_shift and packet.file.byteorder == 'big':
- raise Exception(f"Big-endian packet {packet.id} has an unsupported body shift")
-
- serializer = FieldSerializer(byteorder=packet.file.byteorder, shift=packet_shift)
- for f in packet.fields:
- serializer.serialize(f)
- return ['_span = bytearray()'
- ] + serializer.code + [f'return {packet.parent.id}.serialize(self, payload = bytes(_span))']
-
-
-def generate_packet_parser(packet: ast.Declaration) -> List[str]:
- """Generate the parse() function for a toplevel Packet or Struct
- declaration."""
-
- packet_shift = core.get_packet_shift(packet)
- if packet_shift and packet.file.byteorder == 'big':
- raise Exception(f"Big-endian packet {packet.id} has an unsupported body shift")
-
- # Convert the packet constraints to a boolean expression.
- validation = []
- if packet.constraints:
- cond = []
- for c in packet.constraints:
- if c.value is not None:
- cond.append(f"fields['{c.id}'] != {hex(c.value)}")
- else:
- field = core.get_packet_field(packet, c.id)
- cond.append(f"fields['{c.id}'] != {field.type_id}.{c.tag_id}")
-
- validation = [f"if {' or '.join(cond)}:", " raise Exception(\"Invalid constraint field values\")"]
-
- # Parse fields iteratively.
- parser = FieldParser(byteorder=packet.file.byteorder, shift=packet_shift)
- for f in packet.fields:
- parser.parse(f)
- parser.done()
-
- # Specialize to child packets.
- children = core.get_derived_packets(packet)
- decl = [] if packet.parent_id else ['fields = {\'payload\': None}']
- specialization = []
-
- if len(children) != 0:
- # Try parsing every child packet successively until one is
- # successfully parsed. Return a parsing error if none is valid.
- # Return parent packet if no child packet matches.
- # TODO: order child packets by decreasing size in case no constraint
- # is given for specialization.
- for _, child in children:
- specialization.append("try:")
- specialization.append(f" return {child.id}.parse(fields.copy(), payload)")
- specialization.append("except Exception as exn:")
- specialization.append(" pass")
-
- return decl + validation + parser.code + specialization + [f"return {packet.id}(**fields), span"]
-
-
-def generate_packet_size_getter(packet: ast.Declaration) -> List[str]:
- constant_width = 0
- variable_width = []
- for f in packet.fields:
- field_size = core.get_field_size(f)
- if field_size is not None:
- constant_width += field_size
- elif isinstance(f, (ast.PayloadField, ast.BodyField)):
- variable_width.append("len(self.payload)")
- elif isinstance(f, ast.TypedefField):
- variable_width.append(f"self.{f.id}.size")
- elif isinstance(f, ast.ArrayField) and isinstance(f.type, (ast.StructDeclaration, ast.CustomFieldDeclaration)):
- variable_width.append(f"sum([elt.size for elt in self.{f.id}])")
- elif isinstance(f, ast.ArrayField) and isinstance(f.type, ast.EnumDeclaration):
- variable_width.append(f"len(self.{f.id}) * {f.type.width}")
- elif isinstance(f, ast.ArrayField):
- variable_width.append(f"len(self.{f.id}) * {int(f.width / 8)}")
- else:
- raise Exception("Unsupported field type")
-
- constant_width = int(constant_width / 8)
- if len(variable_width) == 0:
- return [f"return {constant_width}"]
- elif len(variable_width) == 1 and constant_width:
- return [f"return {variable_width[0]} + {constant_width}"]
- elif len(variable_width) == 1:
- return [f"return {variable_width[0]}"]
- elif len(variable_width) > 1 and constant_width:
- return ([f"return {constant_width} + ("] + " +\n ".join(variable_width).split("\n") + [")"])
- elif len(variable_width) > 1:
- return (["return ("] + " +\n ".join(variable_width).split("\n") + [")"])
- else:
- assert False
-
-
-def generate_packet_post_init(decl: ast.Declaration) -> List[str]:
- """Generate __post_init__ function to set constraint field values."""
-
- # Gather all constraints from parent packets.
- constraints = []
- current = decl
- while current.parent_id:
- constraints.extend(current.constraints)
- current = current.parent
-
- if constraints:
- code = []
- for c in constraints:
- if c.value is not None:
- code.append(f"self.{c.id} = {c.value}")
- else:
- field = core.get_packet_field(decl, c.id)
- code.append(f"self.{c.id} = {field.type_id}.{c.tag_id}")
- return code
-
- else:
- return ["pass"]
-
-
-def generate_enum_declaration(decl: ast.EnumDeclaration) -> str:
- """Generate the implementation of an enum type."""
-
- enum_name = decl.id
- tag_decls = []
- for t in decl.tags:
- tag_decls.append(f"{t.id} = {hex(t.value)}")
-
- return dedent("""\
-
- class {enum_name}(enum.IntEnum):
- {tag_decls}
- """).format(enum_name=enum_name, tag_decls=indent(tag_decls, 1))
-
-
-def generate_packet_declaration(packet: ast.Declaration) -> str:
- """Generate the implementation a toplevel Packet or Struct
- declaration."""
-
- packet_name = packet.id
- field_decls = []
- for f in packet.fields:
- if isinstance(f, ast.ScalarField):
- field_decls.append(f"{f.id}: int = field(kw_only=True, default=0)")
- elif isinstance(f, ast.TypedefField):
- if isinstance(f.type, ast.EnumDeclaration):
- field_decls.append(
- f"{f.id}: {f.type_id} = field(kw_only=True, default={f.type_id}.{f.type.tags[0].id})")
- elif isinstance(f.type, ast.ChecksumDeclaration):
- field_decls.append(f"{f.id}: int = field(kw_only=True, default=0)")
- elif isinstance(f.type, (ast.StructDeclaration, ast.CustomFieldDeclaration)):
- field_decls.append(f"{f.id}: {f.type_id} = field(kw_only=True, default_factory={f.type_id})")
- else:
- raise Exception("Unsupported typedef field type")
- elif isinstance(f, ast.ArrayField) and f.width == 8:
- field_decls.append(f"{f.id}: bytearray = field(kw_only=True, default_factory=bytearray)")
- elif isinstance(f, ast.ArrayField) and f.width:
- field_decls.append(f"{f.id}: List[int] = field(kw_only=True, default_factory=list)")
- elif isinstance(f, ast.ArrayField) and f.type_id:
- field_decls.append(f"{f.id}: List[{f.type_id}] = field(kw_only=True, default_factory=list)")
-
- if packet.parent_id:
- parent_name = packet.parent_id
- parent_fields = 'fields: dict, '
- serializer = generate_derived_packet_serializer(packet)
- else:
- parent_name = 'Packet'
- parent_fields = ''
- serializer = generate_toplevel_packet_serializer(packet)
-
- parser = generate_packet_parser(packet)
- size = generate_packet_size_getter(packet)
- post_init = generate_packet_post_init(packet)
-
- return dedent("""\
-
- @dataclass
- class {packet_name}({parent_name}):
- {field_decls}
-
- def __post_init__(self):
- {post_init}
-
- @staticmethod
- def parse({parent_fields}span: bytes) -> Tuple['{packet_name}', bytes]:
- {parser}
-
- def serialize(self, payload: bytes = None) -> bytes:
- {serializer}
-
- @property
- def size(self) -> int:
- {size}
- """).format(packet_name=packet_name,
- parent_name=parent_name,
- parent_fields=parent_fields,
- field_decls=indent(field_decls, 1),
- post_init=indent(post_init, 2),
- parser=indent(parser, 2),
- serializer=indent(serializer, 2),
- size=indent(size, 2))
-
-
-def generate_custom_field_declaration_check(decl: ast.CustomFieldDeclaration) -> str:
- """Generate the code to validate a user custom field implementation.
-
- This code is to be executed when the generated module is loaded to ensure
- the user gets an immediate and clear error message when the provided
- custom types do not fit the expected template.
- """
- return dedent("""\
-
- if (not callable(getattr({custom_field_name}, 'parse', None)) or
- not callable(getattr({custom_field_name}, 'parse_all', None))):
- raise Exception('The custom field type {custom_field_name} does not implement the parse method')
- """).format(custom_field_name=decl.id)
-
-
-def generate_checksum_declaration_check(decl: ast.ChecksumDeclaration) -> str:
- """Generate the code to validate a user checksum field implementation.
-
- This code is to be executed when the generated module is loaded to ensure
- the user gets an immediate and clear error message when the provided
- checksum functions do not fit the expected template.
- """
- return dedent("""\
-
- if not callable({checksum_name}):
- raise Exception('{checksum_name} is not callable')
- """).format(checksum_name=decl.id)
-
-
-def run(input: argparse.FileType, output: argparse.FileType, custom_type_location: Optional[str]):
- file = ast.File.from_json(json.load(input))
- core.desugar(file)
-
- custom_types = []
- custom_type_checks = ""
- for d in file.declarations:
- if isinstance(d, ast.CustomFieldDeclaration):
- custom_types.append(d.id)
- custom_type_checks += generate_custom_field_declaration_check(d)
- elif isinstance(d, ast.ChecksumDeclaration):
- custom_types.append(d.id)
- custom_type_checks += generate_checksum_declaration_check(d)
-
- output.write(f"# File generated from {input.name}, with the command:\n")
- output.write(f"# {' '.join(sys.argv)}\n")
- output.write("# /!\\ Do not edit by hand.\n")
- if custom_types and custom_type_location:
- output.write(f"\nfrom {custom_type_location} import {', '.join(custom_types)}\n")
- output.write(generate_prelude())
- output.write(custom_type_checks)
-
- for d in file.declarations:
- if isinstance(d, ast.EnumDeclaration):
- output.write(generate_enum_declaration(d))
- elif isinstance(d, (ast.PacketDeclaration, ast.StructDeclaration)):
- output.write(generate_packet_declaration(d))
-
-
-def main() -> int:
- """Generate python PDL backend."""
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--input', type=argparse.FileType('r'), default=sys.stdin, help='Input PDL-JSON source')
- parser.add_argument('--output', type=argparse.FileType('w'), default=sys.stdout, help='Output Python file')
- parser.add_argument('--custom-type-location',
- type=str,
- required=False,
- help='Module of declaration of custom types')
- return run(**vars(parser.parse_args()))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/pdl/scripts/packet_runtime.h b/tools/pdl/scripts/packet_runtime.h
deleted file mode 100644
index c9e1420ea9..0000000000
--- a/tools/pdl/scripts/packet_runtime.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <cstdint>
-#include <memory>
-#include <utility>
-#include <vector>
-
-#ifndef ASSERT
-#include <cassert>
-#define ASSERT assert
-#endif // !ASSERT
-
-namespace pdl::packet {
-
-/// Representation of a raw packet slice.
-/// The slice contains a shared pointer to the source packet bytes, and points
-/// to a subrange within this byte buffer.
-class slice {
- public:
- slice() = default;
- slice(slice const&) = default;
- slice(std::shared_ptr<const std::vector<uint8_t>> packet)
- : packet_(std::move(packet)), offset_(0), size_(packet_->size()) {}
-
- slice(std::shared_ptr<const std::vector<uint8_t>> packet, size_t offset,
- size_t size)
- : packet_(std::move(packet)), offset_(offset), size_(size) {}
-
- /// Return a new slice that contains the selected subrange within the
- /// current slice. The range ['offset', 'offset' + 'slice') must be
- /// contained within the bonuds of the current slice.
- slice subrange(size_t offset, size_t size) const {
- ASSERT((offset + size) <= size_);
- return slice(packet_, offset_ + offset, size);
- }
-
- /// Read a scalar value encoded in little-endian.
- /// The bytes that are read from calling this function are consumed.
- /// This function can be used to iterativaly extract values from a packet
- /// slice.
- template <typename T, size_t N = sizeof(T)>
- T read_le() {
- static_assert(N <= sizeof(T));
- ASSERT(N <= size_);
- T value = 0;
- for (size_t n = 0; n < N; n++) {
- value |= (T)at(n) << (8 * n);
- }
- skip(N);
- return value;
- }
-
- /// Read a scalar value encoded in big-endian.
- /// The bytes that are read from calling this function are consumed.
- /// This function can be used to iterativaly extract values from a packet
- /// slice.
- template <typename T, size_t N = sizeof(T)>
- T read_be() {
- static_assert(N <= sizeof(T));
- ASSERT(N <= size_);
- T value = 0;
- for (size_t n = 0; n < N; n++) {
- value = (value << 8) | (T)at(n);
- }
- skip(N);
- return value;
- }
-
- /// Return the value of the byte at the given offset.
- /// `offset` must be within the bounds of the slice.
- uint8_t at(size_t offset) const {
- ASSERT(offset <= size_);
- return packet_->at(offset_ + offset);
- }
-
- /// Skip `size` bytes at the front of the slice.
- /// `size` must be lower than or equal to the slice size.
- void skip(size_t size) {
- ASSERT(size <= size_);
- offset_ += size;
- size_ -= size;
- }
-
- /// Empty the slice.
- void clear() { size_ = 0; }
-
- /// Return the size of the slice in bytes.
- size_t size() const { return size_; }
-
- /// Return the contents of the slice as a byte vector.
- std::vector<uint8_t> bytes() const {
- return std::vector<uint8_t>(packet_->cbegin() + offset_,
- packet_->cbegin() + offset_ + size_);
- }
-
- private:
- std::shared_ptr<const std::vector<uint8_t>> packet_;
- size_t offset_{0};
- size_t size_{0};
-};
-
-/// Interface class for generated packet builders.
-class Builder {
- public:
- virtual ~Builder() = default;
-
- /// Method implemented by generated packet builders.
- /// The packet fields are concatenated to the output vector.
- virtual void Serialize(std::vector<uint8_t>&) const {}
-
- /// Method implemented by generated packet builders.
- /// Returns the size of the serialized packet in bytes.
- virtual size_t GetSize() const { return 0; }
-
- /// Write a scalar value encoded in little-endian.
- template <typename T, size_t N = sizeof(T)>
- static void write_le(std::vector<uint8_t>& output, T value) {
- static_assert(N <= sizeof(T));
- for (size_t n = 0; n < N; n++) {
- output.push_back(value >> (8 * n));
- }
- }
-
- /// Write a scalar value encoded in big-endian.
- template <typename T, size_t N = sizeof(T)>
- static void write_be(std::vector<uint8_t>& output, T value) {
- static_assert(N <= sizeof(T));
- for (size_t n = 0; n < N; n++) {
- output.push_back(value >> (8 * (N - 1 - n)));
- }
- }
-
- /// Helper method to serialize the packet to a byte vector.
- std::vector<uint8_t> Serialize() const {
- std::vector<uint8_t> output;
- Serialize(output);
- return output;
- }
-};
-
-} // namespace pdl::packet
diff --git a/tools/pdl/scripts/pdl/ast.py b/tools/pdl/scripts/pdl/ast.py
deleted file mode 100644
index 4f884e5eb9..0000000000
--- a/tools/pdl/scripts/pdl/ast.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from dataclasses import dataclass, field
-from typing import Optional, List, Dict, Tuple
-
-constructors_ = dict()
-
-
-def node(kind: str):
-
- def decorator(cls):
- cls = dataclass(cls)
- constructors_[kind] = cls
- return cls
-
- return decorator
-
-
-@dataclass
-class SourceLocation:
- offset: int
- line: int
- column: int
-
-
-@dataclass
-class SourceRange:
- file: int
- start: SourceLocation
- end: SourceLocation
-
-
-@dataclass
-class Node:
- kind: str
- loc: SourceLocation
-
-
-@node('tag')
-class Tag(Node):
- id: str
- value: Optional[int] = field(default=None)
- range: Optional[Tuple[int, int]] = field(default=None)
- tags: Optional[List['Tag']] = field(default=None)
-
-
-@node('constraint')
-class Constraint(Node):
- id: str
- value: Optional[int]
- tag_id: Optional[str]
-
-
-@dataclass
-class Field(Node):
- parent: Node = field(init=False)
-
-
-@node('checksum_field')
-class ChecksumField(Field):
- field_id: str
-
-
-@node('padding_field')
-class PaddingField(Field):
- size: int
-
-
-@node('size_field')
-class SizeField(Field):
- field_id: str
- width: int
-
-
-@node('count_field')
-class CountField(Field):
- field_id: str
- width: int
-
-
-@node('body_field')
-class BodyField(Field):
- id: str = field(init=False, default='_body_')
-
-
-@node('payload_field')
-class PayloadField(Field):
- size_modifier: Optional[str]
- id: str = field(init=False, default='_payload_')
-
-
-@node('fixed_field')
-class FixedField(Field):
- width: Optional[int] = None
- value: Optional[int] = None
- enum_id: Optional[str] = None
- tag_id: Optional[str] = None
-
- @property
- def type(self) -> Optional['Declaration']:
- return self.parent.file.typedef_scope[self.enum_id] if self.enum_id else None
-
-
-@node('reserved_field')
-class ReservedField(Field):
- width: int
-
-
-@node('array_field')
-class ArrayField(Field):
- id: str
- width: Optional[int]
- type_id: Optional[str]
- size_modifier: Optional[str]
- size: Optional[int]
- padded_size: Optional[int] = field(init=False, default=None)
-
- @property
- def type(self) -> Optional['Declaration']:
- return self.parent.file.typedef_scope[self.type_id] if self.type_id else None
-
-
-@node('scalar_field')
-class ScalarField(Field):
- id: str
- width: int
-
-
-@node('typedef_field')
-class TypedefField(Field):
- id: str
- type_id: str
-
- @property
- def type(self) -> 'Declaration':
- return self.parent.file.typedef_scope[self.type_id]
-
-
-@node('group_field')
-class GroupField(Field):
- group_id: str
- constraints: List[Constraint]
-
-
-@dataclass
-class Declaration(Node):
- file: 'File' = field(init=False)
-
- def __post_init__(self):
- if hasattr(self, 'fields'):
- for f in self.fields:
- f.parent = self
-
-
-@node('endianness_declaration')
-class EndiannessDeclaration(Node):
- value: str
-
-
-@node('checksum_declaration')
-class ChecksumDeclaration(Declaration):
- id: str
- function: str
- width: int
-
-
-@node('custom_field_declaration')
-class CustomFieldDeclaration(Declaration):
- id: str
- function: str
- width: Optional[int]
-
-
-@node('enum_declaration')
-class EnumDeclaration(Declaration):
- id: str
- tags: List[Tag]
- width: int
-
-
-@node('packet_declaration')
-class PacketDeclaration(Declaration):
- id: str
- parent_id: Optional[str]
- constraints: List[Constraint]
- fields: List[Field]
-
- @property
- def parent(self) -> Optional['PacketDeclaration']:
- return self.file.packet_scope[self.parent_id] if self.parent_id else None
-
-
-@node('struct_declaration')
-class StructDeclaration(Declaration):
- id: str
- parent_id: Optional[str]
- constraints: List[Constraint]
- fields: List[Field]
-
- @property
- def parent(self) -> Optional['StructDeclaration']:
- return self.file.typedef_scope[self.parent_id] if self.parent_id else None
-
-
-@node('group_declaration')
-class GroupDeclaration(Declaration):
- id: str
- fields: List[Field]
-
-
-@dataclass
-class File:
- endianness: EndiannessDeclaration
- declarations: List[Declaration]
- packet_scope: Dict[str, Declaration] = field(init=False)
- typedef_scope: Dict[str, Declaration] = field(init=False)
- group_scope: Dict[str, Declaration] = field(init=False)
-
- def __post_init__(self):
- self.packet_scope = dict()
- self.typedef_scope = dict()
- self.group_scope = dict()
-
- # Construct the toplevel declaration scopes.
- for d in self.declarations:
- d.file = self
- if isinstance(d, PacketDeclaration):
- self.packet_scope[d.id] = d
- elif isinstance(d, GroupDeclaration):
- self.group_scope[d.id] = d
- else:
- self.typedef_scope[d.id] = d
-
- @staticmethod
- def from_json(obj: object) -> 'File':
- """Import a File exported as JSON object by the PDL parser."""
- endianness = convert_(obj['endianness'])
- declarations = convert_(obj['declarations'])
- return File(endianness, declarations)
-
- @property
- def byteorder(self) -> str:
- return 'little' if self.endianness.value == 'little_endian' else 'big'
-
- @property
- def byteorder_short(self, short: bool = False) -> str:
- return 'le' if self.endianness.value == 'little_endian' else 'be'
-
-
-def convert_(obj: object) -> object:
- if obj is None:
- return None
- if isinstance(obj, (int, str)):
- return obj
- if isinstance(obj, list):
- return [convert_(elt) for elt in obj]
- if isinstance(obj, object):
- if 'start' in obj.keys() and 'end' in obj.keys():
- return (objs.start, obj.end)
- kind = obj['kind']
- loc = obj['loc']
- loc = SourceRange(loc['file'], SourceLocation(**loc['start']), SourceLocation(**loc['end']))
- constructor = constructors_.get(kind)
- members = {'loc': loc, 'kind': kind}
- for name, value in obj.items():
- if name != 'kind' and name != 'loc':
- members[name] = convert_(value)
- return constructor(**members)
- raise Exception('Unhandled json object type')
diff --git a/tools/pdl/scripts/pdl/core.py b/tools/pdl/scripts/pdl/core.py
deleted file mode 100644
index f55bb3018a..0000000000
--- a/tools/pdl/scripts/pdl/core.py
+++ /dev/null
@@ -1,334 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import Optional, List, Dict, Union, Tuple, Set
-from .ast import *
-
-
-def desugar_field_(field: Field, previous: Field, constraints: Dict[str, Constraint]) -> List[Field]:
- """Inline group and constrained fields.
- Constrained fields are transformed into fixed fields.
- Group fields are inlined and recursively desugared."""
-
- if isinstance(field, ScalarField) and field.id in constraints:
- value = constraints[field.id].value
- fixed = FixedField(kind='fixed_field', loc=field.loc, width=field.width, value=value)
- fixed.parent = field.parent
- return [fixed]
-
- elif isinstance(field, PaddingField):
- previous.padded_size = field.size
- field.padded_field = previous
- return [field]
-
- elif isinstance(field, TypedefField) and field.id in constraints:
- tag_id = constraints[field.id].tag_id
- fixed = FixedField(kind='fixed_field', loc=field.loc, enum_id=field.type_id, tag_id=tag_id)
- fixed.parent = field.parent
- return [fixed]
-
- elif isinstance(field, GroupField):
- group = field.parent.file.group_scope[field.group_id]
- constraints = dict([(c.id, c) for c in field.constraints])
- fields = []
- for f in group.fields:
- fields.extend(desugar_field_(f, previous, constraints))
- previous = f
- return fields
-
- else:
- return [field]
-
-
-def desugar(file: File):
- """Inline group fields.
- Constrained fields are transformed into fixed fields.
- Group declarations are removed from the file object.
- **The original file object is modified inline.**"""
-
- declarations = []
- for d in file.declarations:
- if isinstance(d, GroupDeclaration):
- continue
-
- if isinstance(d, (PacketDeclaration, StructDeclaration)):
- fields = []
- for f in d.fields:
- fields.extend(desugar_field_(f, fields[-1] if len(fields) > 0 else None, {}))
- d.fields = fields
-
- declarations.append(d)
-
- file.declarations = declarations
- file.group_scope = {}
-
-
-def make_reserved_field(width: int) -> ReservedField:
- """Create a reserved field of specified width."""
- return ReservedField(kind='reserved_field', loc=None, width=width)
-
-
-def get_packet_field(packet: Union[PacketDeclaration, StructDeclaration], id: str) -> Optional[Field]:
- """Return the field with selected identifier declared in the provided
- packet or its ancestors."""
- id = '_payload_' if id == 'payload' else id
- for f in packet.fields:
- if getattr(f, 'id', None) == id:
- return f
- if isinstance(packet, PacketDeclaration) and packet.parent_id:
- parent = packet.file.packet_scope[packet.parent_id]
- return get_packet_field(parent, id)
- elif isinstance(packet, StructDeclaration) and packet.parent_id:
- parent = packet.file.typedef_scope[packet.parent_id]
- return get_packet_field(parent, id)
- else:
- return None
-
-
-def get_packet_fields(decl: Union[PacketDeclaration, StructDeclaration]) -> List[Field]:
- """Return the list of fields declared in the selected packet and its parents.
- Payload fields are removed from the parent declarations."""
-
- fields = []
- if decl.parent:
- fields = [f for f in get_packet_fields(decl.parent) if not isinstance(f, (PayloadField, BodyField))]
- return fields + decl.fields
-
-
-def get_packet_shift(packet: Union[PacketDeclaration, StructDeclaration]) -> int:
- """Return the bit shift of the payload or body field in the parent packet.
-
- When using packet derivation on bit fields, the body may be shifted.
- The shift is handled statically in the implementation of child packets,
- and the incomplete field is included in the body.
- ```
- packet Basic {
- type: 1,
- _body_
- }
- ```
- """
-
- # Traverse empty parents.
- parent = packet.parent
- while parent and len(parent.fields) == 1:
- parent = parent.parent
-
- if not parent:
- return 0
-
- shift = 0
- for f in packet.parent.fields:
- if isinstance(f, (BodyField, PayloadField)):
- return 0 if (shift % 8) == 0 else shift
- else:
- # Fields that do not have a constant size are assumed to start
- # on a byte boundary, and measure an integral number of bytes.
- # Start the count over.
- size = get_field_size(f)
- shift = 0 if size is None else shift + size
-
- # No payload or body in parent packet.
- # Not raising an error, the generation will fail somewhere else.
- return 0
-
-
-def get_packet_ancestor(
- decl: Union[PacketDeclaration, StructDeclaration]) -> Union[PacketDeclaration, StructDeclaration]:
- """Return the root ancestor of the selected packet or struct."""
- if decl.parent_id is None:
- return decl
- else:
- return get_packet_ancestor(decl.file.packet_scope[decl.parent_id])
-
-
-def get_derived_packets(
- decl: Union[PacketDeclaration, StructDeclaration],
- traverse: bool = True,
-) -> List[Tuple[List[Constraint], Union[PacketDeclaration, StructDeclaration]]]:
- """Return the list of packets or structs that immediately derive from the
- selected packet or struct, coupled with the field constraints.
- Packet aliases (containing no field declarations other than a payload)
- are traversed."""
-
- children = []
- for d in decl.file.declarations:
- if type(d) is type(decl) and d.parent_id == decl.id:
- if (len(d.fields) == 1 and isinstance(d.fields[0], (PayloadField, BodyField))) and traverse:
- children.extend([(d.constraints + sub_constraints, sub_child)
- for (sub_constraints, sub_child) in get_derived_packets(d)])
- else:
- children.append((d.constraints, d))
- return children
-
-
-def get_field_size(field: Field, skip_payload: bool = False) -> Optional[int]:
- """Determine the size of a field in bits, if possible.
- If the field is dynamically sized (e.g. unsized array or payload field),
- None is returned instead. If skip_payload is set, payload and body fields
- are counted as having size 0 rather than a variable size."""
-
- if isinstance(field, (ScalarField, SizeField, CountField, ReservedField)):
- return field.width
-
- elif isinstance(field, FixedField):
- return field.width or field.type.width
-
- elif isinstance(field, PaddingField):
- # Padding field width is added to the padded field size.
- return 0
-
- elif isinstance(field, ArrayField) and field.padded_size is not None:
- return field.padded_size * 8
-
- elif isinstance(field, ArrayField) and field.size is not None:
- element_width = field.width or get_declaration_size(field.type)
- return element_width * field.size if element_width is not None else None
-
- elif isinstance(field, TypedefField):
- return get_declaration_size(field.type)
-
- elif isinstance(field, ChecksumField):
- return 0
-
- elif isinstance(field, (PayloadField, BodyField)) and skip_payload:
- return 0
-
- else:
- return None
-
-
-def get_declaration_size(decl: Declaration, skip_payload: bool = False) -> Optional[int]:
- """Determine the size of a declaration type in bits, if possible.
- If the type is dynamically sized (e.g. contains an array or payload),
- None is returned instead. If skip_payload is set, payload and body fields
- are counted as having size 0 rather than a variable size."""
-
- if isinstance(decl, (EnumDeclaration, CustomFieldDeclaration, ChecksumDeclaration)):
- return decl.width
-
- elif isinstance(decl, (PacketDeclaration, StructDeclaration)):
- parent = decl.parent
- packet_size = get_declaration_size(parent, skip_payload=True) if parent else 0
- if packet_size is None:
- return None
- for f in decl.fields:
- field_size = get_field_size(f, skip_payload=skip_payload)
- if field_size is None:
- return None
- packet_size += field_size
- return packet_size
-
- else:
- return None
-
-
-def get_array_field_size(field: ArrayField) -> Union[None, int, Field]:
- """Return the array static size, size field, or count field.
- If the array is unsized None is returned instead."""
-
- if field.size is not None:
- return field.size
- for f in field.parent.fields:
- if isinstance(f, (SizeField, CountField)) and f.field_id == field.id:
- return f
- return None
-
-
-def get_payload_field_size(field: Union[PayloadField, BodyField]) -> Optional[Field]:
- """Return the payload or body size field.
- If the payload is unsized None is returned instead."""
-
- for f in field.parent.fields:
- if isinstance(f, SizeField) and f.field_id == field.id:
- return f
- return None
-
-
-def get_array_element_size(field: ArrayField) -> Optional[int]:
- """Return the array element size, if possible.
- If the element size is not known at compile time,
- None is returned instead."""
-
- return field.width or get_declaration_size(field.type)
-
-
-def get_field_offset_from_start(field: Field) -> Optional[int]:
- """Return the field bit offset from the start of the parent packet, if it
- can be statically computed. If the offset is variable None is returned
- instead."""
- offset = 0
- field_index = field.parent.fields.index(field)
- for f in field.parent.fields[:field_index]:
- size = get_field_size(f)
- if size is None:
- return None
-
- offset += size
- return offset
-
-
-def get_field_offset_from_end(field: Field) -> Optional[int]:
- """Return the field bit offset from the end of the parent packet, if it
- can be statically computed. If the offset is variable None is returned
- instead. The selected field size is not counted towards the offset."""
- offset = 0
- field_index = field.parent.fields.index(field)
- for f in field.parent.fields[field_index + 1:]:
- size = get_field_size(f)
- if size is None:
- return None
- offset += size
- return offset
-
-
-def get_unconstrained_parent_fields(decl: Union[PacketDeclaration, StructDeclaration]) -> List[Field]:
- """Return the list of fields from the parent declarations that have an identifier
- but that do not have a value fixed by any of the parent constraints.
- The fields are returned in order of declaration."""
-
- def constraint_ids(constraints: List[Constraint]) -> Set[str]:
- return set([c.id for c in constraints])
-
- def aux(decl: Optional[Declaration], constraints: Set[str]) -> List[Field]:
- if decl is None:
- return []
- fields = aux(decl.parent, constraints.union(constraint_ids(decl.constraints)))
- for f in decl.fields:
- if (isinstance(f, (ScalarField, ArrayField, TypedefField)) and not f.id in constraints):
- fields.append(f)
- return fields
-
- return aux(decl.parent, constraint_ids(decl.constraints))
-
-
-def get_parent_constraints(decl: Union[PacketDeclaration, StructDeclaration]) -> List[Constraint]:
- """Return the list of constraints from the current and parent declarations."""
- parent_constraints = get_parent_constraints(decl.parent) if decl.parent else []
- return parent_constraints + decl.constraints
-
-
-def is_bit_field(field: Field) -> bool:
- """Identify fields that can have bit granularity.
- These include: ScalarField, FixedField, TypedefField with enum type,
- SizeField, and CountField."""
-
- if isinstance(field, (ScalarField, SizeField, CountField, FixedField, ReservedField)):
- return True
-
- elif isinstance(field, TypedefField) and isinstance(field.type, EnumDeclaration):
- return True
-
- else:
- return False
diff --git a/tools/pdl/scripts/pdl/utils.py b/tools/pdl/scripts/pdl/utils.py
deleted file mode 100644
index 24e91ca9fb..0000000000
--- a/tools/pdl/scripts/pdl/utils.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import List, Tuple, Union, Optional
-
-
-def indent(code: Union[str, List[str]], depth: int) -> str:
- """Indent a code block to the selected depth.
-
- Accepts as parameter a list of lines or a code block. Handles
- line breaks in the lines as well.
- The first line is intentionally not indented so that
- the caller may use it as:
-
- '''
- def generated():
- {codeblock}
- '''
- """
- code = [code] if isinstance(code, str) else code
- lines = [line for block in code for line in block.split('\n')]
- sep = '\n' + (' ' * (depth * 4))
- return sep.join(lines)
-
-
-def to_pascal_case(text: str) -> str:
- """Convert UPPER_SNAKE_CASE strings to PascalCase."""
- return text.replace('_', ' ').title().replace(' ', '')
diff --git a/tools/pdl/src/analyzer.rs b/tools/pdl/src/analyzer.rs
deleted file mode 100644
index 733d4ef9e9..0000000000
--- a/tools/pdl/src/analyzer.rs
+++ /dev/null
@@ -1,2627 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use codespan_reporting::diagnostic::Diagnostic;
-use codespan_reporting::files;
-use codespan_reporting::term;
-use codespan_reporting::term::termcolor;
-use std::collections::HashMap;
-
-use crate::ast::*;
-use crate::parser::ast as parser_ast;
-use crate::utils;
-
-pub mod ast {
- use serde::Serialize;
-
- /// Field and declaration size information.
- #[derive(Debug, Clone, Copy)]
- #[allow(unused)]
- pub enum Size {
- /// Constant size in bits.
- Static(usize),
- /// Size indicated at packet parsing by a size or count field.
- /// The parameter is the static part of the size.
- Dynamic,
- /// The size cannot be determined statically or at runtime.
- /// The packet assumes the largest possible size.
- Unknown,
- }
-
- // TODO: use derive(Default) when UWB is using Rust 1.62.0.
- #[allow(clippy::derivable_impls)]
- impl Default for Size {
- fn default() -> Size {
- Size::Unknown
- }
- }
-
- #[derive(Debug, Serialize, Default, Clone, PartialEq)]
- pub struct Annotation;
-
- #[derive(Default, Debug, Clone)]
- pub struct FieldAnnotation {
- // Size of field.
- pub size: Size,
- // Size of field with padding bytes.
- // This information exists only for array fields.
- pub padded_size: Option<usize>,
- }
-
- #[derive(Default, Debug, Clone)]
- pub struct DeclAnnotation {
- // Size computed excluding the payload.
- pub size: Size,
- // Payload size, or Static(0) if the declaration does not
- // have a payload.
- pub payload_size: Size,
- }
-
- impl FieldAnnotation {
- pub fn new(size: Size) -> Self {
- FieldAnnotation { size, padded_size: None }
- }
- }
-
- impl std::ops::Add for Size {
- type Output = Size;
- fn add(self, rhs: Size) -> Self::Output {
- match (self, rhs) {
- (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown,
- (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic,
- (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs + rhs),
- }
- }
- }
-
- impl std::ops::Mul for Size {
- type Output = Size;
- fn mul(self, rhs: Size) -> Self::Output {
- match (self, rhs) {
- (Size::Unknown, _) | (_, Size::Unknown) => Size::Unknown,
- (Size::Dynamic, _) | (_, Size::Dynamic) => Size::Dynamic,
- (Size::Static(lhs), Size::Static(rhs)) => Size::Static(lhs * rhs),
- }
- }
- }
-
- impl std::ops::Mul<usize> for Size {
- type Output = Size;
- fn mul(self, rhs: usize) -> Self::Output {
- match self {
- Size::Unknown => Size::Unknown,
- Size::Dynamic => Size::Dynamic,
- Size::Static(lhs) => Size::Static(lhs * rhs),
- }
- }
- }
-
- impl crate::ast::Annotation for Annotation {
- type FieldAnnotation = FieldAnnotation;
- type DeclAnnotation = DeclAnnotation;
- }
-
- #[allow(unused)]
- pub type Field = crate::ast::Field<Annotation>;
- #[allow(unused)]
- pub type Decl = crate::ast::Decl<Annotation>;
- #[allow(unused)]
- pub type File = crate::ast::File<Annotation>;
-}
-
-/// List of unique errors reported as analyzer diagnostics.
-#[repr(u16)]
-pub enum ErrorCode {
- DuplicateDeclIdentifier = 1,
- RecursiveDecl = 2,
- UndeclaredGroupIdentifier = 3,
- InvalidGroupIdentifier = 4,
- UndeclaredTypeIdentifier = 5,
- InvalidTypeIdentifier = 6,
- UndeclaredParentIdentifier = 7,
- InvalidParentIdentifier = 8,
- UndeclaredTestIdentifier = 9,
- InvalidTestIdentifier = 10,
- DuplicateFieldIdentifier = 11,
- DuplicateTagIdentifier = 12,
- DuplicateTagValue = 13,
- InvalidTagValue = 14,
- UndeclaredConstraintIdentifier = 15,
- InvalidConstraintIdentifier = 16,
- E17 = 17,
- ConstraintValueOutOfRange = 18,
- E19 = 19,
- E20 = 20,
- E21 = 21,
- DuplicateConstraintIdentifier = 22,
- DuplicateSizeField = 23,
- UndeclaredSizeIdentifier = 24,
- InvalidSizeIdentifier = 25,
- DuplicateCountField = 26,
- UndeclaredCountIdentifier = 27,
- InvalidCountIdentifier = 28,
- DuplicateElementSizeField = 29,
- UndeclaredElementSizeIdentifier = 30,
- InvalidElementSizeIdentifier = 31,
- FixedValueOutOfRange = 32,
- E33 = 33,
- E34 = 34,
- E35 = 35,
- DuplicatePayloadField = 36,
- MissingPayloadField = 37,
- RedundantArraySize = 38,
- InvalidPaddingField = 39,
- InvalidTagRange = 40,
- DuplicateTagRange = 41,
- E42 = 42,
- E43 = 43,
-}
-
-impl From<ErrorCode> for String {
- fn from(code: ErrorCode) -> Self {
- format!("E{}", code as u16)
- }
-}
-
-/// Aggregate analyzer diagnostics.
-#[derive(Debug, Default)]
-pub struct Diagnostics {
- pub diagnostics: Vec<Diagnostic<FileId>>,
-}
-
-/// Gather information about the full AST.
-#[derive(Debug, Default)]
-pub struct Scope<'d, A: Annotation> {
- /// Collection of Group, Packet, Enum, Struct, Checksum, and CustomField
- /// declarations.
- pub typedef: HashMap<String, &'d crate::ast::Decl<A>>,
-}
-
-impl Diagnostics {
- fn is_empty(&self) -> bool {
- self.diagnostics.is_empty()
- }
-
- fn push(&mut self, diagnostic: Diagnostic<FileId>) {
- self.diagnostics.push(diagnostic)
- }
-
- fn err_or<T>(self, value: T) -> Result<T, Diagnostics> {
- if self.is_empty() {
- Ok(value)
- } else {
- Err(self)
- }
- }
-
- pub fn emit(
- &self,
- sources: &SourceDatabase,
- writer: &mut dyn termcolor::WriteColor,
- ) -> Result<(), files::Error> {
- let config = term::Config::default();
- for d in self.diagnostics.iter() {
- term::emit(writer, &config, sources, d)?;
- }
- Ok(())
- }
-}
-
-impl<'d, A: Annotation + Default> Scope<'d, A> {
- pub fn new(file: &'d crate::ast::File<A>) -> Result<Scope<'d, A>, Diagnostics> {
- // Gather top-level declarations.
- let mut scope: Scope<A> = Default::default();
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- if let Some(id) = decl.id() {
- if let Some(prev) = scope.typedef.insert(id.to_string(), decl) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateDeclIdentifier)
- .with_message(format!(
- "redeclaration of {} identifier `{}`",
- decl.kind(),
- id
- ))
- .with_labels(vec![
- decl.loc.primary(),
- prev.loc
- .secondary()
- .with_message(format!("`{}` is first declared here", id)),
- ]),
- )
- }
- }
- }
-
- // Return failure if any diagnostic is raised.
- if diagnostics.is_empty() {
- Ok(scope)
- } else {
- Err(diagnostics)
- }
- }
-
- /// Return the parent declaration of the selected declaration,
- /// if it has one.
- pub fn get_parent(&self, decl: &crate::ast::Decl<A>) -> Option<&'d crate::ast::Decl<A>> {
- decl.parent_id().and_then(|parent_id| self.typedef.get(parent_id).cloned())
- }
-
- /// Iterate over the parent declarations of the selected declaration.
- pub fn iter_parents<'s>(
- &'s self,
- decl: &'d crate::ast::Decl<A>,
- ) -> impl Iterator<Item = &'d Decl<A>> + 's {
- std::iter::successors(self.get_parent(decl), |decl| self.get_parent(decl))
- }
-
- /// Iterate over the declaration and its parent's fields.
- pub fn iter_fields<'s>(
- &'s self,
- decl: &'d crate::ast::Decl<A>,
- ) -> impl Iterator<Item = &'d Field<A>> + 's {
- std::iter::successors(Some(decl), |decl| self.get_parent(decl)).flat_map(Decl::fields)
- }
-
- /// Return the type declaration for the selected field, if applicable.
- #[allow(dead_code)]
- pub fn get_declaration(
- &self,
- field: &'d crate::ast::Field<A>,
- ) -> Option<&'d crate::ast::Decl<A>> {
- match &field.desc {
- FieldDesc::Checksum { .. }
- | FieldDesc::Padding { .. }
- | FieldDesc::Size { .. }
- | FieldDesc::Count { .. }
- | FieldDesc::ElementSize { .. }
- | FieldDesc::Body
- | FieldDesc::Payload { .. }
- | FieldDesc::FixedScalar { .. }
- | FieldDesc::Reserved { .. }
- | FieldDesc::Group { .. }
- | FieldDesc::Scalar { .. }
- | FieldDesc::Array { type_id: None, .. } => None,
- FieldDesc::FixedEnum { enum_id: type_id, .. }
- | FieldDesc::Array { type_id: Some(type_id), .. }
- | FieldDesc::Typedef { type_id, .. } => self.typedef.get(type_id).cloned(),
- }
- }
-}
-
-/// Return the bit-width of a scalar value.
-fn bit_width(value: usize) -> usize {
- usize::BITS as usize - value.leading_zeros() as usize
-}
-
-/// Return the maximum value for a scalar value.
-fn scalar_max(width: usize) -> usize {
- if width >= usize::BITS as usize {
- usize::MAX
- } else {
- (1 << width) - 1
- }
-}
-
-/// Check declaration identifiers.
-/// Raises error diagnostics for the following cases:
-/// - undeclared parent identifier
-/// - invalid parent identifier
-/// - undeclared group identifier
-/// - invalid group identifier
-/// - undeclared typedef identifier
-/// - invalid typedef identifier
-/// - undeclared test identifier
-/// - invalid test identifier
-/// - recursive declaration
-fn check_decl_identifiers(
- file: &parser_ast::File,
- scope: &Scope<parser_ast::Annotation>,
-) -> Result<(), Diagnostics> {
- enum Mark {
- Temporary,
- Permanent,
- }
- #[derive(Default)]
- struct Context<'d> {
- visited: HashMap<&'d str, Mark>,
- }
-
- fn bfs<'d>(
- decl: &'d parser_ast::Decl,
- context: &mut Context<'d>,
- scope: &Scope<'d, parser_ast::Annotation>,
- diagnostics: &mut Diagnostics,
- ) {
- let decl_id = decl.id().unwrap();
- match context.visited.get(decl_id) {
- Some(Mark::Permanent) => return,
- Some(Mark::Temporary) => {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::RecursiveDecl)
- .with_message(format!(
- "recursive declaration of {} `{}`",
- decl.kind(),
- decl_id
- ))
- .with_labels(vec![decl.loc.primary()]),
- );
- return;
- }
- _ => (),
- }
-
- // Start visiting current declaration.
- context.visited.insert(decl_id, Mark::Temporary);
-
- // Iterate over Struct and Group fields.
- for field in decl.fields() {
- match &field.desc {
- // Validate that the group field has a valid identifier.
- // If the type is a group recurse the group definition.
- FieldDesc::Group { group_id, .. } => match scope.typedef.get(group_id) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::UndeclaredGroupIdentifier)
- .with_message(format!("undeclared group identifier `{}`", group_id))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected group identifier".to_owned()]),
- ),
- Some(group_decl @ Decl { desc: DeclDesc::Group { .. }, .. }) => {
- bfs(group_decl, context, scope, diagnostics)
- }
- Some(_) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidGroupIdentifier)
- .with_message(format!("invalid group identifier `{}`", group_id))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected group identifier".to_owned()]),
- ),
- },
- // Validate that the typedef field has a valid identifier.
- // If the type is a struct recurse the struct definition.
- // Append the field to the packet re-definition.
- FieldDesc::Typedef { type_id, .. }
- | FieldDesc::Array { type_id: Some(type_id), .. } => {
- match scope.typedef.get(type_id) {
- None => diagnostics.push(
- Diagnostic::error().with_code(ErrorCode::UndeclaredTypeIdentifier)
- .with_message(format!(
- "undeclared {} identifier `{}`",
- field.kind(),
- type_id
- ))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected enum, struct, custom_field, or checksum identifier".to_owned()]),
- ),
- Some(Decl { desc: DeclDesc::Packet { .. }, .. }) => diagnostics.push(
- Diagnostic::error().with_code(ErrorCode::InvalidTypeIdentifier)
- .with_message(format!(
- "invalid {} identifier `{}`",
- field.kind(),
- type_id
- ))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected enum, struct, custom_field, or checksum identifier".to_owned()]),
- ),
- Some(typedef_decl) =>
- // Not recursing on array type since it is allowed to
- // have recursive structures, e.g. nested TLV types.
- if matches!(&field.desc, FieldDesc::Typedef { .. }) ||
- matches!(&field.desc, FieldDesc::Array { size: Some(_), .. }) {
- bfs(typedef_decl, context, scope, diagnostics)
- }
- }
- }
- // Ignore other fields.
- _ => (),
- }
- }
-
- // Iterate over parent declaration.
- if let Some(parent_id) = decl.parent_id() {
- let parent_decl = scope.typedef.get(parent_id);
- match (&decl.desc, parent_decl) {
- (DeclDesc::Packet { .. }, None) | (DeclDesc::Struct { .. }, None) => diagnostics
- .push(
- Diagnostic::error()
- .with_code(ErrorCode::UndeclaredParentIdentifier)
- .with_message(format!("undeclared parent identifier `{}`", parent_id))
- .with_labels(vec![decl.loc.primary()])
- .with_notes(vec![format!("hint: expected {} identifier", decl.kind())]),
- ),
- (
- DeclDesc::Packet { .. },
- Some(parent_decl @ Decl { desc: DeclDesc::Packet { .. }, .. }),
- )
- | (
- DeclDesc::Struct { .. },
- Some(parent_decl @ Decl { desc: DeclDesc::Struct { .. }, .. }),
- ) => bfs(parent_decl, context, scope, diagnostics),
- (_, Some(_)) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidParentIdentifier)
- .with_message(format!("invalid parent identifier `{}`", parent_id))
- .with_labels(vec![decl.loc.primary()])
- .with_notes(vec![format!("hint: expected {} identifier", decl.kind())]),
- ),
- _ => unreachable!(),
- }
- }
-
- // Done visiting current declaration.
- context.visited.insert(decl_id, Mark::Permanent);
- }
-
- // Start bfs.
- let mut diagnostics = Default::default();
- let mut context = Default::default();
- for decl in &file.declarations {
- match &decl.desc {
- DeclDesc::Checksum { .. } | DeclDesc::CustomField { .. } | DeclDesc::Enum { .. } => (),
- DeclDesc::Packet { .. } | DeclDesc::Struct { .. } | DeclDesc::Group { .. } => {
- bfs(decl, &mut context, scope, &mut diagnostics)
- }
- DeclDesc::Test { type_id, .. } => match scope.typedef.get(type_id) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::UndeclaredTestIdentifier)
- .with_message(format!("undeclared test identifier `{}`", type_id))
- .with_labels(vec![decl.loc.primary()])
- .with_notes(vec!["hint: expected packet identifier".to_owned()]),
- ),
- Some(Decl { desc: DeclDesc::Packet { .. }, .. }) => (),
- Some(_) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidTestIdentifier)
- .with_message(format!("invalid test identifier `{}`", type_id))
- .with_labels(vec![decl.loc.primary()])
- .with_notes(vec!["hint: expected packet identifier".to_owned()]),
- ),
- },
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check field identifiers.
-/// Raises error diagnostics for the following cases:
-/// - duplicate field identifier
-fn check_field_identifiers(file: &parser_ast::File) -> Result<(), Diagnostics> {
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- let mut local_scope = HashMap::new();
- for field in decl.fields() {
- if let Some(id) = field.id() {
- if let Some(prev) = local_scope.insert(id.to_string(), field) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateFieldIdentifier)
- .with_message(format!(
- "redeclaration of {} field identifier `{}`",
- field.kind(),
- id
- ))
- .with_labels(vec![
- field.loc.primary(),
- prev.loc
- .secondary()
- .with_message(format!("`{}` is first declared here", id)),
- ]),
- )
- }
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check enum declarations.
-/// Raises error diagnostics for the following cases:
-/// - duplicate tag identifier
-/// - duplicate tag value
-fn check_enum_declarations(file: &parser_ast::File) -> Result<(), Diagnostics> {
- // Return the inclusive range with bounds correctly ordered.
- // The analyzer will raise an error if the bounds are incorrectly ordered, but this
- // will enable additional checks.
- fn ordered_range(range: &std::ops::RangeInclusive<usize>) -> std::ops::RangeInclusive<usize> {
- *std::cmp::min(range.start(), range.end())..=*std::cmp::max(range.start(), range.end())
- }
-
- fn check_tag_value<'a>(
- tag: &'a TagValue,
- range: std::ops::RangeInclusive<usize>,
- reserved_ranges: impl Iterator<Item = &'a TagRange>,
- tags_by_id: &mut HashMap<&'a str, SourceRange>,
- tags_by_value: &mut HashMap<usize, SourceRange>,
- diagnostics: &mut Diagnostics,
- ) {
- if let Some(prev) = tags_by_id.insert(&tag.id, tag.loc) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateTagIdentifier)
- .with_message(format!("duplicate tag identifier `{}`", tag.id))
- .with_labels(vec![
- tag.loc.primary(),
- prev.secondary()
- .with_message(format!("`{}` is first declared here", tag.id)),
- ]),
- )
- }
- if let Some(prev) = tags_by_value.insert(tag.value, tag.loc) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateTagValue)
- .with_message(format!("duplicate tag value `{}`", tag.value))
- .with_labels(vec![
- tag.loc.primary(),
- prev.secondary()
- .with_message(format!("`{}` is first declared here", tag.value)),
- ]),
- )
- }
- if !range.contains(&tag.value) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidTagValue)
- .with_message(format!(
- "tag value `{}` is outside the range of valid values `{}..{}`",
- tag.value,
- range.start(),
- range.end()
- ))
- .with_labels(vec![tag.loc.primary()]),
- )
- }
- for reserved_range in reserved_ranges {
- if ordered_range(&reserved_range.range).contains(&tag.value) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E43)
- .with_message(format!(
- "tag value `{}` is declared inside the reserved range `{} = {}..{}`",
- tag.value,
- reserved_range.id,
- reserved_range.range.start(),
- reserved_range.range.end()
- ))
- .with_labels(vec![tag.loc.primary()]),
- )
- }
- }
- }
-
- fn check_tag_range<'a>(
- tag: &'a TagRange,
- range: std::ops::RangeInclusive<usize>,
- tags_by_id: &mut HashMap<&'a str, SourceRange>,
- tags_by_value: &mut HashMap<usize, SourceRange>,
- diagnostics: &mut Diagnostics,
- ) {
- if let Some(prev) = tags_by_id.insert(&tag.id, tag.loc) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateTagIdentifier)
- .with_message(format!("duplicate tag identifier `{}`", tag.id))
- .with_labels(vec![
- tag.loc.primary(),
- prev.secondary()
- .with_message(format!("`{}` is first declared here", tag.id)),
- ]),
- )
- }
- if !range.contains(tag.range.start()) || !range.contains(tag.range.end()) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidTagRange)
- .with_message(format!(
- "tag range `{}..{}` has bounds outside the range of valid values `{}..{}`",
- tag.range.start(),
- tag.range.end(),
- range.start(),
- range.end(),
- ))
- .with_labels(vec![tag.loc.primary()]),
- )
- }
- if tag.range.start() >= tag.range.end() {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidTagRange)
- .with_message(format!(
- "tag start value `{}` is greater than or equal to the end value `{}`",
- tag.range.start(),
- tag.range.end()
- ))
- .with_labels(vec![tag.loc.primary()]),
- )
- }
-
- let range = ordered_range(&tag.range);
- for tag in tag.tags.iter() {
- check_tag_value(tag, range.clone(), [].iter(), tags_by_id, tags_by_value, diagnostics)
- }
- }
-
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- if let DeclDesc::Enum { tags, width, .. } = &decl.desc {
- let mut tags_by_id = HashMap::new();
- let mut tags_by_value = HashMap::new();
- let mut tags_by_range = tags
- .iter()
- .filter_map(|tag| match tag {
- Tag::Range(tag) => Some(tag),
- _ => None,
- })
- .collect::<Vec<_>>();
-
- for tag in tags {
- match tag {
- Tag::Value(value) => check_tag_value(
- value,
- 0..=scalar_max(*width),
- tags_by_range.iter().copied(),
- &mut tags_by_id,
- &mut tags_by_value,
- &mut diagnostics,
- ),
- Tag::Range(range) => check_tag_range(
- range,
- 0..=scalar_max(*width),
- &mut tags_by_id,
- &mut tags_by_value,
- &mut diagnostics,
- ),
- }
- }
-
- // Order tag ranges by increasing bounds in order to check for intersecting ranges.
- tags_by_range.sort_by(|lhs, rhs| {
- ordered_range(&lhs.range).into_inner().cmp(&ordered_range(&rhs.range).into_inner())
- });
-
- // Iterate to check for overlap between tag ranges.
- // Not all potential errors are reported, but the check will report
- // at least one error if the values are incorrect.
- for tag in tags_by_range.windows(2) {
- let left_tag = tag[0];
- let right_tag = tag[1];
- let left = ordered_range(&left_tag.range);
- let right = ordered_range(&right_tag.range);
- if !(left.end() < right.start() || right.end() < left.start()) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateTagRange)
- .with_message(format!(
- "overlapping tag range `{}..{}`",
- right.start(),
- right.end()
- ))
- .with_labels(vec![
- right_tag.loc.primary(),
- left_tag.loc.secondary().with_message(format!(
- "`{}..{}` is first declared here",
- left.start(),
- left.end()
- )),
- ]),
- )
- }
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check constraints.
-/// Raises error diagnostics for the following cases:
-/// - undeclared constraint identifier
-/// - invalid constraint identifier
-/// - invalid constraint scalar value (bad type)
-/// - invalid constraint scalar value (overflow)
-/// - invalid constraint enum value (bad type)
-/// - invalid constraint enum value (undeclared tag)
-/// - duplicate constraint
-fn check_constraints(
- file: &parser_ast::File,
- scope: &Scope<parser_ast::Annotation>,
-) -> Result<(), Diagnostics> {
- fn check_constraint(
- constraint: &Constraint,
- decl: &parser_ast::Decl,
- scope: &Scope<parser_ast::Annotation>,
- diagnostics: &mut Diagnostics,
- ) {
- match scope.iter_fields(decl).find(|field| field.id() == Some(&constraint.id)) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::UndeclaredConstraintIdentifier)
- .with_message(format!("undeclared constraint identifier `{}`", constraint.id))
- .with_labels(vec![constraint.loc.primary()])
- .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]),
- ),
- Some(field @ Field { desc: FieldDesc::Array { .. }, .. }) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidConstraintIdentifier)
- .with_message(format!("invalid constraint identifier `{}`", constraint.id))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here as array field",
- constraint.id
- )),
- ])
- .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]),
- ),
- Some(field @ Field { desc: FieldDesc::Scalar { width, .. }, .. }) => {
- match constraint.value {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E17)
- .with_message(format!(
- "invalid constraint value `{}`",
- constraint.tag_id.as_ref().unwrap()
- ))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here as scalar field",
- constraint.id
- )),
- ])
- .with_notes(vec!["hint: expected scalar value".to_owned()]),
- ),
- Some(value) if bit_width(value) > *width => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::ConstraintValueOutOfRange)
- .with_message(format!(
- "constraint value `{}` is larger than maximum value",
- value
- ))
- .with_labels(vec![constraint.loc.primary(), field.loc.secondary()]),
- ),
- _ => (),
- }
- }
- Some(field @ Field { desc: FieldDesc::Typedef { type_id, .. }, .. }) => {
- match scope.typedef.get(type_id) {
- None => (),
- Some(Decl { desc: DeclDesc::Enum { tags, .. }, .. }) => {
- match &constraint.tag_id {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E19)
- .with_message(format!(
- "invalid constraint value `{}`",
- constraint.value.unwrap()
- ))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here as typedef field",
- constraint.id
- )),
- ])
- .with_notes(vec!["hint: expected enum value".to_owned()]),
- ),
- Some(tag_id) => match tags.iter().find(|tag| tag.id() == tag_id) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E20)
- .with_message(format!("undeclared enum tag `{}`", tag_id))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here",
- constraint.id
- )),
- ]),
- ),
- Some(Tag::Range { .. }) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E42)
- .with_message(format!(
- "enum tag `{}` defines a range",
- tag_id
- ))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here",
- constraint.id
- )),
- ])
- .with_notes(vec![
- "hint: expected enum tag with value".to_owned()
- ]),
- ),
- Some(_) => (),
- },
- }
- }
- Some(decl) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E21)
- .with_message(format!(
- "invalid constraint identifier `{}`",
- constraint.value.unwrap()
- ))
- .with_labels(vec![
- constraint.loc.primary(),
- field.loc.secondary().with_message(format!(
- "`{}` is declared here as {} typedef field",
- constraint.id,
- decl.kind()
- )),
- ])
- .with_notes(vec!["hint: expected enum value".to_owned()]),
- ),
- }
- }
- Some(_) => unreachable!(),
- }
- }
-
- fn check_constraints<'d>(
- constraints: &'d [Constraint],
- parent_decl: &parser_ast::Decl,
- scope: &Scope<parser_ast::Annotation>,
- mut constraints_by_id: HashMap<String, &'d Constraint>,
- diagnostics: &mut Diagnostics,
- ) {
- for constraint in constraints {
- check_constraint(constraint, parent_decl, scope, diagnostics);
- if let Some(prev) = constraints_by_id.insert(constraint.id.to_string(), constraint) {
- // Constraint appears twice in current set.
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicateConstraintIdentifier)
- .with_message(format!(
- "duplicate constraint identifier `{}`",
- constraint.id
- ))
- .with_labels(vec![
- constraint.loc.primary(),
- prev.loc
- .secondary()
- .with_message(format!("`{}` is first constrained here", prev.id)),
- ]),
- )
- }
- }
- }
-
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- // Check constraints for packet inheritance.
- match &decl.desc {
- DeclDesc::Packet { constraints, parent_id: Some(parent_id), .. }
- | DeclDesc::Struct { constraints, parent_id: Some(parent_id), .. } => {
- let parent_decl = scope.typedef.get(parent_id).unwrap();
- check_constraints(
- constraints,
- parent_decl,
- scope,
- // Include constraints declared in parent declarations
- // for duplicate check.
- scope.iter_parents(decl).fold(HashMap::new(), |acc, decl| {
- decl.constraints().fold(acc, |mut acc, constraint| {
- let _ = acc.insert(constraint.id.to_string(), constraint);
- acc
- })
- }),
- &mut diagnostics,
- )
- }
- _ => (),
- }
-
- // Check constraints for group inlining.
- for field in decl.fields() {
- if let FieldDesc::Group { group_id, constraints } = &field.desc {
- let group_decl = scope.typedef.get(group_id).unwrap();
- check_constraints(constraints, group_decl, scope, HashMap::new(), &mut diagnostics)
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check size fields.
-/// Raises error diagnostics for the following cases:
-/// - undeclared size identifier
-/// - invalid size identifier
-/// - duplicate size field
-/// - undeclared count identifier
-/// - invalid count identifier
-/// - duplicate count field
-/// - undeclared elementsize identifier
-/// - invalid elementsize identifier
-/// - duplicate elementsize field
-fn check_size_fields(file: &parser_ast::File) -> Result<(), Diagnostics> {
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- let mut size_for_id = HashMap::new();
- let mut element_size_for_id = HashMap::new();
- for field in decl.fields() {
- // Check for duplicate size, count, or element size fields.
- if let Some((reverse_map, field_id, err)) = match &field.desc {
- FieldDesc::Size { field_id, .. } => {
- Some((&mut size_for_id, field_id, ErrorCode::DuplicateSizeField))
- }
- FieldDesc::Count { field_id, .. } => {
- Some((&mut size_for_id, field_id, ErrorCode::DuplicateCountField))
- }
- FieldDesc::ElementSize { field_id, .. } => {
- Some((&mut element_size_for_id, field_id, ErrorCode::DuplicateElementSizeField))
- }
- _ => None,
- } {
- if let Some(prev) = reverse_map.insert(field_id, field) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(err)
- .with_message(format!("duplicate {} field", field.kind()))
- .with_labels(vec![
- field.loc.primary(),
- prev.loc.secondary().with_message(format!(
- "{} is first declared here",
- prev.kind()
- )),
- ]),
- )
- }
- }
-
- // Check for invalid size, count, or element size field identifiers.
- match &field.desc {
- FieldDesc::Size { field_id, .. } => {
- match decl.fields().find(|field| match &field.desc {
- FieldDesc::Payload { .. } => field_id == "_payload_",
- FieldDesc::Body { .. } => field_id == "_body_",
- _ => field.id() == Some(field_id),
- }) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::UndeclaredSizeIdentifier)
- .with_message(format!(
- "undeclared {} identifier `{}`",
- field.kind(),
- field_id
- ))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec![
- "hint: expected payload, body, or array identifier".to_owned(),
- ]),
- ),
- Some(Field { desc: FieldDesc::Body { .. }, .. })
- | Some(Field { desc: FieldDesc::Payload { .. }, .. })
- | Some(Field { desc: FieldDesc::Array { .. }, .. }) => (),
- Some(Field { loc, .. }) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidSizeIdentifier)
- .with_message(format!(
- "invalid {} identifier `{}`",
- field.kind(),
- field_id
- ))
- .with_labels(vec![field.loc.primary(), loc.secondary()])
- .with_notes(vec![
- "hint: expected payload, body, or array identifier".to_owned(),
- ]),
- ),
- }
- }
-
- FieldDesc::Count { field_id, .. } | FieldDesc::ElementSize { field_id, .. } => {
- let (undeclared_err, invalid_err) =
- if matches!(&field.desc, FieldDesc::Count { .. }) {
- (
- ErrorCode::UndeclaredCountIdentifier,
- ErrorCode::InvalidCountIdentifier,
- )
- } else {
- (
- ErrorCode::UndeclaredElementSizeIdentifier,
- ErrorCode::InvalidElementSizeIdentifier,
- )
- };
- match decl.fields().find(|field| field.id() == Some(field_id)) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(undeclared_err)
- .with_message(format!(
- "undeclared {} identifier `{}`",
- field.kind(),
- field_id
- ))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected array identifier".to_owned()]),
- ),
- Some(Field { desc: FieldDesc::Array { .. }, .. }) => (),
- Some(Field { loc, .. }) => diagnostics.push(
- Diagnostic::error()
- .with_code(invalid_err)
- .with_message(format!(
- "invalid {} identifier `{}`",
- field.kind(),
- field_id
- ))
- .with_labels(vec![field.loc.primary(), loc.secondary()])
- .with_notes(vec!["hint: expected array identifier".to_owned()]),
- ),
- }
- }
- _ => (),
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check fixed fields.
-/// Raises error diagnostics for the following cases:
-/// - invalid scalar value
-/// - undeclared enum identifier
-/// - invalid enum identifier
-/// - undeclared tag identifier
-fn check_fixed_fields(
- file: &parser_ast::File,
- scope: &Scope<parser_ast::Annotation>,
-) -> Result<(), Diagnostics> {
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- for field in decl.fields() {
- match &field.desc {
- FieldDesc::FixedScalar { value, width } if bit_width(*value) > *width => {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::FixedValueOutOfRange)
- .with_message(format!(
- "fixed value `{}` is larger than maximum value",
- value
- ))
- .with_labels(vec![field.loc.primary()]),
- )
- }
- FieldDesc::FixedEnum { tag_id, enum_id } => match scope.typedef.get(enum_id) {
- None => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E33)
- .with_message(format!("undeclared type identifier `{}`", enum_id))
- .with_labels(vec![field.loc.primary()])
- .with_notes(vec!["hint: expected enum identifier".to_owned()]),
- ),
- Some(enum_decl @ Decl { desc: DeclDesc::Enum { tags, .. }, .. }) => {
- if !tags.iter().any(|tag| tag.id() == tag_id) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E34)
- .with_message(format!("undeclared tag identifier `{}`", tag_id))
- .with_labels(vec![
- field.loc.primary(),
- enum_decl.loc.secondary(),
- ]),
- )
- }
- }
- Some(decl) => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::E35)
- .with_message(format!("invalid type identifier `{}`", enum_id))
- .with_labels(vec![
- field.loc.primary(),
- decl.loc
- .secondary()
- .with_message(format!("`{}` is declared here", enum_id)),
- ])
- .with_notes(vec!["hint: expected enum identifier".to_owned()]),
- ),
- },
- _ => (),
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check payload fields.
-/// Raises error diagnostics for the following cases:
-/// - duplicate payload field
-/// - duplicate payload field size
-/// - duplicate body field
-/// - duplicate body field size
-/// - missing payload field
-fn check_payload_fields(file: &parser_ast::File) -> Result<(), Diagnostics> {
- // Check whether the declaration requires a payload field.
- // The payload is required if any child packets declares fields.
- fn requires_payload(file: &parser_ast::File, decl: &parser_ast::Decl) -> bool {
- file.iter_children(decl).any(|child| child.fields().next().is_some())
- }
-
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- let mut payload: Option<&parser_ast::Field> = None;
- for field in decl.fields() {
- match &field.desc {
- FieldDesc::Payload { .. } | FieldDesc::Body { .. } => {
- if let Some(prev) = payload {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::DuplicatePayloadField)
- .with_message(format!("duplicate {} field", field.kind()))
- .with_labels(vec![
- field.loc.primary(),
- prev.loc.secondary().with_message(format!(
- "{} is first declared here",
- prev.kind()
- )),
- ]),
- )
- } else {
- payload = Some(field);
- }
- }
- _ => (),
- }
- }
-
- if payload.is_none() && requires_payload(file, decl) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::MissingPayloadField)
- .with_message("missing payload field".to_owned())
- .with_labels(vec![decl.loc.primary()])
- .with_notes(vec![format!(
- "hint: one child packet is extending `{}`",
- decl.id().unwrap()
- )]),
- )
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check array fields.
-/// Raises error diagnostics for the following cases:
-/// - redundant array field size
-fn check_array_fields(file: &parser_ast::File) -> Result<(), Diagnostics> {
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- for field in decl.fields() {
- if let FieldDesc::Array { id, size: Some(size), .. } = &field.desc {
- if let Some(size_field) = decl.fields().find(|field| match &field.desc {
- FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => {
- field_id == id
- }
- _ => false,
- }) {
- diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::RedundantArraySize)
- .with_message(format!("redundant array {} field", size_field.kind()))
- .with_labels(vec![
- size_field.loc.primary(),
- field
- .loc
- .secondary()
- .with_message(format!("`{}` has constant size {}", id, size)),
- ]),
- )
- }
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check padding fields.
-/// Raises error diagnostics for the following cases:
-/// - padding field not following an array field
-fn check_padding_fields(file: &parser_ast::File) -> Result<(), Diagnostics> {
- let mut diagnostics: Diagnostics = Default::default();
- for decl in &file.declarations {
- let mut previous_is_array = false;
- for field in decl.fields() {
- match &field.desc {
- FieldDesc::Padding { .. } if !previous_is_array => diagnostics.push(
- Diagnostic::error()
- .with_code(ErrorCode::InvalidPaddingField)
- .with_message("padding field does not follow an array field".to_owned())
- .with_labels(vec![field.loc.primary()]),
- ),
- FieldDesc::Array { .. } => previous_is_array = true,
- _ => previous_is_array = false,
- }
- }
- }
-
- diagnostics.err_or(())
-}
-
-/// Check checksum fields.
-/// Raises error diagnostics for the following cases:
-/// - checksum field precedes checksum start
-/// - undeclared checksum field
-/// - invalid checksum field
-fn check_checksum_fields(
- _file: &parser_ast::File,
- _scope: &Scope<parser_ast::Annotation>,
-) -> Result<(), Diagnostics> {
- // TODO
- Ok(())
-}
-
-/// Check correct definition of packet sizes.
-/// Annotate fields and declarations with the size in bits.
-fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
- fn annotate_decl(
- decl: &parser_ast::Decl,
- scope: &HashMap<String, ast::DeclAnnotation>,
- ) -> ast::Decl {
- // Annotate the declaration fields.
- let mut decl = decl.annotate(Default::default(), |fields| {
- fields.iter().map(|field| annotate_field(decl, field, scope)).collect()
- });
-
- // Compute the declaration annotation.
- decl.annot = match &decl.desc {
- DeclDesc::Packet { fields, .. }
- | DeclDesc::Struct { fields, .. }
- | DeclDesc::Group { fields, .. } => {
- let mut size = decl
- .parent_id()
- .and_then(|parent_id| scope.get(parent_id))
- .map(|annot| annot.size)
- .unwrap_or(ast::Size::Static(0));
- let mut payload_size = ast::Size::Static(0);
- for field in fields {
- match &field.desc {
- FieldDesc::Payload { .. } | FieldDesc::Body { .. } => {
- payload_size = field.annot.size
- }
- _ => size = size + field.annot.size,
- }
- }
- ast::DeclAnnotation { size, payload_size }
- }
- DeclDesc::Enum { width, .. }
- | DeclDesc::Checksum { width, .. }
- | DeclDesc::CustomField { width: Some(width), .. } => {
- ast::DeclAnnotation { size: ast::Size::Static(*width), ..decl.annot }
- }
- DeclDesc::CustomField { width: None, .. } => {
- ast::DeclAnnotation { size: ast::Size::Dynamic, ..decl.annot }
- }
- DeclDesc::Test { .. } => {
- ast::DeclAnnotation { size: ast::Size::Static(0), ..decl.annot }
- }
- };
- decl
- }
-
- fn annotate_field(
- decl: &parser_ast::Decl,
- field: &parser_ast::Field,
- scope: &HashMap<String, ast::DeclAnnotation>,
- ) -> ast::Field {
- field.annotate(match &field.desc {
- FieldDesc::Checksum { .. } | FieldDesc::Padding { .. } => {
- ast::FieldAnnotation::new(ast::Size::Static(0))
- }
- FieldDesc::Size { width, .. }
- | FieldDesc::Count { width, .. }
- | FieldDesc::ElementSize { width, .. }
- | FieldDesc::FixedScalar { width, .. }
- | FieldDesc::Reserved { width }
- | FieldDesc::Scalar { width, .. } => {
- ast::FieldAnnotation::new(ast::Size::Static(*width))
- }
- FieldDesc::Body | FieldDesc::Payload { .. } => {
- let has_payload_size = decl.fields().any(|field| match &field.desc {
- FieldDesc::Size { field_id, .. } => {
- field_id == "_body_" || field_id == "_payload_"
- }
- _ => false,
- });
- ast::FieldAnnotation::new(if has_payload_size {
- ast::Size::Dynamic
- } else {
- ast::Size::Unknown
- })
- }
- FieldDesc::Typedef { type_id, .. }
- | FieldDesc::FixedEnum { enum_id: type_id, .. }
- | FieldDesc::Group { group_id: type_id, .. } => {
- let type_annot = scope.get(type_id).unwrap();
- ast::FieldAnnotation::new(type_annot.size + type_annot.payload_size)
- }
- FieldDesc::Array { width: Some(width), size: Some(size), .. } => {
- ast::FieldAnnotation::new(ast::Size::Static(*size * *width))
- }
- FieldDesc::Array { width: None, size: Some(size), type_id: Some(type_id), .. } => {
- let type_annot = scope.get(type_id).unwrap();
- ast::FieldAnnotation::new((type_annot.size + type_annot.payload_size) * *size)
- }
- FieldDesc::Array { id, size: None, .. } => {
- // The element does not matter when the size of the array is
- // not static. The array size depends on there being a count
- // or size field or not.
- let has_array_size = decl.fields().any(|field| match &field.desc {
- FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => {
- field_id == id
- }
- _ => false,
- });
- ast::FieldAnnotation::new(if has_array_size {
- ast::Size::Dynamic
- } else {
- ast::Size::Unknown
- })
- }
- FieldDesc::Array { .. } => unreachable!(),
- })
- }
-
- // Construct a scope mapping typedef identifiers to decl annotations.
- let mut scope = HashMap::new();
-
- // Annotate declarations.
- let mut declarations = Vec::new();
- for decl in file.declarations.iter() {
- let decl = annotate_decl(decl, &scope);
- if let Some(id) = decl.id() {
- scope.insert(id.to_string(), decl.annot.clone());
- }
- declarations.push(decl);
- }
-
- File {
- version: file.version.clone(),
- file: file.file,
- comments: file.comments.clone(),
- endianness: file.endianness,
- declarations,
- }
-}
-
-/// Inline padding fields.
-/// The padding information is added directly to the targeted fields.
-fn inline_paddings(file: &mut ast::File) {
- for decl in file.declarations.iter_mut() {
- match &mut decl.desc {
- DeclDesc::Struct { fields, .. }
- | DeclDesc::Packet { fields, .. }
- | DeclDesc::Group { fields, .. } => {
- let mut padding = None;
- for field in fields.iter_mut().rev() {
- field.annot.padded_size = padding;
- padding = match &field.desc {
- FieldDesc::Padding { size } => Some(*size),
- _ => None,
- };
- }
- }
- _ => (),
- }
- }
-}
-
-/// Inline group fields and remove group declarations.
-fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> {
- fn inline_fields<'a>(
- fields: impl Iterator<Item = &'a ast::Field>,
- groups: &HashMap<String, ast::Decl>,
- constraints: &HashMap<String, Constraint>,
- ) -> Vec<ast::Field> {
- fields
- .flat_map(|field| match &field.desc {
- FieldDesc::Group { group_id, constraints: group_constraints } => {
- let mut constraints = constraints.clone();
- constraints.extend(
- group_constraints
- .iter()
- .map(|constraint| (constraint.id.clone(), constraint.clone())),
- );
- inline_fields(groups.get(group_id).unwrap().fields(), groups, &constraints)
- }
- FieldDesc::Scalar { id, width } if constraints.contains_key(id) => {
- vec![ast::Field {
- desc: FieldDesc::FixedScalar {
- width: *width,
- value: constraints.get(id).unwrap().value.unwrap(),
- },
- loc: field.loc,
- annot: field.annot.clone(),
- }]
- }
- FieldDesc::Typedef { id, type_id, .. } if constraints.contains_key(id) => {
- vec![ast::Field {
- desc: FieldDesc::FixedEnum {
- enum_id: type_id.clone(),
- tag_id: constraints
- .get(id)
- .and_then(|constraint| constraint.tag_id.clone())
- .unwrap(),
- },
- loc: field.loc,
- annot: field.annot.clone(),
- }]
- }
- _ => vec![field.clone()],
- })
- .collect()
- }
-
- let groups = utils::drain_filter(&mut file.declarations, |decl| {
- matches!(&decl.desc, DeclDesc::Group { .. })
- })
- .into_iter()
- .map(|decl| (decl.id().unwrap().to_owned(), decl))
- .collect::<HashMap<String, _>>();
-
- for decl in file.declarations.iter_mut() {
- match &mut decl.desc {
- DeclDesc::Packet { fields, .. } | DeclDesc::Struct { fields, .. } => {
- *fields = inline_fields(fields.iter(), &groups, &HashMap::new())
- }
- _ => (),
- }
- }
-
- Ok(())
-}
-
-/// Analyzer entry point, produces a new AST with annotations resulting
-/// from the analysis.
-pub fn analyze(file: &parser_ast::File) -> Result<ast::File, Diagnostics> {
- let scope = Scope::new(file)?;
- check_decl_identifiers(file, &scope)?;
- check_field_identifiers(file)?;
- check_enum_declarations(file)?;
- check_constraints(file, &scope)?;
- check_size_fields(file)?;
- check_fixed_fields(file, &scope)?;
- check_payload_fields(file)?;
- check_array_fields(file)?;
- check_padding_fields(file)?;
- check_checksum_fields(file, &scope)?;
- let mut file = compute_field_sizes(file);
- inline_paddings(&mut file);
- inline_groups(&mut file)?;
- Ok(file)
-}
-
-#[cfg(test)]
-mod test {
- use crate::analyzer;
- use crate::ast::*;
- use crate::parser::parse_inline;
- use codespan_reporting::term::termcolor;
-
- macro_rules! raises {
- ($code:ident, $text:literal) => {{
- let mut db = SourceDatabase::new();
- let file = parse_inline(&mut db, "stdin".to_owned(), $text.to_owned())
- .expect("parsing failure");
- let result = analyzer::analyze(&file);
- assert!(matches!(result, Err(_)));
- let diagnostics = result.err().unwrap();
- let mut buffer = termcolor::Buffer::no_color();
- let _ = diagnostics.emit(&db, &mut buffer);
- println!("{}", std::str::from_utf8(buffer.as_slice()).unwrap());
- assert_eq!(diagnostics.diagnostics.len(), 1);
- assert_eq!(diagnostics.diagnostics[0].code, Some(analyzer::ErrorCode::$code.into()));
- }};
- }
-
- macro_rules! valid {
- ($text:literal) => {{
- let mut db = SourceDatabase::new();
- let file = parse_inline(&mut db, "stdin".to_owned(), $text.to_owned())
- .expect("parsing failure");
- assert!(analyzer::analyze(&file).is_ok());
- }};
- }
-
- #[test]
- fn test_e1() {
- raises!(
- DuplicateDeclIdentifier,
- r#"
- little_endian_packets
- struct A { }
- packet A { }
- "#
- );
-
- raises!(
- DuplicateDeclIdentifier,
- r#"
- little_endian_packets
- struct A { }
- enum A : 8 { X = 0, Y = 1 }
- "#
- );
- }
-
- #[test]
- fn test_e2() {
- raises!(
- RecursiveDecl,
- r#"
- little_endian_packets
- packet A : A { }
- "#
- );
-
- raises!(
- RecursiveDecl,
- r#"
- little_endian_packets
- packet A : B { }
- packet B : A { }
- "#
- );
-
- raises!(
- RecursiveDecl,
- r#"
- little_endian_packets
- struct B { x : B }
- "#
- );
-
- raises!(
- RecursiveDecl,
- r#"
- little_endian_packets
- struct B { x : B[8] }
- "#
- );
-
- raises!(
- RecursiveDecl,
- r#"
- little_endian_packets
- group C { C { x = 1 } }
- "#
- );
- }
-
- #[test]
- fn test_e3() {
- raises!(
- UndeclaredGroupIdentifier,
- r#"
- little_endian_packets
- packet A { C { x = 1 } }
- "#
- );
- }
-
- #[test]
- fn test_e4() {
- raises!(
- InvalidGroupIdentifier,
- r#"
- little_endian_packets
- struct C { x : 8 }
- packet A { C { x = 1 } }
- "#
- );
- }
-
- #[test]
- fn test_e5() {
- raises!(
- UndeclaredTypeIdentifier,
- r#"
- little_endian_packets
- packet A { x : B }
- "#
- );
-
- raises!(
- UndeclaredTypeIdentifier,
- r#"
- little_endian_packets
- packet A { x : B[] }
- "#
- );
- }
-
- #[test]
- fn test_e6() {
- raises!(
- InvalidTypeIdentifier,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B { x : A }
- "#
- );
-
- raises!(
- InvalidTypeIdentifier,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B { x : A[] }
- "#
- );
- }
-
- #[test]
- fn test_e7() {
- raises!(
- UndeclaredParentIdentifier,
- r#"
- little_endian_packets
- packet A : B { }
- "#
- );
-
- raises!(
- UndeclaredParentIdentifier,
- r#"
- little_endian_packets
- struct A : B { }
- "#
- );
- }
-
- #[test]
- fn test_e8() {
- raises!(
- InvalidParentIdentifier,
- r#"
- little_endian_packets
- struct A { }
- packet B : A { }
- "#
- );
-
- raises!(
- InvalidParentIdentifier,
- r#"
- little_endian_packets
- packet A { }
- struct B : A { }
- "#
- );
-
- raises!(
- InvalidParentIdentifier,
- r#"
- little_endian_packets
- group A { x : 1 }
- struct B : A { }
- "#
- );
- }
-
- #[ignore]
- #[test]
- fn test_e9() {
- raises!(
- UndeclaredTestIdentifier,
- r#"
- little_endian_packets
- test A { "aaa" }
- "#
- );
- }
-
- #[ignore]
- #[test]
- fn test_e10() {
- raises!(
- InvalidTestIdentifier,
- r#"
- little_endian_packets
- struct A { }
- test A { "aaa" }
- "#
- );
-
- raises!(
- InvalidTestIdentifier,
- r#"
- little_endian_packets
- group A { x : 8 }
- test A { "aaa" }
- "#
- );
- }
-
- #[test]
- fn test_e11() {
- raises!(
- DuplicateFieldIdentifier,
- r#"
- little_endian_packets
- enum A : 8 { X = 0 }
- struct B {
- x : 8,
- x : A
- }
- "#
- );
-
- raises!(
- DuplicateFieldIdentifier,
- r#"
- little_endian_packets
- enum A : 8 { X = 0 }
- packet B {
- x : 8,
- x : A[]
- }
- "#
- );
- }
-
- #[test]
- fn test_e12() {
- raises!(
- DuplicateTagIdentifier,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 0,
- X = 1,
- }
- "#
- );
-
- raises!(
- DuplicateTagIdentifier,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 0,
- A = 1..10 {
- X = 1,
- }
- }
- "#
- );
-
- raises!(
- DuplicateTagIdentifier,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 0,
- X = 1..10,
- }
- "#
- );
- }
-
- #[test]
- fn test_e13() {
- raises!(
- DuplicateTagValue,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 0,
- Y = 0,
- }
- "#
- );
-
- raises!(
- DuplicateTagValue,
- r#"
- little_endian_packets
- enum A : 8 {
- A = 1..10 {
- X = 1,
- Y = 1,
- }
- }
- "#
- );
- }
-
- #[test]
- fn test_e14() {
- raises!(
- InvalidTagValue,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 256,
- }
- "#
- );
-
- raises!(
- InvalidTagValue,
- r#"
- little_endian_packets
- enum A : 8 {
- A = 0,
- X = 10..20 {
- B = 1,
- },
- }
- "#
- );
- }
-
- #[test]
- fn test_e15() {
- raises!(
- UndeclaredConstraintIdentifier,
- r#"
- little_endian_packets
- packet A { }
- packet B : A (x = 1) { }
- "#
- );
-
- raises!(
- UndeclaredConstraintIdentifier,
- r#"
- little_endian_packets
- group A { x : 8 }
- packet B {
- A { y = 1 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e16() {
- raises!(
- InvalidConstraintIdentifier,
- r#"
- little_endian_packets
- packet A { x : 8[] }
- packet B : A (x = 1) { }
- "#
- );
-
- raises!(
- InvalidConstraintIdentifier,
- r#"
- little_endian_packets
- group A { x : 8[] }
- packet B {
- A { x = 1 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e17() {
- raises!(
- E17,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B : A (x = X) { }
- "#
- );
-
- raises!(
- E17,
- r#"
- little_endian_packets
- group A { x : 8 }
- packet B {
- A { x = X }
- }
- "#
- );
- }
-
- #[test]
- fn test_e18() {
- raises!(
- ConstraintValueOutOfRange,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B : A (x = 256) { }
- "#
- );
-
- raises!(
- ConstraintValueOutOfRange,
- r#"
- little_endian_packets
- group A { x : 8 }
- packet B {
- A { x = 256 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e19() {
- raises!(
- E19,
- r#"
- little_endian_packets
- enum C : 8 { X = 0 }
- packet A { x : C }
- packet B : A (x = 0) { }
- "#
- );
-
- raises!(
- E19,
- r#"
- little_endian_packets
- enum C : 8 { X = 0 }
- group A { x : C }
- packet B {
- A { x = 0 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e20() {
- raises!(
- E20,
- r#"
- little_endian_packets
- enum C : 8 { X = 0 }
- packet A { x : C }
- packet B : A (x = Y) { }
- "#
- );
-
- raises!(
- E20,
- r#"
- little_endian_packets
- enum C : 8 { X = 0 }
- group A { x : C }
- packet B {
- A { x = Y }
- }
- "#
- );
- }
-
- #[test]
- fn test_e21() {
- raises!(
- E21,
- r#"
- little_endian_packets
- struct C { }
- packet A { x : C }
- packet B : A (x = 0) { }
- "#
- );
-
- raises!(
- E21,
- r#"
- little_endian_packets
- struct C { }
- group A { x : C }
- packet B {
- A { x = 0 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e22() {
- raises!(
- DuplicateConstraintIdentifier,
- r#"
- little_endian_packets
- packet A { x: 8 }
- packet B : A (x = 0, x = 1) { }
- "#
- );
-
- raises!(
- DuplicateConstraintIdentifier,
- r#"
- little_endian_packets
- packet A { x: 8 }
- packet B : A (x = 0) { }
- packet C : B (x = 1) { }
- "#
- );
-
- raises!(
- DuplicateConstraintIdentifier,
- r#"
- little_endian_packets
- group A { x : 8 }
- packet B {
- A { x = 0, x = 1 }
- }
- "#
- );
- }
-
- #[test]
- fn test_e23() {
- raises!(
- DuplicateSizeField,
- r#"
- little_endian_packets
- struct A {
- _size_ (_payload_) : 8,
- _size_ (_payload_) : 8,
- _payload_,
- }
- "#
- );
-
- raises!(
- DuplicateSizeField,
- r#"
- little_endian_packets
- struct A {
- _count_ (x) : 8,
- _size_ (x) : 8,
- x: 8[],
- }
- "#
- );
- }
-
- #[test]
- fn test_e24() {
- raises!(
- UndeclaredSizeIdentifier,
- r#"
- little_endian_packets
- struct A {
- _size_ (x) : 8,
- }
- "#
- );
-
- raises!(
- UndeclaredSizeIdentifier,
- r#"
- little_endian_packets
- struct A {
- _size_ (_payload_) : 8,
- }
- "#
- );
- }
-
- #[test]
- fn test_e25() {
- raises!(
- InvalidSizeIdentifier,
- r#"
- little_endian_packets
- enum B : 8 { X = 0 }
- struct A {
- _size_ (x) : 8,
- x : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e26() {
- raises!(
- DuplicateCountField,
- r#"
- little_endian_packets
- struct A {
- _size_ (x) : 8,
- _count_ (x) : 8,
- x: 8[],
- }
- "#
- );
- }
-
- #[test]
- fn test_e27() {
- raises!(
- UndeclaredCountIdentifier,
- r#"
- little_endian_packets
- struct A {
- _count_ (x) : 8,
- }
- "#
- );
- }
-
- #[test]
- fn test_e28() {
- raises!(
- InvalidCountIdentifier,
- r#"
- little_endian_packets
- enum B : 8 { X = 0 }
- struct A {
- _count_ (x) : 8,
- x : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e29() {
- raises!(
- DuplicateElementSizeField,
- r#"
- little_endian_packets
- struct A {
- _elementsize_ (x) : 8,
- _elementsize_ (x) : 8,
- x: 8[],
- }
- "#
- );
- }
-
- #[test]
- fn test_e30() {
- raises!(
- UndeclaredElementSizeIdentifier,
- r#"
- little_endian_packets
- struct A {
- _elementsize_ (x) : 8,
- }
- "#
- );
- }
-
- #[test]
- fn test_e31() {
- raises!(
- InvalidElementSizeIdentifier,
- r#"
- little_endian_packets
- enum B : 8 { X = 0 }
- struct A {
- _elementsize_ (x) : 8,
- x : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e32() {
- raises!(
- FixedValueOutOfRange,
- r#"
- little_endian_packets
- struct A {
- _fixed_ = 256 : 8,
- }
- "#
- );
- }
-
- #[test]
- fn test_e33() {
- raises!(
- E33,
- r#"
- little_endian_packets
- struct A {
- _fixed_ = X : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e34() {
- raises!(
- E34,
- r#"
- little_endian_packets
- enum B : 8 { X = 0 }
- struct A {
- _fixed_ = Y : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e35() {
- raises!(
- E35,
- r#"
- little_endian_packets
- struct B { }
- struct A {
- _fixed_ = X : B,
- }
- "#
- );
- }
-
- #[test]
- fn test_e36() {
- raises!(
- DuplicatePayloadField,
- r#"
- little_endian_packets
- packet A {
- _payload_,
- _body_,
- }
- "#
- );
-
- raises!(
- DuplicatePayloadField,
- r#"
- little_endian_packets
- packet A {
- _body_,
- _payload_,
- }
- "#
- );
- }
-
- #[test]
- fn test_e37() {
- raises!(
- MissingPayloadField,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B : A { y : 8 }
- "#
- );
-
- raises!(
- MissingPayloadField,
- r#"
- little_endian_packets
- packet A { x : 8 }
- packet B : A (x = 0) { }
- packet C : B { y : 8 }
- "#
- );
- }
-
- #[test]
- fn test_e38() {
- raises!(
- RedundantArraySize,
- r#"
- little_endian_packets
- packet A {
- _size_ (x) : 8,
- x : 8[8]
- }
- "#
- );
-
- raises!(
- RedundantArraySize,
- r#"
- little_endian_packets
- packet A {
- _count_ (x) : 8,
- x : 8[8]
- }
- "#
- );
- }
-
- #[test]
- fn test_e39() {
- raises!(
- InvalidPaddingField,
- r#"
- little_endian_packets
- packet A {
- _padding_ [16],
- x : 8[]
- }
- "#
- );
-
- raises!(
- InvalidPaddingField,
- r#"
- little_endian_packets
- enum A : 8 { X = 0 }
- packet B {
- x : A,
- _padding_ [16]
- }
- "#
- );
-
- valid!(
- r#"
- little_endian_packets
- packet A {
- x : 8[],
- _padding_ [16]
- }
- "#
- );
- }
-
- #[test]
- fn test_e40() {
- raises!(
- InvalidTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 4..2,
- }
- "#
- );
-
- raises!(
- InvalidTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 2..2,
- }
- "#
- );
-
- raises!(
- InvalidTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 258..259,
- }
- "#
- );
- }
-
- #[test]
- fn test_e41() {
- raises!(
- DuplicateTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 0..15,
- Y = 8..31,
- }
- "#
- );
-
- raises!(
- DuplicateTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 8..31,
- Y = 0..15,
- }
- "#
- );
-
- raises!(
- DuplicateTagRange,
- r#"
- little_endian_packets
- enum A : 8 {
- X = 1..9,
- Y = 9..11,
- }
- "#
- );
- }
-
- #[test]
- fn test_e42() {
- raises!(
- E42,
- r#"
- little_endian_packets
- enum C : 8 { X = 0..15 }
- packet A { x : C }
- packet B : A (x = X) { }
- "#
- );
-
- raises!(
- E42,
- r#"
- little_endian_packets
- enum C : 8 { X = 0..15 }
- group A { x : C }
- packet B {
- A { x = X }
- }
- "#
- );
- }
-
- #[test]
- fn test_e43() {
- raises!(
- E43,
- r#"
- little_endian_packets
- enum A : 8 {
- A = 0,
- B = 1,
- X = 1..15,
- }
- "#
- );
- }
-
- #[test]
- fn test_enum_declaration() {
- valid!(
- r#"
- little_endian_packets
- enum A : 7 {
- X = 0,
- Y = 1,
- Z = 127,
- }
- "#
- );
-
- valid!(
- r#"
- little_endian_packets
- enum A : 7 {
- A = 50..100 {
- X = 50,
- Y = 100,
- },
- Z = 101,
- }
- "#
- );
-
- valid!(
- r#"
- little_endian_packets
- enum A : 7 {
- A = 50..100,
- X = 101,
- }
- "#
- );
- }
-
- fn desugar(text: &str) -> analyzer::ast::File {
- let mut db = SourceDatabase::new();
- let file =
- parse_inline(&mut db, "stdin".to_owned(), text.to_owned()).expect("parsing failure");
- analyzer::analyze(&file).expect("analyzer failure")
- }
-
- #[test]
- fn test_inline_groups() {
- assert_eq!(
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- group G {
- a: 8,
- b: E,
- }
- packet A {
- G { }
- }
- "#
- ),
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- packet A {
- a: 8,
- b: E,
- }
- "#
- )
- );
-
- assert_eq!(
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- group G {
- a: 8,
- b: E,
- }
- packet A {
- G { a=1, b=X }
- }
- "#
- ),
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- packet A {
- _fixed_ = 1: 8,
- _fixed_ = X: E,
- }
- "#
- )
- );
-
- assert_eq!(
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- group G1 {
- a: 8,
- }
- group G2 {
- G1 { a=1 },
- b: E,
- }
- packet A {
- G2 { b=X }
- }
- "#
- ),
- desugar(
- r#"
- little_endian_packets
- enum E : 8 { X=0, Y=1 }
- packet A {
- _fixed_ = 1: 8,
- _fixed_ = X: E,
- }
- "#
- )
- );
- }
-}
diff --git a/tools/pdl/src/ast.rs b/tools/pdl/src/ast.rs
deleted file mode 100644
index da46c13383..0000000000
--- a/tools/pdl/src/ast.rs
+++ /dev/null
@@ -1,552 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use codespan_reporting::diagnostic;
-use codespan_reporting::files;
-use serde::Serialize;
-use std::fmt;
-use std::ops;
-
-/// File identfiier.
-/// References a source file in the source database.
-pub type FileId = usize;
-
-/// Source database.
-/// Stores the source file contents for reference.
-pub type SourceDatabase = files::SimpleFiles<String, String>;
-
-#[derive(Debug, Default, Copy, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
-pub struct SourceLocation {
- /// Byte offset into the file (counted from zero).
- pub offset: usize,
- /// Line number (counted from zero).
- pub line: usize,
- /// Column number (counted from zero)
- pub column: usize,
-}
-
-#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize)]
-pub struct SourceRange {
- pub file: FileId,
- pub start: SourceLocation,
- pub end: SourceLocation,
-}
-
-pub trait Annotation: fmt::Debug + Serialize {
- type FieldAnnotation: Default + fmt::Debug + Clone;
- type DeclAnnotation: Default + fmt::Debug;
-}
-
-#[derive(Debug, Serialize, Clone)]
-#[serde(tag = "kind", rename = "comment")]
-pub struct Comment {
- pub loc: SourceRange,
- pub text: String,
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize)]
-#[serde(rename_all = "snake_case")]
-pub enum EndiannessValue {
- LittleEndian,
- BigEndian,
-}
-
-#[derive(Debug, Copy, Clone, Serialize)]
-#[serde(tag = "kind", rename = "endianness_declaration")]
-pub struct Endianness {
- pub loc: SourceRange,
- pub value: EndiannessValue,
-}
-
-#[derive(Debug, Clone, Serialize)]
-#[serde(tag = "kind", rename = "tag")]
-pub struct TagValue {
- pub id: String,
- pub loc: SourceRange,
- pub value: usize,
-}
-
-#[derive(Debug, Clone, Serialize)]
-#[serde(tag = "kind", rename = "tag")]
-pub struct TagRange {
- pub id: String,
- pub loc: SourceRange,
- pub range: std::ops::RangeInclusive<usize>,
- pub tags: Vec<TagValue>,
-}
-
-#[derive(Debug, Serialize, Clone, PartialEq, Eq)]
-#[serde(untagged)]
-pub enum Tag {
- Value(TagValue),
- Range(TagRange),
-}
-
-#[derive(Debug, Serialize, Clone)]
-#[serde(tag = "kind", rename = "constraint")]
-pub struct Constraint {
- pub id: String,
- pub loc: SourceRange,
- pub value: Option<usize>,
- pub tag_id: Option<String>,
-}
-
-#[derive(Debug, Serialize, Clone, PartialEq, Eq)]
-#[serde(tag = "kind")]
-pub enum FieldDesc {
- #[serde(rename = "checksum_field")]
- Checksum { field_id: String },
- #[serde(rename = "padding_field")]
- Padding { size: usize },
- #[serde(rename = "size_field")]
- Size { field_id: String, width: usize },
- #[serde(rename = "count_field")]
- Count { field_id: String, width: usize },
- #[serde(rename = "elementsize_field")]
- ElementSize { field_id: String, width: usize },
- #[serde(rename = "body_field")]
- Body,
- #[serde(rename = "payload_field")]
- Payload { size_modifier: Option<String> },
- #[serde(rename = "fixed_field")]
- FixedScalar { width: usize, value: usize },
- #[serde(rename = "fixed_field")]
- FixedEnum { enum_id: String, tag_id: String },
- #[serde(rename = "reserved_field")]
- Reserved { width: usize },
- #[serde(rename = "array_field")]
- Array {
- id: String,
- width: Option<usize>,
- type_id: Option<String>,
- size_modifier: Option<String>,
- size: Option<usize>,
- },
- #[serde(rename = "scalar_field")]
- Scalar { id: String, width: usize },
- #[serde(rename = "typedef_field")]
- Typedef { id: String, type_id: String },
- #[serde(rename = "group_field")]
- Group { group_id: String, constraints: Vec<Constraint> },
-}
-
-#[derive(Debug, Serialize, Clone)]
-pub struct Field<A: Annotation> {
- pub loc: SourceRange,
- #[serde(skip_serializing)]
- pub annot: A::FieldAnnotation,
- #[serde(flatten)]
- pub desc: FieldDesc,
-}
-
-#[derive(Debug, Serialize, Clone)]
-#[serde(tag = "kind", rename = "test_case")]
-pub struct TestCase {
- pub loc: SourceRange,
- pub input: String,
-}
-
-#[derive(Debug, Serialize, PartialEq, Eq)]
-#[serde(tag = "kind")]
-pub enum DeclDesc<A: Annotation> {
- #[serde(rename = "checksum_declaration")]
- Checksum { id: String, function: String, width: usize },
- #[serde(rename = "custom_field_declaration")]
- CustomField { id: String, width: Option<usize>, function: String },
- #[serde(rename = "enum_declaration")]
- Enum { id: String, tags: Vec<Tag>, width: usize },
- #[serde(rename = "packet_declaration")]
- Packet {
- id: String,
- constraints: Vec<Constraint>,
- fields: Vec<Field<A>>,
- parent_id: Option<String>,
- },
- #[serde(rename = "struct_declaration")]
- Struct {
- id: String,
- constraints: Vec<Constraint>,
- fields: Vec<Field<A>>,
- parent_id: Option<String>,
- },
- #[serde(rename = "group_declaration")]
- Group { id: String, fields: Vec<Field<A>> },
- #[serde(rename = "test_declaration")]
- Test { type_id: String, test_cases: Vec<TestCase> },
-}
-
-#[derive(Debug, Serialize)]
-pub struct Decl<A: Annotation> {
- pub loc: SourceRange,
- #[serde(skip_serializing)]
- pub annot: A::DeclAnnotation,
- #[serde(flatten)]
- pub desc: DeclDesc<A>,
-}
-
-#[derive(Debug, Serialize)]
-pub struct File<A: Annotation> {
- pub version: String,
- pub file: FileId,
- pub comments: Vec<Comment>,
- pub endianness: Endianness,
- pub declarations: Vec<Decl<A>>,
-}
-
-impl SourceLocation {
- /// Construct a new source location.
- ///
- /// The `line_starts` indicates the byte offsets where new lines
- /// start in the file. The first element should thus be `0` since
- /// every file has at least one line starting at offset `0`.
- pub fn new(offset: usize, line_starts: &[usize]) -> SourceLocation {
- let mut loc = SourceLocation { offset, line: 0, column: offset };
- for (line, start) in line_starts.iter().enumerate() {
- if *start > offset {
- break;
- }
- loc = SourceLocation { offset, line, column: offset - start };
- }
- loc
- }
-}
-
-impl SourceRange {
- pub fn primary(&self) -> diagnostic::Label<FileId> {
- diagnostic::Label::primary(self.file, self.start.offset..self.end.offset)
- }
- pub fn secondary(&self) -> diagnostic::Label<FileId> {
- diagnostic::Label::secondary(self.file, self.start.offset..self.end.offset)
- }
-}
-
-impl fmt::Display for SourceRange {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- if self.start.line == self.end.line {
- write!(f, "{}:{}-{}", self.start.line, self.start.column, self.end.column)
- } else {
- write!(
- f,
- "{}:{}-{}:{}",
- self.start.line, self.start.column, self.end.line, self.end.column
- )
- }
- }
-}
-
-impl fmt::Debug for SourceRange {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("SourceRange").finish_non_exhaustive()
- }
-}
-
-impl ops::Add<SourceRange> for SourceRange {
- type Output = SourceRange;
-
- fn add(self, rhs: SourceRange) -> SourceRange {
- assert!(self.file == rhs.file);
- SourceRange {
- file: self.file,
- start: self.start.min(rhs.start),
- end: self.end.max(rhs.end),
- }
- }
-}
-
-impl Eq for Endianness {}
-impl PartialEq for Endianness {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc.
- self.value == other.value
- }
-}
-
-impl Eq for TagValue {}
-impl PartialEq for TagValue {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc.
- self.id == other.id && self.value == other.value
- }
-}
-
-impl Eq for TagRange {}
-impl PartialEq for TagRange {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc.
- self.id == other.id && self.range == other.range && self.tags == other.tags
- }
-}
-
-impl Tag {
- pub fn id(&self) -> &str {
- match self {
- Tag::Value(TagValue { id, .. }) | Tag::Range(TagRange { id, .. }) => id,
- }
- }
-
- pub fn loc(&self) -> &SourceRange {
- match self {
- Tag::Value(TagValue { loc, .. }) | Tag::Range(TagRange { loc, .. }) => loc,
- }
- }
-
- pub fn value(&self) -> Option<usize> {
- match self {
- Tag::Value(TagValue { value, .. }) => Some(*value),
- Tag::Range(_) => None,
- }
- }
-}
-
-impl Eq for Constraint {}
-impl PartialEq for Constraint {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc.
- self.id == other.id && self.value == other.value && self.tag_id == other.tag_id
- }
-}
-
-impl Eq for TestCase {}
-impl PartialEq for TestCase {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc.
- self.input == other.input
- }
-}
-
-impl<A: Annotation + std::cmp::PartialEq> Eq for File<A> {}
-impl<A: Annotation + std::cmp::PartialEq> PartialEq for File<A> {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out comments and PDL
- // version information.
- self.endianness == other.endianness && self.declarations == other.declarations
- }
-}
-
-impl<A: Annotation> File<A> {
- pub fn new(file: FileId) -> File<A> {
- File {
- version: "1,0".to_owned(),
- comments: vec![],
- // The endianness is mandatory, so this default value will
- // be updated while parsing.
- endianness: Endianness {
- loc: SourceRange::default(),
- value: EndiannessValue::LittleEndian,
- },
- declarations: vec![],
- file,
- }
- }
-
- /// Iterate over the children of the selected declaration.
- /// /!\ This method is unsafe to use if the file contains cyclic
- /// declarations, use with caution.
- pub fn iter_children<'d>(&'d self, decl: &'d Decl<A>) -> impl Iterator<Item = &'d Decl<A>> {
- self.declarations.iter().filter(|other_decl| other_decl.parent_id() == decl.id())
- }
-}
-
-impl<A: Annotation + std::cmp::PartialEq> Eq for Decl<A> {}
-impl<A: Annotation + std::cmp::PartialEq> PartialEq for Decl<A> {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc and annot.
- self.desc == other.desc
- }
-}
-
-impl<A: Annotation> Decl<A> {
- pub fn new(loc: SourceRange, desc: DeclDesc<A>) -> Decl<A> {
- Decl { loc, annot: Default::default(), desc }
- }
-
- pub fn annotate<F, B: Annotation>(
- &self,
- annot: B::DeclAnnotation,
- annotate_fields: F,
- ) -> Decl<B>
- where
- F: FnOnce(&[Field<A>]) -> Vec<Field<B>>,
- {
- let desc = match &self.desc {
- DeclDesc::Checksum { id, function, width } => {
- DeclDesc::Checksum { id: id.clone(), function: function.clone(), width: *width }
- }
- DeclDesc::CustomField { id, width, function } => {
- DeclDesc::CustomField { id: id.clone(), width: *width, function: function.clone() }
- }
- DeclDesc::Enum { id, tags, width } => {
- DeclDesc::Enum { id: id.clone(), tags: tags.clone(), width: *width }
- }
-
- DeclDesc::Test { type_id, test_cases } => {
- DeclDesc::Test { type_id: type_id.clone(), test_cases: test_cases.clone() }
- }
- DeclDesc::Packet { id, constraints, parent_id, fields } => DeclDesc::Packet {
- id: id.clone(),
- constraints: constraints.clone(),
- parent_id: parent_id.clone(),
- fields: annotate_fields(fields),
- },
- DeclDesc::Struct { id, constraints, parent_id, fields } => DeclDesc::Struct {
- id: id.clone(),
- constraints: constraints.clone(),
- parent_id: parent_id.clone(),
- fields: annotate_fields(fields),
- },
- DeclDesc::Group { id, fields } => {
- DeclDesc::Group { id: id.clone(), fields: annotate_fields(fields) }
- }
- };
- Decl { loc: self.loc, desc, annot }
- }
-
- pub fn id(&self) -> Option<&str> {
- match &self.desc {
- DeclDesc::Test { .. } => None,
- DeclDesc::Checksum { id, .. }
- | DeclDesc::CustomField { id, .. }
- | DeclDesc::Enum { id, .. }
- | DeclDesc::Packet { id, .. }
- | DeclDesc::Struct { id, .. }
- | DeclDesc::Group { id, .. } => Some(id),
- }
- }
-
- pub fn parent_id(&self) -> Option<&str> {
- match &self.desc {
- DeclDesc::Packet { parent_id, .. } | DeclDesc::Struct { parent_id, .. } => {
- parent_id.as_deref()
- }
- _ => None,
- }
- }
-
- pub fn constraints(&self) -> std::slice::Iter<'_, Constraint> {
- match &self.desc {
- DeclDesc::Packet { constraints, .. } | DeclDesc::Struct { constraints, .. } => {
- constraints.iter()
- }
- _ => [].iter(),
- }
- }
-
- pub fn fields(&self) -> std::slice::Iter<'_, Field<A>> {
- match &self.desc {
- DeclDesc::Packet { fields, .. }
- | DeclDesc::Struct { fields, .. }
- | DeclDesc::Group { fields, .. } => fields.iter(),
- _ => [].iter(),
- }
- }
-
- pub fn kind(&self) -> &str {
- match &self.desc {
- DeclDesc::Checksum { .. } => "checksum",
- DeclDesc::CustomField { .. } => "custom field",
- DeclDesc::Enum { .. } => "enum",
- DeclDesc::Packet { .. } => "packet",
- DeclDesc::Struct { .. } => "struct",
- DeclDesc::Group { .. } => "group",
- DeclDesc::Test { .. } => "test",
- }
- }
-}
-
-impl<A: Annotation> Eq for Field<A> {}
-impl<A: Annotation> PartialEq for Field<A> {
- fn eq(&self, other: &Self) -> bool {
- // Implement structual equality, leave out loc and annot.
- self.desc == other.desc
- }
-}
-
-impl<A: Annotation> Field<A> {
- pub fn annotate<B: Annotation>(&self, annot: B::FieldAnnotation) -> Field<B> {
- Field { loc: self.loc, annot, desc: self.desc.clone() }
- }
-
- pub fn id(&self) -> Option<&str> {
- match &self.desc {
- FieldDesc::Checksum { .. }
- | FieldDesc::Padding { .. }
- | FieldDesc::Size { .. }
- | FieldDesc::Count { .. }
- | FieldDesc::ElementSize { .. }
- | FieldDesc::Body
- | FieldDesc::Payload { .. }
- | FieldDesc::FixedScalar { .. }
- | FieldDesc::FixedEnum { .. }
- | FieldDesc::Reserved { .. }
- | FieldDesc::Group { .. } => None,
- FieldDesc::Array { id, .. }
- | FieldDesc::Scalar { id, .. }
- | FieldDesc::Typedef { id, .. } => Some(id),
- }
- }
-
- pub fn kind(&self) -> &str {
- match &self.desc {
- FieldDesc::Checksum { .. } => "payload",
- FieldDesc::Padding { .. } => "padding",
- FieldDesc::Size { .. } => "size",
- FieldDesc::Count { .. } => "count",
- FieldDesc::ElementSize { .. } => "elementsize",
- FieldDesc::Body { .. } => "body",
- FieldDesc::Payload { .. } => "payload",
- FieldDesc::FixedScalar { .. } | FieldDesc::FixedEnum { .. } => "fixed",
- FieldDesc::Reserved { .. } => "reserved",
- FieldDesc::Group { .. } => "group",
- FieldDesc::Array { .. } => "array",
- FieldDesc::Scalar { .. } => "scalar",
- FieldDesc::Typedef { .. } => "typedef",
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn source_location_new() {
- let line_starts = &[0, 20, 80, 120, 150];
- assert_eq!(
- SourceLocation::new(0, line_starts),
- SourceLocation { offset: 0, line: 0, column: 0 }
- );
- assert_eq!(
- SourceLocation::new(10, line_starts),
- SourceLocation { offset: 10, line: 0, column: 10 }
- );
- assert_eq!(
- SourceLocation::new(50, line_starts),
- SourceLocation { offset: 50, line: 1, column: 30 }
- );
- assert_eq!(
- SourceLocation::new(100, line_starts),
- SourceLocation { offset: 100, line: 2, column: 20 }
- );
- assert_eq!(
- SourceLocation::new(1000, line_starts),
- SourceLocation { offset: 1000, line: 4, column: 850 }
- );
- }
-
- #[test]
- fn source_location_new_no_crash_with_empty_line_starts() {
- let loc = SourceLocation::new(100, &[]);
- assert_eq!(loc, SourceLocation { offset: 100, line: 0, column: 100 });
- }
-}
diff --git a/tools/pdl/src/backends.rs b/tools/pdl/src/backends.rs
deleted file mode 100644
index a80f1f9549..0000000000
--- a/tools/pdl/src/backends.rs
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Compiler backends.
-
-pub mod intermediate;
-pub mod json;
-pub mod rust;
-pub mod rust_no_allocation;
diff --git a/tools/pdl/src/backends/intermediate.rs b/tools/pdl/src/backends/intermediate.rs
deleted file mode 100644
index e0d1041170..0000000000
--- a/tools/pdl/src/backends/intermediate.rs
+++ /dev/null
@@ -1,537 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::collections::{hash_map::Entry, HashMap};
-
-use crate::ast;
-use crate::parser;
-
-pub struct Schema<'a> {
- pub packets_and_structs: HashMap<&'a str, PacketOrStruct<'a>>,
- pub enums: HashMap<&'a str, Enum<'a>>,
-}
-
-pub struct PacketOrStruct<'a> {
- pub computed_offsets: HashMap<ComputedOffsetId<'a>, ComputedOffset<'a>>,
- pub computed_values: HashMap<ComputedValueId<'a>, ComputedValue<'a>>,
- /// whether the parse of this packet needs to know its length,
- /// or if the packet can determine its own length
- pub length: PacketOrStructLength,
-}
-
-pub enum PacketOrStructLength {
- Static(usize),
- Dynamic,
- NeedsExternal,
-}
-
-pub struct Enum<'a> {
- pub tags: &'a [ast::Tag],
- pub width: usize,
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-pub enum ComputedValueId<'a> {
- // needed for array fields + varlength structs - note that this is in OCTETS, not BITS
- // this always works since array entries are either structs (which are byte-aligned) or integer-octet-width scalars
- FieldSize(&'a str),
-
- // needed for arrays with fixed element size (otherwise codegen will loop!)
- FieldElementSize(&'a str), // note that this is in OCTETS, not BITS
- FieldCount(&'a str),
-
- Custom(u16),
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-pub enum ComputedOffsetId<'a> {
- // these quantities are known by the runtime
- HeaderStart,
-
- // if the packet needs its length, this will be supplied. otherwise it will be computed
- PacketEnd,
-
- // these quantities will be computed and stored in computed_values
- FieldOffset(&'a str), // needed for all fields, measured in BITS
- FieldEndOffset(&'a str), // needed only for Payload + Body fields, as well as variable-size structs (not arrays), measured in BITS
- Custom(u16),
- TrailerStart,
-}
-
-pub enum ComputedValue<'a> {
- Constant(usize),
- CountStructsUpToSize {
- base_id: ComputedOffsetId<'a>,
- size: ComputedValueId<'a>,
- struct_type: &'a str,
- },
- SizeOfNStructs {
- base_id: ComputedOffsetId<'a>,
- n: ComputedValueId<'a>,
- struct_type: &'a str,
- },
- Product(ComputedValueId<'a>, ComputedValueId<'a>),
- Divide(ComputedValueId<'a>, ComputedValueId<'a>),
- Difference(ComputedOffsetId<'a>, ComputedOffsetId<'a>),
- ValueAt {
- offset: ComputedOffsetId<'a>,
- width: usize,
- },
-}
-
-#[derive(Copy, Clone)]
-pub enum ComputedOffset<'a> {
- ConstantPlusOffsetInBits(ComputedOffsetId<'a>, i64),
- SumWithOctets(ComputedOffsetId<'a>, ComputedValueId<'a>),
- Alias(ComputedOffsetId<'a>),
-}
-
-pub fn generate(file: &parser::ast::File) -> Result<Schema, String> {
- let mut schema = Schema { packets_and_structs: HashMap::new(), enums: HashMap::new() };
- match file.endianness.value {
- ast::EndiannessValue::LittleEndian => {}
- _ => unimplemented!("Only little_endian endianness supported"),
- };
-
- for decl in &file.declarations {
- process_decl(&mut schema, decl);
- }
-
- Ok(schema)
-}
-
-fn process_decl<'a>(schema: &mut Schema<'a>, decl: &'a parser::ast::Decl) {
- match &decl.desc {
- ast::DeclDesc::Enum { id, tags, width, .. } => process_enum(schema, id, tags, *width),
- ast::DeclDesc::Packet { id, fields, .. } | ast::DeclDesc::Struct { id, fields, .. } => {
- process_packet_or_struct(schema, id, fields)
- }
- ast::DeclDesc::Group { .. } => todo!(),
- _ => unimplemented!("type {decl:?} not supported"),
- }
-}
-
-fn process_enum<'a>(schema: &mut Schema<'a>, id: &'a str, tags: &'a [ast::Tag], width: usize) {
- schema.enums.insert(id, Enum { tags, width });
- schema.packets_and_structs.insert(
- id,
- PacketOrStruct {
- computed_offsets: HashMap::new(),
- computed_values: HashMap::new(),
- length: PacketOrStructLength::Static(width),
- },
- );
-}
-
-fn process_packet_or_struct<'a>(
- schema: &mut Schema<'a>,
- id: &'a str,
- fields: &'a [parser::ast::Field],
-) {
- schema.packets_and_structs.insert(id, compute_getters(schema, fields));
-}
-
-fn compute_getters<'a>(
- schema: &Schema<'a>,
- fields: &'a [parser::ast::Field],
-) -> PacketOrStruct<'a> {
- let mut prev_pos_id = None;
- let mut curr_pos_id = ComputedOffsetId::HeaderStart;
- let mut computed_values = HashMap::new();
- let mut computed_offsets = HashMap::new();
-
- let mut cnt = 0;
-
- let one_id = ComputedValueId::Custom(cnt);
- let one_val = ComputedValue::Constant(1);
- cnt += 1;
- computed_values.insert(one_id, one_val);
-
- let mut needs_length = false;
-
- for field in fields {
- // populate this only if we are an array with a knowable size
- let mut next_prev_pos_id = None;
-
- let next_pos = match &field.desc {
- ast::FieldDesc::Reserved { width } => {
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, *width as i64)
- }
- ast::FieldDesc::Scalar { id, width } => {
- computed_offsets
- .insert(ComputedOffsetId::FieldOffset(id), ComputedOffset::Alias(curr_pos_id));
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, *width as i64)
- }
- ast::FieldDesc::FixedScalar { width, .. } => {
- let offset = *width;
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, offset as i64)
- }
- ast::FieldDesc::FixedEnum { enum_id, .. } => {
- let offset = schema.enums[enum_id.as_str()].width;
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, offset as i64)
- }
- ast::FieldDesc::Size { field_id, width } => {
- computed_values.insert(
- ComputedValueId::FieldSize(field_id),
- ComputedValue::ValueAt { offset: curr_pos_id, width: *width },
- );
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, *width as i64)
- }
- ast::FieldDesc::Count { field_id, width } => {
- computed_values.insert(
- ComputedValueId::FieldCount(field_id.as_str()),
- ComputedValue::ValueAt { offset: curr_pos_id, width: *width },
- );
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, *width as i64)
- }
- ast::FieldDesc::ElementSize { field_id, width } => {
- computed_values.insert(
- ComputedValueId::FieldElementSize(field_id),
- ComputedValue::ValueAt { offset: curr_pos_id, width: *width },
- );
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, *width as i64)
- }
- ast::FieldDesc::Group { .. } => {
- unimplemented!("this should be removed by the linter...")
- }
- ast::FieldDesc::Checksum { .. } => unimplemented!("checksum not supported"),
- ast::FieldDesc::Body => {
- computed_offsets.insert(
- ComputedOffsetId::FieldOffset("_body_"),
- ComputedOffset::Alias(curr_pos_id),
- );
- let computed_size_id = ComputedValueId::FieldSize("_body_");
- let end_offset = if computed_values.contains_key(&computed_size_id) {
- ComputedOffset::SumWithOctets(curr_pos_id, computed_size_id)
- } else {
- if needs_length {
- panic!("only one variable-length field can exist")
- }
- needs_length = true;
- ComputedOffset::Alias(ComputedOffsetId::TrailerStart)
- };
- computed_offsets.insert(ComputedOffsetId::FieldEndOffset("_body_"), end_offset);
- end_offset
- }
- ast::FieldDesc::Payload { size_modifier } => {
- if size_modifier.is_some() {
- unimplemented!("size modifiers not supported")
- }
- computed_offsets.insert(
- ComputedOffsetId::FieldOffset("_payload_"),
- ComputedOffset::Alias(curr_pos_id),
- );
- let computed_size_id = ComputedValueId::FieldSize("_payload_");
- let end_offset = if computed_values.contains_key(&computed_size_id) {
- ComputedOffset::SumWithOctets(curr_pos_id, computed_size_id)
- } else {
- if needs_length {
- panic!("only one variable-length field can exist")
- }
- needs_length = true;
- ComputedOffset::Alias(ComputedOffsetId::TrailerStart)
- };
- computed_offsets.insert(ComputedOffsetId::FieldEndOffset("_payload_"), end_offset);
- end_offset
- }
- ast::FieldDesc::Array {
- id,
- width,
- type_id,
- size_modifier,
- size: statically_known_count,
- } => {
- if size_modifier.is_some() {
- unimplemented!("size modifiers not supported")
- }
-
- computed_offsets
- .insert(ComputedOffsetId::FieldOffset(id), ComputedOffset::Alias(curr_pos_id));
-
- // there are a few parameters to consider when parsing arrays
- // 1: the count of elements
- // 2: the total byte size (possibly by subtracting out the len of the trailer)
- // 3: whether the structs know their own lengths
- // parsing is possible if we know (1 OR 2) AND 3
-
- if let Some(count) = statically_known_count {
- computed_values
- .insert(ComputedValueId::FieldCount(id), ComputedValue::Constant(*count));
- }
-
- let statically_known_width_in_bits = if let Some(type_id) = type_id {
- if let PacketOrStructLength::Static(len) =
- schema.packets_and_structs[type_id.as_str()].length
- {
- Some(len)
- } else {
- None
- }
- } else if let Some(width) = width {
- Some(*width)
- } else {
- unreachable!()
- };
-
- // whether the count is known *prior* to parsing the field
- let is_count_known = computed_values.contains_key(&ComputedValueId::FieldCount(id));
- // whether the total field size is explicitly specified
- let is_total_size_known =
- computed_values.contains_key(&ComputedValueId::FieldSize(id));
-
- let element_size = if let Some(type_id) = type_id {
- match schema.packets_and_structs[type_id.as_str()].length {
- PacketOrStructLength::Static(width) => {
- assert!(width % 8 == 0);
- Some(width / 8)
- }
- PacketOrStructLength::Dynamic => None,
- PacketOrStructLength::NeedsExternal => None,
- }
- } else if let Some(width) = width {
- assert!(width % 8 == 0);
- Some(width / 8)
- } else {
- unreachable!()
- };
- if let Some(element_size) = element_size {
- computed_values.insert(
- ComputedValueId::FieldElementSize(id),
- ComputedValue::Constant(element_size),
- );
- }
-
- // whether we can know the length of each element in the array by greedy parsing,
- let structs_know_length = if let Some(type_id) = type_id {
- match schema.packets_and_structs[type_id.as_str()].length {
- PacketOrStructLength::Static(_) => true,
- PacketOrStructLength::Dynamic => true,
- PacketOrStructLength::NeedsExternal => {
- computed_values.contains_key(&ComputedValueId::FieldElementSize(id))
- }
- }
- } else {
- width.is_some()
- };
-
- if !structs_know_length {
- panic!("structs need to know their own length, if they live in an array")
- }
-
- let mut out = None;
- if let Some(count) = statically_known_count {
- if let Some(width) = statically_known_width_in_bits {
- // the fast path, if the count and width are statically known, is to just immediately multiply
- // otherwise this becomes a dynamic computation
- assert!(width % 8 == 0);
- computed_values.insert(
- ComputedValueId::FieldSize(id),
- ComputedValue::Constant(count * width / 8),
- );
- out = Some(ComputedOffset::ConstantPlusOffsetInBits(
- curr_pos_id,
- (count * width) as i64,
- ));
- }
- }
-
- // note: this introduces a forward dependency with the total_size_id
- // however, the FieldSize(id) only depends on the FieldElementSize(id) if FieldCount() == true
- // thus, there will never be an infinite loop, since the FieldElementSize(id) only depends on the
- // FieldSize() if the FieldCount() is not unknown
- if !is_count_known {
- // the count is not known statically, or from earlier in the packet
- // thus, we must compute it from the total size of the field, known either explicitly or implicitly via the trailer
- // the fast path is to do a divide, but otherwise we need to loop over the TLVs
- computed_values.insert(
- ComputedValueId::FieldCount(id),
- if computed_values.contains_key(&ComputedValueId::FieldElementSize(id)) {
- ComputedValue::Divide(
- ComputedValueId::FieldSize(id),
- ComputedValueId::FieldElementSize(id),
- )
- } else {
- ComputedValue::CountStructsUpToSize {
- base_id: curr_pos_id,
- size: ComputedValueId::FieldSize(id),
- struct_type: type_id.as_ref().unwrap(),
- }
- },
- );
- }
-
- if let Some(out) = out {
- // we are paddable if the total size is known
- next_prev_pos_id = Some(curr_pos_id);
- out
- } else if is_total_size_known {
- // we are paddable if the total size is known
- next_prev_pos_id = Some(curr_pos_id);
- ComputedOffset::SumWithOctets(curr_pos_id, ComputedValueId::FieldSize(id))
- } else if is_count_known {
- // we are paddable if the total count is known, since structs know their lengths
- next_prev_pos_id = Some(curr_pos_id);
-
- computed_values.insert(
- ComputedValueId::FieldSize(id),
- if computed_values.contains_key(&ComputedValueId::FieldElementSize(id)) {
- ComputedValue::Product(
- ComputedValueId::FieldCount(id),
- ComputedValueId::FieldElementSize(id),
- )
- } else {
- ComputedValue::SizeOfNStructs {
- base_id: curr_pos_id,
- n: ComputedValueId::FieldCount(id),
- struct_type: type_id.as_ref().unwrap(),
- }
- },
- );
- ComputedOffset::SumWithOctets(curr_pos_id, ComputedValueId::FieldSize(id))
- } else {
- // we can try to infer the total size if we are still in the header
- // however, we are not paddable in this case
- next_prev_pos_id = None;
-
- if needs_length {
- panic!("either the total size, or the count of elements in an array, must be known")
- }
- // now we are in the trailer
- computed_values.insert(
- ComputedValueId::FieldSize(id),
- ComputedValue::Difference(ComputedOffsetId::TrailerStart, curr_pos_id),
- );
- needs_length = true;
- ComputedOffset::Alias(ComputedOffsetId::TrailerStart)
- }
- }
- ast::FieldDesc::Padding { size } => {
- if let Some(prev_pos_id) = prev_pos_id {
- ComputedOffset::ConstantPlusOffsetInBits(prev_pos_id, *size as i64)
- } else {
- panic!("padding must follow array field with known total size")
- }
- }
- ast::FieldDesc::Typedef { id, type_id } => {
- computed_offsets
- .insert(ComputedOffsetId::FieldOffset(id), ComputedOffset::Alias(curr_pos_id));
-
- match schema.packets_and_structs[type_id.as_str()].length {
- PacketOrStructLength::Static(len) => {
- ComputedOffset::ConstantPlusOffsetInBits(curr_pos_id, len as i64)
- }
- PacketOrStructLength::Dynamic => {
- computed_values.insert(
- ComputedValueId::FieldSize(id),
- ComputedValue::SizeOfNStructs {
- base_id: curr_pos_id,
- n: one_id,
- struct_type: type_id,
- },
- );
- ComputedOffset::SumWithOctets(curr_pos_id, ComputedValueId::FieldSize(id))
- }
- PacketOrStructLength::NeedsExternal => {
- let end_offset = if let Entry::Vacant(entry) =
- computed_values.entry(ComputedValueId::FieldSize(id))
- {
- // its size is presently unknown
- if needs_length {
- panic!(
- "cannot have multiple variable-length fields in a single packet/struct"
- )
- }
- // we are now in the trailer
- entry.insert(ComputedValue::Difference(
- ComputedOffsetId::TrailerStart,
- curr_pos_id,
- ));
- needs_length = true;
- ComputedOffset::Alias(ComputedOffsetId::TrailerStart)
- } else {
- ComputedOffset::SumWithOctets(
- curr_pos_id,
- ComputedValueId::FieldSize(id),
- )
- };
- computed_offsets.insert(ComputedOffsetId::FieldEndOffset(id), end_offset);
- end_offset
- }
- }
-
- // it is possible to size a struct in this variant of PDL, even though the linter doesn't allow it
- }
- };
-
- prev_pos_id = next_prev_pos_id;
- curr_pos_id = ComputedOffsetId::Custom(cnt);
- cnt += 1;
- computed_offsets.insert(curr_pos_id, next_pos);
- }
-
- // TODO(aryarahul): simplify compute graph to improve trailer resolution?
-
- // we are now at the end of the packet
- let length = if needs_length {
- // if we needed the length, use the PacketEnd and length to reconstruct the TrailerStart
- let trailer_length =
- compute_length_to_goal(&computed_offsets, curr_pos_id, ComputedOffsetId::TrailerStart)
- .expect("trailers should have deterministic length");
- computed_offsets.insert(
- ComputedOffsetId::TrailerStart,
- ComputedOffset::ConstantPlusOffsetInBits(ComputedOffsetId::PacketEnd, -trailer_length),
- );
- PacketOrStructLength::NeedsExternal
- } else {
- // otherwise, try to reconstruct the full length, if possible
- let full_length =
- compute_length_to_goal(&computed_offsets, curr_pos_id, ComputedOffsetId::HeaderStart);
- if let Some(full_length) = full_length {
- computed_offsets.insert(
- ComputedOffsetId::PacketEnd,
- ComputedOffset::ConstantPlusOffsetInBits(
- ComputedOffsetId::HeaderStart,
- full_length,
- ),
- );
- PacketOrStructLength::Static(full_length as usize)
- } else {
- computed_offsets
- .insert(ComputedOffsetId::PacketEnd, ComputedOffset::Alias(curr_pos_id));
- PacketOrStructLength::Dynamic
- }
- };
-
- PacketOrStruct { computed_values, computed_offsets, length }
-}
-
-fn compute_length_to_goal(
- computed_offsets: &HashMap<ComputedOffsetId, ComputedOffset>,
- start: ComputedOffsetId,
- goal: ComputedOffsetId,
-) -> Option<i64> {
- let mut out = 0;
- let mut pos = start;
- while pos != goal {
- match computed_offsets.get(&pos).ok_or_else(|| format!("key {pos:?} not found")).unwrap() {
- ComputedOffset::ConstantPlusOffsetInBits(base_id, offset) => {
- out += offset;
- pos = *base_id;
- }
- ComputedOffset::Alias(alias) => pos = *alias,
- ComputedOffset::SumWithOctets { .. } => return None,
- }
- }
- Some(out)
-}
diff --git a/tools/pdl/src/backends/json.rs b/tools/pdl/src/backends/json.rs
deleted file mode 100644
index 460d72cc29..0000000000
--- a/tools/pdl/src/backends/json.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Rust compiler backend.
-
-use crate::parser;
-
-/// Turn the AST into a JSON representation.
-pub fn generate(file: &parser::ast::File) -> Result<String, String> {
- serde_json::to_string_pretty(&file)
- .map_err(|err| format!("could not JSON serialize grammar: {err}"))
-}
diff --git a/tools/pdl/src/backends/rust.rs b/tools/pdl/src/backends/rust.rs
deleted file mode 100644
index 2761b9fd77..0000000000
--- a/tools/pdl/src/backends/rust.rs
+++ /dev/null
@@ -1,1585 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Rust compiler backend.
-
-use crate::{ast, lint};
-use quote::{format_ident, quote};
-use std::collections::BTreeSet;
-use std::path::Path;
-use syn::LitInt;
-
-use crate::analyzer::ast as analyzer_ast;
-
-mod parser;
-mod preamble;
-mod serializer;
-mod types;
-
-use parser::FieldParser;
-use serializer::FieldSerializer;
-
-#[cfg(not(tm_mainline_prod))]
-pub use heck::ToUpperCamelCase;
-
-#[cfg(tm_mainline_prod)]
-pub trait ToUpperCamelCase {
- fn to_upper_camel_case(&self) -> String;
-}
-
-#[cfg(tm_mainline_prod)]
-impl ToUpperCamelCase for str {
- fn to_upper_camel_case(&self) -> String {
- use heck::CamelCase;
- let camel_case = self.to_camel_case();
- if camel_case.is_empty() {
- camel_case
- } else {
- // PDL identifiers are a-zA-z0-9, so we're dealing with
- // simple ASCII text.
- format!("{}{}", &camel_case[..1].to_ascii_uppercase(), &camel_case[1..])
- }
- }
-}
-
-/// Generate a block of code.
-///
-/// Like `quote!`, but the code block will be followed by an empty
-/// line of code. This makes the generated code more readable.
-#[macro_export]
-macro_rules! quote_block {
- ($($tt:tt)*) => {
- format!("{}\n\n", ::quote::quote!($($tt)*))
- }
-}
-
-/// Generate a bit-mask which masks out `n` least significant bits.
-///
-/// Literal integers in Rust default to the `i32` type. For this
-/// reason, if `n` is larger than 31, a suffix is added to the
-/// `LitInt` returned. This should either be `u64` or `usize`
-/// depending on where the result is used.
-pub fn mask_bits(n: usize, suffix: &str) -> syn::LitInt {
- let suffix = if n > 31 { format!("_{suffix}") } else { String::new() };
- // Format the hex digits as 0x1111_2222_3333_usize.
- let hex_digits = format!("{:x}", (1u64 << n) - 1)
- .as_bytes()
- .rchunks(4)
- .rev()
- .map(|chunk| std::str::from_utf8(chunk).unwrap())
- .collect::<Vec<&str>>()
- .join("_");
- syn::parse_str::<syn::LitInt>(&format!("0x{hex_digits}{suffix}")).unwrap()
-}
-
-fn generate_packet_size_getter<'a>(
- scope: &lint::Scope<'a>,
- fields: impl Iterator<Item = &'a analyzer_ast::Field>,
- is_packet: bool,
-) -> (usize, proc_macro2::TokenStream) {
- let mut constant_width = 0;
- let mut dynamic_widths = Vec::new();
-
- for field in fields {
- if let Some(width) = scope.get_field_width(field, false) {
- constant_width += width;
- continue;
- }
-
- let decl = scope.get_field_declaration(field);
- dynamic_widths.push(match &field.desc {
- ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. } => {
- if is_packet {
- quote! {
- self.child.get_total_size()
- }
- } else {
- quote! {
- self.payload.len()
- }
- }
- }
- ast::FieldDesc::Typedef { id, .. } => {
- let id = format_ident!("{id}");
- quote!(self.#id.get_size())
- }
- ast::FieldDesc::Array { id, width, .. } => {
- let id = format_ident!("{id}");
- match &decl {
- Some(analyzer_ast::Decl {
- desc: ast::DeclDesc::Struct { .. } | ast::DeclDesc::CustomField { .. },
- ..
- }) => {
- quote! {
- self.#id.iter().map(|elem| elem.get_size()).sum::<usize>()
- }
- }
- Some(analyzer_ast::Decl { desc: ast::DeclDesc::Enum { .. }, .. }) => {
- let width = syn::Index::from(
- scope.get_decl_width(decl.unwrap(), false).unwrap() / 8,
- );
- let mul_width = (width.index > 1).then(|| quote!(* #width));
- quote! {
- self.#id.len() #mul_width
- }
- }
- _ => {
- let width = syn::Index::from(width.unwrap() / 8);
- let mul_width = (width.index > 1).then(|| quote!(* #width));
- quote! {
- self.#id.len() #mul_width
- }
- }
- }
- }
- _ => panic!("Unsupported field type: {field:?}"),
- });
- }
-
- if constant_width > 0 {
- let width = syn::Index::from(constant_width / 8);
- dynamic_widths.insert(0, quote!(#width));
- }
- if dynamic_widths.is_empty() {
- dynamic_widths.push(quote!(0))
- }
-
- (
- constant_width,
- quote! {
- #(#dynamic_widths)+*
- },
- )
-}
-
-fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a analyzer_ast::Decl {
- let mut decl = scope.typedef[packet_name];
- while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
- | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
- {
- decl = scope.typedef[parent_id];
- }
- decl
-}
-
-/// Find all constrained fields in children of `id`.
-fn find_constrained_fields<'a>(
- scope: &'a lint::Scope<'a>,
- id: &'a str,
-) -> Vec<&'a analyzer_ast::Field> {
- let mut fields = Vec::new();
- let mut field_names = BTreeSet::new();
- let mut children = scope.iter_children(id).collect::<Vec<_>>();
-
- while let Some(child) = children.pop() {
- if let ast::DeclDesc::Packet { id, constraints, .. }
- | ast::DeclDesc::Struct { id, constraints, .. } = &child.desc
- {
- let packet_scope = &scope.scopes[&scope.typedef[id]];
- for constraint in constraints {
- if field_names.insert(&constraint.id) {
- fields.push(packet_scope.all_fields[&constraint.id]);
- }
- }
- children.extend(scope.iter_children(id).collect::<Vec<_>>());
- }
- }
-
- fields
-}
-
-/// Find parent fields which are constrained in child packets.
-///
-/// These fields are the fields which need to be passed in when
-/// parsing a `id` packet since their values are needed for one or
-/// more child packets.
-fn find_constrained_parent_fields<'a>(
- scope: &'a lint::Scope<'a>,
- id: &'a str,
-) -> impl Iterator<Item = &'a analyzer_ast::Field> {
- let packet_scope = &scope.scopes[&scope.typedef[id]];
- find_constrained_fields(scope, id).into_iter().filter(|field| {
- let id = field.id().unwrap();
- packet_scope.all_fields.contains_key(id) && packet_scope.get_packet_field(id).is_none()
- })
-}
-
-/// Generate the declaration and implementation for a data struct.
-///
-/// This struct will hold the data for a packet or a struct. It knows
-/// how to parse and serialize its own fields.
-fn generate_data_struct(
- scope: &lint::Scope<'_>,
- endianness: ast::EndiannessValue,
- id: &str,
-) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {
- let decl = scope.typedef[id];
- let packet_scope = &scope.scopes[&decl];
- let is_packet = matches!(&decl.desc, ast::DeclDesc::Packet { .. });
-
- let span = format_ident!("bytes");
- let serializer_span = format_ident!("buffer");
- let mut field_parser = FieldParser::new(scope, endianness, id, &span);
- let mut field_serializer = FieldSerializer::new(scope, endianness, id, &serializer_span);
- for field in packet_scope.iter_fields() {
- field_parser.add(field);
- field_serializer.add(field);
- }
- field_parser.done();
-
- let (parse_arg_names, parse_arg_types) = if is_packet {
- let fields = find_constrained_parent_fields(scope, id).collect::<Vec<_>>();
- let names = fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
- let types = fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
- (names, types)
- } else {
- (Vec::new(), Vec::new()) // No extra arguments to parse in structs.
- };
-
- let (constant_width, packet_size) =
- generate_packet_size_getter(scope, packet_scope.iter_fields(), is_packet);
- let conforms = if constant_width == 0 {
- quote! { true }
- } else {
- let constant_width = syn::Index::from(constant_width / 8);
- quote! { #span.len() >= #constant_width }
- };
-
- let visibility = if is_packet { quote!() } else { quote!(pub) };
- let has_payload = packet_scope.get_payload_field().is_some();
- let has_children = scope.iter_children(id).next().is_some();
-
- let struct_name = if is_packet { format_ident!("{id}Data") } else { format_ident!("{id}") };
- let fields_with_ids =
- packet_scope.iter_fields().filter(|f| f.id().is_some()).collect::<Vec<_>>();
- let mut field_names =
- fields_with_ids.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
- let mut field_types = fields_with_ids.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
- if has_children || has_payload {
- if is_packet {
- field_names.push(format_ident!("child"));
- let field_type = format_ident!("{id}DataChild");
- field_types.push(quote!(#field_type));
- } else {
- field_names.push(format_ident!("payload"));
- field_types.push(quote!(Vec<u8>));
- }
- }
-
- let data_struct_decl = quote! {
- #[derive(Debug, Clone, PartialEq, Eq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- pub struct #struct_name {
- #(#visibility #field_names: #field_types,)*
- }
- };
-
- let data_struct_impl = quote! {
- impl #struct_name {
- fn conforms(#span: &[u8]) -> bool {
- #conforms
- }
-
- #visibility fn parse(
- #span: &[u8] #(, #parse_arg_names: #parse_arg_types)*
- ) -> Result<Self> {
- let mut cell = Cell::new(#span);
- let packet = Self::parse_inner(&mut cell #(, #parse_arg_names)*)?;
- // TODO(mgeisler): communicate back to user if !cell.get().is_empty()?
- Ok(packet)
- }
-
- fn parse_inner(
- mut #span: &mut Cell<&[u8]> #(, #parse_arg_names: #parse_arg_types)*
- ) -> Result<Self> {
- #field_parser
- Ok(Self {
- #(#field_names,)*
- })
- }
-
- fn write_to(&self, buffer: &mut BytesMut) {
- #field_serializer
- }
-
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
-
- fn get_size(&self) -> usize {
- #packet_size
- }
- }
- };
-
- (data_struct_decl, data_struct_impl)
-}
-
-/// Find all parents from `id`.
-///
-/// This includes the `Decl` for `id` itself.
-fn find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a analyzer_ast::Decl> {
- let mut decl = scope.typedef[id];
- let mut parents = vec![decl];
- while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
- | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
- {
- decl = scope.typedef[parent_id];
- parents.push(decl);
- }
- parents.reverse();
- parents
-}
-
-/// Turn the constraint into a value (such as `10` or
-/// `SomeEnum::Foo`).
-pub fn constraint_to_value(
- packet_scope: &lint::PacketScope<'_>,
- constraint: &ast::Constraint,
-) -> proc_macro2::TokenStream {
- match constraint {
- ast::Constraint { value: Some(value), .. } => {
- let value = proc_macro2::Literal::usize_unsuffixed(*value);
- quote!(#value)
- }
- // TODO(mgeisler): include type_id in `ast::Constraint` and
- // drop the packet_scope argument.
- ast::Constraint { tag_id: Some(tag_id), .. } => {
- let type_id = match &packet_scope.all_fields[&constraint.id].desc {
- ast::FieldDesc::Typedef { type_id, .. } => format_ident!("{type_id}"),
- _ => unreachable!("Invalid constraint: {constraint:?}"),
- };
- let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
- quote!(#type_id::#tag_id)
- }
- _ => unreachable!("Invalid constraint: {constraint:?}"),
- }
-}
-
-/// Generate code for a `ast::Decl::Packet`.
-fn generate_packet_decl(
- scope: &lint::Scope<'_>,
- endianness: ast::EndiannessValue,
- id: &str,
-) -> proc_macro2::TokenStream {
- let packet_scope = &scope.scopes[&scope.typedef[id]];
-
- let top_level = top_level_packet(scope, id);
- let top_level_id = top_level.id().unwrap();
- let top_level_packet = format_ident!("{top_level_id}");
- let top_level_data = format_ident!("{top_level_id}Data");
- let top_level_id_lower = format_ident!("{}", top_level_id.to_lowercase());
-
- // TODO(mgeisler): use the convert_case crate to convert between
- // `FooBar` and `foo_bar` in the code below.
- let span = format_ident!("bytes");
- let id_lower = format_ident!("{}", id.to_lowercase());
- let id_packet = format_ident!("{id}");
- let id_child = format_ident!("{id}Child");
- let id_data_child = format_ident!("{id}DataChild");
- let id_builder = format_ident!("{id}Builder");
-
- let parents = find_parents(scope, id);
- let parent_ids = parents.iter().map(|p| p.id().unwrap()).collect::<Vec<_>>();
- let parent_shifted_ids = parent_ids.iter().skip(1).map(|id| format_ident!("{id}"));
- let parent_lower_ids =
- parent_ids.iter().map(|id| format_ident!("{}", id.to_lowercase())).collect::<Vec<_>>();
- let parent_shifted_lower_ids = parent_lower_ids.iter().skip(1).collect::<Vec<_>>();
- let parent_packet = parent_ids.iter().map(|id| format_ident!("{id}"));
- let parent_data = parent_ids.iter().map(|id| format_ident!("{id}Data"));
- let parent_data_child = parent_ids.iter().map(|id| format_ident!("{id}DataChild"));
-
- let all_fields = {
- let mut fields = packet_scope.all_fields.values().collect::<Vec<_>>();
- fields.sort_by_key(|f| f.id());
- fields
- };
- let all_field_names =
- all_fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
- let all_field_types = all_fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
- let all_field_borrows =
- all_fields.iter().map(|f| types::rust_borrow(f, scope)).collect::<Vec<_>>();
- let all_field_getter_names = all_field_names.iter().map(|id| format_ident!("get_{id}"));
- let all_field_self_field = all_fields.iter().map(|f| {
- for (parent, parent_id) in parents.iter().zip(parent_lower_ids.iter()) {
- if scope.scopes[parent].iter_fields().any(|ff| ff.id() == f.id()) {
- return quote!(self.#parent_id);
- }
- }
- unreachable!("Could not find {f:?} in parent chain");
- });
-
- let unconstrained_fields = all_fields
- .iter()
- .filter(|f| !packet_scope.all_constraints.contains_key(f.id().unwrap()))
- .collect::<Vec<_>>();
- let unconstrained_field_names = unconstrained_fields
- .iter()
- .map(|f| format_ident!("{}", f.id().unwrap()))
- .collect::<Vec<_>>();
- let unconstrained_field_types = unconstrained_fields.iter().map(|f| types::rust_type(f));
-
- let rev_parents = parents.iter().rev().collect::<Vec<_>>();
- let builder_assignments = rev_parents.iter().enumerate().map(|(idx, parent)| {
- let parent_id = parent.id().unwrap();
- let parent_id_lower = format_ident!("{}", parent_id.to_lowercase());
- let parent_data = format_ident!("{parent_id}Data");
- let parent_data_child = format_ident!("{parent_id}DataChild");
- let parent_packet_scope = &scope.scopes[&scope.typedef[parent_id]];
-
- let named_fields = {
- let mut names =
- parent_packet_scope.iter_fields().filter_map(ast::Field::id).collect::<Vec<_>>();
- names.sort_unstable();
- names
- };
-
- let mut field = named_fields.iter().map(|id| format_ident!("{id}")).collect::<Vec<_>>();
- let mut value = named_fields
- .iter()
- .map(|&id| match packet_scope.all_constraints.get(id) {
- Some(constraint) => constraint_to_value(packet_scope, constraint),
- None => {
- let id = format_ident!("{id}");
- quote!(self.#id)
- }
- })
- .collect::<Vec<_>>();
-
- if parent_packet_scope.get_payload_field().is_some() {
- field.push(format_ident!("child"));
- if idx == 0 {
- // Top-most parent, the child is simply created from
- // our payload.
- value.push(quote! {
- match self.payload {
- None => #parent_data_child::None,
- Some(bytes) => #parent_data_child::Payload(bytes),
- }
- });
- } else {
- // Child is created from the previous parent.
- let prev_parent_id = rev_parents[idx - 1].id().unwrap();
- let prev_parent_id_lower = format_ident!("{}", prev_parent_id.to_lowercase());
- let prev_parent_id = format_ident!("{prev_parent_id}");
- value.push(quote! {
- #parent_data_child::#prev_parent_id(#prev_parent_id_lower)
- });
- }
- } else if scope.iter_children(parent_id).next().is_some() {
- field.push(format_ident!("child"));
- value.push(quote! { #parent_data_child::None });
- }
-
- quote! {
- let #parent_id_lower = Arc::new(#parent_data {
- #(#field: #value,)*
- });
- }
- });
-
- let children = scope.iter_children(id).collect::<Vec<_>>();
- let has_payload = packet_scope.get_payload_field().is_some();
- let has_children_or_payload = !children.is_empty() || has_payload;
- let child =
- children.iter().map(|child| format_ident!("{}", child.id().unwrap())).collect::<Vec<_>>();
- let child_data = child.iter().map(|child| format_ident!("{child}Data")).collect::<Vec<_>>();
- let get_payload = (children.is_empty() && has_payload).then(|| {
- quote! {
- pub fn get_payload(&self) -> &[u8] {
- match &self.#id_lower.child {
- #id_data_child::Payload(bytes) => &bytes,
- #id_data_child::None => &[],
- }
- }
- }
- });
- let child_declaration = has_children_or_payload.then(|| {
- quote! {
- #[derive(Debug, Clone, PartialEq, Eq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- pub enum #id_data_child {
- #(#child(Arc<#child_data>),)*
- Payload(Bytes),
- None,
- }
-
- impl #id_data_child {
- fn get_total_size(&self) -> usize {
- match self {
- #(#id_data_child::#child(value) => value.get_total_size(),)*
- #id_data_child::Payload(bytes) => bytes.len(),
- #id_data_child::None => 0,
- }
- }
- }
-
- #[derive(Debug, Clone, PartialEq, Eq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- pub enum #id_child {
- #(#child(#child),)*
- Payload(Bytes),
- None,
- }
- }
- });
- let specialize = has_children_or_payload.then(|| {
- quote! {
- pub fn specialize(&self) -> #id_child {
- match &self.#id_lower.child {
- #(
- #id_data_child::#child(_) =>
- #id_child::#child(#child::new(self.#top_level_id_lower.clone()).unwrap()),
- )*
- #id_data_child::Payload(payload) => #id_child::Payload(payload.clone()),
- #id_data_child::None => #id_child::None,
- }
- }
- }
- });
-
- let builder_payload_field = has_children_or_payload.then(|| {
- quote! {
- pub payload: Option<Bytes>
- }
- });
-
- let ancestor_packets =
- parent_ids[..parent_ids.len() - 1].iter().map(|id| format_ident!("{id}"));
- let impl_from_and_try_from = (top_level_id != id).then(|| {
- quote! {
- #(
- impl From<#id_packet> for #ancestor_packets {
- fn from(packet: #id_packet) -> #ancestor_packets {
- #ancestor_packets::new(packet.#top_level_id_lower).unwrap()
- }
- }
- )*
-
- impl TryFrom<#top_level_packet> for #id_packet {
- type Error = Error;
- fn try_from(packet: #top_level_packet) -> Result<#id_packet> {
- #id_packet::new(packet.#top_level_id_lower)
- }
- }
- }
- });
-
- let (data_struct_decl, data_struct_impl) = generate_data_struct(scope, endianness, id);
-
- quote! {
- #child_declaration
-
- #data_struct_decl
-
- #[derive(Debug, Clone, PartialEq, Eq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- pub struct #id_packet {
- #(
- #[cfg_attr(feature = "serde", serde(flatten))]
- #parent_lower_ids: Arc<#parent_data>,
- )*
- }
-
- #[derive(Debug)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- pub struct #id_builder {
- #(pub #unconstrained_field_names: #unconstrained_field_types,)*
- #builder_payload_field
- }
-
- #data_struct_impl
-
- impl Packet for #id_packet {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.#top_level_id_lower.get_size());
- self.#top_level_id_lower.write_to(&mut buffer);
- buffer.freeze()
- }
-
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
- }
-
- impl From<#id_packet> for Bytes {
- fn from(packet: #id_packet) -> Self {
- packet.to_bytes()
- }
- }
-
- impl From<#id_packet> for Vec<u8> {
- fn from(packet: #id_packet) -> Self {
- packet.to_vec()
- }
- }
-
- #impl_from_and_try_from
-
- impl #id_packet {
- pub fn parse(#span: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(#span);
- let packet = Self::parse_inner(&mut cell)?;
- // TODO(mgeisler): communicate back to user if !cell.get().is_empty()?
- Ok(packet)
- }
-
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = #top_level_data::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
-
- #specialize
-
- fn new(#top_level_id_lower: Arc<#top_level_data>) -> Result<Self> {
- #(
- let #parent_shifted_lower_ids = match &#parent_lower_ids.child {
- #parent_data_child::#parent_shifted_ids(value) => value.clone(),
- _ => return Err(Error::InvalidChildError {
- expected: stringify!(#parent_data_child::#parent_shifted_ids),
- actual: format!("{:?}", &#parent_lower_ids.child),
- }),
- };
- )*
- Ok(Self { #(#parent_lower_ids),* })
- }
-
- #(pub fn #all_field_getter_names(&self) -> #all_field_borrows #all_field_types {
- #all_field_borrows #all_field_self_field.as_ref().#all_field_names
- })*
-
- #get_payload
-
- fn write_to(&self, buffer: &mut BytesMut) {
- self.#id_lower.write_to(buffer)
- }
-
- pub fn get_size(&self) -> usize {
- self.#top_level_id_lower.get_size()
- }
- }
-
- impl #id_builder {
- pub fn build(self) -> #id_packet {
- #(#builder_assignments;)*
- #id_packet::new(#top_level_id_lower).unwrap()
- }
- }
-
- #(
- impl From<#id_builder> for #parent_packet {
- fn from(builder: #id_builder) -> #parent_packet {
- builder.build().into()
- }
- }
- )*
- }
-}
-
-/// Generate code for a `ast::Decl::Struct`.
-fn generate_struct_decl(
- scope: &lint::Scope<'_>,
- endianness: ast::EndiannessValue,
- id: &str,
-) -> proc_macro2::TokenStream {
- let (struct_decl, struct_impl) = generate_data_struct(scope, endianness, id);
- quote! {
- #struct_decl
- #struct_impl
- }
-}
-
-/// Generate an enum declaration.
-///
-/// # Arguments
-/// * `id` - Enum identifier.
-/// * `tags` - List of enum tags.
-/// * `width` - Width of the backing type of the enum, in bits.
-/// * `open` - Whether to generate an open or closed enum. Open enums have
-/// an additional Unknown case for unmatched valued. Complete
-/// enums (where the full range of values is covered) are
-/// automatically closed.
-fn generate_enum_decl(
- id: &str,
- tags: &[ast::Tag],
- width: usize,
- open: bool,
-) -> proc_macro2::TokenStream {
- // Determine if the enum is complete, i.e. all values in the backing
- // integer range have a matching tag in the original declaration.
- fn enum_is_complete(tags: &[ast::Tag], max: usize) -> bool {
- let mut ranges = tags
- .iter()
- .map(|tag| match tag {
- ast::Tag::Value(tag) => (tag.value, tag.value),
- ast::Tag::Range(tag) => tag.range.clone().into_inner(),
- })
- .collect::<Vec<_>>();
- ranges.sort_unstable();
- ranges.first().unwrap().0 == 0
- && ranges.last().unwrap().1 == max
- && ranges.windows(2).all(|window| {
- if let [left, right] = window {
- left.1 == right.0 - 1
- } else {
- false
- }
- })
- }
-
- // Determine if the enum is primitive, i.e. does not contain any
- // tag range.
- fn enum_is_primitive(tags: &[ast::Tag]) -> bool {
- tags.iter().all(|tag| matches!(tag, ast::Tag::Value(_)))
- }
-
- // Return the maximum value for the scalar type.
- fn scalar_max(width: usize) -> usize {
- if width >= usize::BITS as usize {
- usize::MAX
- } else {
- (1 << width) - 1
- }
- }
-
- // Format an enum tag identifier to rust upper caml case.
- fn format_tag_ident(id: &str) -> proc_macro2::TokenStream {
- let id = format_ident!("{}", id.to_upper_camel_case());
- quote! { #id }
- }
-
- // Format a constant value as hexadecimal constant.
- fn format_value(value: usize) -> LitInt {
- syn::parse_str::<syn::LitInt>(&format!("{:#x}", value)).unwrap()
- }
-
- // Backing type for the enum.
- let backing_type = types::Integer::new(width);
- let backing_type_str = proc_macro2::Literal::string(&format!("u{}", backing_type.width));
- let range_max = scalar_max(width);
- let is_complete = enum_is_complete(tags, scalar_max(width));
- let is_primitive = enum_is_primitive(tags);
- let name = format_ident!("{id}");
-
- // Generate the variant cases for the enum declaration.
- // Tags declared in ranges are flattened in the same declaration.
- let use_variant_values = is_primitive && (is_complete || !open);
- let repr_u64 = use_variant_values.then(|| quote! { #[repr(u64)] });
- let mut variants = vec![];
- for tag in tags.iter() {
- match tag {
- ast::Tag::Value(tag) if use_variant_values => {
- let id = format_tag_ident(&tag.id);
- let value = format_value(tag.value);
- variants.push(quote! { #id = #value })
- }
- ast::Tag::Value(tag) => variants.push(format_tag_ident(&tag.id)),
- ast::Tag::Range(tag) => {
- variants.extend(tag.tags.iter().map(|tag| format_tag_ident(&tag.id)));
- let id = format_tag_ident(&tag.id);
- variants.push(quote! { #id(Private<#backing_type>) })
- }
- }
- }
-
- // Generate the cases for parsing the enum value from an integer.
- let mut from_cases = vec![];
- for tag in tags.iter() {
- match tag {
- ast::Tag::Value(tag) => {
- let id = format_tag_ident(&tag.id);
- let value = format_value(tag.value);
- from_cases.push(quote! { #value => Ok(#name::#id) })
- }
- ast::Tag::Range(tag) => {
- from_cases.extend(tag.tags.iter().map(|tag| {
- let id = format_tag_ident(&tag.id);
- let value = format_value(tag.value);
- quote! { #value => Ok(#name::#id) }
- }));
- let id = format_tag_ident(&tag.id);
- let start = format_value(*tag.range.start());
- let end = format_value(*tag.range.end());
- from_cases.push(quote! { #start ..= #end => Ok(#name::#id(Private(value))) })
- }
- }
- }
-
- // Generate the cases for serializing the enum value to an integer.
- let mut into_cases = vec![];
- for tag in tags.iter() {
- match tag {
- ast::Tag::Value(tag) => {
- let id = format_tag_ident(&tag.id);
- let value = format_value(tag.value);
- into_cases.push(quote! { #name::#id => #value })
- }
- ast::Tag::Range(tag) => {
- into_cases.extend(tag.tags.iter().map(|tag| {
- let id = format_tag_ident(&tag.id);
- let value = format_value(tag.value);
- quote! { #name::#id => #value }
- }));
- let id = format_tag_ident(&tag.id);
- into_cases.push(quote! { #name::#id(Private(value)) => *value })
- }
- }
- }
-
- // Generate a default case if the enum is open and incomplete.
- if !is_complete && open {
- variants.push(quote! { Unknown(Private<#backing_type>) });
- from_cases.push(quote! { 0..#range_max => Ok(#name::Unknown(Private(value))) });
- into_cases.push(quote! { #name::Unknown(Private(value)) => *value });
- }
-
- // Generate an error case if the enum size is lower than the backing
- // type size, or if the enum is closed or incomplete.
- if backing_type.width != width || (!is_complete && !open) {
- from_cases.push(quote! { _ => Err(value) });
- }
-
- // Derive other Into<uN> and Into<iN> implementations from the explicit
- // implementation, where the type is larger than the backing type.
- let derived_signed_into_types = [8, 16, 32, 64]
- .into_iter()
- .filter(|w| *w > width)
- .map(|w| syn::parse_str::<syn::Type>(&format!("i{}", w)).unwrap());
- let derived_unsigned_into_types = [8, 16, 32, 64]
- .into_iter()
- .filter(|w| *w >= width && *w != backing_type.width)
- .map(|w| syn::parse_str::<syn::Type>(&format!("u{}", w)).unwrap());
- let derived_into_types = derived_signed_into_types.chain(derived_unsigned_into_types);
-
- quote! {
- #repr_u64
- #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- #[cfg_attr(feature = "serde", serde(try_from = #backing_type_str, into = #backing_type_str))]
- pub enum #name {
- #(#variants,)*
- }
-
- impl TryFrom<#backing_type> for #name {
- type Error = #backing_type;
- fn try_from(value: #backing_type) -> std::result::Result<Self, Self::Error> {
- match value {
- #(#from_cases,)*
- }
- }
- }
-
- impl From<&#name> for #backing_type {
- fn from(value: &#name) -> Self {
- match value {
- #(#into_cases,)*
- }
- }
- }
-
- impl From<#name> for #backing_type {
- fn from(value: #name) -> Self {
- (&value).into()
- }
- }
-
- #(impl From<#name> for #derived_into_types {
- fn from(value: #name) -> Self {
- #backing_type::from(value) as Self
- }
- })*
- }
-}
-
-/// Generate the declaration for a custom field of static size.
-///
-/// * `id` - Enum identifier.
-/// * `width` - Width of the backing type of the enum, in bits.
-fn generate_custom_field_decl(id: &str, width: usize) -> proc_macro2::TokenStream {
- let id = format_ident!("{}", id);
- let backing_type = types::Integer::new(width);
- let backing_type_str = proc_macro2::Literal::string(&format!("u{}", backing_type.width));
- let max_value = mask_bits(width, &format!("u{}", backing_type.width));
- let common = quote! {
- impl From<&#id> for #backing_type {
- fn from(value: &#id) -> #backing_type {
- value.0
- }
- }
-
- impl From<#id> for #backing_type {
- fn from(value: #id) -> #backing_type {
- value.0
- }
- }
- };
-
- if backing_type.width == width {
- quote! {
- #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- #[cfg_attr(feature = "serde", serde(from = #backing_type_str, into = #backing_type_str))]
- pub struct #id(#backing_type);
-
- #common
-
- impl From<#backing_type> for #id {
- fn from(value: #backing_type) -> Self {
- #id(value)
- }
- }
- }
- } else {
- quote! {
- #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
- #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
- #[cfg_attr(feature = "serde", serde(try_from = #backing_type_str, into = #backing_type_str))]
- pub struct #id(#backing_type);
-
- #common
-
- impl TryFrom<#backing_type> for #id {
- type Error = #backing_type;
- fn try_from(value: #backing_type) -> std::result::Result<Self, Self::Error> {
- if value > #max_value {
- Err(value)
- } else {
- Ok(#id(value))
- }
- }
- }
- }
- }
-}
-
-fn generate_decl(
- scope: &lint::Scope<'_>,
- file: &analyzer_ast::File,
- decl: &analyzer_ast::Decl,
-) -> proc_macro2::TokenStream {
- match &decl.desc {
- ast::DeclDesc::Packet { id, .. } => generate_packet_decl(scope, file.endianness.value, id),
- ast::DeclDesc::Struct { id, parent_id: None, .. } => {
- // TODO(mgeisler): handle structs with parents. We could
- // generate code for them, but the code is not useful
- // since it would require the caller to unpack everything
- // manually. We either need to change the API, or
- // implement the recursive (de)serialization.
- generate_struct_decl(scope, file.endianness.value, id)
- }
- ast::DeclDesc::Enum { id, tags, width } => generate_enum_decl(id, tags, *width, false),
- ast::DeclDesc::CustomField { id, width: Some(width), .. } => {
- generate_custom_field_decl(id, *width)
- }
- _ => todo!("unsupported Decl::{:?}", decl),
- }
-}
-
-/// Generate Rust code from an AST.
-///
-/// The code is not formatted, pipe it through `rustfmt` to get
-/// readable source code.
-pub fn generate(sources: &ast::SourceDatabase, file: &analyzer_ast::File) -> String {
- let source = sources.get(file.file).expect("could not read source");
- let preamble = preamble::generate(Path::new(source.name()));
-
- let scope = lint::Scope::new(file);
- let decls = file.declarations.iter().map(|decl| generate_decl(&scope, file, decl));
- let code = quote! {
- #preamble
-
- #(#decls)*
- };
- let syntax_tree = syn::parse2(code).expect("Could not parse code");
- prettyplease::unparse(&syntax_tree)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::analyzer;
- use crate::ast;
- use crate::parser::parse_inline;
- use crate::test_utils::{assert_snapshot_eq, format_rust};
- use paste::paste;
-
- /// Parse a string fragment as a PDL file.
- ///
- /// # Panics
- ///
- /// Panics on parse errors.
- pub fn parse_str(text: &str) -> analyzer_ast::File {
- let mut db = ast::SourceDatabase::new();
- let file =
- parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error");
- analyzer::analyze(&file).expect("analyzer error")
- }
-
- #[track_caller]
- fn assert_iter_eq<T: std::cmp::PartialEq + std::fmt::Debug>(
- left: impl IntoIterator<Item = T>,
- right: impl IntoIterator<Item = T>,
- ) {
- assert_eq!(left.into_iter().collect::<Vec<T>>(), right.into_iter().collect::<Vec<T>>());
- }
-
- #[test]
- fn test_find_constrained_parent_fields() {
- let code = "
- little_endian_packets
- packet Parent {
- a: 8,
- b: 8,
- c: 8,
- _payload_,
- }
- packet Child: Parent(a = 10) {
- x: 8,
- _payload_,
- }
- packet GrandChild: Child(b = 20) {
- y: 8,
- _payload_,
- }
- packet GrandGrandChild: GrandChild(c = 30) {
- z: 8,
- }
- ";
- let file = parse_str(code);
- let scope = lint::Scope::new(&file);
- let find_fields =
- |id| find_constrained_parent_fields(&scope, id).map(|field| field.id().unwrap());
- assert_iter_eq(find_fields("Parent"), vec![]);
- assert_iter_eq(find_fields("Child"), vec!["b", "c"]);
- assert_iter_eq(find_fields("GrandChild"), vec!["c"]);
- assert_iter_eq(find_fields("GrandGrandChild"), vec![]);
- }
-
- /// Create a unit test for the given PDL `code`.
- ///
- /// The unit test will compare the generated Rust code for all
- /// declarations with previously saved snapshots. The snapshots
- /// are read from `"tests/generated/{name}_{endianness}_{id}.rs"`
- /// where `is` taken from the declaration.
- ///
- /// When adding new tests or modifying existing ones, use
- /// `UPDATE_SNAPSHOTS=1 cargo test` to automatically populate the
- /// snapshots with the expected output.
- ///
- /// The `code` cannot have an endianness declaration, instead you
- /// must supply either `little_endian` or `big_endian` as
- /// `endianness`.
- macro_rules! make_pdl_test {
- ($name:ident, $code:expr, $endianness:ident) => {
- paste! {
- #[test]
- fn [< test_ $name _ $endianness >]() {
- let name = stringify!($name);
- let endianness = stringify!($endianness);
- let code = format!("{endianness}_packets\n{}", $code);
- let mut db = ast::SourceDatabase::new();
- let file = parse_inline(&mut db, String::from("test"), code).unwrap();
- let file = analyzer::analyze(&file).unwrap();
- let actual_code = generate(&db, &file);
- assert_snapshot_eq(
- &format!("tests/generated/{name}_{endianness}.rs"),
- &format_rust(&actual_code),
- );
- }
- }
- };
- }
-
- /// Create little- and bit-endian tests for the given PDL `code`.
- ///
- /// The `code` cannot have an endianness declaration: we will
- /// automatically generate unit tests for both
- /// "little_endian_packets" and "big_endian_packets".
- macro_rules! test_pdl {
- ($name:ident, $code:expr $(,)?) => {
- make_pdl_test!($name, $code, little_endian);
- make_pdl_test!($name, $code, big_endian);
- };
- }
-
- test_pdl!(packet_decl_empty, "packet Foo {}");
-
- test_pdl!(packet_decl_8bit_scalar, " packet Foo { x: 8 }");
- test_pdl!(packet_decl_24bit_scalar, "packet Foo { x: 24 }");
- test_pdl!(packet_decl_64bit_scalar, "packet Foo { x: 64 }");
-
- test_pdl!(
- enum_declaration,
- r#"
- // Should generate unknown case.
- enum IncompleteTruncated : 3 {
- A = 0,
- B = 1,
- }
-
- // Should generate unknown case.
- enum IncompleteTruncatedWithRange : 3 {
- A = 0,
- B = 1..6 {
- X = 1,
- Y = 2,
- }
- }
-
- // Should generate unreachable case.
- enum CompleteTruncated : 3 {
- A = 0,
- B = 1,
- C = 2,
- D = 3,
- E = 4,
- F = 5,
- G = 6,
- H = 7,
- }
-
- // Should generate unreachable case.
- enum CompleteTruncatedWithRange : 3 {
- A = 0,
- B = 1..7 {
- X = 1,
- Y = 2,
- }
- }
-
- // Should generate no unknown or unreachable case.
- enum CompleteWithRange : 8 {
- A = 0,
- B = 1,
- C = 2..255,
- }
- "#
- );
-
- test_pdl!(
- custom_field_declaration,
- r#"
- // Still unsupported.
- // custom_field Dynamic "dynamic"
-
- // Should generate a type with From<u32> implementation.
- custom_field ExactSize : 32 "exact_size"
-
- // Should generate a type with TryFrom<u32> implementation.
- custom_field TruncatedSize : 24 "truncated_size"
- "#
- );
-
- test_pdl!(
- packet_decl_simple_scalars,
- r#"
- packet Foo {
- x: 8,
- y: 16,
- z: 24,
- }
- "#
- );
-
- test_pdl!(
- packet_decl_complex_scalars,
- r#"
- packet Foo {
- a: 3,
- b: 8,
- c: 5,
- d: 24,
- e: 12,
- f: 4,
- }
- "#,
- );
-
- // Test that we correctly mask a byte-sized value in the middle of
- // a chunk.
- test_pdl!(
- packet_decl_mask_scalar_value,
- r#"
- packet Foo {
- a: 2,
- b: 24,
- c: 6,
- }
- "#,
- );
-
- test_pdl!(
- struct_decl_complex_scalars,
- r#"
- struct Foo {
- a: 3,
- b: 8,
- c: 5,
- d: 24,
- e: 12,
- f: 4,
- }
- "#,
- );
-
- test_pdl!(packet_decl_8bit_enum, " enum Foo : 8 { A = 1, B = 2 } packet Bar { x: Foo }");
- test_pdl!(packet_decl_24bit_enum, "enum Foo : 24 { A = 1, B = 2 } packet Bar { x: Foo }");
- test_pdl!(packet_decl_64bit_enum, "enum Foo : 64 { A = 1, B = 2 } packet Bar { x: Foo }");
-
- test_pdl!(
- packet_decl_mixed_scalars_enums,
- "
- enum Enum7 : 7 {
- A = 1,
- B = 2,
- }
-
- enum Enum9 : 9 {
- A = 1,
- B = 2,
- }
-
- packet Foo {
- x: Enum7,
- y: 5,
- z: Enum9,
- w: 3,
- }
- "
- );
-
- test_pdl!(packet_decl_8bit_scalar_array, " packet Foo { x: 8[3] }");
- test_pdl!(packet_decl_24bit_scalar_array, "packet Foo { x: 24[5] }");
- test_pdl!(packet_decl_64bit_scalar_array, "packet Foo { x: 64[7] }");
-
- test_pdl!(
- packet_decl_8bit_enum_array,
- "enum Foo : 8 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[3] }"
- );
- test_pdl!(
- packet_decl_24bit_enum_array,
- "enum Foo : 24 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[5] }"
- );
- test_pdl!(
- packet_decl_64bit_enum_array,
- "enum Foo : 64 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[7] }"
- );
-
- test_pdl!(
- packet_decl_array_dynamic_count,
- "
- packet Foo {
- _count_(x): 5,
- padding: 3,
- x: 24[]
- }
- "
- );
-
- test_pdl!(
- packet_decl_array_dynamic_size,
- "
- packet Foo {
- _size_(x): 5,
- padding: 3,
- x: 24[]
- }
- "
- );
-
- test_pdl!(
- packet_decl_array_unknown_element_width_dynamic_size,
- "
- struct Foo {
- _count_(a): 40,
- a: 16[],
- }
-
- packet Bar {
- _size_(x): 40,
- x: Foo[],
- }
- "
- );
-
- test_pdl!(
- packet_decl_array_unknown_element_width_dynamic_count,
- "
- struct Foo {
- _count_(a): 40,
- a: 16[],
- }
-
- packet Bar {
- _count_(x): 40,
- x: Foo[],
- }
- "
- );
-
- test_pdl!(
- packet_decl_array_with_padding,
- "
- struct Foo {
- _count_(a): 40,
- a: 16[],
- }
-
- packet Bar {
- a: Foo[],
- _padding_ [128],
- }
- "
- );
-
- test_pdl!(
- packet_decl_reserved_field,
- "
- packet Foo {
- _reserved_: 40,
- }
- "
- );
-
- test_pdl!(
- packet_decl_custom_field,
- r#"
- custom_field Bar1 : 24 "exact"
- custom_field Bar2 : 32 "truncated"
-
- packet Foo {
- a: Bar1,
- b: Bar2,
- }
- "#
- );
-
- test_pdl!(
- packet_decl_fixed_scalar_field,
- "
- packet Foo {
- _fixed_ = 7 : 7,
- b: 57,
- }
- "
- );
-
- test_pdl!(
- packet_decl_fixed_enum_field,
- "
- enum Enum7 : 7 {
- A = 1,
- B = 2,
- }
-
- packet Foo {
- _fixed_ = A : Enum7,
- b: 57,
- }
- "
- );
-
- test_pdl!(
- packet_decl_payload_field_variable_size,
- "
- packet Foo {
- a: 8,
- _size_(_payload_): 8,
- _payload_,
- b: 16,
- }
- "
- );
-
- test_pdl!(
- packet_decl_payload_field_unknown_size,
- "
- packet Foo {
- a: 24,
- _payload_,
- }
- "
- );
-
- test_pdl!(
- packet_decl_payload_field_unknown_size_terminal,
- "
- packet Foo {
- _payload_,
- a: 24,
- }
- "
- );
-
- test_pdl!(
- packet_decl_child_packets,
- "
- enum Enum16 : 16 {
- A = 1,
- B = 2,
- }
-
- packet Foo {
- a: 8,
- b: Enum16,
- _size_(_payload_): 8,
- _payload_
- }
-
- packet Bar : Foo (a = 100) {
- x: 8,
- }
-
- packet Baz : Foo (b = B) {
- y: 16,
- }
- "
- );
-
- test_pdl!(
- packet_decl_grand_children,
- "
- enum Enum16 : 16 {
- A = 1,
- B = 2,
- }
-
- packet Parent {
- foo: Enum16,
- bar: Enum16,
- baz: Enum16,
- _size_(_payload_): 8,
- _payload_
- }
-
- packet Child : Parent (foo = A) {
- quux: Enum16,
- _payload_,
- }
-
- packet GrandChild : Child (bar = A, quux = A) {
- _body_,
- }
-
- packet GrandGrandChild : GrandChild (baz = A) {
- _body_,
- }
- "
- );
-
- test_pdl!(
- packet_decl_parent_with_no_payload,
- "
- enum Enum8 : 8 {
- A = 0,
- }
-
- packet Parent {
- v : Enum8,
- }
-
- packet Child : Parent (v = A) {
- }
- "
- );
-
- test_pdl!(
- packet_decl_parent_with_alias_child,
- "
- enum Enum8 : 8 {
- A = 0,
- B = 1,
- C = 2,
- }
-
- packet Parent {
- v : Enum8,
- _payload_,
- }
-
- packet AliasChild : Parent {
- _payload_
- }
-
- packet NormalChild : Parent (v = A) {
- }
-
- packet NormalGrandChild1 : AliasChild (v = B) {
- }
-
- packet NormalGrandChild2 : AliasChild (v = C) {
- _payload_
- }
- "
- );
-
- // TODO(mgeisler): enable this test when we have an approach to
- // struct fields with parents.
- //
- // test_pdl!(
- // struct_decl_child_structs,
- // "
- // enum Enum16 : 16 {
- // A = 1,
- // B = 2,
- // }
- //
- // struct Foo {
- // a: 8,
- // b: Enum16,
- // _size_(_payload_): 8,
- // _payload_
- // }
- //
- // struct Bar : Foo (a = 100) {
- // x: 8,
- // }
- //
- // struct Baz : Foo (b = B) {
- // y: 16,
- // }
- // "
- // );
- //
- // test_pdl!(
- // struct_decl_grand_children,
- // "
- // enum Enum16 : 16 {
- // A = 1,
- // B = 2,
- // }
- //
- // struct Parent {
- // foo: Enum16,
- // bar: Enum16,
- // baz: Enum16,
- // _size_(_payload_): 8,
- // _payload_
- // }
- //
- // struct Child : Parent (foo = A) {
- // quux: Enum16,
- // _payload_,
- // }
- //
- // struct GrandChild : Child (bar = A, quux = A) {
- // _body_,
- // }
- //
- // struct GrandGrandChild : GrandChild (baz = A) {
- // _body_,
- // }
- // "
- // );
-}
diff --git a/tools/pdl/src/backends/rust/parser.rs b/tools/pdl/src/backends/rust/parser.rs
deleted file mode 100644
index 196ae0059c..0000000000
--- a/tools/pdl/src/backends/rust/parser.rs
+++ /dev/null
@@ -1,793 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use crate::analyzer::ast as analyzer_ast;
-use crate::backends::rust::{
- constraint_to_value, find_constrained_parent_fields, mask_bits, types, ToUpperCamelCase,
-};
-use crate::{ast, lint};
-use quote::{format_ident, quote};
-use std::collections::BTreeSet;
-
-fn size_field_ident(id: &str) -> proc_macro2::Ident {
- format_ident!("{}_size", id.trim_matches('_'))
-}
-
-/// A single bit-field.
-struct BitField<'a> {
- shift: usize, // The shift to apply to this field.
- field: &'a analyzer_ast::Field,
-}
-
-pub struct FieldParser<'a> {
- scope: &'a lint::Scope<'a>,
- endianness: ast::EndiannessValue,
- packet_name: &'a str,
- span: &'a proc_macro2::Ident,
- chunk: Vec<BitField<'a>>,
- code: Vec<proc_macro2::TokenStream>,
- shift: usize,
- offset: usize,
-}
-
-impl<'a> FieldParser<'a> {
- pub fn new(
- scope: &'a lint::Scope<'a>,
- endianness: ast::EndiannessValue,
- packet_name: &'a str,
- span: &'a proc_macro2::Ident,
- ) -> FieldParser<'a> {
- FieldParser {
- scope,
- endianness,
- packet_name,
- span,
- chunk: Vec::new(),
- code: Vec::new(),
- shift: 0,
- offset: 0,
- }
- }
-
- pub fn add(&mut self, field: &'a analyzer_ast::Field) {
- match &field.desc {
- _ if self.scope.is_bitfield(field) => self.add_bit_field(field),
- ast::FieldDesc::Padding { .. } => (),
- ast::FieldDesc::Array { id, width, type_id, size, .. } => self.add_array_field(
- id,
- *width,
- type_id.as_deref(),
- *size,
- field.annot.padded_size,
- self.scope.get_field_declaration(field),
- ),
- ast::FieldDesc::Typedef { id, type_id } => self.add_typedef_field(id, type_id),
- ast::FieldDesc::Payload { size_modifier, .. } => {
- self.add_payload_field(size_modifier.as_deref())
- }
- ast::FieldDesc::Body { .. } => self.add_payload_field(None),
- _ => todo!("{field:?}"),
- }
- }
-
- fn add_bit_field(&mut self, field: &'a analyzer_ast::Field) {
- self.chunk.push(BitField { shift: self.shift, field });
- self.shift += self.scope.get_field_width(field, false).unwrap();
- if self.shift % 8 != 0 {
- return;
- }
-
- let size = self.shift / 8;
- let end_offset = self.offset + size;
-
- let wanted = proc_macro2::Literal::usize_unsuffixed(size);
- self.check_size(self.span, &quote!(#wanted));
-
- let chunk_type = types::Integer::new(self.shift);
- // TODO(mgeisler): generate Rust variable names which cannot
- // conflict with PDL field names. An option would be to start
- // Rust variable names with `_`, but that has a special
- // semantic in Rust.
- let chunk_name = format_ident!("chunk");
-
- let get = types::get_uint(self.endianness, self.shift, self.span);
- if self.chunk.len() > 1 {
- // Multiple values: we read into a local variable.
- self.code.push(quote! {
- let #chunk_name = #get;
- });
- }
-
- let single_value = self.chunk.len() == 1; // && self.chunk[0].offset == 0;
- for BitField { shift, field } in self.chunk.drain(..) {
- let mut v = if single_value {
- // Single value: read directly.
- quote! { #get }
- } else {
- // Multiple values: read from `chunk_name`.
- quote! { #chunk_name }
- };
-
- if shift > 0 {
- let shift = proc_macro2::Literal::usize_unsuffixed(shift);
- v = quote! { (#v >> #shift) }
- }
-
- let width = self.scope.get_field_width(field, false).unwrap();
- let value_type = types::Integer::new(width);
- if !single_value && width < value_type.width {
- // Mask value if we grabbed more than `width` and if
- // `as #value_type` doesn't already do the masking.
- let mask = mask_bits(width, "u64");
- v = quote! { (#v & #mask) };
- }
-
- if value_type.width < chunk_type.width {
- v = quote! { #v as #value_type };
- }
-
- self.code.push(match &field.desc {
- ast::FieldDesc::Scalar { id, .. } => {
- let id = format_ident!("{id}");
- quote! {
- let #id = #v;
- }
- }
- ast::FieldDesc::FixedEnum { enum_id, tag_id, .. } => {
- let enum_id = format_ident!("{enum_id}");
- let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
- quote! {
- if #v != #value_type::from(#enum_id::#tag_id) {
- return Err(Error::InvalidFixedValue {
- expected: #value_type::from(#enum_id::#tag_id) as u64,
- actual: #v as u64,
- });
- }
- }
- }
- ast::FieldDesc::FixedScalar { value, .. } => {
- let value = proc_macro2::Literal::usize_unsuffixed(*value);
- quote! {
- if #v != #value {
- return Err(Error::InvalidFixedValue {
- expected: #value,
- actual: #v as u64,
- });
- }
- }
- }
- ast::FieldDesc::Typedef { id, type_id } => {
- let field_name = id;
- let type_name = type_id;
- let packet_name = &self.packet_name;
- let id = format_ident!("{id}");
- let type_id = format_ident!("{type_id}");
- quote! {
- let #id = #type_id::try_from(#v).map_err(|_| Error::InvalidEnumValueError {
- obj: #packet_name.to_string(),
- field: #field_name.to_string(),
- value: #v as u64,
- type_: #type_name.to_string(),
- })?;
- }
- }
- ast::FieldDesc::Reserved { .. } => {
- if single_value {
- let span = self.span;
- let size = proc_macro2::Literal::usize_unsuffixed(size);
- quote! {
- #span.get_mut().advance(#size);
- }
- } else {
- // Otherwise we don't need anything: we will
- // have advanced past the reserved field when
- // reading the chunk above.
- quote! {}
- }
- }
- ast::FieldDesc::Size { field_id, .. } => {
- let id = size_field_ident(field_id);
- quote! {
- let #id = #v as usize;
- }
- }
- ast::FieldDesc::Count { field_id, .. } => {
- let id = format_ident!("{field_id}_count");
- quote! {
- let #id = #v as usize;
- }
- }
- _ => todo!(),
- });
- }
-
- self.offset = end_offset;
- self.shift = 0;
- }
-
- fn packet_scope(&self) -> Option<&lint::PacketScope> {
- self.scope.scopes.get(self.scope.typedef.get(self.packet_name)?)
- }
-
- fn find_count_field(&self, id: &str) -> Option<proc_macro2::Ident> {
- match self.packet_scope()?.get_array_size_field(id)?.desc {
- ast::FieldDesc::Count { .. } => Some(format_ident!("{id}_count")),
- _ => None,
- }
- }
-
- fn find_size_field(&self, id: &str) -> Option<proc_macro2::Ident> {
- match self.packet_scope()?.get_array_size_field(id)?.desc {
- ast::FieldDesc::Size { .. } => Some(size_field_ident(id)),
- _ => None,
- }
- }
-
- fn payload_field_offset_from_end(&self) -> Option<usize> {
- let packet_scope = self.packet_scope().unwrap();
- let mut fields = packet_scope.iter_fields();
- fields.find(|f| {
- matches!(f.desc, ast::FieldDesc::Body { .. } | ast::FieldDesc::Payload { .. })
- })?;
-
- let mut offset = 0;
- for field in fields {
- if let Some(width) = self.scope.get_field_width(field, false) {
- offset += width;
- } else {
- return None;
- }
- }
-
- Some(offset)
- }
-
- fn check_size(&mut self, span: &proc_macro2::Ident, wanted: &proc_macro2::TokenStream) {
- let packet_name = &self.packet_name;
- self.code.push(quote! {
- if #span.get().remaining() < #wanted {
- return Err(Error::InvalidLengthError {
- obj: #packet_name.to_string(),
- wanted: #wanted,
- got: #span.get().remaining(),
- });
- }
- });
- }
-
- fn add_array_field(
- &mut self,
- id: &str,
- // `width`: the width in bits of the array elements (if Some).
- width: Option<usize>,
- // `type_id`: the enum type of the array elements (if Some).
- // Mutually exclusive with `width`.
- type_id: Option<&str>,
- // `size`: the size of the array in number of elements (if
- // known). If None, the array is a Vec with a dynamic size.
- size: Option<usize>,
- padding_size: Option<usize>,
- decl: Option<&analyzer_ast::Decl>,
- ) {
- enum ElementWidth {
- Static(usize), // Static size in bytes.
- Unknown,
- }
- let element_width = match width.or_else(|| self.scope.get_decl_width(decl?, false)) {
- Some(w) => {
- assert_eq!(w % 8, 0, "Array element size ({w}) is not a multiple of 8");
- ElementWidth::Static(w / 8)
- }
- None => ElementWidth::Unknown,
- };
-
- // The "shape" of the array, i.e., the number of elements
- // given via a static count, a count field, a size field, or
- // unknown.
- enum ArrayShape {
- Static(usize), // Static count
- CountField(proc_macro2::Ident), // Count based on count field
- SizeField(proc_macro2::Ident), // Count based on size and field
- Unknown, // Variable count based on remaining bytes
- }
- let array_shape = if let Some(count) = size {
- ArrayShape::Static(count)
- } else if let Some(count_field) = self.find_count_field(id) {
- ArrayShape::CountField(count_field)
- } else if let Some(size_field) = self.find_size_field(id) {
- ArrayShape::SizeField(size_field)
- } else {
- ArrayShape::Unknown
- };
-
- // TODO size modifier
-
- let span = match padding_size {
- Some(padding_size) => {
- let span = self.span;
- self.check_size(span, &quote!(#padding_size));
- self.code.push(quote! {
- let (head, tail) = #span.get().split_at(#padding_size);
- let mut head = &mut Cell::new(head);
- #span.replace(tail);
- });
- format_ident!("head")
- }
- None => self.span.clone(),
- };
-
- let id = format_ident!("{id}");
-
- let parse_element = self.parse_array_element(&span, width, type_id, decl);
- match (element_width, &array_shape) {
- (ElementWidth::Unknown, ArrayShape::SizeField(size_field)) => {
- // The element width is not known, but the array full
- // octet size is known by size field. Parse elements
- // item by item as a vector.
- self.check_size(&span, &quote!(#size_field));
- let parse_element =
- self.parse_array_element(&format_ident!("head"), width, type_id, decl);
- self.code.push(quote! {
- let (head, tail) = #span.get().split_at(#size_field);
- let mut head = &mut Cell::new(head);
- #span.replace(tail);
- let mut #id = Vec::new();
- while !head.get().is_empty() {
- #id.push(#parse_element?);
- }
- });
- }
- (ElementWidth::Unknown, ArrayShape::Static(count)) => {
- // The element width is not known, but the array
- // element count is known statically. Parse elements
- // item by item as an array.
- let count = syn::Index::from(*count);
- self.code.push(quote! {
- // TODO(mgeisler): use
- // https://doc.rust-lang.org/std/array/fn.try_from_fn.html
- // when stabilized.
- let #id = (0..#count)
- .map(|_| #parse_element)
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- });
- }
- (ElementWidth::Unknown, ArrayShape::CountField(count_field)) => {
- // The element width is not known, but the array
- // element count is known by the count field. Parse
- // elements item by item as a vector.
- self.code.push(quote! {
- let #id = (0..#count_field)
- .map(|_| #parse_element)
- .collect::<Result<Vec<_>>>()?;
- });
- }
- (ElementWidth::Unknown, ArrayShape::Unknown) => {
- // Neither the count not size is known, parse elements
- // until the end of the span.
- self.code.push(quote! {
- let mut #id = Vec::new();
- while !#span.get().is_empty() {
- #id.push(#parse_element?);
- }
- });
- }
- (ElementWidth::Static(element_width), ArrayShape::Static(count)) => {
- // The element width is known, and the array element
- // count is known statically.
- let count = syn::Index::from(*count);
- // This creates a nicely formatted size.
- let array_size = if element_width == 1 {
- quote!(#count)
- } else {
- let element_width = syn::Index::from(element_width);
- quote!(#count * #element_width)
- };
- self.check_size(&span, &quote! { #array_size });
- self.code.push(quote! {
- // TODO(mgeisler): use
- // https://doc.rust-lang.org/std/array/fn.try_from_fn.html
- // when stabilized.
- let #id = (0..#count)
- .map(|_| #parse_element)
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- });
- }
- (ElementWidth::Static(element_width), ArrayShape::CountField(count_field)) => {
- // The element width is known, and the array element
- // count is known dynamically by the count field.
- self.check_size(&span, &quote!(#count_field * #element_width));
- self.code.push(quote! {
- let #id = (0..#count_field)
- .map(|_| #parse_element)
- .collect::<Result<Vec<_>>>()?;
- });
- }
- (ElementWidth::Static(element_width), ArrayShape::SizeField(_))
- | (ElementWidth::Static(element_width), ArrayShape::Unknown) => {
- // The element width is known, and the array full size
- // is known by size field, or unknown (in which case
- // it is the remaining span length).
- let array_size = if let ArrayShape::SizeField(size_field) = &array_shape {
- self.check_size(&span, &quote!(#size_field));
- quote!(#size_field)
- } else {
- quote!(#span.get().remaining())
- };
- let count_field = format_ident!("{id}_count");
- let array_count = if element_width != 1 {
- let element_width = syn::Index::from(element_width);
- self.code.push(quote! {
- if #array_size % #element_width != 0 {
- return Err(Error::InvalidArraySize {
- array: #array_size,
- element: #element_width,
- });
- }
- let #count_field = #array_size / #element_width;
- });
- quote!(#count_field)
- } else {
- array_size
- };
-
- self.code.push(quote! {
- let mut #id = Vec::with_capacity(#array_count);
- for _ in 0..#array_count {
- #id.push(#parse_element?);
- }
- });
- }
- }
- }
-
- /// Parse typedef fields.
- ///
- /// This is only for non-enum fields: enums are parsed via
- /// add_bit_field.
- fn add_typedef_field(&mut self, id: &str, type_id: &str) {
- assert_eq!(self.shift, 0, "Typedef field does not start on an octet boundary");
-
- let decl = self.scope.typedef[type_id];
- if let ast::DeclDesc::Struct { parent_id: Some(_), .. } = &decl.desc {
- panic!("Derived struct used in typedef field");
- }
-
- let span = self.span;
- let id = format_ident!("{id}");
- let type_id = format_ident!("{type_id}");
-
- self.code.push(match self.scope.get_decl_width(decl, true) {
- None => quote! {
- let #id = #type_id::parse_inner(&mut #span)?;
- },
- Some(width) => {
- assert_eq!(width % 8, 0, "Typedef field type size is not a multiple of 8");
- match &decl.desc {
- ast::DeclDesc::Checksum { .. } => todo!(),
- ast::DeclDesc::CustomField { .. } if [8, 16, 32, 64].contains(&width) => {
- let get_uint = types::get_uint(self.endianness, width, span);
- quote! {
- let #id = #get_uint.into();
- }
- }
- ast::DeclDesc::CustomField { .. } => {
- let get_uint = types::get_uint(self.endianness, width, span);
- quote! {
- let #id = (#get_uint)
- .try_into()
- .unwrap(); // Value is masked and conversion must succeed.
- }
- }
- ast::DeclDesc::Struct { .. } => {
- let width = syn::Index::from(width / 8);
- quote! {
- let (head, tail) = #span.get().split_at(#width);
- #span.replace(tail);
- let #id = #type_id::parse(head)?;
- }
- }
- _ => unreachable!(),
- }
- }
- });
- }
-
- /// Parse body and payload fields.
- fn add_payload_field(&mut self, size_modifier: Option<&str>) {
- let span = self.span;
- let packet_scope = self.packet_scope().unwrap();
- let payload_size_field = packet_scope.get_payload_size_field();
- let offset_from_end = self.payload_field_offset_from_end();
-
- if size_modifier.is_some() {
- todo!(
- "Unsupported size modifier for {packet}: {size_modifier:?}",
- packet = self.packet_name
- );
- }
-
- if self.shift != 0 {
- if payload_size_field.is_some() {
- panic!("Unexpected payload size for non byte aligned payload");
- }
-
- //let rounded_size = self.shift / 8 + if self.shift % 8 == 0 { 0 } else { 1 };
- //let padding_bits = 8 * rounded_size - self.shift;
- //let reserved_field =
- // ast::Field::Reserved { loc: ast::SourceRange::default(), width: padding_bits };
- //TODO: self.add_bit_field(&reserved_field); --
- // reserved_field does not live long enough.
-
- // TODO: consume span of rounded size
- } else {
- // TODO: consume span
- }
-
- if let Some(ast::FieldDesc::Size { field_id, .. }) = &payload_size_field.map(|f| &f.desc) {
- // The payload or body has a known size. Consume the
- // payload and update the span in case fields are placed
- // after the payload.
- let size_field = size_field_ident(field_id);
- self.check_size(self.span, &quote!(#size_field ));
- self.code.push(quote! {
- let payload = &#span.get()[..#size_field];
- #span.get_mut().advance(#size_field);
- });
- } else if offset_from_end == Some(0) {
- // The payload or body is the last field of a packet,
- // consume the remaining span.
- self.code.push(quote! {
- let payload = #span.get();
- #span.get_mut().advance(payload.len());
- });
- } else if let Some(offset_from_end) = offset_from_end {
- // The payload or body is followed by fields of static
- // size. Consume the span that is not reserved for the
- // following fields.
- assert_eq!(
- offset_from_end % 8,
- 0,
- "Payload field offset from end of packet is not a multiple of 8"
- );
- let offset_from_end = syn::Index::from(offset_from_end / 8);
- self.check_size(self.span, &quote!(#offset_from_end));
- self.code.push(quote! {
- let payload = &#span.get()[..#span.get().len() - #offset_from_end];
- #span.get_mut().advance(payload.len());
- });
- }
-
- let decl = self.scope.typedef[self.packet_name];
- if let ast::DeclDesc::Struct { .. } = &decl.desc {
- self.code.push(quote! {
- let payload = Vec::from(payload);
- });
- }
- }
-
- /// Parse a single array field element from `span`.
- fn parse_array_element(
- &self,
- span: &proc_macro2::Ident,
- width: Option<usize>,
- type_id: Option<&str>,
- decl: Option<&analyzer_ast::Decl>,
- ) -> proc_macro2::TokenStream {
- if let Some(width) = width {
- let get_uint = types::get_uint(self.endianness, width, span);
- return quote! {
- Ok::<_, Error>(#get_uint)
- };
- }
-
- if let Some(ast::DeclDesc::Enum { id, width, .. }) = decl.map(|decl| &decl.desc) {
- let get_uint = types::get_uint(self.endianness, *width, span);
- let type_id = format_ident!("{id}");
- let packet_name = &self.packet_name;
- return quote! {
- #type_id::try_from(#get_uint).map_err(|_| Error::InvalidEnumValueError {
- obj: #packet_name.to_string(),
- field: String::new(), // TODO(mgeisler): fill out or remove
- value: 0,
- type_: #id.to_string(),
- })
- };
- }
-
- let type_id = format_ident!("{}", type_id.unwrap());
- quote! {
- #type_id::parse_inner(#span)
- }
- }
-
- pub fn done(&mut self) {
- let decl = self.scope.typedef[self.packet_name];
- if let ast::DeclDesc::Struct { .. } = &decl.desc {
- return; // Structs don't parse the child structs recursively.
- }
-
- let packet_scope = &self.scope.scopes[&decl];
- let children = self.scope.iter_children(self.packet_name).collect::<Vec<_>>();
- if children.is_empty() && packet_scope.get_payload_field().is_none() {
- return;
- }
-
- // Gather fields that are constrained in immediate child declarations.
- // Keep the fields sorted by name.
- // TODO: fields that are only matched in grand children will not be included.
- let constrained_fields = children
- .iter()
- .flat_map(|child| child.constraints().map(|c| &c.id))
- .collect::<BTreeSet<_>>();
-
- let mut match_values = Vec::new();
- let mut child_parse_args = Vec::new();
- let mut child_ids_data = Vec::new();
- let mut child_ids = Vec::new();
- let get_constraint_value = |mut constraints: std::slice::Iter<'_, ast::Constraint>,
- id: &str|
- -> Option<proc_macro2::TokenStream> {
- constraints.find(|c| c.id == id).map(|c| constraint_to_value(packet_scope, c))
- };
-
- for child in children.iter() {
- let tuple_values = constrained_fields
- .iter()
- .map(|id| {
- get_constraint_value(child.constraints(), id).map(|v| vec![v]).unwrap_or_else(
- || {
- self.scope
- .file
- .iter_children(child)
- .filter_map(|d| get_constraint_value(d.constraints(), id))
- .collect()
- },
- )
- })
- .collect::<Vec<_>>();
-
- // If no constraint values are found for the tuple just skip the child
- // packet as it would capture unwanted input packets.
- if tuple_values.iter().all(|v| v.is_empty()) {
- continue;
- }
-
- let tuple_values = tuple_values
- .iter()
- .map(|v| v.is_empty().then_some(quote!(_)).unwrap_or_else(|| quote!( #(#v)|* )))
- .collect::<Vec<_>>();
-
- let fields = find_constrained_parent_fields(self.scope, child.id().unwrap())
- .map(|field| format_ident!("{}", field.id().unwrap()));
-
- match_values.push(quote!( (#(#tuple_values),*) ));
- child_parse_args.push(quote!( #(, #fields)*));
- child_ids_data.push(format_ident!("{}Data", child.id().unwrap()));
- child_ids.push(format_ident!("{}", child.id().unwrap()));
- }
-
- let constrained_field_idents =
- constrained_fields.iter().map(|field| format_ident!("{field}"));
- let packet_data_child = format_ident!("{}DataChild", self.packet_name);
-
- // Parsing of packet children requires having a payload field;
- // it is allowed to inherit from a packet with empty payload, in this
- // case generate an empty payload value.
- if !decl
- .fields()
- .any(|f| matches!(&f.desc, ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body))
- {
- self.code.push(quote! {
- let payload: &[u8] = &[];
- })
- }
- self.code.push(quote! {
- let child = match (#(#constrained_field_idents),*) {
- #(#match_values if #child_ids_data::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = #child_ids_data::parse_inner(&mut cell #child_parse_args)?;
- // TODO(mgeisler): communicate back to user if !cell.get().is_empty()?
- #packet_data_child::#child_ids(Arc::new(child_data))
- }),*
- _ if !payload.is_empty() => {
- #packet_data_child::Payload(Bytes::copy_from_slice(payload))
- }
- _ => #packet_data_child::None,
- };
- });
- }
-}
-
-impl quote::ToTokens for FieldParser<'_> {
- fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
- let code = &self.code;
- tokens.extend(quote! {
- #(#code)*
- });
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::analyzer;
- use crate::ast;
- use crate::parser::parse_inline;
-
- /// Parse a string fragment as a PDL file.
- ///
- /// # Panics
- ///
- /// Panics on parse errors.
- pub fn parse_str(text: &str) -> analyzer_ast::File {
- let mut db = ast::SourceDatabase::new();
- let file =
- parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error");
- analyzer::analyze(&file).expect("analyzer error")
- }
-
- #[test]
- fn test_find_fields_static() {
- let code = "
- little_endian_packets
- packet P {
- a: 24[3],
- }
- ";
- let file = parse_str(code);
- let scope = lint::Scope::new(&file);
- let span = format_ident!("bytes");
- let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
- assert_eq!(parser.find_size_field("a"), None);
- assert_eq!(parser.find_count_field("a"), None);
- }
-
- #[test]
- fn test_find_fields_dynamic_count() {
- let code = "
- little_endian_packets
- packet P {
- _count_(b): 24,
- b: 16[],
- }
- ";
- let file = parse_str(code);
- let scope = lint::Scope::new(&file);
- let span = format_ident!("bytes");
- let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
- assert_eq!(parser.find_size_field("b"), None);
- assert_eq!(parser.find_count_field("b"), Some(format_ident!("b_count")));
- }
-
- #[test]
- fn test_find_fields_dynamic_size() {
- let code = "
- little_endian_packets
- packet P {
- _size_(c): 8,
- c: 24[],
- }
- ";
- let file = parse_str(code);
- let scope = lint::Scope::new(&file);
- let span = format_ident!("bytes");
- let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
- assert_eq!(parser.find_size_field("c"), Some(format_ident!("c_size")));
- assert_eq!(parser.find_count_field("c"), None);
- }
-}
diff --git a/tools/pdl/src/backends/rust/preamble.rs b/tools/pdl/src/backends/rust/preamble.rs
deleted file mode 100644
index 45978f13ff..0000000000
--- a/tools/pdl/src/backends/rust/preamble.rs
+++ /dev/null
@@ -1,114 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use quote::quote;
-use std::path::Path;
-
-/// Generate the file preamble.
-pub fn generate(path: &Path) -> proc_macro2::TokenStream {
- // TODO(mgeisler): Make the generated code free from warnings.
- //
- // The code either needs
- //
- // clippy_lints: "none",
- // lints: "none",
- //
- // in the Android.bp file, or we need to add
- //
- // #![allow(warnings, missing_docs)]
- //
- // to the generated code. We cannot add the module-level attribute
- // here because of how the generated code is used with include! in
- // lmp/src/packets.rs.
- let filename = path.file_name().unwrap().to_str().expect("non UTF-8 filename");
- let module_doc_string = format!(" @generated rust packets from {filename}.");
- // TODO(mgeisler): the doc comment below should be an outer
- // comment (#![doc = ...]). However, people include the generated
- // code in the middle of another module via include_str!:
- //
- // fn before() {}
- // include_str!("generated.rs")
- // fn after() {}
- //
- // It is illegal to have a //! comment in the middle of a file. We
- // should refactor such usages to instead look like this:
- //
- // fn before() {}
- // mod foo { include_str!("generated.rs") }
- // use foo::*;
- // fn after() {}
- quote! {
- #[doc = #module_doc_string]
-
- use bytes::{Buf, BufMut, Bytes, BytesMut};
- use std::convert::{TryFrom, TryInto};
- use std::cell::Cell;
- use std::fmt;
- use std::sync::Arc;
- use thiserror::Error;
-
- type Result<T> = std::result::Result<T, Error>;
-
- /// Private prevents users from creating arbitrary scalar values
- /// in situations where the value needs to be validated.
- /// Users can freely deref the value, but only the backend
- /// may create it.
- #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
- pub struct Private<T>(T);
-
- impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
- }
-
- #[derive(Debug, Error)]
- pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error("array size ({array} bytes) is not a multiple of the element size ({element} bytes)")]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
- }
-
- pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::test_utils::{assert_snapshot_eq, format_rust};
-
- #[test]
- fn test_generate_preamble() {
- let actual_code = generate(Path::new("some/path/foo.pdl")).to_string();
- assert_snapshot_eq("tests/generated/preamble.rs", &format_rust(&actual_code));
- }
-}
diff --git a/tools/pdl/src/backends/rust/serializer.rs b/tools/pdl/src/backends/rust/serializer.rs
deleted file mode 100644
index 70a86536e1..0000000000
--- a/tools/pdl/src/backends/rust/serializer.rs
+++ /dev/null
@@ -1,390 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use crate::analyzer::ast as analyzer_ast;
-use crate::backends::rust::{mask_bits, types, ToUpperCamelCase};
-use crate::{ast, lint};
-use quote::{format_ident, quote};
-
-/// A single bit-field value.
-struct BitField {
- value: proc_macro2::TokenStream, // An expression which produces a value.
- field_type: types::Integer, // The type of the value.
- shift: usize, // A bit-shift to apply to `value`.
-}
-
-pub struct FieldSerializer<'a> {
- scope: &'a lint::Scope<'a>,
- endianness: ast::EndiannessValue,
- packet_name: &'a str,
- span: &'a proc_macro2::Ident,
- chunk: Vec<BitField>,
- code: Vec<proc_macro2::TokenStream>,
- shift: usize,
-}
-
-impl<'a> FieldSerializer<'a> {
- pub fn new(
- scope: &'a lint::Scope<'a>,
- endianness: ast::EndiannessValue,
- packet_name: &'a str,
- span: &'a proc_macro2::Ident,
- ) -> FieldSerializer<'a> {
- FieldSerializer {
- scope,
- endianness,
- packet_name,
- span,
- chunk: Vec::new(),
- code: Vec::new(),
- shift: 0,
- }
- }
-
- pub fn add(&mut self, field: &analyzer_ast::Field) {
- match &field.desc {
- _ if self.scope.is_bitfield(field) => self.add_bit_field(field),
- ast::FieldDesc::Array { id, width, .. } => self.add_array_field(
- id,
- *width,
- field.annot.padded_size,
- self.scope.get_field_declaration(field),
- ),
- ast::FieldDesc::Typedef { id, type_id } => {
- self.add_typedef_field(id, type_id);
- }
- ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. } => {
- self.add_payload_field()
- }
- // Padding field handled in serialization of associated array field.
- ast::FieldDesc::Padding { .. } => (),
- _ => todo!("Cannot yet serialize {field:?}"),
- }
- }
-
- fn add_bit_field(&mut self, field: &analyzer_ast::Field) {
- let width = self.scope.get_field_width(field, false).unwrap();
- let shift = self.shift;
-
- match &field.desc {
- ast::FieldDesc::Scalar { id, width } => {
- let field_name = format_ident!("{id}");
- let field_type = types::Integer::new(*width);
- if field_type.width > *width {
- let packet_name = &self.packet_name;
- let max_value = mask_bits(*width, "u64");
- self.code.push(quote! {
- if self.#field_name > #max_value {
- panic!(
- "Invalid value for {}::{}: {} > {}",
- #packet_name, #id, self.#field_name, #max_value
- );
- }
- });
- }
- self.chunk.push(BitField { value: quote!(self.#field_name), field_type, shift });
- }
- ast::FieldDesc::FixedEnum { enum_id, tag_id, .. } => {
- let field_type = types::Integer::new(width);
- let enum_id = format_ident!("{enum_id}");
- let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
- self.chunk.push(BitField {
- value: quote!(#field_type::from(#enum_id::#tag_id)),
- field_type,
- shift,
- });
- }
- ast::FieldDesc::FixedScalar { value, .. } => {
- let field_type = types::Integer::new(width);
- let value = proc_macro2::Literal::usize_unsuffixed(*value);
- self.chunk.push(BitField { value: quote!(#value), field_type, shift });
- }
- ast::FieldDesc::Typedef { id, .. } => {
- let field_name = format_ident!("{id}");
- let field_type = types::Integer::new(width);
- self.chunk.push(BitField {
- value: quote!(#field_type::from(self.#field_name)),
- field_type,
- shift,
- });
- }
- ast::FieldDesc::Reserved { .. } => {
- // Nothing to do here.
- }
- ast::FieldDesc::Size { field_id, width, .. } => {
- let packet_name = &self.packet_name;
- let max_value = mask_bits(*width, "usize");
-
- let decl = self.scope.typedef.get(self.packet_name).unwrap();
- let scope = self.scope.scopes.get(decl).unwrap();
- let value_field = scope.get_packet_field(field_id).unwrap();
-
- let field_name = format_ident!("{field_id}");
- let field_type = types::Integer::new(*width);
- // TODO: size modifier
-
- let value_field_decl = self.scope.get_field_declaration(value_field);
-
- let field_size_name = format_ident!("{field_id}_size");
- let array_size = match (&value_field.desc, value_field_decl.map(|decl| &decl.desc))
- {
- (ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. }, _) => {
- if let ast::DeclDesc::Packet { .. } = &decl.desc {
- quote! { self.child.get_total_size() }
- } else {
- quote! { self.payload.len() }
- }
- }
- (ast::FieldDesc::Array { width: Some(width), .. }, _)
- | (ast::FieldDesc::Array { .. }, Some(ast::DeclDesc::Enum { width, .. })) => {
- let byte_width = syn::Index::from(width / 8);
- if byte_width.index == 1 {
- quote! { self.#field_name.len() }
- } else {
- quote! { (self.#field_name.len() * #byte_width) }
- }
- }
- (ast::FieldDesc::Array { .. }, _) => {
- self.code.push(quote! {
- let #field_size_name = self.#field_name
- .iter()
- .map(|elem| elem.get_size())
- .sum::<usize>();
- });
- quote! { #field_size_name }
- }
- _ => panic!("Unexpected size field: {field:?}"),
- };
-
- self.code.push(quote! {
- if #array_size > #max_value {
- panic!(
- "Invalid length for {}::{}: {} > {}",
- #packet_name, #field_id, #array_size, #max_value
- );
- }
- });
-
- self.chunk.push(BitField {
- value: quote!(#array_size as #field_type),
- field_type,
- shift,
- });
- }
- ast::FieldDesc::Count { field_id, width, .. } => {
- let field_name = format_ident!("{field_id}");
- let field_type = types::Integer::new(*width);
- if field_type.width > *width {
- let packet_name = &self.packet_name;
- let max_value = mask_bits(*width, "usize");
- self.code.push(quote! {
- if self.#field_name.len() > #max_value {
- panic!(
- "Invalid length for {}::{}: {} > {}",
- #packet_name, #field_id, self.#field_name.len(), #max_value
- );
- }
- });
- }
- self.chunk.push(BitField {
- value: quote!(self.#field_name.len() as #field_type),
- field_type,
- shift,
- });
- }
- _ => todo!("{field:?}"),
- }
-
- self.shift += width;
- if self.shift % 8 == 0 {
- self.pack_bit_fields()
- }
- }
-
- fn pack_bit_fields(&mut self) {
- assert_eq!(self.shift % 8, 0);
- let chunk_type = types::Integer::new(self.shift);
- let values = self
- .chunk
- .drain(..)
- .map(|BitField { mut value, field_type, shift }| {
- if field_type.width != chunk_type.width {
- // We will be combining values with `|`, so we
- // need to cast them first.
- value = quote! { (#value as #chunk_type) };
- }
- if shift > 0 {
- let op = quote!(<<);
- let shift = proc_macro2::Literal::usize_unsuffixed(shift);
- value = quote! { (#value #op #shift) };
- }
- value
- })
- .collect::<Vec<_>>();
-
- match values.as_slice() {
- [] => {
- let span = format_ident!("{}", self.span);
- let count = syn::Index::from(self.shift / 8);
- self.code.push(quote! {
- #span.put_bytes(0, #count);
- });
- }
- [value] => {
- let put = types::put_uint(self.endianness, value, self.shift, self.span);
- self.code.push(quote! {
- #put;
- });
- }
- _ => {
- let put = types::put_uint(self.endianness, &quote!(value), self.shift, self.span);
- self.code.push(quote! {
- let value = #(#values)|*;
- #put;
- });
- }
- }
-
- self.shift = 0;
- }
-
- fn add_array_field(
- &mut self,
- id: &str,
- width: Option<usize>,
- padding_size: Option<usize>,
- decl: Option<&analyzer_ast::Decl>,
- ) {
- let span = format_ident!("{}", self.span);
- let serialize = match width {
- Some(width) => {
- let value = quote!(*elem);
- types::put_uint(self.endianness, &value, width, self.span)
- }
- None => {
- if let Some(ast::DeclDesc::Enum { width, .. }) = decl.map(|decl| &decl.desc) {
- let element_type = types::Integer::new(*width);
- types::put_uint(
- self.endianness,
- &quote!(#element_type::from(elem)),
- *width,
- self.span,
- )
- } else {
- quote! {
- elem.write_to(#span)
- }
- }
- }
- };
-
- let id = format_ident!("{id}");
-
- self.code.push(match padding_size {
- Some(padding_size) =>
- quote! {
- let current_size = #span.len();
- for elem in &self.#id {
- #serialize;
- }
- let array_size = #span.len() - current_size;
- if array_size > #padding_size {
- panic!("attempted to serialize an array larger than the enclosing padding size");
- }
- #span.put_bytes(0, #padding_size - array_size);
- },
- None =>
- quote! {
- for elem in &self.#id {
- #serialize;
- }
- }
- });
- }
-
- fn add_typedef_field(&mut self, id: &str, type_id: &str) {
- assert_eq!(self.shift, 0, "Typedef field does not start on an octet boundary");
- let decl = self.scope.typedef[type_id];
- if let ast::DeclDesc::Struct { parent_id: Some(_), .. } = &decl.desc {
- panic!("Derived struct used in typedef field");
- }
-
- let id = format_ident!("{id}");
- let span = format_ident!("{}", self.span);
-
- self.code.push(match &decl.desc {
- ast::DeclDesc::Checksum { .. } => todo!(),
- ast::DeclDesc::CustomField { width: Some(width), .. } => {
- let backing_type = types::Integer::new(*width);
- let put_uint = types::put_uint(
- self.endianness,
- &quote! { #backing_type::from(self.#id) },
- *width,
- self.span,
- );
- quote! {
- #put_uint;
- }
- }
- ast::DeclDesc::Struct { .. } => quote! {
- self.#id.write_to(#span);
- },
- _ => unreachable!(),
- });
- }
-
- fn add_payload_field(&mut self) {
- if self.shift != 0 && self.endianness == ast::EndiannessValue::BigEndian {
- panic!("Payload field does not start on an octet boundary");
- }
-
- let decl = self.scope.typedef[self.packet_name];
- let is_packet = matches!(&decl.desc, ast::DeclDesc::Packet { .. });
-
- let child_ids = self
- .scope
- .iter_children(self.packet_name)
- .map(|child| format_ident!("{}", child.id().unwrap()))
- .collect::<Vec<_>>();
-
- let span = format_ident!("{}", self.span);
- if self.shift == 0 {
- if is_packet {
- let packet_data_child = format_ident!("{}DataChild", self.packet_name);
- self.code.push(quote! {
- match &self.child {
- #(#packet_data_child::#child_ids(child) => child.write_to(#span),)*
- #packet_data_child::Payload(payload) => #span.put_slice(payload),
- #packet_data_child::None => {},
- }
- })
- } else {
- self.code.push(quote! {
- #span.put_slice(&self.payload);
- });
- }
- } else {
- todo!("Shifted payloads");
- }
- }
-}
-
-impl quote::ToTokens for FieldSerializer<'_> {
- fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
- let code = &self.code;
- tokens.extend(quote! {
- #(#code)*
- });
- }
-}
diff --git a/tools/pdl/src/backends/rust/types.rs b/tools/pdl/src/backends/rust/types.rs
deleted file mode 100644
index 5b1767ddfb..0000000000
--- a/tools/pdl/src/backends/rust/types.rs
+++ /dev/null
@@ -1,181 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Utility functions for dealing with Rust integer types.
-
-use crate::analyzer::ast as analyzer_ast;
-use crate::{ast, lint};
-use quote::{format_ident, quote};
-
-/// A Rust integer type such as `u8`.
-#[derive(Copy, Clone)]
-pub struct Integer {
- pub width: usize,
-}
-
-impl Integer {
- /// Get the Rust integer type for the given bit width.
- ///
- /// This will round up the size to the nearest Rust integer size.
- /// PDL supports integers up to 64 bit, so it is an error to call
- /// this with a width larger than 64.
- pub fn new(width: usize) -> Integer {
- for integer_width in [8, 16, 32, 64] {
- if width <= integer_width {
- return Integer { width: integer_width };
- }
- }
- panic!("Cannot construct Integer with width: {width}")
- }
-}
-
-impl quote::ToTokens for Integer {
- fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
- let t: syn::Type = syn::parse_str(&format!("u{}", self.width))
- .expect("Could not parse integer, unsupported width?");
- t.to_tokens(tokens);
- }
-}
-
-pub fn rust_type(field: &analyzer_ast::Field) -> proc_macro2::TokenStream {
- match &field.desc {
- ast::FieldDesc::Scalar { width, .. } => {
- let field_type = Integer::new(*width);
- quote!(#field_type)
- }
- ast::FieldDesc::Typedef { type_id, .. } => {
- let field_type = format_ident!("{type_id}");
- quote!(#field_type)
- }
- ast::FieldDesc::Array { width: Some(width), size: Some(size), .. } => {
- let field_type = Integer::new(*width);
- let size = proc_macro2::Literal::usize_unsuffixed(*size);
- quote!([#field_type; #size])
- }
- ast::FieldDesc::Array { width: Some(width), size: None, .. } => {
- let field_type = Integer::new(*width);
- quote!(Vec<#field_type>)
- }
- ast::FieldDesc::Array { type_id: Some(type_id), size: Some(size), .. } => {
- let field_type = format_ident!("{type_id}");
- let size = proc_macro2::Literal::usize_unsuffixed(*size);
- quote!([#field_type; #size])
- }
- ast::FieldDesc::Array { type_id: Some(type_id), size: None, .. } => {
- let field_type = format_ident!("{type_id}");
- quote!(Vec<#field_type>)
- }
- //ast::Field::Size { .. } | ast::Field::Count { .. } => quote!(),
- _ => todo!("{field:?}"),
- }
-}
-
-pub fn rust_borrow(
- field: &analyzer_ast::Field,
- scope: &lint::Scope<'_>,
-) -> proc_macro2::TokenStream {
- match &field.desc {
- ast::FieldDesc::Scalar { .. } => quote!(),
- ast::FieldDesc::Typedef { type_id, .. } => match &scope.typedef[type_id].desc {
- ast::DeclDesc::Enum { .. } => quote!(),
- ast::DeclDesc::Struct { .. } => quote!(&),
- ast::DeclDesc::CustomField { .. } => quote!(),
- desc => unreachable!("unexpected declaration: {desc:?}"),
- },
- ast::FieldDesc::Array { .. } => quote!(&),
- _ => todo!(),
- }
-}
-
-/// Suffix for `Buf::get_*` and `BufMut::put_*` methods when reading a
-/// value with the given `width`.
-fn endianness_suffix(endianness: ast::EndiannessValue, width: usize) -> &'static str {
- if width > 8 && endianness == ast::EndiannessValue::LittleEndian {
- "_le"
- } else {
- ""
- }
-}
-
-/// Parse an unsigned integer with the given `width`.
-///
-/// The generated code requires that `span` is a mutable `bytes::Buf`
-/// value.
-pub fn get_uint(
- endianness: ast::EndiannessValue,
- width: usize,
- span: &proc_macro2::Ident,
-) -> proc_macro2::TokenStream {
- let suffix = endianness_suffix(endianness, width);
- let value_type = Integer::new(width);
- if value_type.width == width {
- let get_u = format_ident!("get_u{}{}", value_type.width, suffix);
- quote! {
- #span.get_mut().#get_u()
- }
- } else {
- let get_uint = format_ident!("get_uint{}", suffix);
- let value_nbytes = proc_macro2::Literal::usize_unsuffixed(width / 8);
- let cast = (value_type.width < 64).then(|| quote!(as #value_type));
- quote! {
- #span.get_mut().#get_uint(#value_nbytes) #cast
- }
- }
-}
-
-/// Write an unsigned integer `value` to `span`.
-///
-/// The generated code requires that `span` is a mutable
-/// `bytes::BufMut` value.
-pub fn put_uint(
- endianness: ast::EndiannessValue,
- value: &proc_macro2::TokenStream,
- width: usize,
- span: &proc_macro2::Ident,
-) -> proc_macro2::TokenStream {
- let suffix = endianness_suffix(endianness, width);
- let value_type = Integer::new(width);
- if value_type.width == width {
- let put_u = format_ident!("put_u{}{}", width, suffix);
- quote! {
- #span.#put_u(#value)
- }
- } else {
- let put_uint = format_ident!("put_uint{}", suffix);
- let value_nbytes = proc_macro2::Literal::usize_unsuffixed(width / 8);
- let cast = (value_type.width < 64).then(|| quote!(as u64));
- quote! {
- #span.#put_uint(#value #cast, #value_nbytes)
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_integer_new() {
- assert_eq!(Integer::new(0).width, 8);
- assert_eq!(Integer::new(8).width, 8);
- assert_eq!(Integer::new(9).width, 16);
- assert_eq!(Integer::new(64).width, 64);
- }
-
- #[test]
- #[should_panic]
- fn test_integer_new_panics_on_large_width() {
- Integer::new(65);
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/computed_values.rs b/tools/pdl/src/backends/rust_no_allocation/computed_values.rs
deleted file mode 100644
index 37ef6550f4..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/computed_values.rs
+++ /dev/null
@@ -1,169 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use proc_macro2::{Ident, TokenStream};
-use quote::{format_ident, quote};
-
-use crate::backends::intermediate::{
- ComputedOffset, ComputedOffsetId, ComputedValue, ComputedValueId,
-};
-
-/// This trait is implemented on computed quantities (offsets and values) that can be retrieved via a function call
-pub trait Declarable {
- fn get_name(&self) -> String;
-
- fn get_ident(&self) -> Ident {
- format_ident!("try_get_{}", self.get_name())
- }
-
- fn call_fn(&self) -> TokenStream {
- let fn_name = self.get_ident();
- quote! { self.#fn_name()? }
- }
-
- fn declare_fn(&self, body: TokenStream) -> TokenStream {
- let fn_name = self.get_ident();
- quote! {
- #[inline]
- fn #fn_name(&self) -> Result<usize, ParseError> {
- #body
- }
- }
- }
-}
-
-impl Declarable for ComputedValueId<'_> {
- fn get_name(&self) -> String {
- match self {
- ComputedValueId::FieldSize(field) => format!("{field}_size"),
- ComputedValueId::FieldElementSize(field) => format!("{field}_element_size"),
- ComputedValueId::FieldCount(field) => format!("{field}_count"),
- ComputedValueId::Custom(i) => format!("custom_value_{i}"),
- }
- }
-}
-
-impl Declarable for ComputedOffsetId<'_> {
- fn get_name(&self) -> String {
- match self {
- ComputedOffsetId::HeaderStart => "header_start_offset".to_string(),
- ComputedOffsetId::PacketEnd => "packet_end_offset".to_string(),
- ComputedOffsetId::FieldOffset(field) => format!("{field}_offset"),
- ComputedOffsetId::FieldEndOffset(field) => format!("{field}_end_offset"),
- ComputedOffsetId::Custom(i) => format!("custom_offset_{i}"),
- ComputedOffsetId::TrailerStart => "trailer_start_offset".to_string(),
- }
- }
-}
-
-/// This trait is implemented on computed expressions that are computed on-demand (i.e. not via a function call)
-pub trait Computable {
- fn compute(&self) -> TokenStream;
-}
-
-impl Computable for ComputedValue<'_> {
- fn compute(&self) -> TokenStream {
- match self {
- ComputedValue::Constant(k) => quote! { Ok(#k) },
- ComputedValue::CountStructsUpToSize { base_id, size, struct_type } => {
- let base_offset = base_id.call_fn();
- let size = size.call_fn();
- let struct_type = format_ident!("{struct_type}View");
- quote! {
- let mut cnt = 0;
- let mut view = self.buf.offset(#base_offset)?;
- let mut remaining_size = #size;
- while remaining_size > 0 {
- let next_struct_size = #struct_type::try_parse(view)?.try_get_size()?;
- if next_struct_size > remaining_size {
- return Err(ParseError::OutOfBoundsAccess);
- }
- remaining_size -= next_struct_size;
- view = view.offset(next_struct_size * 8)?;
- cnt += 1;
- }
- Ok(cnt)
- }
- }
- ComputedValue::SizeOfNStructs { base_id, n, struct_type } => {
- let base_offset = base_id.call_fn();
- let n = n.call_fn();
- let struct_type = format_ident!("{struct_type}View");
- quote! {
- let mut view = self.buf.offset(#base_offset)?;
- let mut size = 0;
- for _ in 0..#n {
- let next_struct_size = #struct_type::try_parse(view)?.try_get_size()?;
- size += next_struct_size;
- view = view.offset(next_struct_size * 8)?;
- }
- Ok(size)
- }
- }
- ComputedValue::Product(x, y) => {
- let x = x.call_fn();
- let y = y.call_fn();
- quote! { #x.checked_mul(#y).ok_or(ParseError::ArithmeticOverflow) }
- }
- ComputedValue::Divide(x, y) => {
- let x = x.call_fn();
- let y = y.call_fn();
- quote! {
- if #y == 0 || #x % #y != 0 {
- return Err(ParseError::DivisionFailure)
- }
- Ok(#x / #y)
- }
- }
- ComputedValue::Difference(x, y) => {
- let x = x.call_fn();
- let y = y.call_fn();
- quote! {
- let bit_difference = #x.checked_sub(#y).ok_or(ParseError::ArithmeticOverflow)?;
- if bit_difference % 8 != 0 {
- return Err(ParseError::DivisionFailure);
- }
- Ok(bit_difference / 8)
- }
- }
- ComputedValue::ValueAt { offset, width } => {
- let offset = offset.call_fn();
- quote! { self.buf.offset(#offset)?.slice(#width)?.try_parse() }
- }
- }
- }
-}
-
-impl Computable for ComputedOffset<'_> {
- fn compute(&self) -> TokenStream {
- match self {
- ComputedOffset::ConstantPlusOffsetInBits(base_id, offset) => {
- let base_id = base_id.call_fn();
- quote! { #base_id.checked_add_signed(#offset as isize).ok_or(ParseError::ArithmeticOverflow) }
- }
- ComputedOffset::SumWithOctets(x, y) => {
- let x = x.call_fn();
- let y = y.call_fn();
- quote! {
- #x.checked_add(#y.checked_mul(8).ok_or(ParseError::ArithmeticOverflow)?)
- .ok_or(ParseError::ArithmeticOverflow)
- }
- }
- ComputedOffset::Alias(alias) => {
- let alias = alias.call_fn();
- quote! { Ok(#alias) }
- }
- }
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/enums.rs b/tools/pdl/src/backends/rust_no_allocation/enums.rs
deleted file mode 100644
index 663566a848..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/enums.rs
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use proc_macro2::{Literal, TokenStream};
-use quote::{format_ident, quote};
-
-use crate::ast;
-
-use super::utils::get_integer_type;
-
-pub fn generate_enum(id: &str, tags: &[ast::Tag], width: usize) -> TokenStream {
- let id_ident = format_ident!("{id}");
- let tag_ids = tags.iter().map(|tag| format_ident!("{}", tag.id())).collect::<Vec<_>>();
- let tag_values = tags
- .iter()
- .map(|tag| Literal::u64_unsuffixed(tag.value().unwrap() as u64))
- .collect::<Vec<_>>();
- let backing_ident = get_integer_type(width);
-
- quote! {
- #[derive(Copy, Clone, PartialEq, Eq, Debug)]
- pub enum #id_ident {
- #(#tag_ids),*
- }
-
- impl #id_ident {
- pub fn new(value: #backing_ident) -> Result<Self, ParseError> {
- match value {
- #(#tag_values => Ok(Self::#tag_ids)),*,
- _ => Err(ParseError::InvalidEnumValue),
- }
- }
-
- pub fn value(&self) -> #backing_ident {
- match self {
- #(Self::#tag_ids => #tag_values),*,
- }
- }
-
- fn try_parse(buf: BitSlice) -> Result<Self, ParseError> {
- let value = buf.slice(#width)?.try_parse()?;
- match value {
- #(#tag_values => Ok(Self::#tag_ids)),*,
- _ => Err(ParseError::InvalidEnumValue),
- }
- }
- }
-
- impl Serializable for #id_ident {
- fn serialize(&self, writer: &mut impl BitWriter) -> Result<(), SerializeError> {
- writer.write_bits(#width, || Ok(self.value()));
- Ok(())
- }
- }
-
- impl From<#id_ident> for #backing_ident {
- fn from(x: #id_ident) -> #backing_ident {
- x.value()
- }
- }
-
- impl TryFrom<#backing_ident> for #id_ident {
- type Error = ParseError;
-
- fn try_from(value: #backing_ident) -> Result<Self, ParseError> {
- Self::new(value)
- }
- }
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/mod.rs b/tools/pdl/src/backends/rust_no_allocation/mod.rs
deleted file mode 100644
index 858526f529..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/mod.rs
+++ /dev/null
@@ -1,117 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Rust no-allocation backend
-//!
-//! The motivation for this backend is to be a more "idiomatic" backend than
-//! the existing backend. Specifically, it should
-//! 1. Use lifetimes, not reference counting
-//! 2. Avoid expensive memory copies unless needed
-//! 3. Use the intermediate Schema rather than doing all the logic from scratch
-//!
-//! One notable consequence is that we avoid .specialize(), as it has "magic" behavior
-//! not defined in the spec. Instead we mimic the C++ approach of calling tryParse() and
-//! getting a Result<> back.
-
-mod computed_values;
-mod enums;
-mod packet_parser;
-mod packet_serializer;
-pub mod test;
-mod utils;
-
-use std::collections::HashMap;
-
-use proc_macro2::TokenStream;
-use quote::quote;
-
-use crate::ast;
-use crate::parser;
-
-use self::{
- enums::generate_enum, packet_parser::generate_packet,
- packet_serializer::generate_packet_serializer,
-};
-
-use super::intermediate::Schema;
-
-pub fn generate(file: &parser::ast::File, schema: &Schema) -> Result<String, String> {
- match file.endianness.value {
- ast::EndiannessValue::LittleEndian => {}
- _ => unimplemented!("Only little_endian endianness supported"),
- };
-
- let mut out = String::new();
-
- out.push_str(include_str!("preamble.rs"));
-
- let mut children = HashMap::<&str, Vec<&str>>::new();
- for decl in &file.declarations {
- match &decl.desc {
- ast::DeclDesc::Packet { id, parent_id: Some(parent_id), .. }
- | ast::DeclDesc::Struct { id, parent_id: Some(parent_id), .. } => {
- children.entry(parent_id.as_str()).or_default().push(id.as_str());
- }
- _ => {}
- }
- }
-
- let declarations = file
- .declarations
- .iter()
- .map(|decl| generate_decl(decl, schema, &children))
- .collect::<Result<TokenStream, _>>()?;
-
- out.push_str(
- &quote! {
- #declarations
- }
- .to_string(),
- );
-
- Ok(out)
-}
-
-fn generate_decl(
- decl: &parser::ast::Decl,
- schema: &Schema,
- children: &HashMap<&str, Vec<&str>>,
-) -> Result<TokenStream, String> {
- match &decl.desc {
- ast::DeclDesc::Enum { id, tags, width, .. } => Ok(generate_enum(id, tags, *width)),
- ast::DeclDesc::Packet { id, fields, parent_id, .. }
- | ast::DeclDesc::Struct { id, fields, parent_id, .. } => {
- let parser = generate_packet(
- id,
- fields,
- parent_id.as_deref(),
- schema,
- &schema.packets_and_structs[id.as_str()],
- )?;
- let serializer = generate_packet_serializer(
- id,
- parent_id.as_deref(),
- fields,
- schema,
- &schema.packets_and_structs[id.as_str()],
- children,
- );
- Ok(quote! {
- #parser
- #serializer
- })
- }
- _ => unimplemented!("Unsupported decl type"),
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/packet_parser.rs b/tools/pdl/src/backends/rust_no_allocation/packet_parser.rs
deleted file mode 100644
index 44342fb80f..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/packet_parser.rs
+++ /dev/null
@@ -1,363 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::iter::empty;
-
-use proc_macro2::TokenStream;
-use quote::{format_ident, quote};
-
-use crate::ast;
-use crate::parser;
-
-use crate::backends::intermediate::{
- ComputedOffsetId, ComputedValueId, PacketOrStruct, PacketOrStructLength, Schema,
-};
-
-use super::computed_values::{Computable, Declarable};
-use super::utils::get_integer_type;
-
-pub fn generate_packet(
- id: &str,
- fields: &[parser::ast::Field],
- parent_id: Option<&str>,
- schema: &Schema,
- curr_schema: &PacketOrStruct,
-) -> Result<TokenStream, String> {
- let id_ident = format_ident!("{id}View");
-
- let needs_external = matches!(curr_schema.length, PacketOrStructLength::NeedsExternal);
-
- let length_getter = if needs_external {
- ComputedOffsetId::PacketEnd.declare_fn(quote! { Ok(self.buf.get_size_in_bits()) })
- } else {
- quote! {}
- };
-
- let computed_getters = empty()
- .chain(
- curr_schema.computed_offsets.iter().map(|(decl, defn)| decl.declare_fn(defn.compute())),
- )
- .chain(
- curr_schema.computed_values.iter().map(|(decl, defn)| decl.declare_fn(defn.compute())),
- );
-
- let field_getters = fields.iter().map(|field| {
- match &field.desc {
- ast::FieldDesc::Padding { .. }
- | ast::FieldDesc::Reserved { .. }
- | ast::FieldDesc::FixedScalar { .. }
- | ast::FieldDesc::FixedEnum { .. }
- | ast::FieldDesc::ElementSize { .. }
- | ast::FieldDesc::Count { .. }
- | ast::FieldDesc::Size { .. } => {
- // no-op, no getter generated for this type
- quote! {}
- }
- ast::FieldDesc::Group { .. } => unreachable!(),
- ast::FieldDesc::Checksum { .. } => {
- unimplemented!("checksums not yet supported with this backend")
- }
- ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body => {
- let name = if matches!(field.desc, ast::FieldDesc::Payload { .. }) { "_payload_"} else { "_body_"};
- let payload_start_offset = ComputedOffsetId::FieldOffset(name).call_fn();
- let payload_end_offset = ComputedOffsetId::FieldEndOffset(name).call_fn();
- quote! {
- fn try_get_payload(&self) -> Result<SizedBitSlice<'a>, ParseError> {
- let payload_start_offset = #payload_start_offset;
- let payload_end_offset = #payload_end_offset;
- self.buf.offset(payload_start_offset)?.slice(payload_end_offset - payload_start_offset)
- }
-
- fn try_get_raw_payload(&self) -> Result<impl Iterator<Item = Result<u8, ParseError>> + '_, ParseError> {
- let view = self.try_get_payload()?;
- let count = (view.get_size_in_bits() + 7) / 8;
- Ok((0..count).map(move |i| Ok(view.offset(i*8)?.slice(8.min(view.get_size_in_bits() - i*8))?.try_parse()?)))
- }
-
- pub fn get_raw_payload(&self) -> impl Iterator<Item = u8> + '_ {
- self.try_get_raw_payload().unwrap().map(|x| x.unwrap())
- }
- }
- }
- ast::FieldDesc::Array { id, width, type_id, .. } => {
- let (elem_type, return_type) = if let Some(width) = width {
- let ident = get_integer_type(*width);
- (ident.clone(), quote!{ #ident })
- } else if let Some(type_id) = type_id {
- if schema.enums.contains_key(type_id.as_str()) {
- let ident = format_ident!("{}", type_id);
- (ident.clone(), quote! { #ident })
- } else {
- let ident = format_ident!("{}View", type_id);
- (ident.clone(), quote! { #ident<'a> })
- }
- } else {
- unreachable!()
- };
-
- let try_getter_name = format_ident!("try_get_{id}_iter");
- let getter_name = format_ident!("get_{id}_iter");
-
- let start_offset = ComputedOffsetId::FieldOffset(id).call_fn();
- let count = ComputedValueId::FieldCount(id).call_fn();
-
- let element_size_known = curr_schema
- .computed_values
- .contains_key(&ComputedValueId::FieldElementSize(id));
-
- let body = if element_size_known {
- let element_size = ComputedValueId::FieldElementSize(id).call_fn();
- let parsed_curr_view = if width.is_some() {
- quote! { curr_view.try_parse() }
- } else {
- quote! { #elem_type::try_parse(curr_view.into()) }
- };
- quote! {
- let view = self.buf.offset(#start_offset)?;
- let count = #count;
- let element_size = #element_size;
- Ok((0..count).map(move |i| {
- let curr_view = view.offset(element_size.checked_mul(i * 8).ok_or(ParseError::ArithmeticOverflow)?)?
- .slice(element_size.checked_mul(8).ok_or(ParseError::ArithmeticOverflow)?)?;
- #parsed_curr_view
- }))
- }
- } else {
- quote! {
- let mut view = self.buf.offset(#start_offset)?;
- let count = #count;
- Ok((0..count).map(move |i| {
- let parsed = #elem_type::try_parse(view.into())?;
- view = view.offset(parsed.try_get_size()? * 8)?;
- Ok(parsed)
- }))
- }
- };
-
- quote! {
- fn #try_getter_name(&self) -> Result<impl Iterator<Item = Result<#return_type, ParseError>> + 'a, ParseError> {
- #body
- }
-
- #[inline]
- pub fn #getter_name(&self) -> impl Iterator<Item = #return_type> + 'a {
- self.#try_getter_name().unwrap().map(|x| x.unwrap())
- }
- }
- }
- ast::FieldDesc::Scalar { id, width } => {
- let try_getter_name = format_ident!("try_get_{id}");
- let getter_name = format_ident!("get_{id}");
- let offset = ComputedOffsetId::FieldOffset(id).call_fn();
- let scalar_type = get_integer_type(*width);
- quote! {
- fn #try_getter_name(&self) -> Result<#scalar_type, ParseError> {
- self.buf.offset(#offset)?.slice(#width)?.try_parse()
- }
-
- #[inline]
- pub fn #getter_name(&self) -> #scalar_type {
- self.#try_getter_name().unwrap()
- }
- }
- }
- ast::FieldDesc::Typedef { id, type_id } => {
- let try_getter_name = format_ident!("try_get_{id}");
- let getter_name = format_ident!("get_{id}");
-
- let (type_ident, return_type) = if schema.enums.contains_key(type_id.as_str()) {
- let ident = format_ident!("{type_id}");
- (ident.clone(), quote! { #ident })
- } else {
- let ident = format_ident!("{}View", type_id);
- (ident.clone(), quote! { #ident<'a> })
- };
- let offset = ComputedOffsetId::FieldOffset(id).call_fn();
- let end_offset_known = curr_schema
- .computed_offsets
- .contains_key(&ComputedOffsetId::FieldEndOffset(id));
- let sliced_view = if end_offset_known {
- let end_offset = ComputedOffsetId::FieldEndOffset(id).call_fn();
- quote! { self.buf.offset(#offset)?.slice(#end_offset.checked_sub(#offset).ok_or(ParseError::ArithmeticOverflow)?)? }
- } else {
- quote! { self.buf.offset(#offset)? }
- };
-
- quote! {
- fn #try_getter_name(&self) -> Result<#return_type, ParseError> {
- #type_ident::try_parse(#sliced_view.into())
- }
-
- #[inline]
- pub fn #getter_name(&self) -> #return_type {
- self.#try_getter_name().unwrap()
- }
- }
- }
- }
- });
-
- let backing_buffer = if needs_external {
- quote! { SizedBitSlice<'a> }
- } else {
- quote! { BitSlice<'a> }
- };
-
- let parent_ident = match parent_id {
- Some(parent) => format_ident!("{parent}View"),
- None => match curr_schema.length {
- PacketOrStructLength::Static(_) => format_ident!("BitSlice"),
- PacketOrStructLength::Dynamic => format_ident!("BitSlice"),
- PacketOrStructLength::NeedsExternal => format_ident!("SizedBitSlice"),
- },
- };
-
- let buffer_extractor = if parent_id.is_some() {
- quote! { parent.try_get_payload().unwrap().into() }
- } else {
- quote! { parent }
- };
-
- let field_validators = fields.iter().map(|field| match &field.desc {
- ast::FieldDesc::Checksum { .. } => unimplemented!(),
- ast::FieldDesc::Group { .. } => unreachable!(),
- ast::FieldDesc::Padding { .. }
- | ast::FieldDesc::Size { .. }
- | ast::FieldDesc::Count { .. }
- | ast::FieldDesc::ElementSize { .. }
- | ast::FieldDesc::Body
- | ast::FieldDesc::FixedScalar { .. }
- | ast::FieldDesc::FixedEnum { .. }
- | ast::FieldDesc::Reserved { .. } => {
- quote! {}
- }
- ast::FieldDesc::Payload { .. } => {
- quote! {
- self.try_get_payload()?;
- self.try_get_raw_payload()?;
- }
- }
- ast::FieldDesc::Array { id, .. } => {
- let iter_ident = format_ident!("try_get_{id}_iter");
- quote! {
- for elem in self.#iter_ident()? {
- elem?;
- }
- }
- }
- ast::FieldDesc::Scalar { id, .. } | ast::FieldDesc::Typedef { id, .. } => {
- let getter_ident = format_ident!("try_get_{id}");
- quote! { self.#getter_ident()?; }
- }
- });
-
- let packet_end_offset = ComputedOffsetId::PacketEnd.call_fn();
-
- let owned_id_ident = format_ident!("Owned{id_ident}");
- let builder_ident = format_ident!("{id}Builder");
-
- Ok(quote! {
- #[derive(Clone, Copy, Debug)]
- pub struct #id_ident<'a> {
- buf: #backing_buffer,
- }
-
- impl<'a> #id_ident<'a> {
- #length_getter
-
- #(#computed_getters)*
-
- #(#field_getters)*
-
- #[inline]
- fn try_get_header_start_offset(&self) -> Result<usize, ParseError> {
- Ok(0)
- }
-
- #[inline]
- fn try_get_size(&self) -> Result<usize, ParseError> {
- let size = #packet_end_offset;
- if size % 8 != 0 {
- return Err(ParseError::MisalignedPayload);
- }
- Ok(size / 8)
- }
-
- fn validate(&self) -> Result<(), ParseError> {
- #(#field_validators)*
- Ok(())
- }
- }
-
- impl<'a> Packet<'a> for #id_ident<'a> {
- type Parent = #parent_ident<'a>;
- type Owned = #owned_id_ident;
- type Builder = #builder_ident;
-
- fn try_parse_from_buffer(buf: impl Into<SizedBitSlice<'a>>) -> Result<Self, ParseError> {
- let out = Self { buf: buf.into().into() };
- out.validate()?;
- Ok(out)
- }
-
- fn try_parse(parent: #parent_ident<'a>) -> Result<Self, ParseError> {
- let out = Self { buf: #buffer_extractor };
- out.validate()?;
- Ok(out)
- }
-
- fn to_owned_packet(&self) -> #owned_id_ident {
- #owned_id_ident {
- buf: self.buf.backing.to_owned().into(),
- start_bit_offset: self.buf.start_bit_offset,
- end_bit_offset: self.buf.end_bit_offset,
- }
- }
- }
-
- #[derive(Debug)]
- pub struct #owned_id_ident {
- buf: Box<[u8]>,
- start_bit_offset: usize,
- end_bit_offset: usize,
- }
-
- impl OwnedPacket for #owned_id_ident {
- fn try_parse(buf: Box<[u8]>) -> Result<Self, ParseError> {
- #id_ident::try_parse_from_buffer(&buf[..])?;
- let end_bit_offset = buf.len() * 8;
- Ok(Self { buf, start_bit_offset: 0, end_bit_offset })
- }
- }
-
- impl #owned_id_ident {
- pub fn view<'a>(&'a self) -> #id_ident<'a> {
- #id_ident {
- buf: SizedBitSlice(BitSlice {
- backing: &self.buf[..],
- start_bit_offset: self.start_bit_offset,
- end_bit_offset: self.end_bit_offset,
- })
- .into(),
- }
- }
- }
-
- impl<'a> From<&'a #owned_id_ident> for #id_ident<'a> {
- fn from(x: &'a #owned_id_ident) -> Self {
- x.view()
- }
- }
- })
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/packet_serializer.rs b/tools/pdl/src/backends/rust_no_allocation/packet_serializer.rs
deleted file mode 100644
index 9ecae381d3..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/packet_serializer.rs
+++ /dev/null
@@ -1,315 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::collections::HashMap;
-
-use proc_macro2::TokenStream;
-use quote::{format_ident, quote};
-
-use crate::{
- ast,
- backends::{
- intermediate::{ComputedValue, ComputedValueId, PacketOrStruct, Schema},
- rust_no_allocation::utils::get_integer_type,
- },
- parser,
-};
-
-fn standardize_child(id: &str) -> &str {
- match id {
- "_body_" | "_payload_" => "_child_",
- _ => id,
- }
-}
-
-pub fn generate_packet_serializer(
- id: &str,
- parent_id: Option<&str>,
- fields: &[parser::ast::Field],
- schema: &Schema,
- curr_schema: &PacketOrStruct,
- children: &HashMap<&str, Vec<&str>>,
-) -> TokenStream {
- let id_ident = format_ident!("{id}Builder");
-
- let builder_fields = fields
- .iter()
- .filter_map(|field| {
- match &field.desc {
- ast::FieldDesc::Padding { .. }
- | ast::FieldDesc::Reserved { .. }
- | ast::FieldDesc::FixedScalar { .. }
- | ast::FieldDesc::FixedEnum { .. }
- | ast::FieldDesc::ElementSize { .. }
- | ast::FieldDesc::Count { .. }
- | ast::FieldDesc::Size { .. } => {
- // no-op, no getter generated for this type
- None
- }
- ast::FieldDesc::Group { .. } => unreachable!(),
- ast::FieldDesc::Checksum { .. } => {
- unimplemented!("checksums not yet supported with this backend")
- }
- ast::FieldDesc::Body | ast::FieldDesc::Payload { .. } => {
- let type_ident = format_ident!("{id}Child");
- Some(("_child_", quote! { #type_ident }))
- }
- ast::FieldDesc::Array { id, width, type_id, .. } => {
- let element_type = if let Some(width) = width {
- get_integer_type(*width)
- } else if let Some(type_id) = type_id {
- if schema.enums.contains_key(type_id.as_str()) {
- format_ident!("{type_id}")
- } else {
- format_ident!("{type_id}Builder")
- }
- } else {
- unreachable!();
- };
- Some((id.as_str(), quote! { Box<[#element_type]> }))
- }
- ast::FieldDesc::Scalar { id, width } => {
- let id_type = get_integer_type(*width);
- Some((id.as_str(), quote! { #id_type }))
- }
- ast::FieldDesc::Typedef { id, type_id } => {
- let type_ident = if schema.enums.contains_key(type_id.as_str()) {
- format_ident!("{type_id}")
- } else {
- format_ident!("{type_id}Builder")
- };
- Some((id.as_str(), quote! { #type_ident }))
- }
- }
- })
- .map(|(id, typ)| {
- let id_ident = format_ident!("{id}");
- quote! { pub #id_ident: #typ }
- });
-
- let mut has_child = false;
-
- let serializer = fields.iter().map(|field| {
- match &field.desc {
- ast::FieldDesc::Checksum { .. } | ast::FieldDesc::Group { .. } => unimplemented!(),
- ast::FieldDesc::Padding { size, .. } => {
- quote! {
- if (most_recent_array_size_in_bits > #size * 8) {
- return Err(SerializeError::NegativePadding);
- }
- writer.write_bits((#size * 8 - most_recent_array_size_in_bits) as usize, || Ok(0u64))?;
- }
- },
- ast::FieldDesc::Size { field_id, width } => {
- let field_id = standardize_child(field_id);
- let field_ident = format_ident!("{field_id}");
-
- // if the element-size is fixed, we can directly multiply
- if let Some(ComputedValue::Constant(element_width)) = curr_schema.computed_values.get(&ComputedValueId::FieldElementSize(field_id)) {
- return quote! {
- writer.write_bits(
- #width,
- || u64::try_from(self.#field_ident.len() * #element_width).or(Err(SerializeError::IntegerConversionFailure))
- )?;
- }
- }
-
- // if the field is "countable", loop over it to sum up the size
- if curr_schema.computed_values.contains_key(&ComputedValueId::FieldCount(field_id)) {
- return quote! {
- writer.write_bits(#width, || {
- let size_in_bits = self.#field_ident.iter().map(|elem| elem.size_in_bits()).fold(Ok(0), |total, next| {
- let total: u64 = total?;
- let next = u64::try_from(next?).or(Err(SerializeError::IntegerConversionFailure))?;
- total.checked_add(next).ok_or(SerializeError::IntegerConversionFailure)
- })?;
- if size_in_bits % 8 != 0 {
- return Err(SerializeError::AlignmentError);
- }
- Ok(size_in_bits / 8)
- })?;
- }
- }
-
- // otherwise, try to get the size directly
- quote! {
- writer.write_bits(#width, || {
- let size_in_bits: u64 = self.#field_ident.size_in_bits()?.try_into().or(Err(SerializeError::IntegerConversionFailure))?;
- if size_in_bits % 8 != 0 {
- return Err(SerializeError::AlignmentError);
- }
- Ok(size_in_bits / 8)
- })?;
- }
- }
- ast::FieldDesc::Count { field_id, width } => {
- let field_ident = format_ident!("{field_id}");
- quote! { writer.write_bits(#width, || u64::try_from(self.#field_ident.len()).or(Err(SerializeError::IntegerConversionFailure)))?; }
- }
- ast::FieldDesc::ElementSize { field_id, width } => {
- // TODO(aryarahul) - add validation for elementsize against all the other elements
- let field_ident = format_ident!("{field_id}");
- quote! {
- let get_element_size = || Ok(if let Some(field) = self.#field_ident.get(0) {
- let size_in_bits = field.size_in_bits()?;
- if size_in_bits % 8 == 0 {
- (size_in_bits / 8) as u64
- } else {
- return Err(SerializeError::AlignmentError);
- }
- } else {
- 0
- });
- writer.write_bits(#width, || get_element_size() )?;
- }
- }
- ast::FieldDesc::Reserved { width, .. } => {
- quote!{ writer.write_bits(#width, || Ok(0u64))?; }
- }
- ast::FieldDesc::Scalar { width, id } => {
- let field_ident = format_ident!("{id}");
- quote! { writer.write_bits(#width, || Ok(self.#field_ident))?; }
- }
- ast::FieldDesc::FixedScalar { width, value } => {
- let width = quote! { #width };
- let value = {
- let value = *value as u64;
- quote! { #value }
- };
- quote!{ writer.write_bits(#width, || Ok(#value))?; }
- }
- ast::FieldDesc::FixedEnum { enum_id, tag_id } => {
- let width = {
- let width = schema.enums[enum_id.as_str()].width;
- quote! { #width }
- };
- let value = {
- let enum_ident = format_ident!("{}", enum_id);
- let tag_ident = format_ident!("{tag_id}");
- quote! { #enum_ident::#tag_ident.value() }
- };
- quote!{ writer.write_bits(#width, || Ok(#value))?; }
- }
- ast::FieldDesc::Body | ast::FieldDesc::Payload { .. } => {
- has_child = true;
- quote! { self._child_.serialize(writer)?; }
- }
- ast::FieldDesc::Array { width, id, .. } => {
- let id_ident = format_ident!("{id}");
- if let Some(width) = width {
- quote! {
- for elem in self.#id_ident.iter() {
- writer.write_bits(#width, || Ok(*elem))?;
- }
- let most_recent_array_size_in_bits = #width * self.#id_ident.len();
- }
- } else {
- quote! {
- let mut most_recent_array_size_in_bits = 0;
- for elem in self.#id_ident.iter() {
- most_recent_array_size_in_bits += elem.size_in_bits()?;
- elem.serialize(writer)?;
- }
- }
- }
- }
- ast::FieldDesc::Typedef { id, .. } => {
- let id_ident = format_ident!("{id}");
- quote! { self.#id_ident.serialize(writer)?; }
- }
- }
- }).collect::<Vec<_>>();
-
- let variant_names = children.get(id).into_iter().flatten().collect::<Vec<_>>();
-
- let variants = variant_names.iter().map(|name| {
- let name_ident = format_ident!("{name}");
- let variant_ident = format_ident!("{name}Builder");
- quote! { #name_ident(#variant_ident) }
- });
-
- let variant_serializers = variant_names.iter().map(|name| {
- let name_ident = format_ident!("{name}");
- quote! {
- Self::#name_ident(x) => {
- x.serialize(writer)?;
- }
- }
- });
-
- let children_enum = if has_child {
- let enum_ident = format_ident!("{id}Child");
- quote! {
- #[derive(Debug, Clone, PartialEq, Eq)]
- pub enum #enum_ident {
- RawData(Box<[u8]>),
- #(#variants),*
- }
-
- impl Serializable for #enum_ident {
- fn serialize(&self, writer: &mut impl BitWriter) -> Result<(), SerializeError> {
- match self {
- Self::RawData(data) => {
- for byte in data.iter() {
- writer.write_bits(8, || Ok(*byte as u64))?;
- }
- },
- #(#variant_serializers),*
- }
- Ok(())
- }
- }
- }
- } else {
- quote! {}
- };
-
- let parent_type_converter = if let Some(parent_id) = parent_id {
- let parent_enum_ident = format_ident!("{parent_id}Child");
- let variant_ident = format_ident!("{id}");
- Some(quote! {
- impl From<#id_ident> for #parent_enum_ident {
- fn from(x: #id_ident) -> Self {
- Self::#variant_ident(x)
- }
- }
- })
- } else {
- None
- };
-
- let owned_packet_ident = format_ident!("Owned{id}View");
-
- quote! {
- #[derive(Debug, Clone, PartialEq, Eq)]
- pub struct #id_ident {
- #(#builder_fields),*
- }
-
- impl Builder for #id_ident {
- type OwnedPacket = #owned_packet_ident;
- }
-
- impl Serializable for #id_ident {
- fn serialize(&self, writer: &mut impl BitWriter) -> Result<(), SerializeError> {
- #(#serializer)*
- Ok(())
- }
- }
-
- #parent_type_converter
-
- #children_enum
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/preamble.rs b/tools/pdl/src/backends/rust_no_allocation/preamble.rs
deleted file mode 100644
index 30f8486b20..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/preamble.rs
+++ /dev/null
@@ -1,294 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::convert::TryFrom;
-use std::convert::TryInto;
-use std::ops::Deref;
-
-#[derive(Debug)]
-pub enum ParseError {
- InvalidEnumValue,
- DivisionFailure,
- ArithmeticOverflow,
- OutOfBoundsAccess,
- MisalignedPayload,
-}
-
-#[derive(Clone, Copy, Debug)]
-pub struct BitSlice<'a> {
- // note: the offsets are ENTIRELY UNRELATED to the size of this struct,
- // so indexing needs to be checked to avoid panics
- backing: &'a [u8],
-
- // invariant: end_bit_offset >= start_bit_offset, so subtraction will NEVER wrap
- start_bit_offset: usize,
- end_bit_offset: usize,
-}
-
-#[derive(Clone, Copy, Debug)]
-pub struct SizedBitSlice<'a>(BitSlice<'a>);
-
-impl<'a> BitSlice<'a> {
- pub fn offset(&self, offset: usize) -> Result<BitSlice<'a>, ParseError> {
- if self.end_bit_offset - self.start_bit_offset < offset {
- return Err(ParseError::OutOfBoundsAccess);
- }
- Ok(Self {
- backing: self.backing,
- start_bit_offset: self
- .start_bit_offset
- .checked_add(offset)
- .ok_or(ParseError::ArithmeticOverflow)?,
- end_bit_offset: self.end_bit_offset,
- })
- }
-
- pub fn slice(&self, len: usize) -> Result<SizedBitSlice<'a>, ParseError> {
- if self.end_bit_offset - self.start_bit_offset < len {
- return Err(ParseError::OutOfBoundsAccess);
- }
- Ok(SizedBitSlice(Self {
- backing: self.backing,
- start_bit_offset: self.start_bit_offset,
- end_bit_offset: self
- .start_bit_offset
- .checked_add(len)
- .ok_or(ParseError::ArithmeticOverflow)?,
- }))
- }
-
- fn byte_at(&self, index: usize) -> Result<u8, ParseError> {
- self.backing.get(index).ok_or(ParseError::OutOfBoundsAccess).copied()
- }
-}
-
-impl<'a> Deref for SizedBitSlice<'a> {
- type Target = BitSlice<'a>;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-impl<'a> From<SizedBitSlice<'a>> for BitSlice<'a> {
- fn from(x: SizedBitSlice<'a>) -> Self {
- *x
- }
-}
-
-impl<'a, 'b> From<&'b [u8]> for SizedBitSlice<'a>
-where
- 'b: 'a,
-{
- fn from(backing: &'a [u8]) -> Self {
- Self(BitSlice { backing, start_bit_offset: 0, end_bit_offset: backing.len() * 8 })
- }
-}
-
-impl<'a> SizedBitSlice<'a> {
- pub fn try_parse<T: TryFrom<u64>>(&self) -> Result<T, ParseError> {
- if self.end_bit_offset < self.start_bit_offset {
- return Err(ParseError::OutOfBoundsAccess);
- }
- let size_in_bits = self.end_bit_offset - self.start_bit_offset;
-
- // fields that fit into a u64 don't need to be byte-aligned
- if size_in_bits <= 64 {
- let mut accumulator = 0u64;
-
- // where we are in our accumulation
- let mut curr_byte_index = self.start_bit_offset / 8;
- let mut curr_bit_offset = self.start_bit_offset % 8;
- let mut remaining_bits = size_in_bits;
-
- while remaining_bits > 0 {
- // how many bits to take from the current byte?
- // check if this is the last byte
- if curr_bit_offset + remaining_bits <= 8 {
- let tmp = ((self.byte_at(curr_byte_index)? >> curr_bit_offset) as u64)
- & ((1u64 << remaining_bits) - 1);
- accumulator += tmp << (size_in_bits - remaining_bits);
- break;
- } else {
- // this is not the last byte, so we have 8 - curr_bit_offset bits to
- // consume in this byte
- let bits_to_consume = 8 - curr_bit_offset;
- let tmp = (self.byte_at(curr_byte_index)? >> curr_bit_offset) as u64;
- accumulator += tmp << (size_in_bits - remaining_bits);
- curr_bit_offset = 0;
- curr_byte_index += 1;
- remaining_bits -= bits_to_consume as usize;
- }
- }
- T::try_from(accumulator).map_err(|_| ParseError::ArithmeticOverflow)
- } else {
- return Err(ParseError::MisalignedPayload);
- }
- }
-
- pub fn get_size_in_bits(&self) -> usize {
- self.end_bit_offset - self.start_bit_offset
- }
-}
-
-pub trait Packet<'a>
-where
- Self: Sized,
-{
- type Parent;
- type Owned;
- type Builder;
- fn try_parse_from_buffer(buf: impl Into<SizedBitSlice<'a>>) -> Result<Self, ParseError>;
- fn try_parse(parent: Self::Parent) -> Result<Self, ParseError>;
- fn to_owned_packet(&self) -> Self::Owned;
-}
-
-pub trait OwnedPacket
-where
- Self: Sized,
-{
- // Enable GAT when 1.65 is available in AOSP
- // type View<'a> where Self : 'a;
- fn try_parse(buf: Box<[u8]>) -> Result<Self, ParseError>;
- // fn view<'a>(&'a self) -> Self::View<'a>;
-}
-
-pub trait Builder: Serializable {
- type OwnedPacket: OwnedPacket;
-}
-
-#[derive(Debug)]
-pub enum SerializeError {
- NegativePadding,
- IntegerConversionFailure,
- ValueTooLarge,
- AlignmentError,
-}
-
-pub trait BitWriter {
- fn write_bits<T: Into<u64>>(
- &mut self,
- num_bits: usize,
- gen_contents: impl FnOnce() -> Result<T, SerializeError>,
- ) -> Result<(), SerializeError>;
-}
-
-pub trait Serializable {
- fn serialize(&self, writer: &mut impl BitWriter) -> Result<(), SerializeError>;
-
- fn size_in_bits(&self) -> Result<usize, SerializeError> {
- let mut sizer = Sizer::new();
- self.serialize(&mut sizer)?;
- Ok(sizer.size())
- }
-
- fn write(&self, vec: &mut Vec<u8>) -> Result<(), SerializeError> {
- let mut serializer = Serializer::new(vec);
- self.serialize(&mut serializer)?;
- serializer.flush();
- Ok(())
- }
-
- fn to_vec(&self) -> Result<Vec<u8>, SerializeError> {
- let mut out = vec![];
- self.write(&mut out)?;
- Ok(out)
- }
-}
-
-struct Sizer {
- size: usize,
-}
-
-impl Sizer {
- fn new() -> Self {
- Self { size: 0 }
- }
-
- fn size(self) -> usize {
- self.size
- }
-}
-
-impl BitWriter for Sizer {
- fn write_bits<T: Into<u64>>(
- &mut self,
- num_bits: usize,
- gen_contents: impl FnOnce() -> Result<T, SerializeError>,
- ) -> Result<(), SerializeError> {
- self.size += num_bits;
- Ok(())
- }
-}
-
-struct Serializer<'a> {
- buf: &'a mut Vec<u8>,
- curr_byte: u8,
- curr_bit_offset: u8,
-}
-
-impl<'a> Serializer<'a> {
- fn new(buf: &'a mut Vec<u8>) -> Self {
- Self { buf, curr_byte: 0, curr_bit_offset: 0 }
- }
-
- fn flush(self) {
- if self.curr_bit_offset > 0 {
- // partial byte remaining
- self.buf.push(self.curr_byte << (8 - self.curr_bit_offset));
- }
- }
-}
-
-impl<'a> BitWriter for Serializer<'a> {
- fn write_bits<T: Into<u64>>(
- &mut self,
- num_bits: usize,
- gen_contents: impl FnOnce() -> Result<T, SerializeError>,
- ) -> Result<(), SerializeError> {
- let val = gen_contents()?.into();
-
- if num_bits < 64 && val >= 1 << num_bits {
- return Err(SerializeError::ValueTooLarge);
- }
-
- let mut remaining_val = val;
- let mut remaining_bits = num_bits;
- while remaining_bits > 0 {
- let remaining_bits_in_curr_byte = (8 - self.curr_bit_offset) as usize;
- if remaining_bits < remaining_bits_in_curr_byte {
- // we cannot finish the last byte
- self.curr_byte += (remaining_val as u8) << self.curr_bit_offset;
- self.curr_bit_offset += remaining_bits as u8;
- break;
- } else {
- // finish up our current byte and move on
- let val_for_this_byte =
- (remaining_val & ((1 << remaining_bits_in_curr_byte) - 1)) as u8;
- let curr_byte = self.curr_byte + (val_for_this_byte << self.curr_bit_offset);
- self.buf.push(curr_byte);
-
- // clear pending byte
- self.curr_bit_offset = 0;
- self.curr_byte = 0;
-
- // update what's remaining
- remaining_val >>= remaining_bits_in_curr_byte;
- remaining_bits -= remaining_bits_in_curr_byte;
- }
- }
-
- Ok(())
- }
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/test.rs b/tools/pdl/src/backends/rust_no_allocation/test.rs
deleted file mode 100644
index 18aa82be6e..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/test.rs
+++ /dev/null
@@ -1,336 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::collections::HashMap;
-
-use proc_macro2::TokenStream;
-use quote::{format_ident, quote};
-use serde::Deserialize;
-
-use crate::{ast, parser::parse_inline, quote_block};
-
-#[derive(Deserialize)]
-struct PacketTest {
- packet: String,
- tests: Box<[PacketTestCase]>,
-}
-
-#[derive(Deserialize)]
-struct PacketTestCase {
- packed: String,
- unpacked: UnpackedTestFields,
- packet: Option<String>,
-}
-
-#[derive(Deserialize)]
-struct UnpackedTestFields(HashMap<String, Field>);
-
-// fields can be scalars, lists, or structs
-#[derive(Deserialize)]
-#[serde(untagged)]
-enum Field {
- Number(usize),
- Struct(UnpackedTestFields),
- List(Box<[ListEntry]>),
-}
-
-// lists can either contain scalars or structs
-#[derive(Deserialize)]
-#[serde(untagged)]
-enum ListEntry {
- Number(usize),
- Struct(UnpackedTestFields),
-}
-
-fn generate_matchers(
- base: TokenStream,
- value: &UnpackedTestFields,
- filter_fields: &dyn Fn(&str) -> Result<bool, String>,
- curr_type: &str,
- type_lookup: &HashMap<&str, HashMap<&str, Option<&str>>>,
-) -> Result<TokenStream, String> {
- let mut out = vec![];
-
- for (field_name, field_value) in value.0.iter() {
- if !filter_fields(field_name)? {
- continue;
- }
- let getter_ident = format_ident!("get_{field_name}");
- match field_value {
- Field::Number(num) => {
- let num = *num as u64;
- if let Some(field_type) = type_lookup[curr_type][field_name.as_str()] {
- let field_ident = format_ident!("{field_type}");
- out.push(quote! { assert_eq!(#base.#getter_ident(), #field_ident::new(#num as _).unwrap()); });
- } else {
- out.push(quote! { assert_eq!(u64::from(#base.#getter_ident()), #num); });
- }
- }
- Field::List(lst) => {
- if field_name == "payload" {
- let reference = lst
- .iter()
- .map(|val| match val {
- ListEntry::Number(val) => *val as u8,
- _ => unreachable!(),
- })
- .collect::<Vec<_>>();
- out.push(quote! {
- assert_eq!(#base.get_raw_payload().collect::<Vec<_>>(), vec![#(#reference),*]);
- })
- } else {
- let get_iter_ident = format_ident!("get_{field_name}_iter");
- let vec_ident = format_ident!("{field_name}_vec");
- out.push(
- quote! { let #vec_ident = #base.#get_iter_ident().collect::<Vec<_>>(); },
- );
-
- for (i, val) in lst.iter().enumerate() {
- let list_elem = quote! { #vec_ident[#i] };
- out.push(match val {
- ListEntry::Number(num) => {
- if let Some(field_type) = type_lookup[curr_type][field_name.as_str()] {
- let field_ident = format_ident!("{field_type}");
- quote! { assert_eq!(#list_elem, #field_ident::new(#num as _).unwrap()); }
- } else {
- quote! { assert_eq!(u64::from(#list_elem), #num as u64); }
- }
- }
- ListEntry::Struct(fields) => {
- generate_matchers(list_elem, fields, &|_| Ok(true), type_lookup[curr_type][field_name.as_str()].unwrap(), type_lookup)?
- }
- })
- }
- }
- }
- Field::Struct(fields) => {
- out.push(generate_matchers(
- quote! { #base.#getter_ident() },
- fields,
- &|_| Ok(true),
- type_lookup[curr_type][field_name.as_str()].unwrap(),
- type_lookup,
- )?);
- }
- }
- }
- Ok(quote! { { #(#out)* } })
-}
-
-fn generate_builder(
- curr_type: &str,
- child_type: Option<&str>,
- type_lookup: &HashMap<&str, HashMap<&str, Option<&str>>>,
- value: &UnpackedTestFields,
-) -> TokenStream {
- let builder_ident = format_ident!("{curr_type}Builder");
- let child_ident = format_ident!("{curr_type}Child");
-
- let curr_fields = &type_lookup[curr_type];
-
- let fields = value.0.iter().filter_map(|(field_name, field_value)| {
- let curr_field_info = curr_fields.get(field_name.as_str());
-
- if let Some(curr_field_info) = curr_field_info {
- let field_name_ident = if field_name == "payload" {
- format_ident!("_child_")
- } else {
- format_ident!("{field_name}")
- };
- let val = match field_value {
- Field::Number(val) => {
- if let Some(field) = curr_field_info {
- let field_ident = format_ident!("{field}");
- quote! { #field_ident::new(#val as _).unwrap() }
- } else {
- quote! { (#val as u64).try_into().unwrap() }
- }
- }
- Field::Struct(fields) => {
- generate_builder(curr_field_info.unwrap(), None, type_lookup, fields)
- }
- Field::List(lst) => {
- let elems = lst.iter().map(|entry| match entry {
- ListEntry::Number(val) => {
- if let Some(field) = curr_field_info {
- let field_ident = format_ident!("{field}");
- quote! { #field_ident::new(#val as _).unwrap() }
- } else {
- quote! { (#val as u64).try_into().unwrap() }
- }
- }
- ListEntry::Struct(fields) => {
- generate_builder(curr_field_info.unwrap(), None, type_lookup, fields)
- }
- });
- quote! { vec![#(#elems),*].into_boxed_slice() }
- }
- };
-
- Some(if field_name == "payload" {
- quote! { #field_name_ident: #child_ident::RawData(#val) }
- } else {
- quote! { #field_name_ident: #val }
- })
- } else {
- None
- }
- });
-
- let child_field = if let Some(child_type) = child_type {
- let child_builder = generate_builder(child_type, None, type_lookup, value);
- Some(quote! {
- _child_: #child_builder.into(),
- })
- } else {
- None
- };
-
- quote! {
- #builder_ident {
- #child_field
- #(#fields),*
- }
- }
-}
-
-pub fn generate_test_file() -> Result<String, String> {
- let mut out = String::new();
-
- out.push_str(include_str!("test_preamble.rs"));
-
- let file = include_str!("../../../tests/canonical/le_test_vectors.json");
- let test_vectors: Box<[_]> =
- serde_json::from_str(file).map_err(|_| "could not parse test vectors")?;
-
- let pdl = include_str!("../../../tests/canonical/le_rust_noalloc_test_file.pdl");
- let ast = parse_inline(&mut ast::SourceDatabase::new(), "test.pdl".to_owned(), pdl.to_owned())
- .expect("could not parse reference PDL");
- let packet_lookup =
- ast.declarations
- .iter()
- .filter_map(|decl| match &decl.desc {
- ast::DeclDesc::Packet { id, fields, .. }
- | ast::DeclDesc::Struct { id, fields, .. } => Some((
- id.as_str(),
- fields
- .iter()
- .filter_map(|field| match &field.desc {
- ast::FieldDesc::Body { .. } | ast::FieldDesc::Payload { .. } => {
- Some(("payload", None))
- }
- ast::FieldDesc::Array { id, type_id, .. } => match type_id {
- Some(type_id) => Some((id.as_str(), Some(type_id.as_str()))),
- None => Some((id.as_str(), None)),
- },
- ast::FieldDesc::Typedef { id, type_id, .. } => {
- Some((id.as_str(), Some(type_id.as_str())))
- }
- ast::FieldDesc::Scalar { id, .. } => Some((id.as_str(), None)),
- _ => None,
- })
- .collect::<HashMap<_, _>>(),
- )),
- _ => None,
- })
- .collect::<HashMap<_, _>>();
-
- for PacketTest { packet, tests } in test_vectors.iter() {
- if !pdl.contains(packet) {
- // huge brain hack to skip unsupported test vectors
- continue;
- }
-
- for (i, PacketTestCase { packed, unpacked, packet: sub_packet }) in tests.iter().enumerate()
- {
- if let Some(sub_packet) = sub_packet {
- if !pdl.contains(sub_packet) {
- // huge brain hack to skip unsupported test vectors
- continue;
- }
- }
-
- let test_name_ident = format_ident!("test_{packet}_{i}");
- let packet_ident = format_ident!("{packet}_instance");
- let packet_view = format_ident!("{packet}View");
-
- let mut leaf_packet = packet;
-
- let specialization = if let Some(sub_packet) = sub_packet {
- let sub_packet_ident = format_ident!("{}_instance", sub_packet);
- let sub_packet_view_ident = format_ident!("{}View", sub_packet);
-
- leaf_packet = sub_packet;
- quote! { let #sub_packet_ident = #sub_packet_view_ident::try_parse(#packet_ident).unwrap(); }
- } else {
- quote! {}
- };
-
- let leaf_packet_ident = format_ident!("{leaf_packet}_instance");
-
- let packet_matchers = generate_matchers(
- quote! { #packet_ident },
- unpacked,
- &|field| {
- Ok(packet_lookup
- .get(packet.as_str())
- .ok_or(format!("could not find packet {packet}"))?
- .contains_key(field))
- },
- packet,
- &packet_lookup,
- )?;
-
- let sub_packet_matchers = generate_matchers(
- quote! { #leaf_packet_ident },
- unpacked,
- &|field| {
- Ok(packet_lookup
- .get(leaf_packet.as_str())
- .ok_or(format!("could not find packet {packet}"))?
- .contains_key(field))
- },
- sub_packet.as_ref().unwrap_or(packet),
- &packet_lookup,
- )?;
-
- out.push_str(&quote_block! {
- #[test]
- fn #test_name_ident() {
- let base = hex_str_to_byte_vector(#packed);
- let #packet_ident = #packet_view::try_parse(SizedBitSlice::from(&base[..]).into()).unwrap();
-
- #specialization
-
- #packet_matchers
- #sub_packet_matchers
- }
- });
-
- let builder = generate_builder(packet, sub_packet.as_deref(), &packet_lookup, unpacked);
-
- let test_name_ident = format_ident!("test_{packet}_builder_{i}");
- out.push_str(&quote_block! {
- #[test]
- fn #test_name_ident() {
- let packed = hex_str_to_byte_vector(#packed);
- let serialized = #builder.to_vec().unwrap();
- assert_eq!(packed, serialized);
- }
- });
- }
- }
-
- Ok(out)
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/test_preamble.rs b/tools/pdl/src/backends/rust_no_allocation/test_preamble.rs
deleted file mode 100644
index f7c1200062..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/test_preamble.rs
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#![allow(non_snake_case)]
-#![allow(non_camel_case_types)]
-#![allow(warnings, missing_docs)]
-#![allow(clippy::all)]
-// this is now stable
-#![feature(mixed_integer_ops)]
-
-include!(concat!(env!("OUT_DIR"), "/_packets.rs"));
-
-fn hex_to_word(hex: u8) -> u8 {
- if b'0' <= hex && hex <= b'9' {
- hex - b'0'
- } else if b'A' <= hex && hex <= b'F' {
- hex - b'A' + 0xa
- } else {
- hex - b'a' + 0xa
- }
-}
-
-fn hex_str_to_byte_vector(hex: &str) -> Vec<u8> {
- hex.as_bytes()
- .chunks_exact(2)
- .map(|chunk| hex_to_word(chunk[1]) + (hex_to_word(chunk[0]) << 4))
- .collect()
-}
diff --git a/tools/pdl/src/backends/rust_no_allocation/utils.rs b/tools/pdl/src/backends/rust_no_allocation/utils.rs
deleted file mode 100644
index a9286de354..0000000000
--- a/tools/pdl/src/backends/rust_no_allocation/utils.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use proc_macro2::Ident;
-use quote::format_ident;
-
-pub fn get_integer_type(width: usize) -> Ident {
- let best_width = [8, 16, 32, 64]
- .into_iter()
- .filter(|x| *x >= width)
- .min()
- .unwrap_or_else(|| panic!("width {width} is too large"));
- format_ident!("u{best_width}")
-}
diff --git a/tools/pdl/src/bin/generate-canonical-tests.rs b/tools/pdl/src/bin/generate-canonical-tests.rs
deleted file mode 100644
index 44ad2b1543..0000000000
--- a/tools/pdl/src/bin/generate-canonical-tests.rs
+++ /dev/null
@@ -1,240 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Generate Rust unit tests for canonical test vectors.
-
-use quote::{format_ident, quote};
-use serde::{Deserialize, Serialize};
-use serde_json::Value;
-
-#[derive(Debug, Deserialize)]
-struct Packet {
- #[serde(rename = "packet")]
- name: String,
- tests: Vec<TestVector>,
-}
-
-#[derive(Debug, Deserialize)]
-struct TestVector {
- packed: String,
- unpacked: Value,
- packet: Option<String>,
-}
-
-/// Convert a string of hexadecimal characters into a Rust vector of
-/// bytes.
-///
-/// The string `"80038302"` becomes `vec![0x80, 0x03, 0x83, 0x02]`.
-fn hexadecimal_to_vec(hex: &str) -> proc_macro2::TokenStream {
- assert!(hex.len() % 2 == 0, "Expects an even number of hex digits");
- let bytes = hex.as_bytes().chunks_exact(2).map(|chunk| {
- let number = format!("0x{}", std::str::from_utf8(chunk).unwrap());
- syn::parse_str::<syn::LitInt>(&number).unwrap()
- });
-
- quote! {
- vec![#(#bytes),*]
- }
-}
-
-/// Convert `value` to a JSON string literal.
-///
-/// The string literal is a raw literal to avoid escaping
-/// double-quotes.
-fn to_json<T: Serialize>(value: &T) -> syn::LitStr {
- let json = serde_json::to_string(value).unwrap();
- assert!(!json.contains("\"#"), "Please increase number of # for {json:?}");
- syn::parse_str::<syn::LitStr>(&format!("r#\" {json} \"#")).unwrap()
-}
-
-fn generate_unit_tests(input: &str, packet_names: &[&str], module_name: &str) {
- eprintln!("Reading test vectors from {input}, will use {} packets", packet_names.len());
-
- let data = std::fs::read_to_string(input)
- .unwrap_or_else(|err| panic!("Could not read {input}: {err}"));
- let packets: Vec<Packet> = serde_json::from_str(&data).expect("Could not parse JSON");
-
- let module = format_ident!("{}", module_name);
- let mut tests = Vec::new();
- for packet in &packets {
- for (i, test_vector) in packet.tests.iter().enumerate() {
- let test_packet = test_vector.packet.as_deref().unwrap_or(packet.name.as_str());
- if !packet_names.contains(&test_packet) {
- eprintln!("Skipping packet {}", test_packet);
- continue;
- }
- eprintln!("Generating tests for packet {}", test_packet);
-
- let parse_test_name = format_ident!(
- "test_parse_{}_vector_{}_0x{}",
- test_packet,
- i + 1,
- &test_vector.packed
- );
- let serialize_test_name = format_ident!(
- "test_serialize_{}_vector_{}_0x{}",
- test_packet,
- i + 1,
- &test_vector.packed
- );
- let packed = hexadecimal_to_vec(&test_vector.packed);
- let packet_name = format_ident!("{}", test_packet);
- let builder_name = format_ident!("{}Builder", test_packet);
-
- let object = test_vector.unpacked.as_object().unwrap_or_else(|| {
- panic!("Expected test vector object, found: {}", test_vector.unpacked)
- });
- let assertions = object.iter().map(|(key, value)| {
- let getter = format_ident!("get_{key}");
- let expected = format_ident!("expected_{key}");
- let json = to_json(&value);
- quote! {
- let #expected: serde_json::Value = serde_json::from_str(#json)
- .expect("Could not create expected value from canonical JSON data");
- assert_eq!(json!(actual.#getter()), #expected);
- }
- });
-
- let json = to_json(&object);
- tests.push(quote! {
- #[test]
- fn #parse_test_name() {
- let packed = #packed;
- let actual = #module::#packet_name::parse(&packed).unwrap();
- #(#assertions)*
- }
-
- #[test]
- fn #serialize_test_name() {
- let builder: #module::#builder_name = serde_json::from_str(#json)
- .expect("Could not create builder from canonical JSON data");
- let packet = builder.build();
- let packed: Vec<u8> = #packed;
- assert_eq!(packet.to_vec(), packed);
- }
- });
- }
- }
-
- // TODO(mgeisler): make the generated code clean from warnings.
- let code = quote! {
- #![allow(warnings, missing_docs)]
-
- use #module::Packet;
- use serde_json::json;
-
- #(#tests)*
- };
- let syntax_tree = syn::parse2::<syn::File>(code).expect("Could not parse {code:#?}");
- println!("{}", prettyplease::unparse(&syntax_tree));
-}
-
-fn main() {
- let input_path = std::env::args().nth(1).expect("Need path to JSON file with test vectors");
- let module_name = std::env::args().nth(2).expect("Need name for the generated module");
- // TODO(mgeisler): remove the `packet_names` argument when we
- // support all canonical packets.
- generate_unit_tests(
- &input_path,
- &[
- "EnumChild_A",
- "EnumChild_B",
- "Packet_Array_Field_ByteElement_ConstantSize",
- "Packet_Array_Field_ByteElement_UnknownSize",
- "Packet_Array_Field_ByteElement_VariableCount",
- "Packet_Array_Field_ByteElement_VariableSize",
- "Packet_Array_Field_EnumElement",
- "Packet_Array_Field_EnumElement_ConstantSize",
- "Packet_Array_Field_EnumElement_UnknownSize",
- "Packet_Array_Field_EnumElement_VariableCount",
- "Packet_Array_Field_EnumElement_VariableCount",
- "Packet_Array_Field_ScalarElement",
- "Packet_Array_Field_ScalarElement_ConstantSize",
- "Packet_Array_Field_ScalarElement_UnknownSize",
- "Packet_Array_Field_ScalarElement_VariableCount",
- "Packet_Array_Field_ScalarElement_VariableSize",
- "Packet_Array_Field_SizedElement_ConstantSize",
- "Packet_Array_Field_SizedElement_UnknownSize",
- "Packet_Array_Field_SizedElement_VariableCount",
- "Packet_Array_Field_SizedElement_VariableSize",
- "Packet_Array_Field_UnsizedElement_ConstantSize",
- "Packet_Array_Field_UnsizedElement_UnknownSize",
- "Packet_Array_Field_UnsizedElement_VariableCount",
- "Packet_Array_Field_UnsizedElement_VariableSize",
- "Packet_Array_Field_SizedElement_VariableSize_Padded",
- "Packet_Array_Field_UnsizedElement_VariableCount_Padded",
- "Packet_Body_Field_UnknownSize",
- "Packet_Body_Field_UnknownSize_Terminal",
- "Packet_Body_Field_VariableSize",
- "Packet_Count_Field",
- "Packet_Enum8_Field",
- "Packet_Enum_Field",
- "Packet_FixedEnum_Field",
- "Packet_FixedScalar_Field",
- "Packet_Payload_Field_UnknownSize",
- "Packet_Payload_Field_UnknownSize_Terminal",
- "Packet_Payload_Field_VariableSize",
- "Packet_Reserved_Field",
- "Packet_Scalar_Field",
- "Packet_Size_Field",
- "Packet_Struct_Field",
- "ScalarChild_A",
- "ScalarChild_B",
- "Struct_Count_Field",
- "Struct_Array_Field_ByteElement_ConstantSize",
- "Struct_Array_Field_ByteElement_UnknownSize",
- "Struct_Array_Field_ByteElement_UnknownSize",
- "Struct_Array_Field_ByteElement_VariableCount",
- "Struct_Array_Field_ByteElement_VariableCount",
- "Struct_Array_Field_ByteElement_VariableSize",
- "Struct_Array_Field_ByteElement_VariableSize",
- "Struct_Array_Field_EnumElement_ConstantSize",
- "Struct_Array_Field_EnumElement_UnknownSize",
- "Struct_Array_Field_EnumElement_UnknownSize",
- "Struct_Array_Field_EnumElement_VariableCount",
- "Struct_Array_Field_EnumElement_VariableCount",
- "Struct_Array_Field_EnumElement_VariableSize",
- "Struct_Array_Field_EnumElement_VariableSize",
- "Struct_Array_Field_ScalarElement_ConstantSize",
- "Struct_Array_Field_ScalarElement_UnknownSize",
- "Struct_Array_Field_ScalarElement_UnknownSize",
- "Struct_Array_Field_ScalarElement_VariableCount",
- "Struct_Array_Field_ScalarElement_VariableCount",
- "Struct_Array_Field_ScalarElement_VariableSize",
- "Struct_Array_Field_ScalarElement_VariableSize",
- "Struct_Array_Field_SizedElement_ConstantSize",
- "Struct_Array_Field_SizedElement_UnknownSize",
- "Struct_Array_Field_SizedElement_UnknownSize",
- "Struct_Array_Field_SizedElement_VariableCount",
- "Struct_Array_Field_SizedElement_VariableCount",
- "Struct_Array_Field_SizedElement_VariableSize",
- "Struct_Array_Field_SizedElement_VariableSize",
- "Struct_Array_Field_UnsizedElement_ConstantSize",
- "Struct_Array_Field_UnsizedElement_UnknownSize",
- "Struct_Array_Field_UnsizedElement_UnknownSize",
- "Struct_Array_Field_UnsizedElement_VariableCount",
- "Struct_Array_Field_UnsizedElement_VariableCount",
- "Struct_Array_Field_UnsizedElement_VariableSize",
- "Struct_Array_Field_UnsizedElement_VariableSize",
- "Struct_Array_Field_SizedElement_VariableSize_Padded",
- "Struct_Array_Field_UnsizedElement_VariableCount_Padded",
- "Struct_Enum_Field",
- "Struct_FixedEnum_Field",
- "Struct_FixedScalar_Field",
- "Struct_Size_Field",
- "Struct_Struct_Field",
- ],
- &module_name,
- );
-}
diff --git a/tools/pdl/src/lint.rs b/tools/pdl/src/lint.rs
deleted file mode 100644
index 874596eada..0000000000
--- a/tools/pdl/src/lint.rs
+++ /dev/null
@@ -1,320 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::collections::HashMap;
-
-use crate::analyzer::ast as analyzer_ast;
-use crate::ast::*;
-
-/// Gather information about the full AST.
-#[derive(Debug)]
-pub struct Scope<'d> {
- // Original file.
- pub file: &'d analyzer_ast::File,
-
- // Collection of Group, Packet, Enum, Struct, Checksum, and CustomField declarations.
- pub typedef: HashMap<String, &'d analyzer_ast::Decl>,
-
- // Collection of Packet, Struct, and Group scope declarations.
- pub scopes: HashMap<&'d analyzer_ast::Decl, PacketScope<'d>>,
-}
-
-/// Gather information about a Packet, Struct, or Group declaration.
-#[derive(Debug)]
-pub struct PacketScope<'d> {
- // Original decl.
- decl: &'d analyzer_ast::Decl,
-
- // Local and inherited field declarations. Only named fields are preserved.
- // Saved here for reference for parent constraint resolving.
- pub all_fields: HashMap<String, &'d analyzer_ast::Field>,
-
- // Local and inherited constraint declarations.
- // Saved here for constraint conflict checks.
- pub all_constraints: HashMap<String, &'d Constraint>,
-}
-
-impl<'d> std::hash::Hash for &'d analyzer_ast::Decl {
- fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- std::ptr::hash(*self, state);
- }
-}
-
-impl<'d> PacketScope<'d> {
- /// Add parent fields and constraints to the scope.
- /// Only named fields are imported.
- fn inherit(
- &mut self,
- parent: &PacketScope<'d>,
- constraints: impl Iterator<Item = &'d Constraint>,
- ) {
- // Check constraints.
- assert!(self.all_constraints.is_empty());
- self.all_constraints = parent.all_constraints.clone();
- for constraint in constraints {
- let id = constraint.id.clone();
- self.all_constraints.insert(id, constraint);
- }
-
- // Save parent fields.
- self.all_fields = parent.all_fields.clone();
- }
-
- /// Iterate over the packet's fields.
- pub fn iter_fields(&self) -> impl Iterator<Item = &'d analyzer_ast::Field> {
- self.decl.fields()
- }
-
- /// Lookup a field by name. This will also find the special
- /// `_payload_` and `_body_` fields.
- pub fn get_packet_field(&self, id: &str) -> Option<&analyzer_ast::Field> {
- self.decl.fields().find(|field| match &field.desc {
- FieldDesc::Payload { .. } => id == "_payload_",
- FieldDesc::Body { .. } => id == "_body_",
- _ => field.id() == Some(id),
- })
- }
-
- /// Find the payload or body field, if any.
- pub fn get_payload_field(&self) -> Option<&analyzer_ast::Field> {
- self.decl
- .fields()
- .find(|field| matches!(&field.desc, FieldDesc::Payload { .. } | FieldDesc::Body { .. }))
- }
-
- /// Lookup the size field for an array field.
- pub fn get_array_size_field(&self, id: &str) -> Option<&analyzer_ast::Field> {
- self.decl.fields().find(|field| match &field.desc {
- FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => field_id == id,
- _ => false,
- })
- }
-
- /// Find the size field corresponding to the payload or body
- /// field of this packet.
- pub fn get_payload_size_field(&self) -> Option<&analyzer_ast::Field> {
- self.decl.fields().find(|field| match &field.desc {
- FieldDesc::Size { field_id, .. } => field_id == "_payload_" || field_id == "_body_",
- _ => false,
- })
- }
-
- /// Cleanup scope after processing all fields.
- fn finalize(&mut self) {
- // Check field shadowing.
- for f in self.decl.fields() {
- if let Some(id) = f.id() {
- self.all_fields.insert(id.to_string(), f);
- }
- }
- }
-}
-
-impl<'d> Scope<'d> {
- pub fn new(file: &analyzer_ast::File) -> Scope<'_> {
- let mut scope = Scope { file, typedef: HashMap::new(), scopes: HashMap::new() };
-
- // Gather top-level declarations.
- // Validate the top-level scopes (Group, Packet, Typedef).
- //
- // TODO: switch to try_insert when stable
- for decl in &file.declarations {
- if let Some(id) = decl.id() {
- scope.typedef.insert(id.to_string(), decl);
- }
- }
-
- scope.finalize();
- scope
- }
-
- // Sort Packet, Struct, and Group declarations by reverse topological
- // order.
- fn finalize(&mut self) -> Vec<&'d analyzer_ast::Decl> {
- // Auxiliary function implementing BFS on Packet tree.
- enum Mark {
- Temporary,
- Permanent,
- }
- struct Context<'d> {
- list: Vec<&'d analyzer_ast::Decl>,
- visited: HashMap<&'d analyzer_ast::Decl, Mark>,
- scopes: HashMap<&'d analyzer_ast::Decl, PacketScope<'d>>,
- }
-
- fn bfs<'s, 'd>(
- decl: &'d analyzer_ast::Decl,
- context: &'s mut Context<'d>,
- scope: &Scope<'d>,
- ) -> Option<&'s PacketScope<'d>> {
- match context.visited.get(&decl) {
- Some(Mark::Permanent) => return context.scopes.get(&decl),
- Some(Mark::Temporary) => {
- return None;
- }
- _ => (),
- }
-
- let (parent_id, fields) = match &decl.desc {
- DeclDesc::Packet { parent_id, fields, .. }
- | DeclDesc::Struct { parent_id, fields, .. } => (parent_id.as_ref(), fields),
- DeclDesc::Group { fields, .. } => (None, fields),
- _ => return None,
- };
-
- context.visited.insert(decl, Mark::Temporary);
- let mut lscope =
- PacketScope { decl, all_fields: HashMap::new(), all_constraints: HashMap::new() };
-
- // Iterate over Struct and Group fields.
- for f in fields {
- match &f.desc {
- FieldDesc::Group { .. } => unreachable!(),
- FieldDesc::Typedef { type_id, .. } => match scope.typedef.get(type_id) {
- Some(struct_decl @ Decl { desc: DeclDesc::Struct { .. }, .. }) => {
- bfs(struct_decl, context, scope);
- }
- None | Some(_) => (),
- },
- _ => (),
- }
- }
-
- // Iterate over parent declaration.
- let parent = parent_id.and_then(|id| scope.typedef.get(id));
- if let Some(parent_decl) = parent {
- if let Some(rscope) = bfs(parent_decl, context, scope) {
- // Import the parent fields and constraints into the current scope.
- lscope.inherit(rscope, decl.constraints())
- }
- }
-
- lscope.finalize();
- context.list.push(decl);
- context.visited.insert(decl, Mark::Permanent);
- context.scopes.insert(decl, lscope);
- context.scopes.get(&decl)
- }
-
- let mut context =
- Context::<'d> { list: vec![], visited: HashMap::new(), scopes: HashMap::new() };
-
- for decl in self.typedef.values() {
- bfs(decl, &mut context, self);
- }
-
- self.scopes = context.scopes;
- context.list
- }
-
- pub fn iter_children<'a>(
- &'a self,
- id: &'a str,
- ) -> impl Iterator<Item = &'d analyzer_ast::Decl> + 'a {
- self.file.iter_children(self.typedef.get(id).unwrap())
- }
-
- /// Return the declaration of the typedef type backing the
- /// selected field.
- pub fn get_field_declaration(
- &self,
- field: &analyzer_ast::Field,
- ) -> Option<&'d analyzer_ast::Decl> {
- match &field.desc {
- FieldDesc::FixedEnum { enum_id, .. } => self.typedef.get(enum_id).copied(),
- FieldDesc::Array { type_id: Some(type_id), .. } => self.typedef.get(type_id).copied(),
- FieldDesc::Typedef { type_id, .. } => self.typedef.get(type_id.as_str()).copied(),
- _ => None,
- }
- }
-
- /// Test if the selected field is a bitfield.
- pub fn is_bitfield(&self, field: &analyzer_ast::Field) -> bool {
- match &field.desc {
- FieldDesc::Size { .. }
- | FieldDesc::Count { .. }
- | FieldDesc::ElementSize { .. }
- | FieldDesc::FixedScalar { .. }
- | FieldDesc::FixedEnum { .. }
- | FieldDesc::Reserved { .. }
- | FieldDesc::Scalar { .. } => true,
- FieldDesc::Typedef { type_id, .. } => {
- let field = self.typedef.get(type_id.as_str());
- matches!(field, Some(Decl { desc: DeclDesc::Enum { .. }, .. }))
- }
- _ => false,
- }
- }
-
- /// Determine the size of a field in bits, if possible.
- ///
- /// If the field is dynamically sized (e.g. unsized array or
- /// payload field), `None` is returned. If `skip_payload` is set,
- /// payload and body fields are counted as having size `0` rather
- /// than a variable size.
- pub fn get_field_width(
- &self,
- field: &analyzer_ast::Field,
- skip_payload: bool,
- ) -> Option<usize> {
- match &field.desc {
- FieldDesc::Scalar { width, .. }
- | FieldDesc::Size { width, .. }
- | FieldDesc::Count { width, .. }
- | FieldDesc::ElementSize { width, .. }
- | FieldDesc::Reserved { width, .. }
- | FieldDesc::FixedScalar { width, .. } => Some(*width),
- FieldDesc::Padding { .. } => Some(0),
- FieldDesc::Array { .. } if field.annot.padded_size.is_some() => {
- Some(field.annot.padded_size.unwrap() * 8)
- }
- FieldDesc::Array { size: Some(size), width, .. } => {
- let element_width = width
- .or_else(|| self.get_decl_width(self.get_field_declaration(field)?, false))?;
- Some(element_width * size)
- }
- FieldDesc::FixedEnum { .. } | FieldDesc::Typedef { .. } => {
- self.get_decl_width(self.get_field_declaration(field)?, false)
- }
- FieldDesc::Checksum { .. } => Some(0),
- FieldDesc::Payload { .. } | FieldDesc::Body { .. } if skip_payload => Some(0),
- _ => None,
- }
- }
-
- /// Determine the size of a declaration type in bits, if possible.
- ///
- /// If the type is dynamically sized (e.g. contains an array or
- /// payload), `None` is returned. If `skip_payload` is set,
- /// payload and body fields are counted as having size `0` rather
- /// than a variable size.
- pub fn get_decl_width(&self, decl: &analyzer_ast::Decl, skip_payload: bool) -> Option<usize> {
- match &decl.desc {
- DeclDesc::Enum { width, .. } | DeclDesc::Checksum { width, .. } => Some(*width),
- DeclDesc::CustomField { width, .. } => *width,
- DeclDesc::Packet { fields, parent_id, .. }
- | DeclDesc::Struct { fields, parent_id, .. } => {
- let mut packet_size = match parent_id {
- None => 0,
- Some(id) => self.get_decl_width(self.typedef.get(id.as_str())?, true)?,
- };
- for field in fields.iter() {
- packet_size += self.get_field_width(field, skip_payload)?;
- }
- Some(packet_size)
- }
- DeclDesc::Group { .. } | DeclDesc::Test { .. } => None,
- }
- }
-}
diff --git a/tools/pdl/src/main.rs b/tools/pdl/src/main.rs
deleted file mode 100644
index 979f102546..0000000000
--- a/tools/pdl/src/main.rs
+++ /dev/null
@@ -1,121 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! PDL parser and analyzer.
-
-use argh::FromArgs;
-use codespan_reporting::term::{self, termcolor};
-
-mod analyzer;
-mod ast;
-mod backends;
-mod lint;
-mod parser;
-#[cfg(test)]
-mod test_utils;
-mod utils;
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-enum OutputFormat {
- JSON,
- Rust,
- RustNoAlloc,
- RustNoAllocTest,
-}
-
-impl std::str::FromStr for OutputFormat {
- type Err = String;
-
- fn from_str(input: &str) -> Result<Self, Self::Err> {
- match input.to_lowercase().as_str() {
- "json" => Ok(Self::JSON),
- "rust" => Ok(Self::Rust),
- "rust_no_alloc" => Ok(Self::RustNoAlloc),
- "rust_no_alloc_test" => Ok(Self::RustNoAllocTest),
- _ => Err(format!("could not parse {:?}, valid option are 'json', 'rust', 'rust_no_alloc', and 'rust_no_alloc_test'.", input)),
- }
- }
-}
-
-#[derive(FromArgs, Debug)]
-/// PDL analyzer and generator.
-struct Opt {
- #[argh(switch)]
- /// print tool version and exit.
- version: bool,
-
- #[argh(option, default = "OutputFormat::JSON")]
- /// generate output in this format ("json", "rust", "rust_no_alloc", "rust_no_alloc_test"). The output
- /// will be printed on stdout in both cases.
- output_format: OutputFormat,
-
- #[argh(positional)]
- /// input file.
- input_file: String,
-}
-
-fn main() -> Result<(), String> {
- let opt: Opt = argh::from_env();
-
- if opt.version {
- println!("Packet Description Language parser version 1.0");
- return Ok(());
- }
-
- let mut sources = ast::SourceDatabase::new();
- match parser::parse_file(&mut sources, opt.input_file) {
- Ok(file) => {
- let analyzed_file = match analyzer::analyze(&file) {
- Ok(file) => file,
- Err(diagnostics) => {
- diagnostics
- .emit(
- &sources,
- &mut termcolor::StandardStream::stderr(termcolor::ColorChoice::Always)
- .lock(),
- )
- .expect("Could not print analyzer diagnostics");
- return Err(String::from("Analysis failed"));
- }
- };
-
- match opt.output_format {
- OutputFormat::JSON => {
- println!("{}", backends::json::generate(&file).unwrap())
- }
- OutputFormat::Rust => {
- println!("{}", backends::rust::generate(&sources, &analyzed_file))
- }
- OutputFormat::RustNoAlloc => {
- let schema = backends::intermediate::generate(&file).unwrap();
- println!("{}", backends::rust_no_allocation::generate(&file, &schema).unwrap())
- }
- OutputFormat::RustNoAllocTest => {
- println!(
- "{}",
- backends::rust_no_allocation::test::generate_test_file().unwrap()
- )
- }
- }
- Ok(())
- }
-
- Err(err) => {
- let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Always);
- let config = term::Config::default();
- term::emit(&mut writer.lock(), &config, &sources, &err).expect("Could not print error");
- Err(String::from("Error while parsing input"))
- }
- }
-}
diff --git a/tools/pdl/src/parser.rs b/tools/pdl/src/parser.rs
deleted file mode 100644
index 6d19648f0b..0000000000
--- a/tools/pdl/src/parser.rs
+++ /dev/null
@@ -1,669 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use codespan_reporting::diagnostic::Diagnostic;
-use codespan_reporting::files;
-use pest::iterators::{Pair, Pairs};
-use pest::{Parser, Token};
-use std::iter::{Filter, Peekable};
-
-pub mod ast {
- use serde::Serialize;
-
- #[derive(Debug, Serialize, Default, PartialEq, Eq)]
- pub struct Annotation;
-
- impl crate::ast::Annotation for Annotation {
- type FieldAnnotation = ();
- type DeclAnnotation = ();
- }
-
- pub type Field = crate::ast::Field<Annotation>;
- pub type Decl = crate::ast::Decl<Annotation>;
- pub type File = crate::ast::File<Annotation>;
-}
-
-// Generate the PDL parser.
-// TODO: use #[grammar = "pdl.pest"]
-// currently not possible because CARGO_MANIFEST_DIR is not set
-// in soong environment.
-#[derive(pest_derive::Parser)]
-#[grammar_inline = r#"
-WHITESPACE = _{ " " | "\n" }
-COMMENT = { block_comment | line_comment }
-
-block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
-line_comment = { "//" ~ (!"\n" ~ ANY)* }
-
-alpha = { 'a'..'z' | 'A'..'Z' }
-digit = { '0'..'9' }
-hexdigit = { digit | 'a'..'f' | 'A'..'F' }
-alphanum = { alpha | digit | "_" }
-
-identifier = @{ alpha ~ alphanum* }
-payload_identifier = @{ "_payload_" }
-body_identifier = @{ "_body_" }
-intvalue = @{ digit+ }
-hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
-integer = @{ hexvalue | intvalue }
-string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
-size_modifier = @{ "+" ~ intvalue }
-
-endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
-
-enum_value = { identifier ~ "=" ~ integer }
-enum_value_list = { enum_value ~ ("," ~ enum_value)* ~ ","? }
-enum_range = {
- identifier ~ "=" ~ integer ~ ".." ~ integer ~ ("{" ~
- enum_value_list ~
- "}")?
-}
-enum_tag = { enum_range | enum_value }
-enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
-enum_declaration = {
- "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
- enum_tag_list ~
- "}"
-}
-
-constraint = { identifier ~ "=" ~ (identifier|integer) }
-constraint_list = { constraint ~ ("," ~ constraint)* }
-
-checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
-padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
-size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier) ~ ")" ~ ":" ~ integer }
-count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
-elementsize_field = { "_elementsize_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
-body_field = @{ "_body_" }
-payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
-fixed_field = { "_fixed_" ~ "=" ~ (
- (integer ~ ":" ~ integer) |
- (identifier ~ ":" ~ identifier)
-)}
-reserved_field = { "_reserved_" ~ ":" ~ integer }
-array_field = { identifier ~ ":" ~ (integer|identifier) ~
- "[" ~ (size_modifier|integer)? ~ "]"
-}
-scalar_field = { identifier ~ ":" ~ integer }
-typedef_field = { identifier ~ ":" ~ identifier }
-group_field = { identifier ~ ("{" ~ constraint_list? ~ "}")? }
-
-field = _{
- checksum_field |
- padding_field |
- size_field |
- count_field |
- elementsize_field |
- body_field |
- payload_field |
- fixed_field |
- reserved_field |
- array_field |
- scalar_field |
- typedef_field |
- group_field
-}
-field_list = { field ~ ("," ~ field)* ~ ","? }
-
-packet_declaration = {
- "packet" ~ identifier ~
- (":" ~ identifier)? ~
- ("(" ~ constraint_list ~ ")")? ~
- "{" ~
- field_list? ~
- "}"
-}
-
-struct_declaration = {
- "struct" ~ identifier ~
- (":" ~ identifier)? ~
- ("(" ~ constraint_list ~ ")")? ~
- "{" ~
- field_list? ~
- "}"
-}
-
-group_declaration = {
- "group" ~ identifier ~ "{" ~ field_list ~ "}"
-}
-
-checksum_declaration = {
- "checksum" ~ identifier ~ ":" ~ integer ~ string
-}
-
-custom_field_declaration = {
- "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
-}
-
-test_case = { string }
-test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
-test_declaration = {
- "test" ~ identifier ~ "{" ~
- test_case_list ~
- "}"
-}
-
-declaration = _{
- enum_declaration |
- packet_declaration |
- struct_declaration |
- group_declaration |
- checksum_declaration |
- custom_field_declaration |
- test_declaration
-}
-
-file = {
- SOI ~
- endianness_declaration ~
- declaration* ~
- EOI
-}
-"#]
-pub struct PDLParser;
-
-type Node<'i> = Pair<'i, Rule>;
-type NodeIterator<'i> = Peekable<Filter<Pairs<'i, Rule>, fn(&Node<'i>) -> bool>>;
-type Context<'a> = (crate::ast::FileId, &'a Vec<usize>);
-
-trait Helpers<'i> {
- fn children(self) -> NodeIterator<'i>;
- fn as_loc(&self, context: &Context) -> crate::ast::SourceRange;
- fn as_string(&self) -> String;
- fn as_usize(&self) -> Result<usize, String>;
-}
-
-impl<'i> Helpers<'i> for Node<'i> {
- fn children(self) -> NodeIterator<'i> {
- self.into_inner().filter((|n| n.as_rule() != Rule::COMMENT) as fn(&Self) -> bool).peekable()
- }
-
- fn as_loc(&self, context: &Context) -> crate::ast::SourceRange {
- let span = self.as_span();
- crate::ast::SourceRange {
- file: context.0,
- start: crate::ast::SourceLocation::new(span.start_pos().pos(), context.1),
- end: crate::ast::SourceLocation::new(span.end_pos().pos(), context.1),
- }
- }
-
- fn as_string(&self) -> String {
- self.as_str().to_owned()
- }
-
- fn as_usize(&self) -> Result<usize, String> {
- let text = self.as_str();
- if let Some(num) = text.strip_prefix("0x") {
- usize::from_str_radix(num, 16)
- .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
- } else {
- #[allow(clippy::from_str_radix_10)]
- usize::from_str_radix(text, 10)
- .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
- }
- }
-}
-
-fn err_unexpected_rule<T>(expected: Rule, found: Rule) -> Result<T, String> {
- Err(format!("expected rule {:?}, got {:?}", expected, found))
-}
-
-fn err_missing_rule<T>(expected: Rule) -> Result<T, String> {
- Err(format!("expected rule {:?}, got nothing", expected))
-}
-
-fn expect<'i>(iter: &mut impl Iterator<Item = Node<'i>>, rule: Rule) -> Result<Node<'i>, String> {
- match iter.next() {
- Some(node) if node.as_rule() == rule => Ok(node),
- Some(node) => err_unexpected_rule(rule, node.as_rule()),
- None => err_missing_rule(rule),
- }
-}
-
-fn maybe<'i>(iter: &mut NodeIterator<'i>, rule: Rule) -> Option<Node<'i>> {
- iter.next_if(|n| n.as_rule() == rule)
-}
-
-fn parse_identifier(iter: &mut NodeIterator<'_>) -> Result<String, String> {
- expect(iter, Rule::identifier).map(|n| n.as_string())
-}
-
-fn parse_integer(iter: &mut NodeIterator<'_>) -> Result<usize, String> {
- expect(iter, Rule::integer).and_then(|n| n.as_usize())
-}
-
-fn parse_identifier_opt(iter: &mut NodeIterator<'_>) -> Result<Option<String>, String> {
- Ok(maybe(iter, Rule::identifier).map(|n| n.as_string()))
-}
-
-fn parse_integer_opt(iter: &mut NodeIterator<'_>) -> Result<Option<usize>, String> {
- maybe(iter, Rule::integer).map(|n| n.as_usize()).transpose()
-}
-
-fn parse_identifier_or_integer(
- iter: &mut NodeIterator<'_>,
-) -> Result<(Option<String>, Option<usize>), String> {
- match iter.next() {
- Some(n) if n.as_rule() == Rule::identifier => Ok((Some(n.as_string()), None)),
- Some(n) if n.as_rule() == Rule::integer => Ok((None, Some(n.as_usize()?))),
- Some(n) => Err(format!(
- "expected rule {:?} or {:?}, got {:?}",
- Rule::identifier,
- Rule::integer,
- n.as_rule()
- )),
- None => {
- Err(format!("expected rule {:?} or {:?}, got nothing", Rule::identifier, Rule::integer))
- }
- }
-}
-
-fn parse_string<'i>(iter: &mut impl Iterator<Item = Node<'i>>) -> Result<String, String> {
- expect(iter, Rule::string)
- .map(|n| n.as_str())
- .and_then(|s| s.strip_prefix('"').ok_or_else(|| "expected \" prefix".to_owned()))
- .and_then(|s| s.strip_suffix('"').ok_or_else(|| "expected \" suffix".to_owned()))
- .map(|s| s.to_owned())
-}
-
-fn parse_size_modifier_opt(iter: &mut NodeIterator<'_>) -> Option<String> {
- maybe(iter, Rule::size_modifier).map(|n| n.as_string())
-}
-
-fn parse_endianness(node: Node<'_>, context: &Context) -> Result<crate::ast::Endianness, String> {
- if node.as_rule() != Rule::endianness_declaration {
- err_unexpected_rule(Rule::endianness_declaration, node.as_rule())
- } else {
- Ok(crate::ast::Endianness {
- loc: node.as_loc(context),
- value: match node.as_str() {
- "little_endian_packets" => crate::ast::EndiannessValue::LittleEndian,
- "big_endian_packets" => crate::ast::EndiannessValue::BigEndian,
- _ => unreachable!(),
- },
- })
- }
-}
-
-fn parse_constraint(node: Node<'_>, context: &Context) -> Result<crate::ast::Constraint, String> {
- if node.as_rule() != Rule::constraint {
- err_unexpected_rule(Rule::constraint, node.as_rule())
- } else {
- let loc = node.as_loc(context);
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let (tag_id, value) = parse_identifier_or_integer(&mut children)?;
- Ok(crate::ast::Constraint { id, loc, value, tag_id })
- }
-}
-
-fn parse_constraint_list_opt(
- iter: &mut NodeIterator<'_>,
- context: &Context,
-) -> Result<Vec<crate::ast::Constraint>, String> {
- maybe(iter, Rule::constraint_list)
- .map_or(Ok(vec![]), |n| n.children().map(|n| parse_constraint(n, context)).collect())
-}
-
-fn parse_enum_value(node: Node<'_>, context: &Context) -> Result<crate::ast::TagValue, String> {
- if node.as_rule() != Rule::enum_value {
- err_unexpected_rule(Rule::enum_value, node.as_rule())
- } else {
- let loc = node.as_loc(context);
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let value = parse_integer(&mut children)?;
- Ok(crate::ast::TagValue { id, loc, value })
- }
-}
-
-fn parse_enum_value_list_opt(
- iter: &mut NodeIterator<'_>,
- context: &Context,
-) -> Result<Vec<crate::ast::TagValue>, String> {
- maybe(iter, Rule::enum_value_list)
- .map_or(Ok(vec![]), |n| n.children().map(|n| parse_enum_value(n, context)).collect())
-}
-
-fn parse_enum_range(node: Node<'_>, context: &Context) -> Result<crate::ast::TagRange, String> {
- if node.as_rule() != Rule::enum_range {
- err_unexpected_rule(Rule::enum_range, node.as_rule())
- } else {
- let loc = node.as_loc(context);
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let start = parse_integer(&mut children)?;
- let end = parse_integer(&mut children)?;
- let tags = parse_enum_value_list_opt(&mut children, context)?;
- Ok(crate::ast::TagRange { id, loc, range: start..=end, tags })
- }
-}
-
-fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result<crate::ast::Tag, String> {
- if node.as_rule() != Rule::enum_tag {
- err_unexpected_rule(Rule::enum_tag, node.as_rule())
- } else {
- match node.children().next() {
- Some(node) if node.as_rule() == Rule::enum_value => {
- Ok(crate::ast::Tag::Value(parse_enum_value(node, context)?))
- }
- Some(node) if node.as_rule() == Rule::enum_range => {
- Ok(crate::ast::Tag::Range(parse_enum_range(node, context)?))
- }
- Some(node) => Err(format!(
- "expected rule {:?} or {:?}, got {:?}",
- Rule::enum_value,
- Rule::enum_range,
- node.as_rule()
- )),
- None => Err(format!(
- "expected rule {:?} or {:?}, got nothing",
- Rule::enum_value,
- Rule::enum_range
- )),
- }
- }
-}
-
-fn parse_enum_tag_list(
- iter: &mut NodeIterator<'_>,
- context: &Context,
-) -> Result<Vec<crate::ast::Tag>, String> {
- expect(iter, Rule::enum_tag_list)
- .and_then(|n| n.children().map(|n| parse_enum_tag(n, context)).collect())
-}
-
-fn parse_field(node: Node<'_>, context: &Context) -> Result<ast::Field, String> {
- let loc = node.as_loc(context);
- let rule = node.as_rule();
- let mut children = node.children();
- Ok(crate::ast::Field {
- loc,
- annot: Default::default(),
- desc: match rule {
- Rule::checksum_field => {
- let field_id = parse_identifier(&mut children)?;
- crate::ast::FieldDesc::Checksum { field_id }
- }
- Rule::padding_field => {
- let size = parse_integer(&mut children)?;
- crate::ast::FieldDesc::Padding { size }
- }
- Rule::size_field => {
- let field_id = match children.next() {
- Some(n) if n.as_rule() == Rule::identifier => n.as_string(),
- Some(n) if n.as_rule() == Rule::payload_identifier => n.as_string(),
- Some(n) if n.as_rule() == Rule::body_identifier => n.as_string(),
- Some(n) => err_unexpected_rule(Rule::identifier, n.as_rule())?,
- None => err_missing_rule(Rule::identifier)?,
- };
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::Size { field_id, width }
- }
- Rule::count_field => {
- let field_id = parse_identifier(&mut children)?;
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::Count { field_id, width }
- }
- Rule::elementsize_field => {
- let field_id = parse_identifier(&mut children)?;
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::ElementSize { field_id, width }
- }
- Rule::body_field => crate::ast::FieldDesc::Body,
- Rule::payload_field => {
- let size_modifier = parse_size_modifier_opt(&mut children);
- crate::ast::FieldDesc::Payload { size_modifier }
- }
- Rule::fixed_field => match children.next() {
- Some(n) if n.as_rule() == Rule::integer => {
- let value = n.as_usize()?;
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::FixedScalar { width, value }
- }
- Some(n) if n.as_rule() == Rule::identifier => {
- let tag_id = n.as_string();
- let enum_id = parse_identifier(&mut children)?;
- crate::ast::FieldDesc::FixedEnum { enum_id, tag_id }
- }
- _ => unreachable!(),
- },
- Rule::reserved_field => {
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::Reserved { width }
- }
- Rule::array_field => {
- let id = parse_identifier(&mut children)?;
- let (type_id, width) = parse_identifier_or_integer(&mut children)?;
- let (size, size_modifier) = match children.next() {
- Some(n) if n.as_rule() == Rule::integer => (Some(n.as_usize()?), None),
- Some(n) if n.as_rule() == Rule::size_modifier => (None, Some(n.as_string())),
- Some(n) => {
- return Err(format!(
- "expected rule {:?} or {:?}, got {:?}",
- Rule::integer,
- Rule::size_modifier,
- n.as_rule()
- ))
- }
- None => (None, None),
- };
- crate::ast::FieldDesc::Array { id, type_id, width, size, size_modifier }
- }
- Rule::scalar_field => {
- let id = parse_identifier(&mut children)?;
- let width = parse_integer(&mut children)?;
- crate::ast::FieldDesc::Scalar { id, width }
- }
- Rule::typedef_field => {
- let id = parse_identifier(&mut children)?;
- let type_id = parse_identifier(&mut children)?;
- crate::ast::FieldDesc::Typedef { id, type_id }
- }
- Rule::group_field => {
- let group_id = parse_identifier(&mut children)?;
- let constraints = parse_constraint_list_opt(&mut children, context)?;
- crate::ast::FieldDesc::Group { group_id, constraints }
- }
- _ => return Err(format!("expected rule *_field, got {:?}", rule)),
- },
- })
-}
-
-fn parse_field_list(iter: &mut NodeIterator, context: &Context) -> Result<Vec<ast::Field>, String> {
- expect(iter, Rule::field_list)
- .and_then(|n| n.children().map(|n| parse_field(n, context)).collect())
-}
-
-fn parse_field_list_opt(
- iter: &mut NodeIterator,
- context: &Context,
-) -> Result<Vec<ast::Field>, String> {
- maybe(iter, Rule::field_list)
- .map_or(Ok(vec![]), |n| n.children().map(|n| parse_field(n, context)).collect())
-}
-
-fn parse_toplevel(root: Node<'_>, context: &Context) -> Result<ast::File, String> {
- let mut toplevel_comments = vec![];
- let mut file = crate::ast::File::new(context.0);
-
- let mut comment_start = vec![];
- for token in root.clone().tokens() {
- match token {
- Token::Start { rule: Rule::COMMENT, pos } => comment_start.push(pos),
- Token::End { rule: Rule::COMMENT, pos } => {
- let start_pos = comment_start.pop().unwrap();
- file.comments.push(crate::ast::Comment {
- loc: crate::ast::SourceRange {
- file: context.0,
- start: crate::ast::SourceLocation::new(start_pos.pos(), context.1),
- end: crate::ast::SourceLocation::new(pos.pos(), context.1),
- },
- text: start_pos.span(&pos).as_str().to_owned(),
- })
- }
- _ => (),
- }
- }
-
- for node in root.children() {
- let loc = node.as_loc(context);
- let rule = node.as_rule();
- match rule {
- Rule::endianness_declaration => file.endianness = parse_endianness(node, context)?,
- Rule::checksum_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let width = parse_integer(&mut children)?;
- let function = parse_string(&mut children)?;
- file.declarations.push(crate::ast::Decl::new(
- loc,
- crate::ast::DeclDesc::Checksum { id, function, width },
- ))
- }
- Rule::custom_field_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let width = parse_integer_opt(&mut children)?;
- let function = parse_string(&mut children)?;
- file.declarations.push(crate::ast::Decl::new(
- loc,
- crate::ast::DeclDesc::CustomField { id, function, width },
- ))
- }
- Rule::enum_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let width = parse_integer(&mut children)?;
- let tags = parse_enum_tag_list(&mut children, context)?;
- file.declarations.push(crate::ast::Decl::new(
- loc,
- crate::ast::DeclDesc::Enum { id, width, tags },
- ))
- }
- Rule::packet_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let parent_id = parse_identifier_opt(&mut children)?;
- let constraints = parse_constraint_list_opt(&mut children, context)?;
- let fields = parse_field_list_opt(&mut children, context)?;
- file.declarations.push(crate::ast::Decl::new(
- loc,
- crate::ast::DeclDesc::Packet { id, parent_id, constraints, fields },
- ))
- }
- Rule::struct_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let parent_id = parse_identifier_opt(&mut children)?;
- let constraints = parse_constraint_list_opt(&mut children, context)?;
- let fields = parse_field_list_opt(&mut children, context)?;
- file.declarations.push(crate::ast::Decl::new(
- loc,
- crate::ast::DeclDesc::Struct { id, parent_id, constraints, fields },
- ))
- }
- Rule::group_declaration => {
- let mut children = node.children();
- let id = parse_identifier(&mut children)?;
- let fields = parse_field_list(&mut children, context)?;
- file.declarations
- .push(crate::ast::Decl::new(loc, crate::ast::DeclDesc::Group { id, fields }))
- }
- Rule::test_declaration => {}
- Rule::EOI => (),
- _ => unreachable!(),
- }
- }
- file.comments.append(&mut toplevel_comments);
- Ok(file)
-}
-
-/// Parse PDL source code from a string.
-///
-/// The file is added to the compilation database under the provided
-/// name.
-pub fn parse_inline(
- sources: &mut crate::ast::SourceDatabase,
- name: String,
- source: String,
-) -> Result<ast::File, Diagnostic<crate::ast::FileId>> {
- let root = PDLParser::parse(Rule::file, &source)
- .map_err(|e| {
- Diagnostic::error()
- .with_message(format!("failed to parse input file '{}': {}", &name, e))
- })?
- .next()
- .unwrap();
- let line_starts: Vec<_> = files::line_starts(&source).collect();
- let file = sources.add(name, source.clone());
- parse_toplevel(root, &(file, &line_starts)).map_err(|e| Diagnostic::error().with_message(e))
-}
-
-/// Parse a new source file.
-///
-/// The source file is fully read and added to the compilation
-/// database. Returns the constructed AST, or a descriptive error
-/// message in case of syntax error.
-pub fn parse_file(
- sources: &mut crate::ast::SourceDatabase,
- name: String,
-) -> Result<ast::File, Diagnostic<crate::ast::FileId>> {
- let source = std::fs::read_to_string(&name).map_err(|e| {
- Diagnostic::error().with_message(format!("failed to read input file '{}': {}", &name, e))
- })?;
- parse_inline(sources, name, source)
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[test]
- fn endianness_is_set() {
- // The file starts out with a placeholder little-endian value.
- // This tests that we update it while parsing.
- let mut db = crate::ast::SourceDatabase::new();
- let file =
- parse_inline(&mut db, String::from("stdin"), String::from(" big_endian_packets "))
- .unwrap();
- assert_eq!(file.endianness.value, crate::ast::EndiannessValue::BigEndian);
- assert_ne!(file.endianness.loc, crate::ast::SourceRange::default());
- }
-
- #[test]
- fn test_parse_string_bare() {
- let mut pairs = PDLParser::parse(Rule::string, r#""test""#).unwrap();
-
- assert_eq!(parse_string(&mut pairs).as_deref(), Ok("test"));
- assert_eq!(pairs.next(), None, "pairs is empty");
- }
-
- #[test]
- fn test_parse_string_space() {
- let mut pairs = PDLParser::parse(Rule::string, r#""test with space""#).unwrap();
-
- assert_eq!(parse_string(&mut pairs).as_deref(), Ok("test with space"));
- assert_eq!(pairs.next(), None, "pairs is empty");
- }
-
- #[test]
- #[should_panic] /* This is not supported */
- fn test_parse_string_escape() {
- let mut pairs = PDLParser::parse(Rule::string, r#""\"test\"""#).unwrap();
-
- assert_eq!(parse_string(&mut pairs).as_deref(), Ok(r#""test""#));
- assert_eq!(pairs.next(), None, "pairs is empty");
- }
-}
diff --git a/tools/pdl/src/pdl.pest b/tools/pdl/src/pdl.pest
deleted file mode 100644
index 06563d61b0..0000000000
--- a/tools/pdl/src/pdl.pest
+++ /dev/null
@@ -1,125 +0,0 @@
-WHITESPACE = _{ " " | "\n" }
-COMMENT = { block_comment | line_comment }
-
-block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
-line_comment = { "//" ~ (!"\n" ~ ANY)* }
-
-alpha = { 'a'..'z' | 'A'..'Z' }
-digit = { '0'..'9' }
-hexdigit = { digit | 'a'..'f' | 'A'..'F' }
-alphanum = { alpha | digit | "_" }
-
-identifier = @{ alpha ~ alphanum* }
-payload_identifier = @{ "_payload_" }
-body_identifier = @{ "_body_" }
-intvalue = @{ digit+ }
-hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
-integer = @{ hexvalue | intvalue }
-string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
-size_modifier = @{
- ("+"|"-"|"*"|"/") ~ (digit|"+"|"-"|"*"|"/")+
-}
-
-endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
-
-enum_tag = { identifier ~ "=" ~ integer }
-enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
-enum_declaration = {
- "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
- enum_tag_list ~
- "}"
-}
-
-constraint = { identifier ~ "=" ~ (identifier|integer) }
-constraint_list = { constraint ~ ("," ~ constraint)* }
-
-checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
-padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
-size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier) ~ ")" ~ ":" ~ integer }
-count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
-elementsize_field = { "_elementsize_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
-body_field = @{ "_body_" }
-payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
-fixed_field = { "_fixed_" ~ "=" ~ (
- (integer ~ ":" ~ integer) |
- (identifier ~ ":" ~ identifier)
-)}
-reserved_field = { "_reserved_" ~ ":" ~ integer }
-array_field = { identifier ~ ":" ~ (integer|identifier) ~
- "[" ~ (size_modifier|integer)? ~ "]"
-}
-scalar_field = { identifier ~ ":" ~ integer }
-typedef_field = { identifier ~ ":" ~ identifier }
-group_field = { identifier ~ ("{" ~ constraint_list ~ "}")? }
-
-field = _{
- checksum_field |
- padding_field |
- size_field |
- count_field |
- elementsize_field |
- body_field |
- payload_field |
- fixed_field |
- reserved_field |
- array_field |
- scalar_field |
- typedef_field |
- group_field
-}
-field_list = { field ~ ("," ~ field)* ~ ","? }
-
-packet_declaration = {
- "packet" ~ identifier ~
- (":" ~ identifier)? ~
- ("(" ~ constraint_list ~ ")")? ~
- "{" ~
- field_list? ~
- "}"
-}
-
-struct_declaration = {
- "struct" ~ identifier ~
- (":" ~ identifier)? ~
- ("(" ~ constraint_list ~ ")")? ~
- "{" ~
- field_list? ~
- "}"
-}
-
-group_declaration = {
- "group" ~ identifier ~ "{" ~ field_list ~ "}"
-}
-
-checksum_declaration = {
- "checksum" ~ identifier ~ ":" ~ integer ~ string
-}
-
-custom_field_declaration = {
- "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
-}
-
-test_case = { string }
-test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
-test_declaration = {
- "test" ~ identifier ~ "{" ~
- test_case_list ~
- "}"
-}
-
-declaration = _{
- enum_declaration |
- packet_declaration |
- struct_declaration |
- group_declaration |
- checksum_declaration |
- custom_field_declaration |
- test_declaration
-}
-
-file = {
- SOI ~
- endianness_declaration ~
- declaration* ~
- EOI
-}
diff --git a/tools/pdl/src/test_utils.rs b/tools/pdl/src/test_utils.rs
deleted file mode 100644
index 13a45fd4fa..0000000000
--- a/tools/pdl/src/test_utils.rs
+++ /dev/null
@@ -1,170 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//! Various utility functions used in tests.
-
-// This file is included directly into integration tests in the
-// `tests/` directory. These tests are compiled without access to the
-// rest of the `pdl` crate. To make this work, avoid `use crate::`
-// statements below.
-
-use std::fs;
-use std::io::Write;
-use std::path::Path;
-use std::process::Command;
-use tempfile::NamedTempFile;
-
-/// Format Rust code in `input`.
-pub fn format_rust(input: &str) -> String {
- let syntax_tree = syn::parse_file(input).expect("Could not parse {input:#?} as Rust code");
- let formatted = prettyplease::unparse(&syntax_tree);
- format!("#![rustfmt::skip]\n{formatted}")
-}
-
-/// Find the unified diff between two strings using `diff`.
-///
-/// # Panics
-///
-/// Panics if `diff` cannot be found on `$PATH` or if it returns an
-/// error.
-pub fn diff(left_label: &str, left: &str, right_label: &str, right: &str) -> String {
- let mut temp_left = NamedTempFile::new().unwrap();
- temp_left.write_all(left.as_bytes()).unwrap();
- let mut temp_right = NamedTempFile::new().unwrap();
- temp_right.write_all(right.as_bytes()).unwrap();
-
- // We expect `diff` to be available on PATH.
- let output = Command::new("diff")
- .arg("--unified")
- .arg("--color=always")
- .arg("--label")
- .arg(left_label)
- .arg("--label")
- .arg(right_label)
- .arg(temp_left.path())
- .arg(temp_right.path())
- .output()
- .expect("failed to run diff");
- let diff_trouble_exit_code = 2; // from diff(1)
- assert_ne!(
- output.status.code().unwrap(),
- diff_trouble_exit_code,
- "diff failed: {}",
- output.status
- );
- String::from_utf8(output.stdout).expect("diff output was not UTF-8")
-}
-
-/// Compare two strings and output a diff if they are not equal.
-#[track_caller]
-pub fn assert_eq_with_diff(left_label: &str, left: &str, right_label: &str, right: &str) {
- assert!(
- left == right,
- "texts did not match, diff:\n{}\n",
- diff(left_label, left, right_label, right)
- );
-}
-
-/// Check that `haystack` contains `needle`.
-///
-/// Panic with a nice message if not.
-#[track_caller]
-pub fn assert_contains(haystack: &str, needle: &str) {
- assert!(haystack.contains(needle), "Could not find {:?} in {:?}", needle, haystack);
-}
-
-/// Compare a string with a snapshot file.
-///
-/// The `snapshot_path` is relative to the current working directory
-/// of the test binary. This depends on how you execute the tests:
-///
-/// * When using `atest`: The current working directory is a random
-/// temporary directory. You need to ensure that the snapshot file
-/// is installed into this directory. You do this by adding the
-/// snapshot to the `data` attribute of your test rule
-///
-/// * When using Cargo: The current working directory is set to
-/// `CARGO_MANIFEST_DIR`, which is where the `Cargo.toml` file is
-/// found.
-///
-/// If you run the test with Cargo and the `UPDATE_SNAPSHOTS`
-/// environment variable is set, then the `actual_content` will be
-/// written to `snapshot_path`. Otherwise the content is compared and
-/// a panic is triggered if they differ.
-#[track_caller]
-pub fn assert_snapshot_eq<P: AsRef<Path>>(snapshot_path: P, actual_content: &str) {
- let update_snapshots = std::env::var("UPDATE_SNAPSHOTS").is_ok();
- let snapshot = snapshot_path.as_ref();
- let snapshot_content = match fs::read(snapshot) {
- Ok(content) => content,
- Err(_) if update_snapshots => Vec::new(),
- Err(err) => panic!("Could not read snapshot from {}: {}", snapshot.display(), err),
- };
- let snapshot_content = String::from_utf8(snapshot_content).expect("Snapshot was not UTF-8");
-
- // Normal comparison if UPDATE_SNAPSHOTS is unset.
- if !update_snapshots {
- return assert_eq_with_diff(
- snapshot.to_str().unwrap(),
- &snapshot_content,
- "actual",
- actual_content,
- );
- }
-
- // Bail out if we are not using Cargo.
- if std::env::var("CARGO_MANIFEST_DIR").is_err() {
- panic!("Please unset UPDATE_SNAPSHOTS if you are not using Cargo");
- }
-
- if actual_content != snapshot_content {
- eprintln!(
- "Updating snapshot {}: {} -> {} bytes",
- snapshot.display(),
- snapshot_content.len(),
- actual_content.len()
- );
- fs::write(&snapshot_path, actual_content).unwrap_or_else(|err| {
- panic!("Could not write snapshot to {}: {}", snapshot.display(), err)
- });
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_diff_labels_with_special_chars() {
- // Check that special characters in labels are passed
- // correctly to diff.
- let patch = diff("left 'file'", "foo\nbar\n", "right ~file!", "foo\nnew line\nbar\n");
- assert_contains(&patch, "left 'file'");
- assert_contains(&patch, "right ~file!");
- }
-
- #[test]
- #[should_panic]
- fn test_assert_eq_with_diff_on_diff() {
- // We use identical labels to check that we haven't
- // accidentally mixed up the labels with the file content.
- assert_eq_with_diff("", "foo\nbar\n", "", "foo\nnew line\nbar\n");
- }
-
- #[test]
- fn test_assert_eq_with_diff_on_eq() {
- // No panic when there is no diff.
- assert_eq_with_diff("left", "foo\nbar\n", "right", "foo\nbar\n");
- }
-}
diff --git a/tools/pdl/src/utils.rs b/tools/pdl/src/utils.rs
deleted file mode 100644
index 0e64250266..0000000000
--- a/tools/pdl/src/utils.rs
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/// Placeholder implementation of Vec::drain_filter.
-/// The feature drain_filter is currently unstable.
-pub fn drain_filter<T, F>(input: &mut Vec<T>, predicate: F) -> Vec<T>
-where
- F: Fn(&T) -> bool,
-{
- // Pass 1: compute the total number of removed elements.
- let mut total_left_count = 0;
- for element in input.iter() {
- total_left_count += !predicate(element) as usize;
- }
- // Pass 2: compute the final position of each element in the input
- // array in order to position left elements first and drained elements
- // last, preserving the order.
- let mut rank = Vec::with_capacity(input.len());
- let mut left_count = 0;
- let mut removed_count = 0;
- for element in input.iter() {
- if predicate(element) {
- rank.push(total_left_count + removed_count);
- removed_count += 1;
- } else {
- rank.push(left_count);
- left_count += 1;
- }
- }
- // Pass 3: swap the elements to their final position.
- let mut n = 0;
- while n < input.len() {
- let rank_n = rank[n];
- if n != rank_n {
- input.swap(n, rank_n);
- rank.swap(n, rank_n);
- } else {
- n += 1;
- }
- }
- // Finally: split off the removed elements off the input vector.
- input.split_off(total_left_count)
-}
-
-#[cfg(test)]
-mod test {
- use crate::utils::drain_filter;
-
- #[test]
- fn test_drain_filter() {
- let mut input = vec![1, 4, 2, 5, 3, 6, 7];
- let drained = drain_filter(&mut input, |element| *element > 3);
- assert_eq!(input, vec![1, 2, 3]);
- assert_eq!(drained, vec![4, 5, 6, 7]);
- }
-}
diff --git a/tools/pdl/tests/canonical/be_test_vectors.json b/tools/pdl/tests/canonical/be_test_vectors.json
deleted file mode 100644
index e03357ed71..0000000000
--- a/tools/pdl/tests/canonical/be_test_vectors.json
+++ /dev/null
@@ -1,4271 +0,0 @@
-[
- {
- "packet": "Packet_Scalar_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffff80",
- "unpacked": {
- "a": 0,
- "c": 144115188075855871
- }
- },
- {
- "packed": "0081018202830380",
- "unpacked": {
- "a": 0,
- "c": 283686952306183
- }
- },
- {
- "packed": "000000000000007f",
- "unpacked": {
- "a": 127,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffffff",
- "unpacked": {
- "a": 127,
- "c": 144115188075855871
- }
- },
- {
- "packed": "00810182028303ff",
- "unpacked": {
- "a": 127,
- "c": 283686952306183
- }
- },
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffff80",
- "unpacked": {
- "a": 0,
- "c": 144115188075855871
- }
- },
- {
- "packed": "0081018202830380",
- "unpacked": {
- "a": 0,
- "c": 283686952306183
- }
- }
- ]
- },
- {
- "packet": "Packet_Enum_Field",
- "tests": [
- {
- "packed": "0000000000000001",
- "unpacked": {
- "a": 1,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffff81",
- "unpacked": {
- "a": 1,
- "c": 144115188075855871
- }
- },
- {
- "packed": "08090a0b0c0d0e81",
- "unpacked": {
- "a": 1,
- "c": 4523477106694685
- }
- },
- {
- "packed": "0000000000000002",
- "unpacked": {
- "a": 2,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffff82",
- "unpacked": {
- "a": 2,
- "c": 144115188075855871
- }
- },
- {
- "packed": "08090a0b0c0d0e82",
- "unpacked": {
- "a": 2,
- "c": 4523477106694685
- }
- }
- ]
- },
- {
- "packet": "Packet_Reserved_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "fffffffffffffe00",
- "unpacked": {
- "a": 0,
- "c": 36028797018963967
- }
- },
- {
- "packed": "1011121314152c00",
- "unpacked": {
- "a": 0,
- "c": 2261184477268630
- }
- },
- {
- "packed": "000000000000007f",
- "unpacked": {
- "a": 127,
- "c": 0
- }
- },
- {
- "packed": "fffffffffffffe7f",
- "unpacked": {
- "a": 127,
- "c": 36028797018963967
- }
- },
- {
- "packed": "1011121314152c7f",
- "unpacked": {
- "a": 127,
- "c": 2261184477268630
- }
- },
- {
- "packed": "0000000000000007",
- "unpacked": {
- "a": 7,
- "c": 0
- }
- },
- {
- "packed": "fffffffffffffe07",
- "unpacked": {
- "a": 7,
- "c": 36028797018963967
- }
- },
- {
- "packed": "1011121314152c07",
- "unpacked": {
- "a": 7,
- "c": 2261184477268630
- }
- }
- ]
- },
- {
- "packet": "Packet_Size_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "b": []
- }
- },
- {
- "packed": "00000000000000071f102122232425",
- "unpacked": {
- "a": 0,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- },
- {
- "packed": "fffffffffffffff8",
- "unpacked": {
- "a": 2305843009213693951,
- "b": []
- }
- },
- {
- "packed": "ffffffffffffffff1f102122232425",
- "unpacked": {
- "a": 2305843009213693951,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- },
- {
- "packed": "0b8c0c8d0d8e0ef0",
- "unpacked": {
- "a": 104006728889254366,
- "b": []
- }
- },
- {
- "packed": "0b8c0c8d0d8e0ef71f102122232425",
- "unpacked": {
- "a": 104006728889254366,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Count_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "b": []
- }
- },
- {
- "packed": "00000000000000072c2f2e31303332",
- "unpacked": {
- "a": 0,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- },
- {
- "packed": "fffffffffffffff8",
- "unpacked": {
- "a": 2305843009213693951,
- "b": []
- }
- },
- {
- "packed": "ffffffffffffffff2c2f2e31303332",
- "unpacked": {
- "a": 2305843009213693951,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- },
- {
- "packed": "2262728292a2b2c8",
- "unpacked": {
- "a": 309708581267330649,
- "b": []
- }
- },
- {
- "packed": "2262728292a2b2cf2c2f2e31303332",
- "unpacked": {
- "a": 309708581267330649,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_FixedScalar_Field",
- "tests": [
- {
- "packed": "0000000000000007",
- "unpacked": {
- "b": 0
- }
- },
- {
- "packed": "ffffffffffffff87",
- "unpacked": {
- "b": 144115188075855871
- }
- },
- {
- "packed": "346a6c6e70727587",
- "unpacked": {
- "b": 29507425461658859
- }
- }
- ]
- },
- {
- "packet": "Packet_FixedEnum_Field",
- "tests": [
- {
- "packed": "0000000000000001",
- "unpacked": {
- "b": 0
- }
- },
- {
- "packed": "ffffffffffffff81",
- "unpacked": {
- "b": 144115188075855871
- }
- },
- {
- "packed": "38f0f4f8fd010501",
- "unpacked": {
- "b": 32055067271627274
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "0743444546474049",
- "unpacked": {
- "payload": [
- 67,
- 68,
- 69,
- 70,
- 71,
- 64,
- 73
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_SizeModifier",
- "tests": [
- {
- "packed": "02",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "074a4b4c4d4e",
- "unpacked": {
- "payload": [
- 74,
- 75,
- 76,
- 77,
- 78
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_UnknownSize",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "payload": [],
- "a": 0
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "payload": [],
- "a": 65535
- }
- },
- {
- "packed": "52a5",
- "unpacked": {
- "payload": [],
- "a": 21157
- }
- },
- {
- "packed": "4f485152530000",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 0
- }
- },
- {
- "packed": "4f48515253ffff",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 65535
- }
- },
- {
- "packed": "4f4851525352a5",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 21157
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_UnknownSize_Terminal",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": 0,
- "payload": []
- }
- },
- {
- "packed": "000050595a5b5c",
- "unpacked": {
- "a": 0,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "a": 65535,
- "payload": []
- }
- },
- {
- "packed": "ffff50595a5b5c",
- "unpacked": {
- "a": 65535,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- },
- {
- "packed": "52b7",
- "unpacked": {
- "a": 21175,
- "payload": []
- }
- },
- {
- "packed": "52b750595a5b5c",
- "unpacked": {
- "a": 21175,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "075d5e5f58616263",
- "unpacked": {
- "payload": [
- 93,
- 94,
- 95,
- 88,
- 97,
- 98,
- 99
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_UnknownSize",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "payload": [],
- "a": 0
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "payload": [],
- "a": 65535
- }
- },
- {
- "packed": "6b4a",
- "unpacked": {
- "payload": [],
- "a": 27466
- }
- },
- {
- "packed": "64656667600000",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 0
- }
- },
- {
- "packed": "6465666760ffff",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 65535
- }
- },
- {
- "packed": "64656667606b4a",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 27466
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_UnknownSize_Terminal",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": 0,
- "payload": []
- }
- },
- {
- "packed": "00006d6e6f6871",
- "unpacked": {
- "a": 0,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "a": 65535,
- "payload": []
- }
- },
- {
- "packed": "ffff6d6e6f6871",
- "unpacked": {
- "a": 65535,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- },
- {
- "packed": "6b5c",
- "unpacked": {
- "a": 27484,
- "payload": []
- }
- },
- {
- "packed": "6b5c6d6e6f6871",
- "unpacked": {
- "a": 27484,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_ScalarGroup_Field",
- "tests": [
- {
- "packed": "002a",
- "unpacked": {}
- }
- ]
- },
- {
- "packet": "Packet_EnumGroup_Field",
- "tests": [
- {
- "packed": "aabb",
- "unpacked": {}
- }
- ]
- },
- {
- "packet": "Packet_Checksum_Field_FromStart",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "a": 0,
- "b": 0,
- "crc": 0
- }
- },
- {
- "packed": "0000fffffe",
- "unpacked": {
- "a": 0,
- "b": 65535,
- "crc": 254
- }
- },
- {
- "packed": "000073a518",
- "unpacked": {
- "a": 0,
- "b": 29605,
- "crc": 24
- }
- },
- {
- "packed": "ffff0000fe",
- "unpacked": {
- "a": 65535,
- "b": 0,
- "crc": 254
- }
- },
- {
- "packed": "fffffffffc",
- "unpacked": {
- "a": 65535,
- "b": 65535,
- "crc": 252
- }
- },
- {
- "packed": "ffff73a516",
- "unpacked": {
- "a": 65535,
- "b": 29605,
- "crc": 22
- }
- },
- {
- "packed": "7393000006",
- "unpacked": {
- "a": 29587,
- "b": 0,
- "crc": 6
- }
- },
- {
- "packed": "7393ffff04",
- "unpacked": {
- "a": 29587,
- "b": 65535,
- "crc": 4
- }
- },
- {
- "packed": "739373a51e",
- "unpacked": {
- "a": 29587,
- "b": 29605,
- "crc": 30
- }
- }
- ]
- },
- {
- "packet": "Packet_Checksum_Field_FromEnd",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 0
- }
- },
- {
- "packed": "000000ffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 65535
- }
- },
- {
- "packed": "0000007bee",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 31726
- }
- },
- {
- "packed": "00ffff0000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 0
- }
- },
- {
- "packed": "00ffffffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 65535
- }
- },
- {
- "packed": "00ffff7bee",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 31726
- }
- },
- {
- "packed": "007bdc0000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 0
- }
- },
- {
- "packed": "007bdcffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 65535
- }
- },
- {
- "packed": "007bdc7bee",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 31726
- }
- },
- {
- "packed": "767770797a5000000000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 0
- }
- },
- {
- "packed": "767770797a500000ffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 65535
- }
- },
- {
- "packed": "767770797a5000007bee",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 31726
- }
- },
- {
- "packed": "767770797a50ffff0000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 0
- }
- },
- {
- "packed": "767770797a50ffffffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 65535
- }
- },
- {
- "packed": "767770797a50ffff7bee",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 31726
- }
- },
- {
- "packed": "767770797a507bdc0000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 0
- }
- },
- {
- "packed": "767770797a507bdcffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 65535
- }
- },
- {
- "packed": "767770797a507bdc7bee",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 31726
- }
- }
- ]
- },
- {
- "packet": "Packet_Struct_Field",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "0003788182",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- },
- {
- "packed": "ff00",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "ff03788182",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- },
- {
- "packed": "7f00",
- "unpacked": {
- "a": {
- "a": 127
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "7f03788182",
- "unpacked": {
- "a": {
- "a": 127
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_ConstantSize",
- "tests": [
- {
- "packed": "83848586",
- "unpacked": {
- "array": [
- 131,
- 132,
- 133,
- 134
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0f8780898a8b8c8d8e8f889192939495",
- "unpacked": {
- "array": [
- 135,
- 128,
- 137,
- 138,
- 139,
- 140,
- 141,
- 142,
- 143,
- 136,
- 145,
- 146,
- 147,
- 148,
- 149
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0f969790999a9b9c9d9e9f98a1a2a3a4",
- "unpacked": {
- "array": [
- 150,
- 151,
- 144,
- 153,
- 154,
- 155,
- 156,
- 157,
- 158,
- 159,
- 152,
- 161,
- 162,
- 163,
- 164
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "a5a6a7",
- "unpacked": {
- "array": [
- 165,
- 166,
- 167
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_ConstantSize",
- "tests": [
- {
- "packed": "a541ad53ad65ad77",
- "unpacked": {
- "array": [
- 42305,
- 44371,
- 44389,
- 44407
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0ead81b593b5a5b5b7b5c1bdd3bde5",
- "unpacked": {
- "array": [
- 44417,
- 46483,
- 46501,
- 46519,
- 46529,
- 48595,
- 48613
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0fbdf7be01c613c625c637c641ce53ce65ce77ce81d693d6a5d6b7d6c1ded3",
- "unpacked": {
- "array": [
- 48631,
- 48641,
- 50707,
- 50725,
- 50743,
- 50753,
- 52819,
- 52837,
- 52855,
- 52865,
- 54931,
- 54949,
- 54967,
- 54977,
- 57043
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "dee5def7df01",
- "unpacked": {
- "array": [
- 57061,
- 57079,
- 57089
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_ConstantSize",
- "tests": [
- {
- "packed": "aabbccddaabbccdd",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_VariableSize",
- "tests": [
- {
- "packed": "0eaabbccddaabbccddaabbccddaabb",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_VariableCount",
- "tests": [
- {
- "packed": "0faabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabb",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_UnknownSize",
- "tests": [
- {
- "packed": "aabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccdd",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00ffe200",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 226
- },
- {
- "a": 0
- }
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f00ffe400ffe500ffe600ffe700ffe0",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 228
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 229
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 230
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 231
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 224
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00ffea00ffeb00ffec00ffed00ffee",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 234
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 235
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 236
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 237
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 238
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_UnknownSize",
- "tests": [
- {
- "packed": "00ffe800fff100fff200fff300fff400fff500fff600fff700fff000fff900ff",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 232
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 241
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 242
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 243
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 244
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 245
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 246
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 247
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 240
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 249
- },
- {
- "a": 0
- },
- {
- "a": 255
- }
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_ConstantSize",
- "tests": [
- {
- "packed": "0003fbfcfd0003fef801",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 251,
- 252,
- 253
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 254,
- 248,
- 1
- ]
- }
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f0003050607000300090a00030b0c0d",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 5,
- 6,
- 7
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 0,
- 9,
- 10
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 11,
- 12,
- 13
- ]
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00031112130003141516000317101900031a1b1c00031d1e1f0003182122000323242500",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 17,
- 18,
- 19
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 20,
- 21,
- 22
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 23,
- 16,
- 25
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 26,
- 27,
- 28
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 29,
- 30,
- 31
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 24,
- 33,
- 34
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 35,
- 36,
- 37
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_UnknownSize",
- "tests": [
- {
- "packed": "0003292a2b00032c2d2e00032f283100033233340003353637000330393a00033b3c3d00033e3f3800034142430003444546000347404900034a4b4c00034d4e4f000348515200035354550003565750",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 41,
- 42,
- 43
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 44,
- 45,
- 46
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 47,
- 40,
- 49
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 50,
- 51,
- 52
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 53,
- 54,
- 55
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 48,
- 57,
- 58
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 59,
- 60,
- 61
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 62,
- 63,
- 56
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 65,
- 66,
- 67
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 68,
- 69,
- 70
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 71,
- 64,
- 73
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 74,
- 75,
- 76
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 77,
- 78,
- 79
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 72,
- 81,
- 82
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 83,
- 84,
- 85
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 86,
- 87,
- 80
- ]
- }
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_SizeModifier",
- "tests": [
- {
- "packed": "0d00035c5d5e00035f586100",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 92,
- 93,
- 94
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 95,
- 88,
- 97
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "02",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableSize_Padded",
- "tests": [
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0e632e63386b4a6b5c6b6e6b78738a0000",
- "unpacked": {
- "array": [
- 25390,
- 25400,
- 27466,
- 27484,
- 27502,
- 27512,
- 29578
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableCount_Padded",
- "tests": [
- {
- "packed": "07000373747500037677700003797a7b00",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 115,
- 116,
- 117
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 118,
- 119,
- 112
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 121,
- 122,
- 123
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "ScalarParent",
- "tests": [
- {
- "packed": "000100",
- "unpacked": {
- "a": 0,
- "b": 0
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "0001ff",
- "unpacked": {
- "a": 0,
- "b": 255
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "00017f",
- "unpacked": {
- "a": 0,
- "b": 127
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "01020000",
- "unpacked": {
- "a": 1,
- "c": 0
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "0102ffff",
- "unpacked": {
- "a": 1,
- "c": 65535
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "01027c01",
- "unpacked": {
- "a": 1,
- "c": 31745
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "020100",
- "unpacked": {
- "a": 2,
- "b": 0
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "0201ff",
- "unpacked": {
- "a": 2,
- "b": 255
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "020185",
- "unpacked": {
- "a": 2,
- "b": 133
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "03020000",
- "unpacked": {
- "a": 3,
- "c": 0
- },
- "packet": "AliasedChild_B"
- },
- {
- "packed": "0302ffff",
- "unpacked": {
- "a": 3,
- "c": 65535
- },
- "packet": "AliasedChild_B"
- },
- {
- "packed": "03028437",
- "unpacked": {
- "a": 3,
- "c": 33847
- },
- "packet": "AliasedChild_B"
- }
- ]
- },
- {
- "packet": "EnumParent",
- "tests": [
- {
- "packed": "aabb0100",
- "unpacked": {
- "a": 43707,
- "b": 0
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "aabb01ff",
- "unpacked": {
- "a": 43707,
- "b": 255
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "aabb0182",
- "unpacked": {
- "a": 43707,
- "b": 130
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "ccdd020000",
- "unpacked": {
- "a": 52445,
- "c": 0
- },
- "packet": "EnumChild_B"
- },
- {
- "packed": "ccdd02ffff",
- "unpacked": {
- "a": 52445,
- "c": 65535
- },
- "packet": "EnumChild_B"
- },
- {
- "packed": "ccdd02841c",
- "unpacked": {
- "a": 52445,
- "c": 33820
- },
- "packet": "EnumChild_B"
- }
- ]
- },
- {
- "packet": "Struct_Enum_Field",
- "tests": [
- {
- "packed": "0000000000000001",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 0
- }
- }
- },
- {
- "packed": "ffffffffffffff81",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 144115188075855871
- }
- }
- },
- {
- "packed": "84444c545c646f01",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 74459583098702046
- }
- }
- },
- {
- "packed": "0000000000000002",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 0
- }
- }
- },
- {
- "packed": "ffffffffffffff82",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 144115188075855871
- }
- }
- },
- {
- "packed": "84444c545c646f02",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 74459583098702046
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Reserved_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 0
- }
- }
- },
- {
- "packed": "fffffffffffffe00",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "8c848c949ca4ac00",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 19776118031536726
- }
- }
- },
- {
- "packed": "000000000000007f",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 0
- }
- }
- },
- {
- "packed": "fffffffffffffe7f",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "8c848c949ca4ac7f",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 19776118031536726
- }
- }
- },
- {
- "packed": "0000000000000047",
- "unpacked": {
- "s": {
- "a": 71,
- "c": 0
- }
- }
- },
- {
- "packed": "fffffffffffffe47",
- "unpacked": {
- "s": {
- "a": 71,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "8c848c949ca4ac47",
- "unpacked": {
- "s": {
- "a": 71,
- "c": 19776118031536726
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Size_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": []
- }
- }
- },
- {
- "packed": "00000000000000079e9fa0a1a2a3a4",
- "unpacked": {
- "s": {
- "a": 0,
- "b": [
- 158,
- 159,
- 160,
- 161,
- 162,
- 163,
- 164
- ]
- }
- }
- },
- {
- "packed": "fffffffffffffff8",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": []
- }
- }
- },
- {
- "packed": "ffffffffffffffff9e9fa0a1a2a3a4",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": [
- 158,
- 159,
- 160,
- 161,
- 162,
- 163,
- 164
- ]
- }
- }
- },
- {
- "packed": "965e62666a6e70e8",
- "unpacked": {
- "s": {
- "a": 1354400743188975133,
- "b": []
- }
- }
- },
- {
- "packed": "965e62666a6e70ef9e9fa0a1a2a3a4",
- "unpacked": {
- "s": {
- "a": 1354400743188975133,
- "b": [
- 158,
- 159,
- 160,
- 161,
- 162,
- 163,
- 164
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Count_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": []
- }
- }
- },
- {
- "packed": "0000000000000007adaeafa0b1b2b3",
- "unpacked": {
- "s": {
- "a": 0,
- "b": [
- 173,
- 174,
- 175,
- 160,
- 177,
- 178,
- 179
- ]
- }
- }
- },
- {
- "packed": "fffffffffffffff8",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": []
- }
- }
- },
- {
- "packed": "ffffffffffffffffadaeafa0b1b2b3",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": [
- 173,
- 174,
- 175,
- 160,
- 177,
- 178,
- 179
- ]
- }
- }
- },
- {
- "packed": "d2d353d454d555e0",
- "unpacked": {
- "s": {
- "a": 1898947267434031804,
- "b": []
- }
- }
- },
- {
- "packed": "d2d353d454d555e7adaeafa0b1b2b3",
- "unpacked": {
- "s": {
- "a": 1898947267434031804,
- "b": [
- 173,
- 174,
- 175,
- 160,
- 177,
- 178,
- 179
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_FixedScalar_Field",
- "tests": [
- {
- "packed": "0000000000000007",
- "unpacked": {
- "s": {
- "b": 0
- }
- }
- },
- {
- "packed": "ffffffffffffff87",
- "unpacked": {
- "s": {
- "b": 144115188075855871
- }
- }
- },
- {
- "packed": "bb4b5b6b7b8b9d07",
- "unpacked": {
- "s": {
- "b": 105437353324517178
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_FixedEnum_Field",
- "tests": [
- {
- "packed": "0000000000000001",
- "unpacked": {
- "s": {
- "b": 0
- }
- }
- },
- {
- "packed": "ffffffffffffff81",
- "unpacked": {
- "s": {
- "b": 144115188075855871
- }
- }
- },
- {
- "packed": "b77797b7d7f80081",
- "unpacked": {
- "s": {
- "b": 103282828492402689
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_ScalarGroup_Field",
- "tests": [
- {
- "packed": "002a",
- "unpacked": {
- "s": {}
- }
- }
- ]
- },
- {
- "packet": "Struct_EnumGroup_Field",
- "tests": [
- {
- "packed": "aabb",
- "unpacked": {
- "s": {}
- }
- }
- ]
- },
- {
- "packet": "Struct_Checksum_Field_FromStart",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 0,
- "crc": 0
- }
- }
- },
- {
- "packed": "0000fffffe",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 65535,
- "crc": 254
- }
- }
- },
- {
- "packed": "0000f105f6",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 61701,
- "crc": 246
- }
- }
- },
- {
- "packed": "ffff0000fe",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 0,
- "crc": 254
- }
- }
- },
- {
- "packed": "fffffffffc",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 65535,
- "crc": 252
- }
- }
- },
- {
- "packed": "fffff105f4",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 61701,
- "crc": 244
- }
- }
- },
- {
- "packed": "f083000073",
- "unpacked": {
- "s": {
- "a": 61571,
- "b": 0,
- "crc": 115
- }
- }
- },
- {
- "packed": "f083ffff71",
- "unpacked": {
- "s": {
- "a": 61571,
- "b": 65535,
- "crc": 113
- }
- }
- },
- {
- "packed": "f083f10569",
- "unpacked": {
- "s": {
- "a": 61571,
- "b": 61701,
- "crc": 105
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Checksum_Field_FromEnd",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 0
- }
- }
- },
- {
- "packed": "000000ffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 65535
- }
- }
- },
- {
- "packed": "000000f34e",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 62286
- }
- }
- },
- {
- "packed": "00ffff0000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 0
- }
- }
- },
- {
- "packed": "00ffffffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 65535
- }
- }
- },
- {
- "packed": "00fffff34e",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 62286
- }
- }
- },
- {
- "packed": "00f2cc0000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 62156,
- "b": 0
- }
- }
- },
- {
- "packed": "00f2ccffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 62156,
- "b": 65535
- }
- }
- },
- {
- "packed": "00f2ccf34e",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 62156,
- "b": 62286
- }
- }
- },
- {
- "packed": "c6c7c8c9cae800000000",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 0,
- "b": 0
- }
- }
- },
- {
- "packed": "c6c7c8c9cae80000ffff",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 0,
- "b": 65535
- }
- }
- },
- {
- "packed": "c6c7c8c9cae80000f34e",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 0,
- "b": 62286
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8ffff0000",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 65535,
- "b": 0
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8ffffffff",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 65535,
- "b": 65535
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8fffff34e",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 65535,
- "b": 62286
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8f2cc0000",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 62156,
- "b": 0
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8f2ccffff",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 62156,
- "b": 65535
- }
- }
- },
- {
- "packed": "c6c7c8c9cae8f2ccf34e",
- "unpacked": {
- "s": {
- "payload": [
- 198,
- 199,
- 200,
- 201,
- 202
- ],
- "crc": 232,
- "a": 62156,
- "b": 62286
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Struct_Field",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "0003d0d1d2",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": [
- 208,
- 209,
- 210
- ]
- }
- }
- },
- {
- "packed": "ff00",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "ff03d0d1d2",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": [
- 208,
- 209,
- 210
- ]
- }
- }
- },
- {
- "packed": "cf00",
- "unpacked": {
- "a": {
- "a": 207
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "cf03d0d1d2",
- "unpacked": {
- "a": {
- "a": 207
- },
- "b": {
- "array": [
- 208,
- 209,
- 210
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_ConstantSize",
- "tests": [
- {
- "packed": "d3d4d5d6",
- "unpacked": {
- "s": {
- "array": [
- 211,
- 212,
- 213,
- 214
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0fd7d8d9dadbdcdddedfe0e1e2e3e4e5",
- "unpacked": {
- "s": {
- "array": [
- 215,
- 216,
- 217,
- 218,
- 219,
- 220,
- 221,
- 222,
- 223,
- 224,
- 225,
- 226,
- 227,
- 228,
- 229
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0fe6e7e8e9eaebecedeeeff0f1f2f3f4",
- "unpacked": {
- "s": {
- "array": [
- 230,
- 231,
- 232,
- 233,
- 234,
- 235,
- 236,
- 237,
- 238,
- 239,
- 240,
- 241,
- 242,
- 243,
- 244
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "f5f6f7",
- "unpacked": {
- "s": {
- "array": [
- 245,
- 246,
- 247
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_ConstantSize",
- "tests": [
- {
- "packed": "fe39febbff3dff80",
- "unpacked": {
- "s": {
- "array": [
- 65081,
- 65211,
- 65341,
- 65408
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0e004200c4014601c8024a02cc034e",
- "unpacked": {
- "s": {
- "array": [
- 66,
- 196,
- 326,
- 456,
- 586,
- 716,
- 846
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0f03d0045204d4055605d8065a06dc075e07e0086208e4096609e80a6a0aec",
- "unpacked": {
- "s": {
- "array": [
- 976,
- 1106,
- 1236,
- 1366,
- 1496,
- 1626,
- 1756,
- 1886,
- 2016,
- 2146,
- 2276,
- 2406,
- 2536,
- 2666,
- 2796
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0b6e0bf00c72",
- "unpacked": {
- "s": {
- "array": [
- 2926,
- 3056,
- 3186
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_ConstantSize",
- "tests": [
- {
- "packed": "aabbccddaabbccdd",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_VariableSize",
- "tests": [
- {
- "packed": "0eaabbccddaabbccddaabbccddaabb",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_VariableCount",
- "tests": [
- {
- "packed": "0faabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabb",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_UnknownSize",
- "tests": [
- {
- "packed": "aabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccddaabbccdd",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00ff3300",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 51
- },
- {
- "a": 0
- }
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f00ff3500ff3600ff3700ff3800ff39",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 53
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 54
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 55
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 56
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 57
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00ff3b00ff3c00ff3d00ff3e00ff3f",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 59
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 60
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 61
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 62
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 63
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_UnknownSize",
- "tests": [
- {
- "packed": "00ff4100ff4200ff4300ff4400ff4500ff4600ff4700ff4800ff4900ff4a00ff",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 65
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 66
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 67
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 68
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 69
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 70
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 71
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 72
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 73
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 74
- },
- {
- "a": 0
- },
- {
- "a": 255
- }
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00034c4d4e00034f5051",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 76,
- 77,
- 78
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 79,
- 80,
- 81
- ]
- }
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f0003555657000358595a00035b5c5d",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 85,
- 86,
- 87
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 88,
- 89,
- 90
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 91,
- 92,
- 93
- ]
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00036162630003646566000367686900036a6b6c00036d6e6f0003707172000373747500",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 97,
- 98,
- 99
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 100,
- 101,
- 102
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 103,
- 104,
- 105
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 106,
- 107,
- 108
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 109,
- 110,
- 111
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 112,
- 113,
- 114
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 115,
- 116,
- 117
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_UnknownSize",
- "tests": [
- {
- "packed": "0003797a7b00037c7d7e00037f408100038283840003858687000388898a00038b8c8d00038e8f9000039192930003949596000397989900039a9b9c00039d9e9f0003a0a1a20003a3a4a50003a6a7a8",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 121,
- 122,
- 123
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 124,
- 125,
- 126
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 127,
- 64,
- 129
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 130,
- 131,
- 132
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 133,
- 134,
- 135
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 136,
- 137,
- 138
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 139,
- 140,
- 141
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 142,
- 143,
- 144
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 145,
- 146,
- 147
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 148,
- 149,
- 150
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 151,
- 152,
- 153
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 154,
- 155,
- 156
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 157,
- 158,
- 159
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 160,
- 161,
- 162
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 163,
- 164,
- 165
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 166,
- 167,
- 168
- ]
- }
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_SizeModifier",
- "tests": [
- {
- "packed": "0d0003acadae0003afb0b100",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 172,
- 173,
- 174
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 175,
- 176,
- 177
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "02",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableSize_Padded",
- "tests": [
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0ead76adf8ae7aaefcaf7eafc0f0420000",
- "unpacked": {
- "s": {
- "array": [
- 44406,
- 44536,
- 44666,
- 44796,
- 44926,
- 44992,
- 61506
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableCount_Padded",
- "tests": [
- {
- "packed": "070003c3c4c50003c6c7c80003c9cacb00",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 195,
- 196,
- 197
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 198,
- 199,
- 200
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 201,
- 202,
- 203
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- }
-] \ No newline at end of file
diff --git a/tools/pdl/tests/canonical/le_rust_noalloc_test_file.pdl b/tools/pdl/tests/canonical/le_rust_noalloc_test_file.pdl
deleted file mode 100644
index 7137fae32b..0000000000
--- a/tools/pdl/tests/canonical/le_rust_noalloc_test_file.pdl
+++ /dev/null
@@ -1,610 +0,0 @@
-little_endian_packets
-
-
-enum Enum7 : 7 {
- A = 1,
- B = 2,
-}
-
-enum Enum16 : 16 {
- A = 0xaabb,
- B = 0xccdd,
-}
-
-struct SizedStruct {
- a: 8,
-}
-
-struct UnsizedStruct {
- _size_(array): 2,
- _reserved_: 6,
- array: 8[],
-}
-
-packet ScalarParent {
- a: 8,
- _size_(_payload_): 8,
- _payload_
-}
-
-packet EnumParent {
- a: Enum16,
- _size_(_payload_): 8,
- _payload_
-}
-
-packet EmptyParent : ScalarParent {
- _payload_
-}
-
-packet PartialParent5 {
- a: 5,
- _payload_
-}
-
-packet PartialParent12 {
- a: 12,
- _payload_
-}
-
-// Packet bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Enum_Field {
- a: Enum7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Reserved_Field {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Size_Field {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Count_Field {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedScalar_Field {
- _fixed_ = 7 : 7,
- b: 57,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedEnum_Field {
- _fixed_ = A : Enum7,
- b: 57,
-}
-
-// Packet payload fields
-
-// The parser must be able to handle sized payload fields without
-// size modifier.
-packet Packet_Payload_Field_VariableSize {
- _size_(_payload_): 3,
- _reserved_: 5,
- _payload_
-}
-
-// The parser must be able to handle payload fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Payload_Field_UnknownSize {
- _payload_,
- a: 16,
-}
-
-// The parser must be able to handle payload fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Payload_Field_UnknownSize_Terminal {
- a: 16,
- _payload_,
-}
-
-// Packet body fields
-
-// The parser must be able to handle sized body fields without
-// size modifier when the packet has no children.
-packet Packet_Body_Field_VariableSize {
- _size_(_body_): 3,
- _reserved_: 5,
- _body_
-}
-
-// The parser must be able to handle body fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Body_Field_UnknownSize {
- _body_,
- a: 16,
-}
-
-// The parser must be able to handle body fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Body_Field_UnknownSize_Terminal {
- a: 16,
- _payload_,
-}
-
-// Packet typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Packet_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-
-packet Packet_Array_Field_ByteElement_ConstantSize {
- array: 8[4],
-}
-
-packet Packet_Array_Field_ByteElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_UnknownSize {
- array: 8[],
-}
-
-packet Packet_Array_Field_ScalarElement_ConstantSize {
- array: 16[4],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_UnknownSize {
- array: 16[],
-}
-
-packet Packet_Array_Field_EnumElement_ConstantSize {
- array: Enum16[4],
-}
-
-packet Packet_Array_Field_EnumElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_UnknownSize {
- array: Enum16[],
-}
-
-packet Packet_Array_Field_SizedElement_ConstantSize {
- array: SizedStruct[4],
-}
-
-packet Packet_Array_Field_SizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_UnknownSize {
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_ConstantSize {
- array: UnsizedStruct[4],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_UnknownSize {
- array: UnsizedStruct[],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_SizedElement_VariableSize_Padded {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_UnsizedElement_VariableCount_Padded {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-
-// Packet inheritance
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_A : ScalarParent (a = 0) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_B : ScalarParent (a = 1) {
- c: 16,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_A : EnumParent (a = A) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_B : EnumParent (a = B) {
- c: 16,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild5_A : PartialParent5 (a = 0) {
- b: 11,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild5_B : PartialParent5 (a = 1) {
- c: 27,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild12_A : PartialParent12 (a = 2) {
- d: 4,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild12_B : PartialParent12 (a = 3) {
- e: 20,
-}
-
-// Struct bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Enum_Field_ {
- a: Enum7,
- c: 57,
-}
-packet Struct_Enum_Field {
- s: Struct_Enum_Field_,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Reserved_Field_ {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-packet Struct_Reserved_Field {
- s: Struct_Reserved_Field_,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Size_Field_ {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Size_Field {
- s: Struct_Size_Field_,
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Count_Field_ {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Count_Field {
- s: Struct_Count_Field_,
-}
-
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedScalar_Field_ {
- _fixed_ = 7 : 7,
- b: 57,
-}
-packet Struct_FixedScalar_Field {
- s: Struct_FixedScalar_Field_,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedEnum_Field_ {
- _fixed_ = A : Enum7,
- b: 57,
-}
-packet Struct_FixedEnum_Field {
- s: Struct_FixedEnum_Field_,
-}
-
-// Struct typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Struct_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-
-struct Struct_Array_Field_ByteElement_ConstantSize_ {
- array: 8[4],
-}
-packet Struct_Array_Field_ByteElement_ConstantSize {
- s: Struct_Array_Field_ByteElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_ByteElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableSize {
- s: Struct_Array_Field_ByteElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ByteElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableCount {
- s: Struct_Array_Field_ByteElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ByteElement_UnknownSize_ {
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_UnknownSize {
- s: Struct_Array_Field_ByteElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_ConstantSize_ {
- array: 16[4],
-}
-packet Struct_Array_Field_ScalarElement_ConstantSize {
- s: Struct_Array_Field_ScalarElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableSize {
- s: Struct_Array_Field_ScalarElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableCount {
- s: Struct_Array_Field_ScalarElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ScalarElement_UnknownSize_ {
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_UnknownSize {
- s: Struct_Array_Field_ScalarElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_EnumElement_ConstantSize_ {
- array: Enum16[4],
-}
-packet Struct_Array_Field_EnumElement_ConstantSize {
- s: Struct_Array_Field_EnumElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableSize {
- s: Struct_Array_Field_EnumElement_VariableSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableCount {
- s: Struct_Array_Field_EnumElement_VariableCount_,
-}
-
-struct Struct_Array_Field_EnumElement_UnknownSize_ {
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_UnknownSize {
- s: Struct_Array_Field_EnumElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_SizedElement_ConstantSize_ {
- array: SizedStruct[4],
-}
-packet Struct_Array_Field_SizedElement_ConstantSize {
- s: Struct_Array_Field_SizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableSize {
- s: Struct_Array_Field_SizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableCount {
- s: Struct_Array_Field_SizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_SizedElement_UnknownSize_ {
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_UnknownSize {
- s: Struct_Array_Field_SizedElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_ConstantSize_ {
- array: UnsizedStruct[4],
-}
-packet Struct_Array_Field_UnsizedElement_ConstantSize {
- s: Struct_Array_Field_UnsizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableSize {
- s: Struct_Array_Field_UnsizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount {
- s: Struct_Array_Field_UnsizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_UnsizedElement_UnknownSize_ {
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_UnknownSize {
- s: Struct_Array_Field_UnsizedElement_UnknownSize_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_SizedElement_VariableSize_Padded_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-packet Struct_Array_Field_SizedElement_VariableSize_Padded {
- s: Struct_Array_Field_SizedElement_VariableSize_Padded_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_UnsizedElement_VariableCount_Padded_ {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount_Padded {
- s: Struct_Array_Field_UnsizedElement_VariableCount_Padded_,
-}
diff --git a/tools/pdl/tests/canonical/le_rust_test_file.pdl b/tools/pdl/tests/canonical/le_rust_test_file.pdl
deleted file mode 100644
index cdb20b3ec6..0000000000
--- a/tools/pdl/tests/canonical/le_rust_test_file.pdl
+++ /dev/null
@@ -1,573 +0,0 @@
-little_endian_packets
-
-// Preliminary definitions
-
-enum MaxDiscriminantEnum : 64 {
- Max = 0xffffffffffffffff,
-}
-
-enum Enum7 : 7 {
- A = 1,
- B = 2,
-}
-
-enum Enum16 : 16 {
- A = 0xaabb,
- B = 0xccdd,
-}
-
-struct SizedStruct {
- a: 8,
-}
-
-struct UnsizedStruct {
- _size_(array): 2,
- _reserved_: 6,
- array: 8[],
-}
-
-packet ScalarParent {
- a: 8,
- _size_(_payload_): 8,
- _payload_
-}
-
-packet EnumParent {
- a: Enum16,
- _size_(_payload_): 8,
- _payload_
-}
-
-// Packet bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Enum_Field {
- a: Enum7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Reserved_Field {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Size_Field {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Count_Field {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedScalar_Field {
- _fixed_ = 7 : 7,
- b: 57,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedEnum_Field {
- _fixed_ = A : Enum7,
- b: 57,
-}
-
-// Packet payload fields
-
-// The parser must be able to handle sized payload fields without
-// size modifier.
-packet Packet_Payload_Field_VariableSize {
- _size_(_payload_): 3,
- _reserved_: 5,
- _payload_
-}
-
-// The parser must be able to handle payload fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Payload_Field_UnknownSize {
- _payload_,
- a: 16,
-}
-
-// The parser must be able to handle payload fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Payload_Field_UnknownSize_Terminal {
- a: 16,
- _payload_,
-}
-
-// Packet body fields
-
-// The parser must be able to handle sized body fields without
-// size modifier when the packet has no children.
-packet Packet_Body_Field_VariableSize {
- _size_(_body_): 3,
- _reserved_: 5,
- _body_
-}
-
-// The parser must be able to handle body fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Body_Field_UnknownSize {
- _body_,
- a: 16,
-}
-
-// The parser must be able to handle body fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Body_Field_UnknownSize_Terminal {
- a: 16,
- _body_,
-}
-
-// Packet typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Packet_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-packet Packet_Array_Field_ByteElement_ConstantSize {
- array: 8[4],
-}
-
-packet Packet_Array_Field_ByteElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_UnknownSize {
- array: 8[],
-}
-
-packet Packet_Array_Field_ScalarElement_ConstantSize {
- array: 16[4],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_UnknownSize {
- array: 16[],
-}
-
-packet Packet_Array_Field_EnumElement_ConstantSize {
- array: Enum16[4],
-}
-
-packet Packet_Array_Field_EnumElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_UnknownSize {
- array: Enum16[],
-}
-
-packet Packet_Array_Field_SizedElement_ConstantSize {
- array: SizedStruct[4],
-}
-
-packet Packet_Array_Field_SizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_UnknownSize {
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_ConstantSize {
- array: UnsizedStruct[4],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_UnknownSize {
- array: UnsizedStruct[],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_SizedElement_VariableSize_Padded {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_UnsizedElement_VariableCount_Padded {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-
-// Packet inheritance
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_A : ScalarParent (a = 0) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_B : ScalarParent (a = 1) {
- c: 16,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_A : EnumParent (a = A) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_B : EnumParent (a = B) {
- c: 16,
-}
-
-// Struct bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Enum_Field_ {
- a: Enum7,
- c: 57,
-}
-packet Struct_Enum_Field {
- s: Struct_Enum_Field_,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Reserved_Field_ {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-packet Struct_Reserved_Field {
- s: Struct_Reserved_Field_,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Size_Field_ {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Size_Field {
- s: Struct_Size_Field_,
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Count_Field_ {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Count_Field {
- s: Struct_Count_Field_,
-}
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedScalar_Field_ {
- _fixed_ = 7 : 7,
- b: 57,
-}
-packet Struct_FixedScalar_Field {
- s: Struct_FixedScalar_Field_,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedEnum_Field_ {
- _fixed_ = A : Enum7,
- b: 57,
-}
-packet Struct_FixedEnum_Field {
- s: Struct_FixedEnum_Field_,
-}
-
-// Struct typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Struct_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-
-struct Struct_Array_Field_ByteElement_ConstantSize_ {
- array: 8[4],
-}
-packet Struct_Array_Field_ByteElement_ConstantSize {
- s: Struct_Array_Field_ByteElement_ConstantSize_,
-}
-
-
-struct Struct_Array_Field_ByteElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableSize {
- s: Struct_Array_Field_ByteElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ByteElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableCount {
- s: Struct_Array_Field_ByteElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ByteElement_UnknownSize_ {
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_UnknownSize {
- s: Struct_Array_Field_ByteElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_ConstantSize_ {
- array: 16[4],
-}
-packet Struct_Array_Field_ScalarElement_ConstantSize {
- s: Struct_Array_Field_ScalarElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableSize {
- s: Struct_Array_Field_ScalarElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableCount {
- s: Struct_Array_Field_ScalarElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ScalarElement_UnknownSize_ {
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_UnknownSize {
- s: Struct_Array_Field_ScalarElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_EnumElement_ConstantSize_ {
- array: Enum16[4],
-}
-packet Struct_Array_Field_EnumElement_ConstantSize {
- s: Struct_Array_Field_EnumElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableSize {
- s: Struct_Array_Field_EnumElement_VariableSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableCount {
- s: Struct_Array_Field_EnumElement_VariableCount_,
-}
-
-struct Struct_Array_Field_EnumElement_UnknownSize_ {
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_UnknownSize {
- s: Struct_Array_Field_EnumElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_SizedElement_ConstantSize_ {
- array: SizedStruct[4],
-}
-packet Struct_Array_Field_SizedElement_ConstantSize {
- s: Struct_Array_Field_SizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableSize {
- s: Struct_Array_Field_SizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableCount {
- s: Struct_Array_Field_SizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_SizedElement_UnknownSize_ {
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_UnknownSize {
- s: Struct_Array_Field_SizedElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_ConstantSize_ {
- array: UnsizedStruct[4],
-}
-packet Struct_Array_Field_UnsizedElement_ConstantSize {
- s: Struct_Array_Field_UnsizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableSize {
- s: Struct_Array_Field_UnsizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount {
- s: Struct_Array_Field_UnsizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_UnsizedElement_UnknownSize_ {
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_UnknownSize {
- s: Struct_Array_Field_UnsizedElement_UnknownSize_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_SizedElement_VariableSize_Padded_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-packet Struct_Array_Field_SizedElement_VariableSize_Padded {
- s: Struct_Array_Field_SizedElement_VariableSize_Padded_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_UnsizedElement_VariableCount_Padded_ {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount_Padded {
- s: Struct_Array_Field_UnsizedElement_VariableCount_Padded_,
-}
diff --git a/tools/pdl/tests/canonical/le_test_file.pdl b/tools/pdl/tests/canonical/le_test_file.pdl
deleted file mode 100644
index 6bc140c46e..0000000000
--- a/tools/pdl/tests/canonical/le_test_file.pdl
+++ /dev/null
@@ -1,780 +0,0 @@
-little_endian_packets
-
-// Preliminary definitions
-
-custom_field SizedCustomField : 8 "SizedCustomField"
-custom_field UnsizedCustomField "UnsizedCustomField"
-checksum Checksum : 8 "Checksum"
-
-enum Enum7 : 7 {
- A = 1,
- B = 2,
-}
-
-enum Enum16 : 16 {
- A = 0xaabb,
- B = 0xccdd,
-}
-
-struct SizedStruct {
- a: 8,
-}
-
-struct UnsizedStruct {
- _size_(array): 2,
- _reserved_: 6,
- array: 8[],
-}
-
-group ScalarGroup {
- a: 16
-}
-
-group EnumGroup {
- a: Enum16
-}
-
-packet ScalarParent {
- a: 8,
- _size_(_payload_): 8,
- _payload_
-}
-
-packet EnumParent {
- a: Enum16,
- _size_(_payload_): 8,
- _payload_
-}
-
-packet EmptyParent : ScalarParent {
- _payload_
-}
-
-// Start: little_endian_only
-packet PartialParent5 {
- a: 5,
- _payload_
-}
-
-packet PartialParent12 {
- a: 12,
- _payload_
-}
-// End: little_endian_only
-
-// Packet bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Enum_Field {
- a: Enum7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Reserved_Field {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Size_Field {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_Count_Field {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedScalar_Field {
- _fixed_ = 7 : 7,
- b: 57,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-packet Packet_FixedEnum_Field {
- _fixed_ = A : Enum7,
- b: 57,
-}
-
-// Packet payload fields
-
-// The parser must be able to handle sized payload fields without
-// size modifier.
-packet Packet_Payload_Field_VariableSize {
- _size_(_payload_): 3,
- _reserved_: 5,
- _payload_
-}
-
-// The parser must be able to handle sized payload fields with
-// size modifier.
-packet Packet_Payload_Field_SizeModifier {
- _size_(_payload_): 3,
- _reserved_: 5,
- _payload_ : [+2],
-}
-
-// The parser must be able to handle payload fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Payload_Field_UnknownSize {
- _payload_,
- a: 16,
-}
-
-// The parser must be able to handle payload fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Payload_Field_UnknownSize_Terminal {
- a: 16,
- _payload_,
-}
-
-// Packet body fields
-
-// The parser must be able to handle sized body fields without
-// size modifier when the packet has no children.
-packet Packet_Body_Field_VariableSize {
- _size_(_body_): 3,
- _reserved_: 5,
- _body_
-}
-
-// The parser must be able to handle body fields of unkonwn size followed
-// by fields of statically known size. The remaining span is integrated
-// in the packet.
-packet Packet_Body_Field_UnknownSize {
- _body_,
- a: 16,
-}
-
-// The parser must be able to handle body fields of unkonwn size.
-// The remaining span is integrated in the packet.
-packet Packet_Body_Field_UnknownSize_Terminal {
- a: 16,
- _body_,
-}
-
-// Packet group fields
-
-packet Packet_ScalarGroup_Field {
- ScalarGroup { a = 42 },
-}
-
-packet Packet_EnumGroup_Field {
- EnumGroup { a = A },
-}
-
-// Packet checksum fields
-
-// The parser must be able to handle checksum fields if the checksum value
-// field is positioned at constant offset from the checksum start.
-// The parser should generate a checksum guard for the buffer covered by the
-// checksum.
-packet Packet_Checksum_Field_FromStart {
- _checksum_start_(crc),
- a: 16,
- b: 16,
- crc: Checksum,
-}
-
-// The parser must be able to handle checksum fields if the checksum value
-// field is positioned at constant offset from the end of the packet.
-// The parser should generate a checksum guard for the buffer covered by the
-// checksum.
-packet Packet_Checksum_Field_FromEnd {
- _checksum_start_(crc),
- _payload_,
- crc: Checksum,
- a: 16,
- b: 16,
-}
-
-// Packet typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Packet_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-// The parser must be able to handle custom fields of constant size.
-// The parser should generate a static size guard.
-packet Packet_Custom_Field_ConstantSize {
- a: SizedCustomField,
-}
-
-// The parser must be able to handle custom fields of undefined size.
-// No size guard possible.
-packet Packet_Custom_Field_VariableSize {
- a: UnsizedCustomField,
-}
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-
-packet Packet_Array_Field_ByteElement_ConstantSize {
- array: 8[4],
-}
-
-packet Packet_Array_Field_ByteElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-
-packet Packet_Array_Field_ByteElement_UnknownSize {
- array: 8[],
-}
-
-packet Packet_Array_Field_ScalarElement_ConstantSize {
- array: 16[4],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-
-packet Packet_Array_Field_ScalarElement_UnknownSize {
- array: 16[],
-}
-
-packet Packet_Array_Field_EnumElement_ConstantSize {
- array: Enum16[4],
-}
-
-packet Packet_Array_Field_EnumElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-
-packet Packet_Array_Field_EnumElement_UnknownSize {
- array: Enum16[],
-}
-
-packet Packet_Array_Field_SizedElement_ConstantSize {
- array: SizedStruct[4],
-}
-
-packet Packet_Array_Field_SizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_SizedElement_UnknownSize {
- array: SizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_ConstantSize {
- array: UnsizedStruct[4],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableSize {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_VariableCount {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-
-packet Packet_Array_Field_UnsizedElement_UnknownSize {
- array: UnsizedStruct[],
-}
-
-// The parser must support complex size modifiers on arrays whose size is
-// specified by a size field.
-packet Packet_Array_Field_UnsizedElement_SizeModifier {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[+2],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_SizedElement_VariableSize_Padded {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-
-// The parser must be able to handle arrays with padded size.
-packet Packet_Array_Field_UnsizedElement_VariableCount_Padded {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-
-// Packet inheritance
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_A : ScalarParent (a = 0) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with scalar constraints.
-packet ScalarChild_B : ScalarParent (a = 1) {
- c: 16,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_A : EnumParent (a = A) {
- b: 8,
-}
-
-// The parser must handle specialization into
-// any child packet of a parent packet with enum constraints.
-packet EnumChild_B : EnumParent (a = B) {
- c: 16,
-}
-
-// The parser must handle aliasing of packets
-// through inheritance with no constraints
-packet AliasedChild_A : EmptyParent (a = 2) {
- b: 8,
-}
-
-// The parser must handle aliasing of packets
-// through inheritance with no constraints
-packet AliasedChild_B : EmptyParent (a = 3) {
- c: 16,
-}
-
-// Start: little_endian_only
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild5_A : PartialParent5 (a = 0) {
- b: 11,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild5_B : PartialParent5 (a = 1) {
- c: 27,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild12_A : PartialParent12 (a = 2) {
- d: 4,
-}
-
-// The parser must handle inheritance of packets with payloads starting
-// on a shifted byte boundary, as long as the first fields of the child
-// complete the bit fields.
-packet PartialChild12_B : PartialParent12 (a = 3) {
- e: 20,
-}
-
-// End: little_endian_only
-
-// Struct bit fields
-
-// The parser must be able to handle bit fields with scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Scalar_Field {
- a: 7,
- c: 57,
-}
-
-// The parser must be able to handle bit fields with enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Enum_Field_ {
- a: Enum7,
- c: 57,
-}
-packet Struct_Enum_Field {
- s: Struct_Enum_Field_,
-}
-
-// The parser must be able to handle bit fields with reserved fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Reserved_Field_ {
- a: 7,
- _reserved_: 2,
- c: 55,
-}
-packet Struct_Reserved_Field {
- s: Struct_Reserved_Field_,
-}
-
-// The parser must be able to handle bit fields with size fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Size_Field_ {
- _size_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Size_Field {
- s: Struct_Size_Field_,
-}
-
-// The parser must be able to handle bit fields with count fields
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_Count_Field_ {
- _count_(b): 3,
- a: 61,
- b: 8[],
-}
-packet Struct_Count_Field {
- s: Struct_Count_Field_,
-}
-
-// The parser must be able to handle bit fields with fixed scalar values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedScalar_Field_ {
- _fixed_ = 7 : 7,
- b: 57,
-}
-packet Struct_FixedScalar_Field {
- s: Struct_FixedScalar_Field_,
-}
-
-// The parser must be able to handle bit fields with fixed enum values
-// up to 64 bits wide. The parser should generate a static size guard.
-struct Struct_FixedEnum_Field_ {
- _fixed_ = A : Enum7,
- b: 57,
-}
-packet Struct_FixedEnum_Field {
- s: Struct_FixedEnum_Field_,
-}
-
-// Struct group fields
-
-struct Struct_ScalarGroup_Field_ {
- ScalarGroup { a = 42 },
-}
-packet Struct_ScalarGroup_Field {
- s: Struct_ScalarGroup_Field_,
-}
-
-struct Struct_EnumGroup_Field_ {
- EnumGroup { a = A },
-}
-packet Struct_EnumGroup_Field {
- s: Struct_EnumGroup_Field_,
-}
-
-// Struct checksum fields
-
-// The parser must be able to handle checksum fields if the checksum value
-// field is positioned at constant offset from the checksum start.
-// The parser should generate a checksum guard for the buffer covered by the
-// checksum.
-struct Struct_Checksum_Field_FromStart_ {
- _checksum_start_(crc),
- a: 16,
- b: 16,
- crc: Checksum,
-}
-packet Struct_Checksum_Field_FromStart {
- s: Struct_Checksum_Field_FromStart_,
-}
-
-// The parser must be able to handle checksum fields if the checksum value
-// field is positioned at constant offset from the end of the packet.
-// The parser should generate a checksum guard for the buffer covered by the
-// checksum.
-struct Struct_Checksum_Field_FromEnd_ {
- _checksum_start_(crc),
- _payload_,
- crc: Checksum,
- a: 16,
- b: 16,
-}
-packet Struct_Checksum_Field_FromEnd {
- s: Struct_Checksum_Field_FromEnd_,
-}
-
-// Struct typedef fields
-
-// The parser must be able to handle struct fields.
-// The size guard is generated by the Struct parser.
-packet Struct_Struct_Field {
- a: SizedStruct,
- b: UnsizedStruct,
-}
-
-// The parser must be able to handle custom fields of constant size.
-// The parser should generate a static size guard.
-struct Struct_Custom_Field_ConstantSize_ {
- a: SizedCustomField,
-}
-packet Struct_Custom_Field_ConstantSize {
- s: Struct_Custom_Field_ConstantSize_,
-}
-
-// The parser must be able to handle custom fields of undefined size.
-// No size guard possible.
-struct Struct_Custom_Field_VariableSize_ {
- a: UnsizedCustomField,
-}
-packet Struct_Custom_Field_VariableSize {
- s: Struct_Custom_Field_VariableSize_,
-}
-
-// Array field configurations.
-// Add constructs for all configurations of type, size, and padding:
-//
-// - type: u8, u16, enum, struct with static size, struct with dynamic size
-// - size: constant, with size field, with count field, unspecified
-//
-// The type u8 is tested separately since it is likely to be handled
-// idiomatically by the specific language generators.
-
-struct Struct_Array_Field_ByteElement_ConstantSize_ {
- array: 8[4],
-}
-packet Struct_Array_Field_ByteElement_ConstantSize {
- s: Struct_Array_Field_ByteElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_ByteElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableSize {
- s: Struct_Array_Field_ByteElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ByteElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_VariableCount {
- s: Struct_Array_Field_ByteElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ByteElement_UnknownSize_ {
- array: 8[],
-}
-packet Struct_Array_Field_ByteElement_UnknownSize {
- s: Struct_Array_Field_ByteElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_ConstantSize_ {
- array: 16[4],
-}
-packet Struct_Array_Field_ScalarElement_ConstantSize {
- s: Struct_Array_Field_ScalarElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableSize {
- s: Struct_Array_Field_ScalarElement_VariableSize_,
-}
-
-struct Struct_Array_Field_ScalarElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_VariableCount {
- s: Struct_Array_Field_ScalarElement_VariableCount_,
-}
-
-struct Struct_Array_Field_ScalarElement_UnknownSize_ {
- array: 16[],
-}
-packet Struct_Array_Field_ScalarElement_UnknownSize {
- s: Struct_Array_Field_ScalarElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_EnumElement_ConstantSize_ {
- array: Enum16[4],
-}
-packet Struct_Array_Field_EnumElement_ConstantSize {
- s: Struct_Array_Field_EnumElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableSize {
- s: Struct_Array_Field_EnumElement_VariableSize_,
-}
-
-struct Struct_Array_Field_EnumElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_VariableCount {
- s: Struct_Array_Field_EnumElement_VariableCount_,
-}
-
-struct Struct_Array_Field_EnumElement_UnknownSize_ {
- array: Enum16[],
-}
-packet Struct_Array_Field_EnumElement_UnknownSize {
- s: Struct_Array_Field_EnumElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_SizedElement_ConstantSize_ {
- array: SizedStruct[4],
-}
-packet Struct_Array_Field_SizedElement_ConstantSize {
- s: Struct_Array_Field_SizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableSize {
- s: Struct_Array_Field_SizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_SizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_VariableCount {
- s: Struct_Array_Field_SizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_SizedElement_UnknownSize_ {
- array: SizedStruct[],
-}
-packet Struct_Array_Field_SizedElement_UnknownSize {
- s: Struct_Array_Field_SizedElement_UnknownSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_ConstantSize_ {
- array: UnsizedStruct[4],
-}
-packet Struct_Array_Field_UnsizedElement_ConstantSize {
- s: Struct_Array_Field_UnsizedElement_ConstantSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableSize_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableSize {
- s: Struct_Array_Field_UnsizedElement_VariableSize_,
-}
-
-struct Struct_Array_Field_UnsizedElement_VariableCount_ {
- _count_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount {
- s: Struct_Array_Field_UnsizedElement_VariableCount_,
-}
-
-struct Struct_Array_Field_UnsizedElement_UnknownSize_ {
- array: UnsizedStruct[],
-}
-packet Struct_Array_Field_UnsizedElement_UnknownSize {
- s: Struct_Array_Field_UnsizedElement_UnknownSize_,
-}
-
-// The parser must support complex size modifiers on arrays whose size is
-// specified by a size field.
-struct Struct_Array_Field_UnsizedElement_SizeModifier_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: UnsizedStruct[+2],
-}
-packet Struct_Array_Field_UnsizedElement_SizeModifier {
- s: Struct_Array_Field_UnsizedElement_SizeModifier_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_SizedElement_VariableSize_Padded_ {
- _size_(array) : 4,
- _reserved_: 4,
- array: 16[],
- _padding_ [16],
-}
-packet Struct_Array_Field_SizedElement_VariableSize_Padded {
- s: Struct_Array_Field_SizedElement_VariableSize_Padded_,
-}
-
-// The parser must be able to handle arrays with padded size.
-struct Struct_Array_Field_UnsizedElement_VariableCount_Padded_ {
- _count_(array) : 8,
- array: UnsizedStruct[],
- _padding_ [16],
-}
-packet Struct_Array_Field_UnsizedElement_VariableCount_Padded {
- s: Struct_Array_Field_UnsizedElement_VariableCount_Padded_,
-}
diff --git a/tools/pdl/tests/canonical/le_test_vectors.json b/tools/pdl/tests/canonical/le_test_vectors.json
deleted file mode 100644
index 243952c697..0000000000
--- a/tools/pdl/tests/canonical/le_test_vectors.json
+++ /dev/null
@@ -1,4377 +0,0 @@
-[
- {
- "packet": "Packet_Scalar_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "80ffffffffffffff",
- "unpacked": {
- "a": 0,
- "c": 144115188075855871
- }
- },
- {
- "packed": "8003830282018100",
- "unpacked": {
- "a": 0,
- "c": 283686952306183
- }
- },
- {
- "packed": "7f00000000000000",
- "unpacked": {
- "a": 127,
- "c": 0
- }
- },
- {
- "packed": "ffffffffffffffff",
- "unpacked": {
- "a": 127,
- "c": 144115188075855871
- }
- },
- {
- "packed": "ff03830282018100",
- "unpacked": {
- "a": 127,
- "c": 283686952306183
- }
- },
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "80ffffffffffffff",
- "unpacked": {
- "a": 0,
- "c": 144115188075855871
- }
- },
- {
- "packed": "8003830282018100",
- "unpacked": {
- "a": 0,
- "c": 283686952306183
- }
- }
- ]
- },
- {
- "packet": "Packet_Enum_Field",
- "tests": [
- {
- "packed": "0100000000000000",
- "unpacked": {
- "a": 1,
- "c": 0
- }
- },
- {
- "packed": "81ffffffffffffff",
- "unpacked": {
- "a": 1,
- "c": 144115188075855871
- }
- },
- {
- "packed": "810e0d0c0b0a0908",
- "unpacked": {
- "a": 1,
- "c": 4523477106694685
- }
- },
- {
- "packed": "0200000000000000",
- "unpacked": {
- "a": 2,
- "c": 0
- }
- },
- {
- "packed": "82ffffffffffffff",
- "unpacked": {
- "a": 2,
- "c": 144115188075855871
- }
- },
- {
- "packed": "820e0d0c0b0a0908",
- "unpacked": {
- "a": 2,
- "c": 4523477106694685
- }
- }
- ]
- },
- {
- "packet": "Packet_Reserved_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "c": 0
- }
- },
- {
- "packed": "00feffffffffffff",
- "unpacked": {
- "a": 0,
- "c": 36028797018963967
- }
- },
- {
- "packed": "002c151413121110",
- "unpacked": {
- "a": 0,
- "c": 2261184477268630
- }
- },
- {
- "packed": "7f00000000000000",
- "unpacked": {
- "a": 127,
- "c": 0
- }
- },
- {
- "packed": "7ffeffffffffffff",
- "unpacked": {
- "a": 127,
- "c": 36028797018963967
- }
- },
- {
- "packed": "7f2c151413121110",
- "unpacked": {
- "a": 127,
- "c": 2261184477268630
- }
- },
- {
- "packed": "0700000000000000",
- "unpacked": {
- "a": 7,
- "c": 0
- }
- },
- {
- "packed": "07feffffffffffff",
- "unpacked": {
- "a": 7,
- "c": 36028797018963967
- }
- },
- {
- "packed": "072c151413121110",
- "unpacked": {
- "a": 7,
- "c": 2261184477268630
- }
- }
- ]
- },
- {
- "packet": "Packet_Size_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "b": []
- }
- },
- {
- "packed": "07000000000000001f102122232425",
- "unpacked": {
- "a": 0,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- },
- {
- "packed": "f8ffffffffffffff",
- "unpacked": {
- "a": 2305843009213693951,
- "b": []
- }
- },
- {
- "packed": "ffffffffffffffff1f102122232425",
- "unpacked": {
- "a": 2305843009213693951,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- },
- {
- "packed": "f00e8e0d8d0c8c0b",
- "unpacked": {
- "a": 104006728889254366,
- "b": []
- }
- },
- {
- "packed": "f70e8e0d8d0c8c0b1f102122232425",
- "unpacked": {
- "a": 104006728889254366,
- "b": [
- 31,
- 16,
- 33,
- 34,
- 35,
- 36,
- 37
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Count_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "a": 0,
- "b": []
- }
- },
- {
- "packed": "07000000000000002c2f2e31303332",
- "unpacked": {
- "a": 0,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- },
- {
- "packed": "f8ffffffffffffff",
- "unpacked": {
- "a": 2305843009213693951,
- "b": []
- }
- },
- {
- "packed": "ffffffffffffffff2c2f2e31303332",
- "unpacked": {
- "a": 2305843009213693951,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- },
- {
- "packed": "c8b2a29282726222",
- "unpacked": {
- "a": 309708581267330649,
- "b": []
- }
- },
- {
- "packed": "cfb2a292827262222c2f2e31303332",
- "unpacked": {
- "a": 309708581267330649,
- "b": [
- 44,
- 47,
- 46,
- 49,
- 48,
- 51,
- 50
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_FixedScalar_Field",
- "tests": [
- {
- "packed": "0700000000000000",
- "unpacked": {
- "b": 0
- }
- },
- {
- "packed": "87ffffffffffffff",
- "unpacked": {
- "b": 144115188075855871
- }
- },
- {
- "packed": "877572706e6c6a34",
- "unpacked": {
- "b": 29507425461658859
- }
- }
- ]
- },
- {
- "packet": "Packet_FixedEnum_Field",
- "tests": [
- {
- "packed": "0100000000000000",
- "unpacked": {
- "b": 0
- }
- },
- {
- "packed": "81ffffffffffffff",
- "unpacked": {
- "b": 144115188075855871
- }
- },
- {
- "packed": "010501fdf8f4f038",
- "unpacked": {
- "b": 32055067271627274
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "0743444546474049",
- "unpacked": {
- "payload": [
- 67,
- 68,
- 69,
- 70,
- 71,
- 64,
- 73
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_SizeModifier",
- "tests": [
- {
- "packed": "02",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "074a4b4c4d4e",
- "unpacked": {
- "payload": [
- 74,
- 75,
- 76,
- 77,
- 78
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_UnknownSize",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "payload": [],
- "a": 0
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "payload": [],
- "a": 65535
- }
- },
- {
- "packed": "a552",
- "unpacked": {
- "payload": [],
- "a": 21157
- }
- },
- {
- "packed": "4f485152530000",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 0
- }
- },
- {
- "packed": "4f48515253ffff",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 65535
- }
- },
- {
- "packed": "4f48515253a552",
- "unpacked": {
- "payload": [
- 79,
- 72,
- 81,
- 82,
- 83
- ],
- "a": 21157
- }
- }
- ]
- },
- {
- "packet": "Packet_Payload_Field_UnknownSize_Terminal",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": 0,
- "payload": []
- }
- },
- {
- "packed": "000050595a5b5c",
- "unpacked": {
- "a": 0,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "a": 65535,
- "payload": []
- }
- },
- {
- "packed": "ffff50595a5b5c",
- "unpacked": {
- "a": 65535,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- },
- {
- "packed": "b752",
- "unpacked": {
- "a": 21175,
- "payload": []
- }
- },
- {
- "packed": "b75250595a5b5c",
- "unpacked": {
- "a": 21175,
- "payload": [
- 80,
- 89,
- 90,
- 91,
- 92
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "payload": []
- }
- },
- {
- "packed": "075d5e5f58616263",
- "unpacked": {
- "payload": [
- 93,
- 94,
- 95,
- 88,
- 97,
- 98,
- 99
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_UnknownSize",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "payload": [],
- "a": 0
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "payload": [],
- "a": 65535
- }
- },
- {
- "packed": "4a6b",
- "unpacked": {
- "payload": [],
- "a": 27466
- }
- },
- {
- "packed": "64656667600000",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 0
- }
- },
- {
- "packed": "6465666760ffff",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 65535
- }
- },
- {
- "packed": "64656667604a6b",
- "unpacked": {
- "payload": [
- 100,
- 101,
- 102,
- 103,
- 96
- ],
- "a": 27466
- }
- }
- ]
- },
- {
- "packet": "Packet_Body_Field_UnknownSize_Terminal",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": 0,
- "payload": []
- }
- },
- {
- "packed": "00006d6e6f6871",
- "unpacked": {
- "a": 0,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- },
- {
- "packed": "ffff",
- "unpacked": {
- "a": 65535,
- "payload": []
- }
- },
- {
- "packed": "ffff6d6e6f6871",
- "unpacked": {
- "a": 65535,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- },
- {
- "packed": "5c6b",
- "unpacked": {
- "a": 27484,
- "payload": []
- }
- },
- {
- "packed": "5c6b6d6e6f6871",
- "unpacked": {
- "a": 27484,
- "payload": [
- 109,
- 110,
- 111,
- 104,
- 113
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_ScalarGroup_Field",
- "tests": [
- {
- "packed": "2a00",
- "unpacked": {}
- }
- ]
- },
- {
- "packet": "Packet_EnumGroup_Field",
- "tests": [
- {
- "packed": "bbaa",
- "unpacked": {}
- }
- ]
- },
- {
- "packet": "Packet_Checksum_Field_FromStart",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "a": 0,
- "b": 0,
- "crc": 0
- }
- },
- {
- "packed": "0000fffffe",
- "unpacked": {
- "a": 0,
- "b": 65535,
- "crc": 254
- }
- },
- {
- "packed": "0000a57318",
- "unpacked": {
- "a": 0,
- "b": 29605,
- "crc": 24
- }
- },
- {
- "packed": "ffff0000fe",
- "unpacked": {
- "a": 65535,
- "b": 0,
- "crc": 254
- }
- },
- {
- "packed": "fffffffffc",
- "unpacked": {
- "a": 65535,
- "b": 65535,
- "crc": 252
- }
- },
- {
- "packed": "ffffa57316",
- "unpacked": {
- "a": 65535,
- "b": 29605,
- "crc": 22
- }
- },
- {
- "packed": "9373000006",
- "unpacked": {
- "a": 29587,
- "b": 0,
- "crc": 6
- }
- },
- {
- "packed": "9373ffff04",
- "unpacked": {
- "a": 29587,
- "b": 65535,
- "crc": 4
- }
- },
- {
- "packed": "9373a5731e",
- "unpacked": {
- "a": 29587,
- "b": 29605,
- "crc": 30
- }
- }
- ]
- },
- {
- "packet": "Packet_Checksum_Field_FromEnd",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 0
- }
- },
- {
- "packed": "000000ffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 65535
- }
- },
- {
- "packed": "000000ee7b",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 31726
- }
- },
- {
- "packed": "00ffff0000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 0
- }
- },
- {
- "packed": "00ffffffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 65535
- }
- },
- {
- "packed": "00ffffee7b",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 31726
- }
- },
- {
- "packed": "00dc7b0000",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 0
- }
- },
- {
- "packed": "00dc7bffff",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 65535
- }
- },
- {
- "packed": "00dc7bee7b",
- "unpacked": {
- "payload": [],
- "crc": 0,
- "a": 31708,
- "b": 31726
- }
- },
- {
- "packed": "767770797a5000000000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 0
- }
- },
- {
- "packed": "767770797a500000ffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 65535
- }
- },
- {
- "packed": "767770797a500000ee7b",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 0,
- "b": 31726
- }
- },
- {
- "packed": "767770797a50ffff0000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 0
- }
- },
- {
- "packed": "767770797a50ffffffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 65535
- }
- },
- {
- "packed": "767770797a50ffffee7b",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 65535,
- "b": 31726
- }
- },
- {
- "packed": "767770797a50dc7b0000",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 0
- }
- },
- {
- "packed": "767770797a50dc7bffff",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 65535
- }
- },
- {
- "packed": "767770797a50dc7bee7b",
- "unpacked": {
- "payload": [
- 118,
- 119,
- 112,
- 121,
- 122
- ],
- "crc": 80,
- "a": 31708,
- "b": 31726
- }
- }
- ]
- },
- {
- "packet": "Packet_Struct_Field",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "0003788182",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- },
- {
- "packed": "ff00",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "ff03788182",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- },
- {
- "packed": "7f00",
- "unpacked": {
- "a": {
- "a": 127
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "7f03788182",
- "unpacked": {
- "a": {
- "a": 127
- },
- "b": {
- "array": [
- 120,
- 129,
- 130
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_ConstantSize",
- "tests": [
- {
- "packed": "83848586",
- "unpacked": {
- "array": [
- 131,
- 132,
- 133,
- 134
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0f8780898a8b8c8d8e8f889192939495",
- "unpacked": {
- "array": [
- 135,
- 128,
- 137,
- 138,
- 139,
- 140,
- 141,
- 142,
- 143,
- 136,
- 145,
- 146,
- 147,
- 148,
- 149
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0f969790999a9b9c9d9e9f98a1a2a3a4",
- "unpacked": {
- "array": [
- 150,
- 151,
- 144,
- 153,
- 154,
- 155,
- 156,
- 157,
- 158,
- 159,
- 152,
- 161,
- 162,
- 163,
- 164
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ByteElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "a5a6a7",
- "unpacked": {
- "array": [
- 165,
- 166,
- 167
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_ConstantSize",
- "tests": [
- {
- "packed": "41a553ad65ad77ad",
- "unpacked": {
- "array": [
- 42305,
- 44371,
- 44389,
- 44407
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0e81ad93b5a5b5b7b5c1b5d3bde5bd",
- "unpacked": {
- "array": [
- 44417,
- 46483,
- 46501,
- 46519,
- 46529,
- 48595,
- 48613
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0ff7bd01be13c625c637c641c653ce65ce77ce81ce93d6a5d6b7d6c1d6d3de",
- "unpacked": {
- "array": [
- 48631,
- 48641,
- 50707,
- 50725,
- 50743,
- 50753,
- 52819,
- 52837,
- 52855,
- 52865,
- 54931,
- 54949,
- 54967,
- 54977,
- 57043
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_ScalarElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "e5def7de01df",
- "unpacked": {
- "array": [
- 57061,
- 57079,
- 57089
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_ConstantSize",
- "tests": [
- {
- "packed": "bbaaddccbbaaddcc",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_VariableSize",
- "tests": [
- {
- "packed": "0ebbaaddccbbaaddccbbaaddccbbaa",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_VariableCount",
- "tests": [
- {
- "packed": "0fbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaa",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_EnumElement_UnknownSize",
- "tests": [
- {
- "packed": "bbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddcc",
- "unpacked": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00ffe200",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 226
- },
- {
- "a": 0
- }
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f00ffe400ffe500ffe600ffe700ffe0",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 228
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 229
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 230
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 231
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 224
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00ffea00ffeb00ffec00ffed00ffee",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 234
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 235
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 236
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 237
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 238
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_UnknownSize",
- "tests": [
- {
- "packed": "00ffe800fff100fff200fff300fff400fff500fff600fff700fff000fff900ff",
- "unpacked": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 232
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 241
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 242
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 243
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 244
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 245
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 246
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 247
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 240
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 249
- },
- {
- "a": 0
- },
- {
- "a": 255
- }
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_ConstantSize",
- "tests": [
- {
- "packed": "0003fbfcfd0003fef801",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 251,
- 252,
- 253
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 254,
- 248,
- 1
- ]
- }
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f0003050607000300090a00030b0c0d",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 5,
- 6,
- 7
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 0,
- 9,
- 10
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 11,
- 12,
- 13
- ]
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00031112130003141516000317101900031a1b1c00031d1e1f0003182122000323242500",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 17,
- 18,
- 19
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 20,
- 21,
- 22
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 23,
- 16,
- 25
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 26,
- 27,
- 28
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 29,
- 30,
- 31
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 24,
- 33,
- 34
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 35,
- 36,
- 37
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_UnknownSize",
- "tests": [
- {
- "packed": "0003292a2b00032c2d2e00032f283100033233340003353637000330393a00033b3c3d00033e3f3800034142430003444546000347404900034a4b4c00034d4e4f000348515200035354550003565750",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 41,
- 42,
- 43
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 44,
- 45,
- 46
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 47,
- 40,
- 49
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 50,
- 51,
- 52
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 53,
- 54,
- 55
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 48,
- 57,
- 58
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 59,
- 60,
- 61
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 62,
- 63,
- 56
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 65,
- 66,
- 67
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 68,
- 69,
- 70
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 71,
- 64,
- 73
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 74,
- 75,
- 76
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 77,
- 78,
- 79
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 72,
- 81,
- 82
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 83,
- 84,
- 85
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 86,
- 87,
- 80
- ]
- }
- ]
- }
- },
- {
- "packed": "",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_SizeModifier",
- "tests": [
- {
- "packed": "0d00035c5d5e00035f586100",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 92,
- 93,
- 94
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 95,
- 88,
- 97
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "02",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_SizedElement_VariableSize_Padded",
- "tests": [
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "array": []
- }
- },
- {
- "packed": "0e2e6338634a6b5c6b6e6b786b8a730000",
- "unpacked": {
- "array": [
- 25390,
- 25400,
- 27466,
- 27484,
- 27502,
- 27512,
- 29578
- ]
- }
- }
- ]
- },
- {
- "packet": "Packet_Array_Field_UnsizedElement_VariableCount_Padded",
- "tests": [
- {
- "packed": "07000373747500037677700003797a7b00",
- "unpacked": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 115,
- 116,
- 117
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 118,
- 119,
- 112
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 121,
- 122,
- 123
- ]
- },
- {
- "array": []
- }
- ]
- }
- },
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "array": []
- }
- }
- ]
- },
- {
- "packet": "ScalarParent",
- "tests": [
- {
- "packed": "000100",
- "unpacked": {
- "a": 0,
- "b": 0
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "0001ff",
- "unpacked": {
- "a": 0,
- "b": 255
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "00017f",
- "unpacked": {
- "a": 0,
- "b": 127
- },
- "packet": "ScalarChild_A"
- },
- {
- "packed": "01020000",
- "unpacked": {
- "a": 1,
- "c": 0
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "0102ffff",
- "unpacked": {
- "a": 1,
- "c": 65535
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "0102017c",
- "unpacked": {
- "a": 1,
- "c": 31745
- },
- "packet": "ScalarChild_B"
- },
- {
- "packed": "020100",
- "unpacked": {
- "a": 2,
- "b": 0
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "0201ff",
- "unpacked": {
- "a": 2,
- "b": 255
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "020185",
- "unpacked": {
- "a": 2,
- "b": 133
- },
- "packet": "AliasedChild_A"
- },
- {
- "packed": "03020000",
- "unpacked": {
- "a": 3,
- "c": 0
- },
- "packet": "AliasedChild_B"
- },
- {
- "packed": "0302ffff",
- "unpacked": {
- "a": 3,
- "c": 65535
- },
- "packet": "AliasedChild_B"
- },
- {
- "packed": "03023784",
- "unpacked": {
- "a": 3,
- "c": 33847
- },
- "packet": "AliasedChild_B"
- }
- ]
- },
- {
- "packet": "EnumParent",
- "tests": [
- {
- "packed": "bbaa0100",
- "unpacked": {
- "a": 43707,
- "b": 0
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "bbaa01ff",
- "unpacked": {
- "a": 43707,
- "b": 255
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "bbaa0182",
- "unpacked": {
- "a": 43707,
- "b": 130
- },
- "packet": "EnumChild_A"
- },
- {
- "packed": "ddcc020000",
- "unpacked": {
- "a": 52445,
- "c": 0
- },
- "packet": "EnumChild_B"
- },
- {
- "packed": "ddcc02ffff",
- "unpacked": {
- "a": 52445,
- "c": 65535
- },
- "packet": "EnumChild_B"
- },
- {
- "packed": "ddcc021c84",
- "unpacked": {
- "a": 52445,
- "c": 33820
- },
- "packet": "EnumChild_B"
- }
- ]
- },
- {
- "packet": "PartialParent5",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": 0,
- "b": 0
- },
- "packet": "PartialChild5_A"
- },
- {
- "packed": "e0ff",
- "unpacked": {
- "a": 0,
- "b": 2047
- },
- "packet": "PartialChild5_A"
- },
- {
- "packed": "0081",
- "unpacked": {
- "a": 0,
- "b": 1032
- },
- "packet": "PartialChild5_A"
- },
- {
- "packed": "01000000",
- "unpacked": {
- "a": 1,
- "c": 0
- },
- "packet": "PartialChild5_B"
- },
- {
- "packed": "e1ffffff",
- "unpacked": {
- "a": 1,
- "c": 134217727
- },
- "packet": "PartialChild5_B"
- },
- {
- "packed": "c1a262a2",
- "unpacked": {
- "a": 1,
- "c": 85136662
- },
- "packet": "PartialChild5_B"
- }
- ]
- },
- {
- "packet": "PartialParent12",
- "tests": [
- {
- "packed": "0200",
- "unpacked": {
- "a": 2,
- "d": 0
- },
- "packet": "PartialChild12_A"
- },
- {
- "packed": "02f0",
- "unpacked": {
- "a": 2,
- "d": 15
- },
- "packet": "PartialChild12_A"
- },
- {
- "packed": "0260",
- "unpacked": {
- "a": 2,
- "d": 6
- },
- "packet": "PartialChild12_A"
- },
- {
- "packed": "03000000",
- "unpacked": {
- "a": 3,
- "e": 0
- },
- "packet": "PartialChild12_B"
- },
- {
- "packed": "03f0ffff",
- "unpacked": {
- "a": 3,
- "e": 1048575
- },
- "packet": "PartialChild12_B"
- },
- {
- "packed": "03d0b191",
- "unpacked": {
- "a": 3,
- "e": 596765
- },
- "packet": "PartialChild12_B"
- }
- ]
- },
- {
- "packet": "Struct_Enum_Field",
- "tests": [
- {
- "packed": "0100000000000000",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 0
- }
- }
- },
- {
- "packed": "81ffffffffffffff",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 144115188075855871
- }
- }
- },
- {
- "packed": "012b29272523218f",
- "unpacked": {
- "s": {
- "a": 1,
- "c": 80574713001038422
- }
- }
- },
- {
- "packed": "0200000000000000",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 0
- }
- }
- },
- {
- "packed": "82ffffffffffffff",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 144115188075855871
- }
- }
- },
- {
- "packed": "022b29272523218f",
- "unpacked": {
- "s": {
- "a": 2,
- "c": 80574713001038422
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Reserved_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 0
- }
- }
- },
- {
- "packed": "00feffffffffffff",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "003a393735333197",
- "unpacked": {
- "s": {
- "a": 0,
- "c": 21278408744606877
- }
- }
- },
- {
- "packed": "7f00000000000000",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 0
- }
- }
- },
- {
- "packed": "7ffeffffffffffff",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "7f3a393735333197",
- "unpacked": {
- "s": {
- "a": 127,
- "c": 21278408744606877
- }
- }
- },
- {
- "packed": "4b00000000000000",
- "unpacked": {
- "s": {
- "a": 75,
- "c": 0
- }
- }
- },
- {
- "packed": "4bfeffffffffffff",
- "unpacked": {
- "s": {
- "a": 75,
- "c": 36028797018963967
- }
- }
- },
- {
- "packed": "4b3a393735333197",
- "unpacked": {
- "s": {
- "a": 75,
- "c": 21278408744606877
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Size_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": []
- }
- }
- },
- {
- "packed": "0700000000000000a6a7a8a9aaabac",
- "unpacked": {
- "s": {
- "a": 0,
- "b": [
- 166,
- 167,
- 168,
- 169,
- 170,
- 171,
- 172
- ]
- }
- }
- },
- {
- "packed": "f8ffffffffffffff",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": []
- }
- }
- },
- {
- "packed": "ffffffffffffffffa6a7a8a9aaabac",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": [
- 166,
- 167,
- 168,
- 169,
- 170,
- 171,
- 172
- ]
- }
- }
- },
- {
- "packed": "28a4a3a2a1a09f9e",
- "unpacked": {
- "s": {
- "a": 1428753874421052549,
- "b": []
- }
- }
- },
- {
- "packed": "2fa4a3a2a1a09f9ea6a7a8a9aaabac",
- "unpacked": {
- "s": {
- "a": 1428753874421052549,
- "b": [
- 166,
- 167,
- 168,
- 169,
- 170,
- 171,
- 172
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Count_Field",
- "tests": [
- {
- "packed": "0000000000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": []
- }
- }
- },
- {
- "packed": "0700000000000000b5b6b7b4b9babb",
- "unpacked": {
- "s": {
- "a": 0,
- "b": [
- 181,
- 182,
- 183,
- 180,
- 185,
- 186,
- 187
- ]
- }
- }
- },
- {
- "packed": "f8ffffffffffffff",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": []
- }
- }
- },
- {
- "packed": "ffffffffffffffffb5b6b7b4b9babb",
- "unpacked": {
- "s": {
- "a": 2305843009213693951,
- "b": [
- 181,
- 182,
- 183,
- 180,
- 185,
- 186,
- 187
- ]
- }
- }
- },
- {
- "packed": "60563616f6d5b5b5",
- "unpacked": {
- "s": {
- "a": 1636700843070114508,
- "b": []
- }
- }
- },
- {
- "packed": "67563616f6d5b5b5b5b6b7b4b9babb",
- "unpacked": {
- "s": {
- "a": 1636700843070114508,
- "b": [
- 181,
- 182,
- 183,
- 180,
- 185,
- 186,
- 187
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_FixedScalar_Field",
- "tests": [
- {
- "packed": "0700000000000000",
- "unpacked": {
- "s": {
- "b": 0
- }
- }
- },
- {
- "packed": "87ffffffffffffff",
- "unpacked": {
- "s": {
- "b": 144115188075855871
- }
- }
- },
- {
- "packed": "070503fffaf6f2ba",
- "unpacked": {
- "s": {
- "b": 105242976510150154
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_FixedEnum_Field",
- "tests": [
- {
- "packed": "0100000000000000",
- "unpacked": {
- "s": {
- "b": 0
- }
- }
- },
- {
- "packed": "81ffffffffffffff",
- "unpacked": {
- "s": {
- "b": 144115188075855871
- }
- }
- },
- {
- "packed": "81443e362e261ec6",
- "unpacked": {
- "s": {
- "b": 111530389443214473
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_ScalarGroup_Field",
- "tests": [
- {
- "packed": "2a00",
- "unpacked": {
- "s": {}
- }
- }
- ]
- },
- {
- "packet": "Struct_EnumGroup_Field",
- "tests": [
- {
- "packed": "bbaa",
- "unpacked": {
- "s": {}
- }
- }
- ]
- },
- {
- "packet": "Struct_Checksum_Field_FromStart",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 0,
- "crc": 0
- }
- }
- },
- {
- "packed": "0000fffffe",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 65535,
- "crc": 254
- }
- }
- },
- {
- "packed": "0000cdcc99",
- "unpacked": {
- "s": {
- "a": 0,
- "b": 52429,
- "crc": 153
- }
- }
- },
- {
- "packed": "ffff0000fe",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 0,
- "crc": 254
- }
- }
- },
- {
- "packed": "fffffffffc",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 65535,
- "crc": 252
- }
- }
- },
- {
- "packed": "ffffcdcc97",
- "unpacked": {
- "s": {
- "a": 65535,
- "b": 52429,
- "crc": 151
- }
- }
- },
- {
- "packed": "abcc000077",
- "unpacked": {
- "s": {
- "a": 52395,
- "b": 0,
- "crc": 119
- }
- }
- },
- {
- "packed": "abccffff75",
- "unpacked": {
- "s": {
- "a": 52395,
- "b": 65535,
- "crc": 117
- }
- }
- },
- {
- "packed": "abcccdcc10",
- "unpacked": {
- "s": {
- "a": 52395,
- "b": 52429,
- "crc": 16
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Checksum_Field_FromEnd",
- "tests": [
- {
- "packed": "0000000000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 0
- }
- }
- },
- {
- "packed": "000000ffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 65535
- }
- }
- },
- {
- "packed": "00000056dd",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 0,
- "b": 56662
- }
- }
- },
- {
- "packed": "00ffff0000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 0
- }
- }
- },
- {
- "packed": "00ffffffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 65535
- }
- }
- },
- {
- "packed": "00ffff56dd",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 65535,
- "b": 56662
- }
- }
- },
- {
- "packed": "0034dd0000",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 56628,
- "b": 0
- }
- }
- },
- {
- "packed": "0034ddffff",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 56628,
- "b": 65535
- }
- }
- },
- {
- "packed": "0034dd56dd",
- "unpacked": {
- "s": {
- "payload": [],
- "crc": 0,
- "a": 56628,
- "b": 56662
- }
- }
- },
- {
- "packed": "cecfc0d1d20000000000",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 0,
- "b": 0
- }
- }
- },
- {
- "packed": "cecfc0d1d2000000ffff",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 0,
- "b": 65535
- }
- }
- },
- {
- "packed": "cecfc0d1d200000056dd",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 0,
- "b": 56662
- }
- }
- },
- {
- "packed": "cecfc0d1d200ffff0000",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 65535,
- "b": 0
- }
- }
- },
- {
- "packed": "cecfc0d1d200ffffffff",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 65535,
- "b": 65535
- }
- }
- },
- {
- "packed": "cecfc0d1d200ffff56dd",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 65535,
- "b": 56662
- }
- }
- },
- {
- "packed": "cecfc0d1d20034dd0000",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 56628,
- "b": 0
- }
- }
- },
- {
- "packed": "cecfc0d1d20034ddffff",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 56628,
- "b": 65535
- }
- }
- },
- {
- "packed": "cecfc0d1d20034dd56dd",
- "unpacked": {
- "s": {
- "payload": [
- 206,
- 207,
- 192,
- 209,
- 210
- ],
- "crc": 0,
- "a": 56628,
- "b": 56662
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Struct_Field",
- "tests": [
- {
- "packed": "0000",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "0003d8d9da",
- "unpacked": {
- "a": {
- "a": 0
- },
- "b": {
- "array": [
- 216,
- 217,
- 218
- ]
- }
- }
- },
- {
- "packed": "ff00",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "ff03d8d9da",
- "unpacked": {
- "a": {
- "a": 255
- },
- "b": {
- "array": [
- 216,
- 217,
- 218
- ]
- }
- }
- },
- {
- "packed": "d700",
- "unpacked": {
- "a": {
- "a": 215
- },
- "b": {
- "array": []
- }
- }
- },
- {
- "packed": "d703d8d9da",
- "unpacked": {
- "a": {
- "a": 215
- },
- "b": {
- "array": [
- 216,
- 217,
- 218
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_ConstantSize",
- "tests": [
- {
- "packed": "dbdcddde",
- "unpacked": {
- "s": {
- "array": [
- 219,
- 220,
- 221,
- 222
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0fdfd0e1e2e3e4e5e6e7e8e9eaebeced",
- "unpacked": {
- "s": {
- "array": [
- 223,
- 208,
- 225,
- 226,
- 227,
- 228,
- 229,
- 230,
- 231,
- 232,
- 233,
- 234,
- 235,
- 236,
- 237
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0feeefe0f1f2f3f4f5f6f7f8f9fafbfc",
- "unpacked": {
- "s": {
- "array": [
- 238,
- 239,
- 224,
- 241,
- 242,
- 243,
- 244,
- 245,
- 246,
- 247,
- 248,
- 249,
- 250,
- 251,
- 252
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ByteElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "fdfef0",
- "unpacked": {
- "s": {
- "array": [
- 253,
- 254,
- 240
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_ConstantSize",
- "tests": [
- {
- "packed": "1200340056007800",
- "unpacked": {
- "s": {
- "array": [
- 18,
- 52,
- 86,
- 120
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_VariableSize",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0e9a00bc00de00f000121134115611",
- "unpacked": {
- "s": {
- "array": [
- 154,
- 188,
- 222,
- 240,
- 4370,
- 4404,
- 4438
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_VariableCount",
- "tests": [
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0f78119a11bc11de11f01112223422562278229a22bc22de22f02212333433",
- "unpacked": {
- "s": {
- "array": [
- 4472,
- 4506,
- 4540,
- 4574,
- 4592,
- 8722,
- 8756,
- 8790,
- 8824,
- 8858,
- 8892,
- 8926,
- 8944,
- 13074,
- 13108
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_ScalarElement_UnknownSize",
- "tests": [
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "563378339a33",
- "unpacked": {
- "s": {
- "array": [
- 13142,
- 13176,
- 13210
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_ConstantSize",
- "tests": [
- {
- "packed": "bbaaddccbbaaddcc",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_VariableSize",
- "tests": [
- {
- "packed": "0ebbaaddccbbaaddccbbaaddccbbaa",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_VariableCount",
- "tests": [
- {
- "packed": "0fbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaa",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_EnumElement_UnknownSize",
- "tests": [
- {
- "packed": "bbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddccbbaaddcc",
- "unpacked": {
- "s": {
- "array": [
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445,
- 43707,
- 52445
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00ff3b00",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 59
- },
- {
- "a": 0
- }
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f00ff3d00ff3e00ff3f00ff3000ff41",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 61
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 62
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 63
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 48
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 65
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f00ff4300ff4400ff4500ff4600ff47",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 67
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 68
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 69
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 70
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 71
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_UnknownSize",
- "tests": [
- {
- "packed": "00ff4900ff4a00ff4b00ff4c00ff4d00ff4e00ff4f00ff4000ff5100ff5200ff",
- "unpacked": {
- "s": {
- "array": [
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 73
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 74
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 75
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 76
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 77
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 78
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 79
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 64
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 81
- },
- {
- "a": 0
- },
- {
- "a": 255
- },
- {
- "a": 82
- },
- {
- "a": 0
- },
- {
- "a": 255
- }
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_ConstantSize",
- "tests": [
- {
- "packed": "00035455560003575859",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 84,
- 85,
- 86
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 87,
- 88,
- 89
- ]
- }
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableSize",
- "tests": [
- {
- "packed": "0f00035d5e5f00035061620003636465",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 93,
- 94,
- 95
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 80,
- 97,
- 98
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 99,
- 100,
- 101
- ]
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableCount",
- "tests": [
- {
- "packed": "0f0003696a6b00036c6d6e00036f607100037273740003757677000378797a00037b7c7d00",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 105,
- 106,
- 107
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 108,
- 109,
- 110
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 111,
- 96,
- 113
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 114,
- 115,
- 116
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 117,
- 118,
- 119
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 120,
- 121,
- 122
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 123,
- 124,
- 125
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "00",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_UnknownSize",
- "tests": [
- {
- "packed": "00038182830003848586000387888900038a8b8c00038d8e8f0003809192000393949500039697980003999a9b00039c9d9e00039f90a10003a2a3a40003a5a6a70003a8a9aa0003abacad0003aeafa0",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 129,
- 130,
- 131
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 132,
- 133,
- 134
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 135,
- 136,
- 137
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 138,
- 139,
- 140
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 141,
- 142,
- 143
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 128,
- 145,
- 146
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 147,
- 148,
- 149
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 150,
- 151,
- 152
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 153,
- 154,
- 155
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 156,
- 157,
- 158
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 159,
- 144,
- 161
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 162,
- 163,
- 164
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 165,
- 166,
- 167
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 168,
- 169,
- 170
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 171,
- 172,
- 173
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 174,
- 175,
- 160
- ]
- }
- ]
- }
- }
- },
- {
- "packed": "",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_SizeModifier",
- "tests": [
- {
- "packed": "0d0003b4b5b60003b7b8b900",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 180,
- 181,
- 182
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 183,
- 184,
- 185
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "02",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_SizedElement_VariableSize_Padded",
- "tests": [
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- },
- {
- "packed": "0edebbf0bb12cc34cc56cc78cc9acc0000",
- "unpacked": {
- "s": {
- "array": [
- 48094,
- 48112,
- 52242,
- 52276,
- 52310,
- 52344,
- 52378
- ]
- }
- }
- }
- ]
- },
- {
- "packet": "Struct_Array_Field_UnsizedElement_VariableCount_Padded",
- "tests": [
- {
- "packed": "070003cbcccd0003cecfc00003d1d2d300",
- "unpacked": {
- "s": {
- "array": [
- {
- "array": []
- },
- {
- "array": [
- 203,
- 204,
- 205
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 206,
- 207,
- 192
- ]
- },
- {
- "array": []
- },
- {
- "array": [
- 209,
- 210,
- 211
- ]
- },
- {
- "array": []
- }
- ]
- }
- }
- },
- {
- "packed": "0000000000000000000000000000000000",
- "unpacked": {
- "s": {
- "array": []
- }
- }
- }
- ]
- }
-] \ No newline at end of file
diff --git a/tools/pdl/tests/custom_types.py b/tools/pdl/tests/custom_types.py
deleted file mode 100644
index cac9896244..0000000000
--- a/tools/pdl/tests/custom_types.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from dataclasses import dataclass
-from typing import Tuple
-
-
-@dataclass
-class SizedCustomField:
-
- def __init__(self, value: int = 0):
- self.value = value
-
- def parse(span: bytes) -> Tuple['SizedCustomField', bytes]:
- return (SizedCustomField(span[0]), span[1:])
-
- def parse_all(span: bytes) -> 'SizedCustomField':
- assert (len(span) == 1)
- return SizedCustomField(span[0])
-
- @property
- def size(self) -> int:
- return 1
-
-
-@dataclass
-class UnsizedCustomField:
-
- def __init__(self, value: int = 0):
- self.value = value
-
- def parse(span: bytes) -> Tuple['UnsizedCustomField', bytes]:
- return (UnsizedCustomField(span[0]), span[1:])
-
- def parse_all(span: bytes) -> 'UnsizedCustomField':
- assert (len(span) == 1)
- return UnsizedCustomField(span[0])
-
- @property
- def size(self) -> int:
- return 1
-
-
-def Checksum(span: bytes) -> int:
- return sum(span) % 256
diff --git a/tools/pdl/tests/examples/array-field.pdl b/tools/pdl/tests/examples/array-field.pdl
deleted file mode 100644
index 070a6cc532..0000000000
--- a/tools/pdl/tests/examples/array-field.pdl
+++ /dev/null
@@ -1,39 +0,0 @@
-little_endian_packets
-
-custom_field custom: 1 "custom"
-checksum checksum: 1 "checksum"
-
-enum Enum : 1 {
- tag = 0,
-}
-
-struct Struct {
- a: 1,
-}
-
-packet Packet {
- a: 1,
-}
-
-group Group {
- a: 1,
-}
-
-packet InvalidKind {
- array_0: Group[],
- array_1: Packet[],
- array_2: checksum[],
-}
-
-packet UndeclaredType {
- array: Unknown[],
-}
-
-packet Correct {
- array_0: custom[],
- array_1: Enum[],
- array_2: Struct[],
- array_3: 1[],
- array_4: 1[42],
- array_5: 1[+2],
-}
diff --git a/tools/pdl/tests/examples/checksum-field.pdl b/tools/pdl/tests/examples/checksum-field.pdl
deleted file mode 100644
index 0e1a98b266..0000000000
--- a/tools/pdl/tests/examples/checksum-field.pdl
+++ /dev/null
@@ -1,22 +0,0 @@
-little_endian_packets
-
-checksum crc16: 16 "crc16"
-
-packet Undefined {
- _checksum_start_ (crc16),
-}
-
-packet InvalidType {
- crc16: 16,
- _checksum_start_ (crc16),
-}
-
-packet InvalidOrder {
- _checksum_start_ (crc16),
- crc16: crc16,
-}
-
-packet Correct {
- crc16: crc16,
- _checksum_start_ (crc16),
-}
diff --git a/tools/pdl/tests/examples/count-field.pdl b/tools/pdl/tests/examples/count-field.pdl
deleted file mode 100644
index a88cccdec8..0000000000
--- a/tools/pdl/tests/examples/count-field.pdl
+++ /dev/null
@@ -1,25 +0,0 @@
-little_endian_packets
-
-packet Undefined {
- _count_ (array): 8,
-}
-
-packet InvalidType {
- _count_ (array): 8,
- array: 16,
-}
-
-packet InvalidOrder {
- array: 16[],
- _count_ (array): 8,
-}
-
-packet InvalidSize {
- _count_ (array): 8,
- array: 16[32],
-}
-
-packet Correct {
- _count_ (array): 8,
- array: 16[],
-}
diff --git a/tools/pdl/tests/examples/decl-scope.pdl b/tools/pdl/tests/examples/decl-scope.pdl
deleted file mode 100644
index c1391ab3f2..0000000000
--- a/tools/pdl/tests/examples/decl-scope.pdl
+++ /dev/null
@@ -1,26 +0,0 @@
-
-// Clashes with custom_field, struct, enum
-checksum decl_name: 16 "crc16"
-
-// Clashes with checksum, struct, enum
-custom_field decl_name: 1 "custom"
-
-// Clashes with checksum, custom_field, struct
-enum decl_name : 1 {
- A = 1,
-}
-
-// Clashes with checksum, custom_field, enum
-struct decl_name {
- a: 1,
-}
-
-// OK
-group decl_name {
- a: 1,
-}
-
-// OK
-packet decl_name {
- a: 1,
-}
diff --git a/tools/pdl/tests/examples/example.pdl b/tools/pdl/tests/examples/example.pdl
deleted file mode 100644
index b34d1400db..0000000000
--- a/tools/pdl/tests/examples/example.pdl
+++ /dev/null
@@ -1,78 +0,0 @@
-// line comment
-/* block comment */
-
-little_endian_packets
-
-/* stuff */
-enum FourBits : 4 {
- ONE = 1,
- TWO = 2,
- THREE = 3,
- FIVE = 5,
- TEN = 10,
- LAZY_ME = 15,
-}
-
-/* other stuff */
-enum FourBits : 4 {
- ONE = 1,
- TWO = 2,
- THREE = 3,
- FIVE = 5,
- TEN = 10,
- LAZY_ME = 15
-}
-
-packet Test {
- /* Checksum */
- _checksum_start_ (crc16),
- /* Padding */
- _padding_ [1],
- /* Size */
- _size_ (_payload_) : 1,
- _size_ (_body_) : 1,
- _size_ (id) : 1,
- /* Body */
- _body_,
- /* Payload */
- _payload_,
- _payload_ : [+1],
- /* Fixed */
- _fixed_ = 1:1,
- _fixed_ = id:id,
- /* Reserved */
- _reserved_ : 1,
- /* Array */
- id: 1[+1],
- id: id[+1],
- id: 1[1],
- id: id[1],
- id: 1[],
- id: id[],
- /* Scalar */
- id: 1,
- /* Typedef */
- id : id,
- /* Group */
- id { a=1, b=2 },
- id,
-}
-
-packet TestChild : Test {
-}
-
-packet TestChild (a=1, b=2) {
-}
-
-packet TestChild : Test (a=1, b=2) {
-}
-
-checksum id: 1 "id"
-
-custom_field id : 1 "id"
-custom_field id "id"
-
-test Test {
- "1111",
- "2222",
-}
diff --git a/tools/pdl/tests/examples/fixed-field.pdl b/tools/pdl/tests/examples/fixed-field.pdl
deleted file mode 100644
index e69fc7e37f..0000000000
--- a/tools/pdl/tests/examples/fixed-field.pdl
+++ /dev/null
@@ -1,22 +0,0 @@
-little_endian_packets
-
-enum Enum : 1 {
- tag = 0,
-}
-
-packet InvalidValue {
- _fixed_ = 1: 256,
-}
-
-packet UndeclaredEnum {
- _fixed_ = tag : InvalidEnum,
-}
-
-packet UndeclaredTag {
- _fixed_ = invalid_tag : Enum,
-}
-
-packet Correct {
- _fixed_ = 1: 256,
- _fixed_ = tag: Enum,
-}
diff --git a/tools/pdl/tests/examples/group-constraint.pdl b/tools/pdl/tests/examples/group-constraint.pdl
deleted file mode 100644
index 34ee2ab7ef..0000000000
--- a/tools/pdl/tests/examples/group-constraint.pdl
+++ /dev/null
@@ -1,39 +0,0 @@
-little_endian_packets
-
-custom_field custom_field: 1 "custom"
-checksum checksum: 1 "checksum"
-
-enum Enum : 1 {
- tag = 0,
-}
-
-group Group {
- a: 4,
- b: Enum,
- c: custom_field,
- d: checksum,
-}
-
-struct Undeclared {
- Group { e=1 },
-}
-
-struct Redeclared {
- Group { a=1, a=2 },
-}
-
-struct TypeMismatch {
- Group { a=tag, b=1, c=1, d=1 },
-}
-
-struct InvalidLiteral {
- Group { a=42 },
-}
-
-struct UndeclaredTag {
- Group { b=undeclared_tag },
-}
-
-struct Correct {
- Group { a=1, b=tag },
-}
diff --git a/tools/pdl/tests/examples/packet.pdl b/tools/pdl/tests/examples/packet.pdl
deleted file mode 100644
index 9b9ca201d6..0000000000
--- a/tools/pdl/tests/examples/packet.pdl
+++ /dev/null
@@ -1,52 +0,0 @@
-little_endian_packets
-
-custom_field custom: 1 "custom"
-checksum checksum: 1 "checksum"
-
-enum Enum : 1 {
- tag = 0,
-}
-
-packet Packet {
- a: 4,
- b: Enum,
- c: custom,
- d: checksum,
-}
-
-struct Struct {
- a: 4,
-}
-
-packet RecursivePacket_0 : RecursivePacket_1 {
-}
-
-packet RecursivePacket_1 : RecursivePacket_0 {
-}
-
-packet InvalidParent : Struct {
-}
-
-packet UndeclaredParent : FooBar {
-}
-
-packet UnnecessaryConstraints (a=1) {
-}
-
-packet Undeclared : Packet (c=1) {
-}
-
-packet Redeclared : Packet (a=1, a=2) {
-}
-
-packet TypeMismatch : Packet (a=tag, b=1, c=1, d=1) {
-}
-
-packet InvalidLiteral : Packet (a=42) {
-}
-
-packet UndeclaredTag : Packet (b=undeclared_tag) {
-}
-
-packet Correct : Packet (a=1, b=tag) {
-}
diff --git a/tools/pdl/tests/examples/recurse.pdl b/tools/pdl/tests/examples/recurse.pdl
deleted file mode 100644
index ad3a200981..0000000000
--- a/tools/pdl/tests/examples/recurse.pdl
+++ /dev/null
@@ -1,38 +0,0 @@
-
-struct Struct_0: Struct_1 {
-}
-
-struct Struct_1: Struct_0 {
-}
-
-
-struct Packet_0: Packet_1 {
-}
-
-struct Packet_1: Packet_0 {
-}
-
-
-group Group_0 {
- Group_1
-}
-
-struct Struct_2 {
- Group_0
-}
-
-group Group_1 {
- a: Struct_2
-}
-
-
-struct Struct_3: Struct_4 {
-}
-
-struct Struct_4 {
- Group_2
-}
-
-group Group_2 {
- a: Struct_3
-}
diff --git a/tools/pdl/tests/examples/size-field.pdl b/tools/pdl/tests/examples/size-field.pdl
deleted file mode 100644
index dfa9ad7f5b..0000000000
--- a/tools/pdl/tests/examples/size-field.pdl
+++ /dev/null
@@ -1,58 +0,0 @@
-little_endian_packets
-
-packet Undefined {
- _size_ (array): 8,
-}
-
-packet UndefinedPayloadWithBody {
- _size_ (_payload_): 8,
- _body_,
-}
-
-packet UndefinedPayload {
- _size_ (_payload_): 8,
-}
-
-packet UndefinedBodyWithPayload {
- _size_ (_body_): 8,
- _payload_,
-}
-
-packet UndefinedBody {
- _size_ (_body_): 8,
-}
-
-packet InvalidType {
- _size_ (array): 8,
- array: 16,
-}
-
-packet InvalidArrayOrder {
- array: 16[],
- _size_ (array): 8,
-}
-
-packet InvalidPayloadOrder {
- _payload_,
- _size_ (_payload_): 8,
-}
-
-packet InvalidBodyOrder {
- _body_,
- _size_ (_body_): 8,
-}
-
-packet CorrectArray {
- _size_ (array): 8,
- array: 16[],
-}
-
-packet CorrectPayload {
- _size_ (_payload_): 8,
- _payload_,
-}
-
-packet CorrectBody {
- _size_ (_body_): 8,
- _body_,
-}
diff --git a/tools/pdl/tests/examples/struct.pdl b/tools/pdl/tests/examples/struct.pdl
deleted file mode 100644
index d8ed439a2e..0000000000
--- a/tools/pdl/tests/examples/struct.pdl
+++ /dev/null
@@ -1,52 +0,0 @@
-little_endian_packets
-
-custom_field custom: 1 "custom"
-checksum checksum: 1 "checksum"
-
-enum Enum : 1 {
- tag = 0,
-}
-
-struct Struct {
- a: 4,
- b: Enum,
- c: custom,
- d: checksum,
-}
-
-packet Packet {
- a: 4,
-}
-
-struct RecursiveStruct_0 : RecursiveStruct_1 {
-}
-
-struct RecursiveStruct_1 : RecursiveStruct_0 {
-}
-
-struct InvalidParent : Packet {
-}
-
-struct UndeclaredParent : FooBar {
-}
-
-struct UnnecessaryConstraints (a=1) {
-}
-
-struct Undeclared : Struct (c=1) {
-}
-
-struct Redeclared : Struct (a=1, a=2) {
-}
-
-struct TypeMismatch : Struct (a=tag, b=1, c=1, d=1) {
-}
-
-struct InvalidLiteral : Struct (a=42) {
-}
-
-struct UndeclaredTag : Struct (b=undeclared_tag) {
-}
-
-struct Correct : Struct (a=1, b=tag) {
-}
diff --git a/tools/pdl/tests/examples/typedef-field.pdl b/tools/pdl/tests/examples/typedef-field.pdl
deleted file mode 100644
index 2e56676558..0000000000
--- a/tools/pdl/tests/examples/typedef-field.pdl
+++ /dev/null
@@ -1,36 +0,0 @@
-little_endian_packets
-
-custom_field custom: 1 "custom"
-checksum checksum: 1 "checksum"
-
-enum Enum : 1 {
- tag = 0,
-}
-
-struct Struct {
- a: 1,
-}
-
-packet Packet {
- a: 1,
-}
-
-group Group {
- a: 1,
-}
-
-packet InvalidKind {
- typedef_0: Group,
- typedef_1: Packet,
-}
-
-packet UndeclaredType {
- typedef: Unknown,
-}
-
-packet Correct {
- typedef_0: custom,
- typedef_1: checksum,
- typedef_2: Enum,
- typedef_3: Struct,
-}
diff --git a/tools/pdl/tests/generated/custom_field_declaration_big_endian.rs b/tools/pdl/tests/generated/custom_field_declaration_big_endian.rs
deleted file mode 100644
index 31fa6036c1..0000000000
--- a/tools/pdl/tests/generated/custom_field_declaration_big_endian.rs
+++ /dev/null
@@ -1,85 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(from = "u32", into = "u32"))]
-pub struct ExactSize(u32);
-impl From<&ExactSize> for u32 {
- fn from(value: &ExactSize) -> u32 {
- value.0
- }
-}
-impl From<ExactSize> for u32 {
- fn from(value: ExactSize) -> u32 {
- value.0
- }
-}
-impl From<u32> for ExactSize {
- fn from(value: u32) -> Self {
- ExactSize(value)
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub struct TruncatedSize(u32);
-impl From<&TruncatedSize> for u32 {
- fn from(value: &TruncatedSize) -> u32 {
- value.0
- }
-}
-impl From<TruncatedSize> for u32 {
- fn from(value: TruncatedSize) -> u32 {
- value.0
- }
-}
-impl TryFrom<u32> for TruncatedSize {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- if value > 0xff_ffff { Err(value) } else { Ok(TruncatedSize(value)) }
- }
-}
diff --git a/tools/pdl/tests/generated/custom_field_declaration_little_endian.rs b/tools/pdl/tests/generated/custom_field_declaration_little_endian.rs
deleted file mode 100644
index 31fa6036c1..0000000000
--- a/tools/pdl/tests/generated/custom_field_declaration_little_endian.rs
+++ /dev/null
@@ -1,85 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(from = "u32", into = "u32"))]
-pub struct ExactSize(u32);
-impl From<&ExactSize> for u32 {
- fn from(value: &ExactSize) -> u32 {
- value.0
- }
-}
-impl From<ExactSize> for u32 {
- fn from(value: ExactSize) -> u32 {
- value.0
- }
-}
-impl From<u32> for ExactSize {
- fn from(value: u32) -> Self {
- ExactSize(value)
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub struct TruncatedSize(u32);
-impl From<&TruncatedSize> for u32 {
- fn from(value: &TruncatedSize) -> u32 {
- value.0
- }
-}
-impl From<TruncatedSize> for u32 {
- fn from(value: TruncatedSize) -> u32 {
- value.0
- }
-}
-impl TryFrom<u32> for TruncatedSize {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- if value > 0xff_ffff { Err(value) } else { Ok(TruncatedSize(value)) }
- }
-}
diff --git a/tools/pdl/tests/generated/enum_declaration_big_endian.rs b/tools/pdl/tests/generated/enum_declaration_big_endian.rs
deleted file mode 100644
index 87b5d0eec5..0000000000
--- a/tools/pdl/tests/generated/enum_declaration_big_endian.rs
+++ /dev/null
@@ -1,400 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncated {
- A = 0x0,
- B = 0x1,
-}
-impl TryFrom<u8> for IncompleteTruncated {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(IncompleteTruncated::A),
- 0x1 => Ok(IncompleteTruncated::B),
- _ => Err(value),
- }
- }
-}
-impl From<&IncompleteTruncated> for u8 {
- fn from(value: &IncompleteTruncated) -> Self {
- match value {
- IncompleteTruncated::A => 0x0,
- IncompleteTruncated::B => 0x1,
- }
- }
-}
-impl From<IncompleteTruncated> for u8 {
- fn from(value: IncompleteTruncated) -> Self {
- (&value).into()
- }
-}
-impl From<IncompleteTruncated> for i8 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i16 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i32 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i64 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u16 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u32 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u64 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncatedWithRange {
- A,
- X,
- Y,
- B(Private<u8>),
-}
-impl TryFrom<u8> for IncompleteTruncatedWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(IncompleteTruncatedWithRange::A),
- 0x1 => Ok(IncompleteTruncatedWithRange::X),
- 0x2 => Ok(IncompleteTruncatedWithRange::Y),
- 0x1..=0x6 => Ok(IncompleteTruncatedWithRange::B(Private(value))),
- _ => Err(value),
- }
- }
-}
-impl From<&IncompleteTruncatedWithRange> for u8 {
- fn from(value: &IncompleteTruncatedWithRange) -> Self {
- match value {
- IncompleteTruncatedWithRange::A => 0x0,
- IncompleteTruncatedWithRange::X => 0x1,
- IncompleteTruncatedWithRange::Y => 0x2,
- IncompleteTruncatedWithRange::B(Private(value)) => *value,
- }
- }
-}
-impl From<IncompleteTruncatedWithRange> for u8 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<IncompleteTruncatedWithRange> for i8 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i16 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i32 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i64 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u16 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u32 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u64 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteTruncated {
- A = 0x0,
- B = 0x1,
- C = 0x2,
- D = 0x3,
- E = 0x4,
- F = 0x5,
- G = 0x6,
- H = 0x7,
-}
-impl TryFrom<u8> for CompleteTruncated {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteTruncated::A),
- 0x1 => Ok(CompleteTruncated::B),
- 0x2 => Ok(CompleteTruncated::C),
- 0x3 => Ok(CompleteTruncated::D),
- 0x4 => Ok(CompleteTruncated::E),
- 0x5 => Ok(CompleteTruncated::F),
- 0x6 => Ok(CompleteTruncated::G),
- 0x7 => Ok(CompleteTruncated::H),
- _ => Err(value),
- }
- }
-}
-impl From<&CompleteTruncated> for u8 {
- fn from(value: &CompleteTruncated) -> Self {
- match value {
- CompleteTruncated::A => 0x0,
- CompleteTruncated::B => 0x1,
- CompleteTruncated::C => 0x2,
- CompleteTruncated::D => 0x3,
- CompleteTruncated::E => 0x4,
- CompleteTruncated::F => 0x5,
- CompleteTruncated::G => 0x6,
- CompleteTruncated::H => 0x7,
- }
- }
-}
-impl From<CompleteTruncated> for u8 {
- fn from(value: CompleteTruncated) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteTruncated> for i8 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i16 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i32 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i64 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u16 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u32 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u64 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteTruncatedWithRange {
- A,
- X,
- Y,
- B(Private<u8>),
-}
-impl TryFrom<u8> for CompleteTruncatedWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteTruncatedWithRange::A),
- 0x1 => Ok(CompleteTruncatedWithRange::X),
- 0x2 => Ok(CompleteTruncatedWithRange::Y),
- 0x1..=0x7 => Ok(CompleteTruncatedWithRange::B(Private(value))),
- _ => Err(value),
- }
- }
-}
-impl From<&CompleteTruncatedWithRange> for u8 {
- fn from(value: &CompleteTruncatedWithRange) -> Self {
- match value {
- CompleteTruncatedWithRange::A => 0x0,
- CompleteTruncatedWithRange::X => 0x1,
- CompleteTruncatedWithRange::Y => 0x2,
- CompleteTruncatedWithRange::B(Private(value)) => *value,
- }
- }
-}
-impl From<CompleteTruncatedWithRange> for u8 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteTruncatedWithRange> for i8 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i16 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i32 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i64 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u16 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u32 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u64 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteWithRange {
- A,
- B,
- C(Private<u8>),
-}
-impl TryFrom<u8> for CompleteWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteWithRange::A),
- 0x1 => Ok(CompleteWithRange::B),
- 0x2..=0xff => Ok(CompleteWithRange::C(Private(value))),
- }
- }
-}
-impl From<&CompleteWithRange> for u8 {
- fn from(value: &CompleteWithRange) -> Self {
- match value {
- CompleteWithRange::A => 0x0,
- CompleteWithRange::B => 0x1,
- CompleteWithRange::C(Private(value)) => *value,
- }
- }
-}
-impl From<CompleteWithRange> for u8 {
- fn from(value: CompleteWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteWithRange> for i16 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for i32 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for i64 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u16 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u32 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u64 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
diff --git a/tools/pdl/tests/generated/enum_declaration_little_endian.rs b/tools/pdl/tests/generated/enum_declaration_little_endian.rs
deleted file mode 100644
index 87b5d0eec5..0000000000
--- a/tools/pdl/tests/generated/enum_declaration_little_endian.rs
+++ /dev/null
@@ -1,400 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncated {
- A = 0x0,
- B = 0x1,
-}
-impl TryFrom<u8> for IncompleteTruncated {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(IncompleteTruncated::A),
- 0x1 => Ok(IncompleteTruncated::B),
- _ => Err(value),
- }
- }
-}
-impl From<&IncompleteTruncated> for u8 {
- fn from(value: &IncompleteTruncated) -> Self {
- match value {
- IncompleteTruncated::A => 0x0,
- IncompleteTruncated::B => 0x1,
- }
- }
-}
-impl From<IncompleteTruncated> for u8 {
- fn from(value: IncompleteTruncated) -> Self {
- (&value).into()
- }
-}
-impl From<IncompleteTruncated> for i8 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i16 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i32 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for i64 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u16 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u32 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncated> for u64 {
- fn from(value: IncompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncatedWithRange {
- A,
- X,
- Y,
- B(Private<u8>),
-}
-impl TryFrom<u8> for IncompleteTruncatedWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(IncompleteTruncatedWithRange::A),
- 0x1 => Ok(IncompleteTruncatedWithRange::X),
- 0x2 => Ok(IncompleteTruncatedWithRange::Y),
- 0x1..=0x6 => Ok(IncompleteTruncatedWithRange::B(Private(value))),
- _ => Err(value),
- }
- }
-}
-impl From<&IncompleteTruncatedWithRange> for u8 {
- fn from(value: &IncompleteTruncatedWithRange) -> Self {
- match value {
- IncompleteTruncatedWithRange::A => 0x0,
- IncompleteTruncatedWithRange::X => 0x1,
- IncompleteTruncatedWithRange::Y => 0x2,
- IncompleteTruncatedWithRange::B(Private(value)) => *value,
- }
- }
-}
-impl From<IncompleteTruncatedWithRange> for u8 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<IncompleteTruncatedWithRange> for i8 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i16 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i32 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for i64 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u16 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u32 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<IncompleteTruncatedWithRange> for u64 {
- fn from(value: IncompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteTruncated {
- A = 0x0,
- B = 0x1,
- C = 0x2,
- D = 0x3,
- E = 0x4,
- F = 0x5,
- G = 0x6,
- H = 0x7,
-}
-impl TryFrom<u8> for CompleteTruncated {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteTruncated::A),
- 0x1 => Ok(CompleteTruncated::B),
- 0x2 => Ok(CompleteTruncated::C),
- 0x3 => Ok(CompleteTruncated::D),
- 0x4 => Ok(CompleteTruncated::E),
- 0x5 => Ok(CompleteTruncated::F),
- 0x6 => Ok(CompleteTruncated::G),
- 0x7 => Ok(CompleteTruncated::H),
- _ => Err(value),
- }
- }
-}
-impl From<&CompleteTruncated> for u8 {
- fn from(value: &CompleteTruncated) -> Self {
- match value {
- CompleteTruncated::A => 0x0,
- CompleteTruncated::B => 0x1,
- CompleteTruncated::C => 0x2,
- CompleteTruncated::D => 0x3,
- CompleteTruncated::E => 0x4,
- CompleteTruncated::F => 0x5,
- CompleteTruncated::G => 0x6,
- CompleteTruncated::H => 0x7,
- }
- }
-}
-impl From<CompleteTruncated> for u8 {
- fn from(value: CompleteTruncated) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteTruncated> for i8 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i16 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i32 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for i64 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u16 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u32 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncated> for u64 {
- fn from(value: CompleteTruncated) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteTruncatedWithRange {
- A,
- X,
- Y,
- B(Private<u8>),
-}
-impl TryFrom<u8> for CompleteTruncatedWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteTruncatedWithRange::A),
- 0x1 => Ok(CompleteTruncatedWithRange::X),
- 0x2 => Ok(CompleteTruncatedWithRange::Y),
- 0x1..=0x7 => Ok(CompleteTruncatedWithRange::B(Private(value))),
- _ => Err(value),
- }
- }
-}
-impl From<&CompleteTruncatedWithRange> for u8 {
- fn from(value: &CompleteTruncatedWithRange) -> Self {
- match value {
- CompleteTruncatedWithRange::A => 0x0,
- CompleteTruncatedWithRange::X => 0x1,
- CompleteTruncatedWithRange::Y => 0x2,
- CompleteTruncatedWithRange::B(Private(value)) => *value,
- }
- }
-}
-impl From<CompleteTruncatedWithRange> for u8 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteTruncatedWithRange> for i8 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i16 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i32 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for i64 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u16 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u32 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteTruncatedWithRange> for u64 {
- fn from(value: CompleteTruncatedWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum CompleteWithRange {
- A,
- B,
- C(Private<u8>),
-}
-impl TryFrom<u8> for CompleteWithRange {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(CompleteWithRange::A),
- 0x1 => Ok(CompleteWithRange::B),
- 0x2..=0xff => Ok(CompleteWithRange::C(Private(value))),
- }
- }
-}
-impl From<&CompleteWithRange> for u8 {
- fn from(value: &CompleteWithRange) -> Self {
- match value {
- CompleteWithRange::A => 0x0,
- CompleteWithRange::B => 0x1,
- CompleteWithRange::C(Private(value)) => *value,
- }
- }
-}
-impl From<CompleteWithRange> for u8 {
- fn from(value: CompleteWithRange) -> Self {
- (&value).into()
- }
-}
-impl From<CompleteWithRange> for i16 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for i32 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for i64 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u16 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u32 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<CompleteWithRange> for u64 {
- fn from(value: CompleteWithRange) -> Self {
- u8::from(value) as Self
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_enum_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_enum_array_big_endian.rs
deleted file mode 100644
index bd352ef126..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_enum_array_big_endian.rs
+++ /dev/null
@@ -1,206 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u32> for Foo {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u32 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 5],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 5],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 15
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 * 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5 * 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..5)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_uint(3) as u32)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_uint(u32::from(elem) as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 15
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 5] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_enum_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_enum_array_little_endian.rs
deleted file mode 100644
index 5027984e5b..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_enum_array_little_endian.rs
+++ /dev/null
@@ -1,206 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u32> for Foo {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u32 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 5],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 5],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 15
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 * 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5 * 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..5)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_uint_le(3) as u32)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_uint_le(u32::from(elem) as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 15
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 5] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_enum_big_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_enum_big_endian.rs
deleted file mode 100644
index d2b63bebab..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_enum_big_endian.rs
+++ /dev/null
@@ -1,198 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u32> for Foo {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u32 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_uint(3) as u32)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_uint(3) as u32 as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_uint(u32::from(self.x) as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_enum_little_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_enum_little_endian.rs
deleted file mode 100644
index 40f98945b0..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_enum_little_endian.rs
+++ /dev/null
@@ -1,198 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u32> for Foo {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u32 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u32::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_uint_le(3) as u32)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_uint_le(3) as u32 as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_uint_le(u32::from(self.x) as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_big_endian.rs
deleted file mode 100644
index eaa7537905..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_big_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u32; 5],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u32; 5],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 15
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 * 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5 * 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..5)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_uint(3) as u32))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_uint(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 15
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u32; 5] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_little_endian.rs
deleted file mode 100644
index f9878462a9..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_scalar_array_little_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u32; 5],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u32; 5],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 15
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 * 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5 * 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..5)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_uint_le(3) as u32))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_uint_le(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 15
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u32; 5] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_scalar_big_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_scalar_big_endian.rs
deleted file mode 100644
index e5aa0e2692..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_scalar_big_endian.rs
+++ /dev/null
@@ -1,149 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u32,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u32,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_uint(3) as u32;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "x", self.x, 0xff_ffff);
- }
- buffer.put_uint(self.x as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u32 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_24bit_scalar_little_endian.rs b/tools/pdl/tests/generated/packet_decl_24bit_scalar_little_endian.rs
deleted file mode 100644
index c9a65ed19b..0000000000
--- a/tools/pdl/tests/generated/packet_decl_24bit_scalar_little_endian.rs
+++ /dev/null
@@ -1,149 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u32,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u32,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_uint_le(3) as u32;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "x", self.x, 0xff_ffff);
- }
- buffer.put_uint_le(self.x as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u32 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_enum_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_enum_array_big_endian.rs
deleted file mode 100644
index 6940901736..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_enum_array_big_endian.rs
+++ /dev/null
@@ -1,191 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u64", into = "u64"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u64> for Foo {
- type Error = u64;
- fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u64 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 7],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 7],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 56
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 7 * 8 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 7 * 8,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..7)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_u64())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u64(u64::from(elem));
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 56
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 7] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_enum_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_enum_array_little_endian.rs
deleted file mode 100644
index a7008b37d0..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_enum_array_little_endian.rs
+++ /dev/null
@@ -1,191 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u64", into = "u64"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u64> for Foo {
- type Error = u64;
- fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u64 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 7],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 7],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 56
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 7 * 8 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 7 * 8,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..7)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_u64_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u64_le(u64::from(elem));
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 56
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 7] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_enum_big_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_enum_big_endian.rs
deleted file mode 100644
index a8f182c32e..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_enum_big_endian.rs
+++ /dev/null
@@ -1,183 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u64", into = "u64"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u64> for Foo {
- type Error = u64;
- fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u64 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_u64())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_u64() as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u64(u64::from(self.x));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_enum_little_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_enum_little_endian.rs
deleted file mode 100644
index 8fa8467f9e..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_enum_little_endian.rs
+++ /dev/null
@@ -1,183 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u64", into = "u64"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u64> for Foo {
- type Error = u64;
- fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u64 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_u64_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_u64_le() as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u64_le(u64::from(self.x));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_big_endian.rs
deleted file mode 100644
index d162424f3c..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_big_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u64; 7],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u64; 7],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 56
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 7 * 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 7 * 8,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..7)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u64()))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u64(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 56
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u64; 7] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_little_endian.rs
deleted file mode 100644
index 5d8c77ee74..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_scalar_array_little_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u64; 7],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u64; 7],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 56
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 7 * 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 7 * 8,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..7)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u64_le()))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u64_le(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 56
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u64; 7] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_scalar_big_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_scalar_big_endian.rs
deleted file mode 100644
index 176a4af46d..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_scalar_big_endian.rs
+++ /dev/null
@@ -1,146 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u64();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u64(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u64 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_64bit_scalar_little_endian.rs b/tools/pdl/tests/generated/packet_decl_64bit_scalar_little_endian.rs
deleted file mode 100644
index 3f26f6c3f6..0000000000
--- a/tools/pdl/tests/generated/packet_decl_64bit_scalar_little_endian.rs
+++ /dev/null
@@ -1,146 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u64_le();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u64_le(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u64 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_enum_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_enum_array_big_endian.rs
deleted file mode 100644
index ef47d85b1f..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_enum_array_big_endian.rs
+++ /dev/null
@@ -1,221 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u8> for Foo {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u8 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u8 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 3],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 3],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..3)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u8(u8::from(elem));
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 3] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_enum_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_enum_array_little_endian.rs
deleted file mode 100644
index ef47d85b1f..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_enum_array_little_endian.rs
+++ /dev/null
@@ -1,221 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Foo {
- FooBar = 0x1,
- Baz = 0x2,
-}
-impl TryFrom<u8> for Foo {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::FooBar),
- 0x2 => Ok(Foo::Baz),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u8 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::FooBar => 0x1,
- Foo::Baz => 0x2,
- }
- }
-}
-impl From<Foo> for u8 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: [Foo; 3],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: [Foo; 3],
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..3)
- .map(|_| {
- Foo::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: String::new(),
- value: 0,
- type_: "Foo".to_string(),
- })
- })
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u8(u8::from(elem));
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &[Foo; 3] {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_enum_big_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_enum_big_endian.rs
deleted file mode 100644
index b3c497967f..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_enum_big_endian.rs
+++ /dev/null
@@ -1,213 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Foo {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u8 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u8 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.x));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_enum_little_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_enum_little_endian.rs
deleted file mode 100644
index b3c497967f..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_enum_little_endian.rs
+++ /dev/null
@@ -1,213 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Foo {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Foo {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Foo::A),
- 0x2 => Ok(Foo::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Foo> for u8 {
- fn from(value: &Foo) -> Self {
- match value {
- Foo::A => 0x1,
- Foo::B => 0x2,
- }
- }
-}
-impl From<Foo> for u8 {
- fn from(value: Foo) -> Self {
- (&value).into()
- }
-}
-impl From<Foo> for i16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for i64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u16 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u32 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Foo> for u64 {
- fn from(value: Foo) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Foo,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Foo,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = Foo::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Bar".to_string(),
- field: "x".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Foo".to_string(),
- })?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.x));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> Foo {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_big_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_big_endian.rs
deleted file mode 100644
index a1915a2864..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_big_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u8; 3],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u8; 3],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..3)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u8()))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u8(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u8; 3] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_little_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_little_endian.rs
deleted file mode 100644
index a1915a2864..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_scalar_array_little_endian.rs
+++ /dev/null
@@ -1,152 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: [u8; 3],
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: [u8; 3],
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..3)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u8()))
- .collect::<Result<Vec<_>>>()?
- .try_into()
- .map_err(|_| Error::InvalidPacketError)?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- for elem in &self.x {
- buffer.put_u8(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> &[u8; 3] {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_scalar_big_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_scalar_big_endian.rs
deleted file mode 100644
index ae3515a511..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_scalar_big_endian.rs
+++ /dev/null
@@ -1,146 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u8 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_8bit_scalar_little_endian.rs b/tools/pdl/tests/generated/packet_decl_8bit_scalar_little_endian.rs
deleted file mode 100644
index ae3515a511..0000000000
--- a/tools/pdl/tests/generated/packet_decl_8bit_scalar_little_endian.rs
+++ /dev/null
@@ -1,146 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u8 {
- self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { x: self.x });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_dynamic_count_big_endian.rs b/tools/pdl/tests/generated/packet_decl_array_dynamic_count_big_endian.rs
deleted file mode 100644
index c9a5e221a8..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_dynamic_count_big_endian.rs
+++ /dev/null
@@ -1,178 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- padding: u8,
- x: Vec<u32>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub padding: u8,
- pub x: Vec<u32>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u8();
- let x_count = (chunk & 0x1f) as usize;
- let padding = ((chunk >> 5) & 0x7);
- if bytes.get().remaining() < x_count * 3usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: x_count * 3usize,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..x_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_uint(3) as u32))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { padding, x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x.len() > 0x1f {
- panic!("Invalid length for {}::{}: {} > {}", "Foo", "x", self.x.len(), 0x1f);
- }
- if self.padding > 0x7 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "padding", self.padding, 0x7
- );
- }
- let value = self.x.len() as u8 | (self.padding << 5);
- buffer.put_u8(value);
- for elem in &self.x {
- buffer.put_uint(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.x.len() * 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_padding(&self) -> u8 {
- self.foo.as_ref().padding
- }
- pub fn get_x(&self) -> &Vec<u32> {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- padding: self.padding,
- x: self.x,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_dynamic_count_little_endian.rs b/tools/pdl/tests/generated/packet_decl_array_dynamic_count_little_endian.rs
deleted file mode 100644
index 7a9f27fbf7..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_dynamic_count_little_endian.rs
+++ /dev/null
@@ -1,178 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- padding: u8,
- x: Vec<u32>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub padding: u8,
- pub x: Vec<u32>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u8();
- let x_count = (chunk & 0x1f) as usize;
- let padding = ((chunk >> 5) & 0x7);
- if bytes.get().remaining() < x_count * 3usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: x_count * 3usize,
- got: bytes.get().remaining(),
- });
- }
- let x = (0..x_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_uint_le(3) as u32))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { padding, x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x.len() > 0x1f {
- panic!("Invalid length for {}::{}: {} > {}", "Foo", "x", self.x.len(), 0x1f);
- }
- if self.padding > 0x7 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "padding", self.padding, 0x7
- );
- }
- let value = self.x.len() as u8 | (self.padding << 5);
- buffer.put_u8(value);
- for elem in &self.x {
- buffer.put_uint_le(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.x.len() * 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_padding(&self) -> u8 {
- self.foo.as_ref().padding
- }
- pub fn get_x(&self) -> &Vec<u32> {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- padding: self.padding,
- x: self.x,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_dynamic_size_big_endian.rs b/tools/pdl/tests/generated/packet_decl_array_dynamic_size_big_endian.rs
deleted file mode 100644
index ec487365ea..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_dynamic_size_big_endian.rs
+++ /dev/null
@@ -1,189 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- padding: u8,
- x: Vec<u32>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub padding: u8,
- pub x: Vec<u32>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u8();
- let x_size = (chunk & 0x1f) as usize;
- let padding = ((chunk >> 5) & 0x7);
- if bytes.get().remaining() < x_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: x_size,
- got: bytes.get().remaining(),
- });
- }
- if x_size % 3 != 0 {
- return Err(Error::InvalidArraySize {
- array: x_size,
- element: 3,
- });
- }
- let x_count = x_size / 3;
- let mut x = Vec::with_capacity(x_count);
- for _ in 0..x_count {
- x.push(Ok::<_, Error>(bytes.get_mut().get_uint(3) as u32)?);
- }
- Ok(Self { padding, x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if (self.x.len() * 3) > 0x1f {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "x", (self.x.len() * 3),
- 0x1f
- );
- }
- if self.padding > 0x7 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "padding", self.padding, 0x7
- );
- }
- let value = (self.x.len() * 3) as u8 | (self.padding << 5);
- buffer.put_u8(value);
- for elem in &self.x {
- buffer.put_uint(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.x.len() * 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_padding(&self) -> u8 {
- self.foo.as_ref().padding
- }
- pub fn get_x(&self) -> &Vec<u32> {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- padding: self.padding,
- x: self.x,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_dynamic_size_little_endian.rs b/tools/pdl/tests/generated/packet_decl_array_dynamic_size_little_endian.rs
deleted file mode 100644
index cec18ee62c..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_dynamic_size_little_endian.rs
+++ /dev/null
@@ -1,189 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- padding: u8,
- x: Vec<u32>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub padding: u8,
- pub x: Vec<u32>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u8();
- let x_size = (chunk & 0x1f) as usize;
- let padding = ((chunk >> 5) & 0x7);
- if bytes.get().remaining() < x_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: x_size,
- got: bytes.get().remaining(),
- });
- }
- if x_size % 3 != 0 {
- return Err(Error::InvalidArraySize {
- array: x_size,
- element: 3,
- });
- }
- let x_count = x_size / 3;
- let mut x = Vec::with_capacity(x_count);
- for _ in 0..x_count {
- x.push(Ok::<_, Error>(bytes.get_mut().get_uint_le(3) as u32)?);
- }
- Ok(Self { padding, x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if (self.x.len() * 3) > 0x1f {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "x", (self.x.len() * 3),
- 0x1f
- );
- }
- if self.padding > 0x7 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "padding", self.padding, 0x7
- );
- }
- let value = (self.x.len() * 3) as u8 | (self.padding << 5);
- buffer.put_u8(value);
- for elem in &self.x {
- buffer.put_uint_le(*elem as u64, 3);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.x.len() * 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_padding(&self) -> u8 {
- self.foo.as_ref().padding
- }
- pub fn get_x(&self) -> &Vec<u32> {
- &self.foo.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- padding: self.padding,
- x: self.x,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_big_endian.rs b/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_big_endian.rs
deleted file mode 100644
index b57eb29665..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_big_endian.rs
+++ /dev/null
@@ -1,212 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let x_count = bytes.get_mut().get_uint(5) as usize;
- let x = (0..x_count)
- .map(|_| Foo::parse_inner(bytes))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Bar", "x", self.x.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint(self.x.len() as u64, 5);
- for elem in &self.x {
- elem.write_to(buffer);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.x.iter().map(|elem| elem.get_size()).sum::<usize>()
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &Vec<Foo> {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_little_endian.rs b/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_little_endian.rs
deleted file mode 100644
index 3fb7990f4d..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_count_little_endian.rs
+++ /dev/null
@@ -1,212 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint_le(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16_le()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint_le(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16_le(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let x_count = bytes.get_mut().get_uint_le(5) as usize;
- let x = (0..x_count)
- .map(|_| Foo::parse_inner(bytes))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.x.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Bar", "x", self.x.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint_le(self.x.len() as u64, 5);
- for elem in &self.x {
- elem.write_to(buffer);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.x.iter().map(|elem| elem.get_size()).sum::<usize>()
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &Vec<Foo> {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_big_endian.rs b/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_big_endian.rs
deleted file mode 100644
index ee4459e597..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_big_endian.rs
+++ /dev/null
@@ -1,224 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let x_size = bytes.get_mut().get_uint(5) as usize;
- if bytes.get().remaining() < x_size {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: x_size,
- got: bytes.get().remaining(),
- });
- }
- let (head, tail) = bytes.get().split_at(x_size);
- let mut head = &mut Cell::new(head);
- bytes.replace(tail);
- let mut x = Vec::new();
- while !head.get().is_empty() {
- x.push(Foo::parse_inner(head)?);
- }
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- let x_size = self.x.iter().map(|elem| elem.get_size()).sum::<usize>();
- if x_size > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Bar", "x", x_size,
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint(x_size as u64, 5);
- for elem in &self.x {
- elem.write_to(buffer);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.x.iter().map(|elem| elem.get_size()).sum::<usize>()
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &Vec<Foo> {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_little_endian.rs b/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_little_endian.rs
deleted file mode 100644
index a0605eb919..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_unknown_element_width_dynamic_size_little_endian.rs
+++ /dev/null
@@ -1,224 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint_le(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16_le()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint_le(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16_le(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub x: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let x_size = bytes.get_mut().get_uint_le(5) as usize;
- if bytes.get().remaining() < x_size {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: x_size,
- got: bytes.get().remaining(),
- });
- }
- let (head, tail) = bytes.get().split_at(x_size);
- let mut head = &mut Cell::new(head);
- bytes.replace(tail);
- let mut x = Vec::new();
- while !head.get().is_empty() {
- x.push(Foo::parse_inner(head)?);
- }
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- let x_size = self.x.iter().map(|elem| elem.get_size()).sum::<usize>();
- if x_size > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Bar", "x", x_size,
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint_le(x_size as u64, 5);
- for elem in &self.x {
- elem.write_to(buffer);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.x.iter().map(|elem| elem.get_size()).sum::<usize>()
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_x(&self) -> &Vec<Foo> {
- &self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_with_padding_big_endian.rs b/tools/pdl/tests/generated/packet_decl_array_with_padding_big_endian.rs
deleted file mode 100644
index 36b5571218..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_with_padding_big_endian.rs
+++ /dev/null
@@ -1,216 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- a: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub a: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 128
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 128usize {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 128usize,
- got: bytes.get().remaining(),
- });
- }
- let (head, tail) = bytes.get().split_at(128usize);
- let mut head = &mut Cell::new(head);
- bytes.replace(tail);
- let mut a = Vec::new();
- while !head.get().is_empty() {
- a.push(Foo::parse_inner(head)?);
- }
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- let current_size = buffer.len();
- for elem in &self.a {
- elem.write_to(buffer);
- }
- let array_size = buffer.len() - current_size;
- if array_size > 128usize {
- panic!(
- "attempted to serialize an array larger than the enclosing padding size"
- );
- }
- buffer.put_bytes(0, 128usize - array_size);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 128
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_a(&self) -> &Vec<Foo> {
- &self.bar.as_ref().a
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { a: self.a });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_array_with_padding_little_endian.rs b/tools/pdl/tests/generated/packet_decl_array_with_padding_little_endian.rs
deleted file mode 100644
index 53cec92781..0000000000
--- a/tools/pdl/tests/generated/packet_decl_array_with_padding_little_endian.rs
+++ /dev/null
@@ -1,216 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: Vec<u16>,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- let a_count = bytes.get_mut().get_uint_le(5) as usize;
- if bytes.get().remaining() < a_count * 2usize {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: a_count * 2usize,
- got: bytes.get().remaining(),
- });
- }
- let a = (0..a_count)
- .map(|_| Ok::<_, Error>(bytes.get_mut().get_u16_le()))
- .collect::<Result<Vec<_>>>()?;
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a.len() > 0xff_ffff_ffff_usize {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "a", self.a.len(),
- 0xff_ffff_ffff_usize
- );
- }
- buffer.put_uint_le(self.a.len() as u64, 5);
- for elem in &self.a {
- buffer.put_u16_le(*elem);
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5 + self.a.len() * 2
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- a: Vec<Foo>,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub a: Vec<Foo>,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 128
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 128usize {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 128usize,
- got: bytes.get().remaining(),
- });
- }
- let (head, tail) = bytes.get().split_at(128usize);
- let mut head = &mut Cell::new(head);
- bytes.replace(tail);
- let mut a = Vec::new();
- while !head.get().is_empty() {
- a.push(Foo::parse_inner(head)?);
- }
- Ok(Self { a })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- let current_size = buffer.len();
- for elem in &self.a {
- elem.write_to(buffer);
- }
- let array_size = buffer.len() - current_size;
- if array_size > 128usize {
- panic!(
- "attempted to serialize an array larger than the enclosing padding size"
- );
- }
- buffer.put_bytes(0, 128usize - array_size);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 128
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.bar.get_size());
- self.bar.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = BarData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(bar: Arc<BarData>) -> Result<Self> {
- Ok(Self { bar })
- }
- pub fn get_a(&self) -> &Vec<Foo> {
- &self.bar.as_ref().a
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.bar.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { a: self.a });
- Bar::new(bar).unwrap()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_child_packets_big_endian.rs b/tools/pdl/tests/generated/packet_decl_child_packets_big_endian.rs
deleted file mode 100644
index e8468a7a44..0000000000
--- a/tools/pdl/tests/generated/packet_decl_child_packets_big_endian.rs
+++ /dev/null
@@ -1,584 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum16 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum16 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum16::A),
- 0x2 => Ok(Enum16::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum16> for u16 {
- fn from(value: &Enum16) -> Self {
- match value {
- Enum16::A => 0x1,
- Enum16::B => 0x2,
- }
- }
-}
-impl From<Enum16> for u16 {
- fn from(value: Enum16) -> Self {
- (&value).into()
- }
-}
-impl From<Enum16> for i32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for i64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Bar(Arc<BarData>),
- Baz(Arc<BazData>),
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Bar(value) => value.get_total_size(),
- FooDataChild::Baz(value) => value.get_total_size(),
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Bar(Bar),
- Baz(Baz),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: Enum16,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: Enum16,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let b = Enum16::try_from(bytes.get_mut().get_u16())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "b".to_string(),
- value: bytes.get_mut().get_u16() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- let child = match (a, b) {
- (100, _) if BarData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = BarData::parse_inner(&mut cell)?;
- FooDataChild::Bar(Arc::new(child_data))
- }
- (_, Enum16::B) if BazData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = BazData::parse_inner(&mut cell)?;
- FooDataChild::Baz(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, b, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.a);
- buffer.put_u16(u16::from(self.b));
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- FooDataChild::Bar(child) => child.write_to(buffer),
- FooDataChild::Baz(child) => child.write_to(buffer),
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Bar(_) => FooChild::Bar(Bar::new(self.foo.clone()).unwrap()),
- FooDataChild::Baz(_) => FooChild::Baz(Baz::new(self.foo.clone()).unwrap()),
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub b: Enum16,
- pub x: u8,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl From<Bar> for Foo {
- fn from(packet: Bar) -> Foo {
- Foo::new(packet.foo).unwrap()
- }
-}
-impl TryFrom<Foo> for Bar {
- type Error = Error;
- fn try_from(packet: Foo) -> Result<Bar> {
- Bar::new(packet.foo)
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- let bar = match &foo.child {
- FooDataChild::Bar(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(FooDataChild::Bar),
- actual: format!("{:?}", & foo.child),
- });
- }
- };
- Ok(Self { foo, bar })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- pub fn get_x(&self) -> u8 {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- let foo = Arc::new(FooData {
- a: 100,
- b: self.b,
- child: FooDataChild::Bar(bar),
- });
- Bar::new(foo).unwrap()
- }
-}
-impl From<BarBuilder> for Foo {
- fn from(builder: BarBuilder) -> Foo {
- builder.build().into()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BazData {
- y: u16,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Baz {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- baz: Arc<BazData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BazBuilder {
- pub a: u8,
- pub y: u16,
-}
-impl BazData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 2
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Baz".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let y = bytes.get_mut().get_u16();
- Ok(Self { y })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16(self.y);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 2
- }
-}
-impl Packet for Baz {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Baz> for Bytes {
- fn from(packet: Baz) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Baz> for Vec<u8> {
- fn from(packet: Baz) -> Self {
- packet.to_vec()
- }
-}
-impl From<Baz> for Foo {
- fn from(packet: Baz) -> Foo {
- Foo::new(packet.foo).unwrap()
- }
-}
-impl TryFrom<Foo> for Baz {
- type Error = Error;
- fn try_from(packet: Foo) -> Result<Baz> {
- Baz::new(packet.foo)
- }
-}
-impl Baz {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- let baz = match &foo.child {
- FooDataChild::Baz(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(FooDataChild::Baz),
- actual: format!("{:?}", & foo.child),
- });
- }
- };
- Ok(Self { foo, baz })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- pub fn get_y(&self) -> u16 {
- self.baz.as_ref().y
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.baz.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl BazBuilder {
- pub fn build(self) -> Baz {
- let baz = Arc::new(BazData { y: self.y });
- let foo = Arc::new(FooData {
- a: self.a,
- b: Enum16::B,
- child: FooDataChild::Baz(baz),
- });
- Baz::new(foo).unwrap()
- }
-}
-impl From<BazBuilder> for Foo {
- fn from(builder: BazBuilder) -> Foo {
- builder.build().into()
- }
-}
-impl From<BazBuilder> for Baz {
- fn from(builder: BazBuilder) -> Baz {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_child_packets_little_endian.rs b/tools/pdl/tests/generated/packet_decl_child_packets_little_endian.rs
deleted file mode 100644
index bbbb2617c4..0000000000
--- a/tools/pdl/tests/generated/packet_decl_child_packets_little_endian.rs
+++ /dev/null
@@ -1,584 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum16 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum16 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum16::A),
- 0x2 => Ok(Enum16::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum16> for u16 {
- fn from(value: &Enum16) -> Self {
- match value {
- Enum16::A => 0x1,
- Enum16::B => 0x2,
- }
- }
-}
-impl From<Enum16> for u16 {
- fn from(value: Enum16) -> Self {
- (&value).into()
- }
-}
-impl From<Enum16> for i32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for i64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Bar(Arc<BarData>),
- Baz(Arc<BazData>),
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Bar(value) => value.get_total_size(),
- FooDataChild::Baz(value) => value.get_total_size(),
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Bar(Bar),
- Baz(Baz),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: Enum16,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: Enum16,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let b = Enum16::try_from(bytes.get_mut().get_u16_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "b".to_string(),
- value: bytes.get_mut().get_u16_le() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- let child = match (a, b) {
- (100, _) if BarData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = BarData::parse_inner(&mut cell)?;
- FooDataChild::Bar(Arc::new(child_data))
- }
- (_, Enum16::B) if BazData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = BazData::parse_inner(&mut cell)?;
- FooDataChild::Baz(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, b, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.a);
- buffer.put_u16_le(u16::from(self.b));
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- FooDataChild::Bar(child) => child.write_to(buffer),
- FooDataChild::Baz(child) => child.write_to(buffer),
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Bar(_) => FooChild::Bar(Bar::new(self.foo.clone()).unwrap()),
- FooDataChild::Baz(_) => FooChild::Baz(Baz::new(self.foo.clone()).unwrap()),
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarData {
- x: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Bar {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- bar: Arc<BarData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BarBuilder {
- pub b: Enum16,
- pub x: u8,
-}
-impl BarData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Bar".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- Ok(Self { x })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Bar {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Bar> for Bytes {
- fn from(packet: Bar) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Bar> for Vec<u8> {
- fn from(packet: Bar) -> Self {
- packet.to_vec()
- }
-}
-impl From<Bar> for Foo {
- fn from(packet: Bar) -> Foo {
- Foo::new(packet.foo).unwrap()
- }
-}
-impl TryFrom<Foo> for Bar {
- type Error = Error;
- fn try_from(packet: Foo) -> Result<Bar> {
- Bar::new(packet.foo)
- }
-}
-impl Bar {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- let bar = match &foo.child {
- FooDataChild::Bar(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(FooDataChild::Bar),
- actual: format!("{:?}", & foo.child),
- });
- }
- };
- Ok(Self { foo, bar })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- pub fn get_x(&self) -> u8 {
- self.bar.as_ref().x
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.bar.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl BarBuilder {
- pub fn build(self) -> Bar {
- let bar = Arc::new(BarData { x: self.x });
- let foo = Arc::new(FooData {
- a: 100,
- b: self.b,
- child: FooDataChild::Bar(bar),
- });
- Bar::new(foo).unwrap()
- }
-}
-impl From<BarBuilder> for Foo {
- fn from(builder: BarBuilder) -> Foo {
- builder.build().into()
- }
-}
-impl From<BarBuilder> for Bar {
- fn from(builder: BarBuilder) -> Bar {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BazData {
- y: u16,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Baz {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- baz: Arc<BazData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct BazBuilder {
- pub a: u8,
- pub y: u16,
-}
-impl BazData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 2
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Baz".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let y = bytes.get_mut().get_u16_le();
- Ok(Self { y })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16_le(self.y);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 2
- }
-}
-impl Packet for Baz {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Baz> for Bytes {
- fn from(packet: Baz) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Baz> for Vec<u8> {
- fn from(packet: Baz) -> Self {
- packet.to_vec()
- }
-}
-impl From<Baz> for Foo {
- fn from(packet: Baz) -> Foo {
- Foo::new(packet.foo).unwrap()
- }
-}
-impl TryFrom<Foo> for Baz {
- type Error = Error;
- fn try_from(packet: Foo) -> Result<Baz> {
- Baz::new(packet.foo)
- }
-}
-impl Baz {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- let baz = match &foo.child {
- FooDataChild::Baz(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(FooDataChild::Baz),
- actual: format!("{:?}", & foo.child),
- });
- }
- };
- Ok(Self { foo, baz })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Enum16 {
- self.foo.as_ref().b
- }
- pub fn get_y(&self) -> u16 {
- self.baz.as_ref().y
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.baz.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl BazBuilder {
- pub fn build(self) -> Baz {
- let baz = Arc::new(BazData { y: self.y });
- let foo = Arc::new(FooData {
- a: self.a,
- b: Enum16::B,
- child: FooDataChild::Baz(baz),
- });
- Baz::new(foo).unwrap()
- }
-}
-impl From<BazBuilder> for Foo {
- fn from(builder: BazBuilder) -> Foo {
- builder.build().into()
- }
-}
-impl From<BazBuilder> for Baz {
- fn from(builder: BazBuilder) -> Baz {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_complex_scalars_big_endian.rs b/tools/pdl/tests/generated/packet_decl_complex_scalars_big_endian.rs
deleted file mode 100644
index 688af4282f..0000000000
--- a/tools/pdl/tests/generated/packet_decl_complex_scalars_big_endian.rs
+++ /dev/null
@@ -1,218 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u8,
- c: u8,
- d: u32,
- e: u16,
- f: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u8,
- pub c: u8,
- pub d: u32,
- pub e: u16,
- pub f: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16();
- let a = (chunk & 0x7) as u8;
- let b = (chunk >> 3) as u8;
- let c = ((chunk >> 11) & 0x1f) as u8;
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let d = bytes.get_mut().get_uint(3) as u32;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16();
- let e = (chunk & 0xfff);
- let f = ((chunk >> 12) & 0xf) as u8;
- Ok(Self { a, b, c, d, e, f })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x7);
- }
- if self.c > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x1f);
- }
- let value = (self.a as u16) | ((self.b as u16) << 3) | ((self.c as u16) << 11);
- buffer.put_u16(value);
- if self.d > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "d", self.d, 0xff_ffff);
- }
- buffer.put_uint(self.d as u64, 3);
- if self.e > 0xfff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "e", self.e, 0xfff);
- }
- if self.f > 0xf {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "f", self.f, 0xf);
- }
- let value = self.e | ((self.f as u16) << 12);
- buffer.put_u16(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u8 {
- self.foo.as_ref().b
- }
- pub fn get_c(&self) -> u8 {
- self.foo.as_ref().c
- }
- pub fn get_d(&self) -> u32 {
- self.foo.as_ref().d
- }
- pub fn get_e(&self) -> u16 {
- self.foo.as_ref().e
- }
- pub fn get_f(&self) -> u8 {
- self.foo.as_ref().f
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- c: self.c,
- d: self.d,
- e: self.e,
- f: self.f,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_complex_scalars_little_endian.rs b/tools/pdl/tests/generated/packet_decl_complex_scalars_little_endian.rs
deleted file mode 100644
index 83da632a3a..0000000000
--- a/tools/pdl/tests/generated/packet_decl_complex_scalars_little_endian.rs
+++ /dev/null
@@ -1,218 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u8,
- c: u8,
- d: u32,
- e: u16,
- f: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u8,
- pub c: u8,
- pub d: u32,
- pub e: u16,
- pub f: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16_le();
- let a = (chunk & 0x7) as u8;
- let b = (chunk >> 3) as u8;
- let c = ((chunk >> 11) & 0x1f) as u8;
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let d = bytes.get_mut().get_uint_le(3) as u32;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16_le();
- let e = (chunk & 0xfff);
- let f = ((chunk >> 12) & 0xf) as u8;
- Ok(Self { a, b, c, d, e, f })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x7);
- }
- if self.c > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x1f);
- }
- let value = (self.a as u16) | ((self.b as u16) << 3) | ((self.c as u16) << 11);
- buffer.put_u16_le(value);
- if self.d > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "d", self.d, 0xff_ffff);
- }
- buffer.put_uint_le(self.d as u64, 3);
- if self.e > 0xfff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "e", self.e, 0xfff);
- }
- if self.f > 0xf {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "f", self.f, 0xf);
- }
- let value = self.e | ((self.f as u16) << 12);
- buffer.put_u16_le(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u8 {
- self.foo.as_ref().b
- }
- pub fn get_c(&self) -> u8 {
- self.foo.as_ref().c
- }
- pub fn get_d(&self) -> u32 {
- self.foo.as_ref().d
- }
- pub fn get_e(&self) -> u16 {
- self.foo.as_ref().e
- }
- pub fn get_f(&self) -> u8 {
- self.foo.as_ref().f
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- c: self.c,
- d: self.d,
- e: self.e,
- f: self.f,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_custom_field_big_endian.rs b/tools/pdl/tests/generated/packet_decl_custom_field_big_endian.rs
deleted file mode 100644
index 72bb2c6fca..0000000000
--- a/tools/pdl/tests/generated/packet_decl_custom_field_big_endian.rs
+++ /dev/null
@@ -1,185 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub struct Bar1(u32);
-impl From<&Bar1> for u32 {
- fn from(value: &Bar1) -> u32 {
- value.0
- }
-}
-impl From<Bar1> for u32 {
- fn from(value: Bar1) -> u32 {
- value.0
- }
-}
-impl TryFrom<u32> for Bar1 {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- if value > 0xff_ffff { Err(value) } else { Ok(Bar1(value)) }
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(from = "u32", into = "u32"))]
-pub struct Bar2(u32);
-impl From<&Bar2> for u32 {
- fn from(value: &Bar2) -> u32 {
- value.0
- }
-}
-impl From<Bar2> for u32 {
- fn from(value: Bar2) -> u32 {
- value.0
- }
-}
-impl From<u32> for Bar2 {
- fn from(value: u32) -> Self {
- Bar2(value)
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: Bar1,
- b: Bar2,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: Bar1,
- pub b: Bar2,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let a = (bytes.get_mut().get_uint(3) as u32).try_into().unwrap();
- let b = bytes.get_mut().get_u32().into();
- Ok(Self { a, b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_uint(u32::from(self.a) as u64, 3);
- buffer.put_u32(u32::from(self.b));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> Bar1 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Bar2 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { a: self.a, b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_custom_field_little_endian.rs b/tools/pdl/tests/generated/packet_decl_custom_field_little_endian.rs
deleted file mode 100644
index 600bab6178..0000000000
--- a/tools/pdl/tests/generated/packet_decl_custom_field_little_endian.rs
+++ /dev/null
@@ -1,185 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u32", into = "u32"))]
-pub struct Bar1(u32);
-impl From<&Bar1> for u32 {
- fn from(value: &Bar1) -> u32 {
- value.0
- }
-}
-impl From<Bar1> for u32 {
- fn from(value: Bar1) -> u32 {
- value.0
- }
-}
-impl TryFrom<u32> for Bar1 {
- type Error = u32;
- fn try_from(value: u32) -> std::result::Result<Self, Self::Error> {
- if value > 0xff_ffff { Err(value) } else { Ok(Bar1(value)) }
- }
-}
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(from = "u32", into = "u32"))]
-pub struct Bar2(u32);
-impl From<&Bar2> for u32 {
- fn from(value: &Bar2) -> u32 {
- value.0
- }
-}
-impl From<Bar2> for u32 {
- fn from(value: Bar2) -> u32 {
- value.0
- }
-}
-impl From<u32> for Bar2 {
- fn from(value: u32) -> Self {
- Bar2(value)
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: Bar1,
- b: Bar2,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: Bar1,
- pub b: Bar2,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let a = (bytes.get_mut().get_uint_le(3) as u32).try_into().unwrap();
- let b = bytes.get_mut().get_u32_le().into();
- Ok(Self { a, b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_uint_le(u32::from(self.a) as u64, 3);
- buffer.put_u32_le(u32::from(self.b));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> Bar1 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> Bar2 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { a: self.a, b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_empty_big_endian.rs b/tools/pdl/tests/generated/packet_decl_empty_big_endian.rs
deleted file mode 100644
index d05c2ff750..0000000000
--- a/tools/pdl/tests/generated/packet_decl_empty_big_endian.rs
+++ /dev/null
@@ -1,129 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {});
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_empty_little_endian.rs b/tools/pdl/tests/generated/packet_decl_empty_little_endian.rs
deleted file mode 100644
index d05c2ff750..0000000000
--- a/tools/pdl/tests/generated/packet_decl_empty_little_endian.rs
+++ /dev/null
@@ -1,129 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {});
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_fixed_enum_field_big_endian.rs b/tools/pdl/tests/generated/packet_decl_fixed_enum_field_big_endian.rs
deleted file mode 100644
index 8683e02959..0000000000
--- a/tools/pdl/tests/generated/packet_decl_fixed_enum_field_big_endian.rs
+++ /dev/null
@@ -1,226 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum7 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Enum7 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum7::A),
- 0x2 => Ok(Enum7::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum7> for u8 {
- fn from(value: &Enum7) -> Self {
- match value {
- Enum7::A => 0x1,
- Enum7::B => 0x2,
- }
- }
-}
-impl From<Enum7> for u8 {
- fn from(value: Enum7) -> Self {
- (&value).into()
- }
-}
-impl From<Enum7> for i8 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- b: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub b: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u64();
- if (chunk & 0x7f) as u8 != u8::from(Enum7::A) {
- return Err(Error::InvalidFixedValue {
- expected: u8::from(Enum7::A) as u64,
- actual: (chunk & 0x7f) as u8 as u64,
- });
- }
- let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
- Ok(Self { b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.b > 0x1ff_ffff_ffff_ffff_u64 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "b", self.b,
- 0x1ff_ffff_ffff_ffff_u64
- );
- }
- let value = (u8::from(Enum7::A) as u64) | (self.b << 7);
- buffer.put_u64(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_b(&self) -> u64 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_fixed_enum_field_little_endian.rs b/tools/pdl/tests/generated/packet_decl_fixed_enum_field_little_endian.rs
deleted file mode 100644
index f598d3e719..0000000000
--- a/tools/pdl/tests/generated/packet_decl_fixed_enum_field_little_endian.rs
+++ /dev/null
@@ -1,226 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum7 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Enum7 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum7::A),
- 0x2 => Ok(Enum7::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum7> for u8 {
- fn from(value: &Enum7) -> Self {
- match value {
- Enum7::A => 0x1,
- Enum7::B => 0x2,
- }
- }
-}
-impl From<Enum7> for u8 {
- fn from(value: Enum7) -> Self {
- (&value).into()
- }
-}
-impl From<Enum7> for i8 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- b: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub b: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u64_le();
- if (chunk & 0x7f) as u8 != u8::from(Enum7::A) {
- return Err(Error::InvalidFixedValue {
- expected: u8::from(Enum7::A) as u64,
- actual: (chunk & 0x7f) as u8 as u64,
- });
- }
- let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
- Ok(Self { b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.b > 0x1ff_ffff_ffff_ffff_u64 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "b", self.b,
- 0x1ff_ffff_ffff_ffff_u64
- );
- }
- let value = (u8::from(Enum7::A) as u64) | (self.b << 7);
- buffer.put_u64_le(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_b(&self) -> u64 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs b/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs
deleted file mode 100644
index a44732ebce..0000000000
--- a/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs
+++ /dev/null
@@ -1,160 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- b: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub b: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u64();
- if (chunk & 0x7f) as u8 != 7 {
- return Err(Error::InvalidFixedValue {
- expected: 7,
- actual: (chunk & 0x7f) as u8 as u64,
- });
- }
- let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
- Ok(Self { b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.b > 0x1ff_ffff_ffff_ffff_u64 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "b", self.b,
- 0x1ff_ffff_ffff_ffff_u64
- );
- }
- let value = (7 as u64) | (self.b << 7);
- buffer.put_u64(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_b(&self) -> u64 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs b/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs
deleted file mode 100644
index d2c7985aa1..0000000000
--- a/tools/pdl/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs
+++ /dev/null
@@ -1,160 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- b: u64,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub b: u64,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 8
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 8 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 8,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u64_le();
- if (chunk & 0x7f) as u8 != 7 {
- return Err(Error::InvalidFixedValue {
- expected: 7,
- actual: (chunk & 0x7f) as u8 as u64,
- });
- }
- let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
- Ok(Self { b })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.b > 0x1ff_ffff_ffff_ffff_u64 {
- panic!(
- "Invalid value for {}::{}: {} > {}", "Foo", "b", self.b,
- 0x1ff_ffff_ffff_ffff_u64
- );
- }
- let value = (7 as u64) | (self.b << 7);
- buffer.put_u64_le(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 8
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_b(&self) -> u64 {
- self.foo.as_ref().b
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData { b: self.b });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_grand_children_big_endian.rs b/tools/pdl/tests/generated/packet_decl_grand_children_big_endian.rs
deleted file mode 100644
index 33432f3e01..0000000000
--- a/tools/pdl/tests/generated/packet_decl_grand_children_big_endian.rs
+++ /dev/null
@@ -1,993 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum16 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum16 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum16::A),
- 0x2 => Ok(Enum16::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum16> for u16 {
- fn from(value: &Enum16) -> Self {
- match value {
- Enum16::A => 0x1,
- Enum16::B => 0x2,
- }
- }
-}
-impl From<Enum16> for u16 {
- fn from(value: Enum16) -> Self {
- (&value).into()
- }
-}
-impl From<Enum16> for i32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for i64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- Child(Arc<ChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::Child(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- Child(Child),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- foo: Enum16,
- bar: Enum16,
- baz: Enum16,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub bar: Enum16,
- pub baz: Enum16,
- pub foo: Enum16,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let foo = Enum16::try_from(bytes.get_mut().get_u16())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "foo".to_string(),
- value: bytes.get_mut().get_u16() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let bar = Enum16::try_from(bytes.get_mut().get_u16())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "bar".to_string(),
- value: bytes.get_mut().get_u16() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let baz = Enum16::try_from(bytes.get_mut().get_u16())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "baz".to_string(),
- value: bytes.get_mut().get_u16() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- let child = match (foo) {
- (Enum16::A) if ChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = ChildData::parse_inner(&mut cell, bar, baz)?;
- ParentDataChild::Child(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { foo, bar, baz, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16(u16::from(self.foo));
- buffer.put_u16(u16::from(self.bar));
- buffer.put_u16(u16::from(self.baz));
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Parent", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- ParentDataChild::Child(child) => child.write_to(buffer),
- ParentDataChild::Payload(payload) => buffer.put_slice(payload),
- ParentDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7 + self.child.get_total_size()
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::Child(_) => {
- ParentChild::Child(Child::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- bar: self.bar,
- baz: self.baz,
- foo: self.foo,
- child: match self.payload {
- None => ParentDataChild::None,
- Some(bytes) => ParentDataChild::Payload(bytes),
- },
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ChildDataChild {
- GrandChild(Arc<GrandChildData>),
- Payload(Bytes),
- None,
-}
-impl ChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ChildDataChild::GrandChild(value) => value.get_total_size(),
- ChildDataChild::Payload(bytes) => bytes.len(),
- ChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ChildChild {
- GrandChild(GrandChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildData {
- quux: Enum16,
- child: ChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Child {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildBuilder {
- pub bar: Enum16,
- pub baz: Enum16,
- pub quux: Enum16,
- pub payload: Option<Bytes>,
-}
-impl ChildData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 2
- }
- fn parse(bytes: &[u8], bar: Enum16, baz: Enum16) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, bar, baz)?;
- Ok(packet)
- }
- fn parse_inner(
- mut bytes: &mut Cell<&[u8]>,
- bar: Enum16,
- baz: Enum16,
- ) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Child".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let quux = Enum16::try_from(bytes.get_mut().get_u16())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Child".to_string(),
- field: "quux".to_string(),
- value: bytes.get_mut().get_u16() as u64,
- type_: "Enum16".to_string(),
- })?;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (bar, quux) {
- (Enum16::A, Enum16::A) if GrandChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = GrandChildData::parse_inner(&mut cell, baz)?;
- ChildDataChild::GrandChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ChildDataChild::None,
- };
- Ok(Self { quux, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16(u16::from(self.quux));
- match &self.child {
- ChildDataChild::GrandChild(child) => child.write_to(buffer),
- ChildDataChild::Payload(payload) => buffer.put_slice(payload),
- ChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 2 + self.child.get_total_size()
- }
-}
-impl Packet for Child {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Child> for Bytes {
- fn from(packet: Child) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Child> for Vec<u8> {
- fn from(packet: Child) -> Self {
- packet.to_vec()
- }
-}
-impl From<Child> for Parent {
- fn from(packet: Child) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for Child {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<Child> {
- Child::new(packet.parent)
- }
-}
-impl Child {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ChildChild {
- match &self.child.child {
- ChildDataChild::GrandChild(_) => {
- ChildChild::GrandChild(GrandChild::new(self.parent.clone()).unwrap())
- }
- ChildDataChild::Payload(payload) => ChildChild::Payload(payload.clone()),
- ChildDataChild::None => ChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, child })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.child.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ChildBuilder {
- pub fn build(self) -> Child {
- let child = Arc::new(ChildData {
- quux: self.quux,
- child: match self.payload {
- None => ChildDataChild::None,
- Some(bytes) => ChildDataChild::Payload(bytes),
- },
- });
- let parent = Arc::new(ParentData {
- bar: self.bar,
- baz: self.baz,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- Child::new(parent).unwrap()
- }
-}
-impl From<ChildBuilder> for Parent {
- fn from(builder: ChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<ChildBuilder> for Child {
- fn from(builder: ChildBuilder) -> Child {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandChildDataChild {
- GrandGrandChild(Arc<GrandGrandChildData>),
- Payload(Bytes),
- None,
-}
-impl GrandChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- GrandChildDataChild::GrandGrandChild(value) => value.get_total_size(),
- GrandChildDataChild::Payload(bytes) => bytes.len(),
- GrandChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandChildChild {
- GrandGrandChild(GrandGrandChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChildData {
- child: GrandChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandchild: Arc<GrandChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChildBuilder {
- pub baz: Enum16,
- pub payload: Option<Bytes>,
-}
-impl GrandChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8], baz: Enum16) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, baz)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>, baz: Enum16) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (baz) {
- (Enum16::A) if GrandGrandChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = GrandGrandChildData::parse_inner(&mut cell)?;
- GrandChildDataChild::GrandGrandChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- GrandChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => GrandChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- GrandChildDataChild::GrandGrandChild(child) => child.write_to(buffer),
- GrandChildDataChild::Payload(payload) => buffer.put_slice(payload),
- GrandChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for GrandChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<GrandChild> for Bytes {
- fn from(packet: GrandChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<GrandChild> for Vec<u8> {
- fn from(packet: GrandChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<GrandChild> for Parent {
- fn from(packet: GrandChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<GrandChild> for Child {
- fn from(packet: GrandChild) -> Child {
- Child::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for GrandChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<GrandChild> {
- GrandChild::new(packet.parent)
- }
-}
-impl GrandChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> GrandChildChild {
- match &self.grandchild.child {
- GrandChildDataChild::GrandGrandChild(_) => {
- GrandChildChild::GrandGrandChild(
- GrandGrandChild::new(self.parent.clone()).unwrap(),
- )
- }
- GrandChildDataChild::Payload(payload) => {
- GrandChildChild::Payload(payload.clone())
- }
- GrandChildDataChild::None => GrandChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let grandchild = match &child.child {
- ChildDataChild::GrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ChildDataChild::GrandChild),
- actual: format!("{:?}", & child.child),
- });
- }
- };
- Ok(Self { parent, child, grandchild })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.grandchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl GrandChildBuilder {
- pub fn build(self) -> GrandChild {
- let grandchild = Arc::new(GrandChildData {
- child: match self.payload {
- None => GrandChildDataChild::None,
- Some(bytes) => GrandChildDataChild::Payload(bytes),
- },
- });
- let child = Arc::new(ChildData {
- quux: Enum16::A,
- child: ChildDataChild::GrandChild(grandchild),
- });
- let parent = Arc::new(ParentData {
- bar: Enum16::A,
- baz: self.baz,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- GrandChild::new(parent).unwrap()
- }
-}
-impl From<GrandChildBuilder> for Parent {
- fn from(builder: GrandChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<GrandChildBuilder> for Child {
- fn from(builder: GrandChildBuilder) -> Child {
- builder.build().into()
- }
-}
-impl From<GrandChildBuilder> for GrandChild {
- fn from(builder: GrandChildBuilder) -> GrandChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandGrandChildDataChild {
- Payload(Bytes),
- None,
-}
-impl GrandGrandChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- GrandGrandChildDataChild::Payload(bytes) => bytes.len(),
- GrandGrandChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandGrandChildChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChildData {
- child: GrandGrandChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandchild: Arc<GrandChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandgrandchild: Arc<GrandGrandChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChildBuilder {
- pub payload: Option<Bytes>,
-}
-impl GrandGrandChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- GrandGrandChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => GrandGrandChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- GrandGrandChildDataChild::Payload(payload) => buffer.put_slice(payload),
- GrandGrandChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for GrandGrandChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<GrandGrandChild> for Bytes {
- fn from(packet: GrandGrandChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<GrandGrandChild> for Vec<u8> {
- fn from(packet: GrandGrandChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<GrandGrandChild> for Parent {
- fn from(packet: GrandGrandChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<GrandGrandChild> for Child {
- fn from(packet: GrandGrandChild) -> Child {
- Child::new(packet.parent).unwrap()
- }
-}
-impl From<GrandGrandChild> for GrandChild {
- fn from(packet: GrandGrandChild) -> GrandChild {
- GrandChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for GrandGrandChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<GrandGrandChild> {
- GrandGrandChild::new(packet.parent)
- }
-}
-impl GrandGrandChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> GrandGrandChildChild {
- match &self.grandgrandchild.child {
- GrandGrandChildDataChild::Payload(payload) => {
- GrandGrandChildChild::Payload(payload.clone())
- }
- GrandGrandChildDataChild::None => GrandGrandChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let grandchild = match &child.child {
- ChildDataChild::GrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ChildDataChild::GrandChild),
- actual: format!("{:?}", & child.child),
- });
- }
- };
- let grandgrandchild = match &grandchild.child {
- GrandChildDataChild::GrandGrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(GrandChildDataChild::GrandGrandChild),
- actual: format!("{:?}", & grandchild.child),
- });
- }
- };
- Ok(Self {
- parent,
- child,
- grandchild,
- grandgrandchild,
- })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.grandgrandchild.child {
- GrandGrandChildDataChild::Payload(bytes) => &bytes,
- GrandGrandChildDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.grandgrandchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl GrandGrandChildBuilder {
- pub fn build(self) -> GrandGrandChild {
- let grandgrandchild = Arc::new(GrandGrandChildData {
- child: match self.payload {
- None => GrandGrandChildDataChild::None,
- Some(bytes) => GrandGrandChildDataChild::Payload(bytes),
- },
- });
- let grandchild = Arc::new(GrandChildData {
- child: GrandChildDataChild::GrandGrandChild(grandgrandchild),
- });
- let child = Arc::new(ChildData {
- quux: Enum16::A,
- child: ChildDataChild::GrandChild(grandchild),
- });
- let parent = Arc::new(ParentData {
- bar: Enum16::A,
- baz: Enum16::A,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- GrandGrandChild::new(parent).unwrap()
- }
-}
-impl From<GrandGrandChildBuilder> for Parent {
- fn from(builder: GrandGrandChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for Child {
- fn from(builder: GrandGrandChildBuilder) -> Child {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for GrandChild {
- fn from(builder: GrandGrandChildBuilder) -> GrandChild {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for GrandGrandChild {
- fn from(builder: GrandGrandChildBuilder) -> GrandGrandChild {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_grand_children_little_endian.rs b/tools/pdl/tests/generated/packet_decl_grand_children_little_endian.rs
deleted file mode 100644
index c223814d2b..0000000000
--- a/tools/pdl/tests/generated/packet_decl_grand_children_little_endian.rs
+++ /dev/null
@@ -1,993 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum16 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum16 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum16::A),
- 0x2 => Ok(Enum16::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum16> for u16 {
- fn from(value: &Enum16) -> Self {
- match value {
- Enum16::A => 0x1,
- Enum16::B => 0x2,
- }
- }
-}
-impl From<Enum16> for u16 {
- fn from(value: Enum16) -> Self {
- (&value).into()
- }
-}
-impl From<Enum16> for i32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for i64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u32 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum16> for u64 {
- fn from(value: Enum16) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- Child(Arc<ChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::Child(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- Child(Child),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- foo: Enum16,
- bar: Enum16,
- baz: Enum16,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub bar: Enum16,
- pub baz: Enum16,
- pub foo: Enum16,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let foo = Enum16::try_from(bytes.get_mut().get_u16_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "foo".to_string(),
- value: bytes.get_mut().get_u16_le() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let bar = Enum16::try_from(bytes.get_mut().get_u16_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "bar".to_string(),
- value: bytes.get_mut().get_u16_le() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let baz = Enum16::try_from(bytes.get_mut().get_u16_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "baz".to_string(),
- value: bytes.get_mut().get_u16_le() as u64,
- type_: "Enum16".to_string(),
- })?;
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- let child = match (foo) {
- (Enum16::A) if ChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = ChildData::parse_inner(&mut cell, bar, baz)?;
- ParentDataChild::Child(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { foo, bar, baz, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16_le(u16::from(self.foo));
- buffer.put_u16_le(u16::from(self.bar));
- buffer.put_u16_le(u16::from(self.baz));
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Parent", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- ParentDataChild::Child(child) => child.write_to(buffer),
- ParentDataChild::Payload(payload) => buffer.put_slice(payload),
- ParentDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7 + self.child.get_total_size()
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::Child(_) => {
- ParentChild::Child(Child::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- bar: self.bar,
- baz: self.baz,
- foo: self.foo,
- child: match self.payload {
- None => ParentDataChild::None,
- Some(bytes) => ParentDataChild::Payload(bytes),
- },
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ChildDataChild {
- GrandChild(Arc<GrandChildData>),
- Payload(Bytes),
- None,
-}
-impl ChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ChildDataChild::GrandChild(value) => value.get_total_size(),
- ChildDataChild::Payload(bytes) => bytes.len(),
- ChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ChildChild {
- GrandChild(GrandChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildData {
- quux: Enum16,
- child: ChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Child {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildBuilder {
- pub bar: Enum16,
- pub baz: Enum16,
- pub quux: Enum16,
- pub payload: Option<Bytes>,
-}
-impl ChildData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 2
- }
- fn parse(bytes: &[u8], bar: Enum16, baz: Enum16) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, bar, baz)?;
- Ok(packet)
- }
- fn parse_inner(
- mut bytes: &mut Cell<&[u8]>,
- bar: Enum16,
- baz: Enum16,
- ) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Child".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let quux = Enum16::try_from(bytes.get_mut().get_u16_le())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Child".to_string(),
- field: "quux".to_string(),
- value: bytes.get_mut().get_u16_le() as u64,
- type_: "Enum16".to_string(),
- })?;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (bar, quux) {
- (Enum16::A, Enum16::A) if GrandChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = GrandChildData::parse_inner(&mut cell, baz)?;
- ChildDataChild::GrandChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ChildDataChild::None,
- };
- Ok(Self { quux, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u16_le(u16::from(self.quux));
- match &self.child {
- ChildDataChild::GrandChild(child) => child.write_to(buffer),
- ChildDataChild::Payload(payload) => buffer.put_slice(payload),
- ChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 2 + self.child.get_total_size()
- }
-}
-impl Packet for Child {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Child> for Bytes {
- fn from(packet: Child) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Child> for Vec<u8> {
- fn from(packet: Child) -> Self {
- packet.to_vec()
- }
-}
-impl From<Child> for Parent {
- fn from(packet: Child) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for Child {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<Child> {
- Child::new(packet.parent)
- }
-}
-impl Child {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ChildChild {
- match &self.child.child {
- ChildDataChild::GrandChild(_) => {
- ChildChild::GrandChild(GrandChild::new(self.parent.clone()).unwrap())
- }
- ChildDataChild::Payload(payload) => ChildChild::Payload(payload.clone()),
- ChildDataChild::None => ChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, child })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.child.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ChildBuilder {
- pub fn build(self) -> Child {
- let child = Arc::new(ChildData {
- quux: self.quux,
- child: match self.payload {
- None => ChildDataChild::None,
- Some(bytes) => ChildDataChild::Payload(bytes),
- },
- });
- let parent = Arc::new(ParentData {
- bar: self.bar,
- baz: self.baz,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- Child::new(parent).unwrap()
- }
-}
-impl From<ChildBuilder> for Parent {
- fn from(builder: ChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<ChildBuilder> for Child {
- fn from(builder: ChildBuilder) -> Child {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandChildDataChild {
- GrandGrandChild(Arc<GrandGrandChildData>),
- Payload(Bytes),
- None,
-}
-impl GrandChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- GrandChildDataChild::GrandGrandChild(value) => value.get_total_size(),
- GrandChildDataChild::Payload(bytes) => bytes.len(),
- GrandChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandChildChild {
- GrandGrandChild(GrandGrandChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChildData {
- child: GrandChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandchild: Arc<GrandChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandChildBuilder {
- pub baz: Enum16,
- pub payload: Option<Bytes>,
-}
-impl GrandChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8], baz: Enum16) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, baz)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>, baz: Enum16) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (baz) {
- (Enum16::A) if GrandGrandChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = GrandGrandChildData::parse_inner(&mut cell)?;
- GrandChildDataChild::GrandGrandChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- GrandChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => GrandChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- GrandChildDataChild::GrandGrandChild(child) => child.write_to(buffer),
- GrandChildDataChild::Payload(payload) => buffer.put_slice(payload),
- GrandChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for GrandChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<GrandChild> for Bytes {
- fn from(packet: GrandChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<GrandChild> for Vec<u8> {
- fn from(packet: GrandChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<GrandChild> for Parent {
- fn from(packet: GrandChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<GrandChild> for Child {
- fn from(packet: GrandChild) -> Child {
- Child::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for GrandChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<GrandChild> {
- GrandChild::new(packet.parent)
- }
-}
-impl GrandChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> GrandChildChild {
- match &self.grandchild.child {
- GrandChildDataChild::GrandGrandChild(_) => {
- GrandChildChild::GrandGrandChild(
- GrandGrandChild::new(self.parent.clone()).unwrap(),
- )
- }
- GrandChildDataChild::Payload(payload) => {
- GrandChildChild::Payload(payload.clone())
- }
- GrandChildDataChild::None => GrandChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let grandchild = match &child.child {
- ChildDataChild::GrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ChildDataChild::GrandChild),
- actual: format!("{:?}", & child.child),
- });
- }
- };
- Ok(Self { parent, child, grandchild })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.grandchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl GrandChildBuilder {
- pub fn build(self) -> GrandChild {
- let grandchild = Arc::new(GrandChildData {
- child: match self.payload {
- None => GrandChildDataChild::None,
- Some(bytes) => GrandChildDataChild::Payload(bytes),
- },
- });
- let child = Arc::new(ChildData {
- quux: Enum16::A,
- child: ChildDataChild::GrandChild(grandchild),
- });
- let parent = Arc::new(ParentData {
- bar: Enum16::A,
- baz: self.baz,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- GrandChild::new(parent).unwrap()
- }
-}
-impl From<GrandChildBuilder> for Parent {
- fn from(builder: GrandChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<GrandChildBuilder> for Child {
- fn from(builder: GrandChildBuilder) -> Child {
- builder.build().into()
- }
-}
-impl From<GrandChildBuilder> for GrandChild {
- fn from(builder: GrandChildBuilder) -> GrandChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandGrandChildDataChild {
- Payload(Bytes),
- None,
-}
-impl GrandGrandChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- GrandGrandChildDataChild::Payload(bytes) => bytes.len(),
- GrandGrandChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum GrandGrandChildChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChildData {
- child: GrandGrandChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandchild: Arc<GrandChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- grandgrandchild: Arc<GrandGrandChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct GrandGrandChildBuilder {
- pub payload: Option<Bytes>,
-}
-impl GrandGrandChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- GrandGrandChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => GrandGrandChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- GrandGrandChildDataChild::Payload(payload) => buffer.put_slice(payload),
- GrandGrandChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for GrandGrandChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<GrandGrandChild> for Bytes {
- fn from(packet: GrandGrandChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<GrandGrandChild> for Vec<u8> {
- fn from(packet: GrandGrandChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<GrandGrandChild> for Parent {
- fn from(packet: GrandGrandChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<GrandGrandChild> for Child {
- fn from(packet: GrandGrandChild) -> Child {
- Child::new(packet.parent).unwrap()
- }
-}
-impl From<GrandGrandChild> for GrandChild {
- fn from(packet: GrandGrandChild) -> GrandChild {
- GrandChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for GrandGrandChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<GrandGrandChild> {
- GrandGrandChild::new(packet.parent)
- }
-}
-impl GrandGrandChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> GrandGrandChildChild {
- match &self.grandgrandchild.child {
- GrandGrandChildDataChild::Payload(payload) => {
- GrandGrandChildChild::Payload(payload.clone())
- }
- GrandGrandChildDataChild::None => GrandGrandChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let grandchild = match &child.child {
- ChildDataChild::GrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ChildDataChild::GrandChild),
- actual: format!("{:?}", & child.child),
- });
- }
- };
- let grandgrandchild = match &grandchild.child {
- GrandChildDataChild::GrandGrandChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(GrandChildDataChild::GrandGrandChild),
- actual: format!("{:?}", & grandchild.child),
- });
- }
- };
- Ok(Self {
- parent,
- child,
- grandchild,
- grandgrandchild,
- })
- }
- pub fn get_bar(&self) -> Enum16 {
- self.parent.as_ref().bar
- }
- pub fn get_baz(&self) -> Enum16 {
- self.parent.as_ref().baz
- }
- pub fn get_foo(&self) -> Enum16 {
- self.parent.as_ref().foo
- }
- pub fn get_quux(&self) -> Enum16 {
- self.child.as_ref().quux
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.grandgrandchild.child {
- GrandGrandChildDataChild::Payload(bytes) => &bytes,
- GrandGrandChildDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.grandgrandchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl GrandGrandChildBuilder {
- pub fn build(self) -> GrandGrandChild {
- let grandgrandchild = Arc::new(GrandGrandChildData {
- child: match self.payload {
- None => GrandGrandChildDataChild::None,
- Some(bytes) => GrandGrandChildDataChild::Payload(bytes),
- },
- });
- let grandchild = Arc::new(GrandChildData {
- child: GrandChildDataChild::GrandGrandChild(grandgrandchild),
- });
- let child = Arc::new(ChildData {
- quux: Enum16::A,
- child: ChildDataChild::GrandChild(grandchild),
- });
- let parent = Arc::new(ParentData {
- bar: Enum16::A,
- baz: Enum16::A,
- foo: Enum16::A,
- child: ParentDataChild::Child(child),
- });
- GrandGrandChild::new(parent).unwrap()
- }
-}
-impl From<GrandGrandChildBuilder> for Parent {
- fn from(builder: GrandGrandChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for Child {
- fn from(builder: GrandGrandChildBuilder) -> Child {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for GrandChild {
- fn from(builder: GrandGrandChildBuilder) -> GrandChild {
- builder.build().into()
- }
-}
-impl From<GrandGrandChildBuilder> for GrandGrandChild {
- fn from(builder: GrandGrandChildBuilder) -> GrandGrandChild {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_mask_scalar_value_big_endian.rs b/tools/pdl/tests/generated/packet_decl_mask_scalar_value_big_endian.rs
deleted file mode 100644
index 581542d44e..0000000000
--- a/tools/pdl/tests/generated/packet_decl_mask_scalar_value_big_endian.rs
+++ /dev/null
@@ -1,173 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u32,
- c: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u32,
- pub c: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 4 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 4,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u32();
- let a = (chunk & 0x3) as u8;
- let b = ((chunk >> 2) & 0xff_ffff);
- let c = ((chunk >> 26) & 0x3f) as u8;
- Ok(Self { a, b, c })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x3 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x3);
- }
- if self.b > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "b", self.b, 0xff_ffff);
- }
- if self.c > 0x3f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x3f);
- }
- let value = (self.a as u32) | (self.b << 2) | ((self.c as u32) << 26);
- buffer.put_u32(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u32 {
- self.foo.as_ref().b
- }
- pub fn get_c(&self) -> u8 {
- self.foo.as_ref().c
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- c: self.c,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_mask_scalar_value_little_endian.rs b/tools/pdl/tests/generated/packet_decl_mask_scalar_value_little_endian.rs
deleted file mode 100644
index 9bce70a052..0000000000
--- a/tools/pdl/tests/generated/packet_decl_mask_scalar_value_little_endian.rs
+++ /dev/null
@@ -1,173 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u32,
- c: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u32,
- pub c: u8,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 4 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 4,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u32_le();
- let a = (chunk & 0x3) as u8;
- let b = ((chunk >> 2) & 0xff_ffff);
- let c = ((chunk >> 26) & 0x3f) as u8;
- Ok(Self { a, b, c })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x3 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x3);
- }
- if self.b > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "b", self.b, 0xff_ffff);
- }
- if self.c > 0x3f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x3f);
- }
- let value = (self.a as u32) | (self.b << 2) | ((self.c as u32) << 26);
- buffer.put_u32_le(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u32 {
- self.foo.as_ref().b
- }
- pub fn get_c(&self) -> u8 {
- self.foo.as_ref().c
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- c: self.c,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs b/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs
deleted file mode 100644
index 87d0ecf28a..0000000000
--- a/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs
+++ /dev/null
@@ -1,312 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum7 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Enum7 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum7::A),
- 0x2 => Ok(Enum7::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum7> for u8 {
- fn from(value: &Enum7) -> Self {
- match value {
- Enum7::A => 0x1,
- Enum7::B => 0x2,
- }
- }
-}
-impl From<Enum7> for u8 {
- fn from(value: Enum7) -> Self {
- (&value).into()
- }
-}
-impl From<Enum7> for i8 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum9 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum9 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum9::A),
- 0x2 => Ok(Enum9::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum9> for u16 {
- fn from(value: &Enum9) -> Self {
- match value {
- Enum9::A => 0x1,
- Enum9::B => 0x2,
- }
- }
-}
-impl From<Enum9> for u16 {
- fn from(value: Enum9) -> Self {
- (&value).into()
- }
-}
-impl From<Enum9> for i16 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for i32 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for i64 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for u32 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for u64 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: Enum7,
- y: u8,
- z: Enum9,
- w: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub w: u8,
- pub x: Enum7,
- pub y: u8,
- pub z: Enum9,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_uint(3) as u32;
- let x = Enum7::try_from((chunk & 0x7f) as u8)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "x".to_string(),
- value: (chunk & 0x7f) as u8 as u64,
- type_: "Enum7".to_string(),
- })?;
- let y = ((chunk >> 7) & 0x1f) as u8;
- let z = Enum9::try_from(((chunk >> 12) & 0x1ff) as u16)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "z".to_string(),
- value: ((chunk >> 12) & 0x1ff) as u16 as u64,
- type_: "Enum9".to_string(),
- })?;
- let w = ((chunk >> 21) & 0x7) as u8;
- Ok(Self { x, y, z, w })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.y > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "y", self.y, 0x1f);
- }
- if self.w > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "w", self.w, 0x7);
- }
- let value = (u8::from(self.x) as u32) | ((self.y as u32) << 7)
- | ((u16::from(self.z) as u32) << 12) | ((self.w as u32) << 21);
- buffer.put_uint(value as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_w(&self) -> u8 {
- self.foo.as_ref().w
- }
- pub fn get_x(&self) -> Enum7 {
- self.foo.as_ref().x
- }
- pub fn get_y(&self) -> u8 {
- self.foo.as_ref().y
- }
- pub fn get_z(&self) -> Enum9 {
- self.foo.as_ref().z
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- w: self.w,
- x: self.x,
- y: self.y,
- z: self.z,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs b/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs
deleted file mode 100644
index 85297cc586..0000000000
--- a/tools/pdl/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs
+++ /dev/null
@@ -1,312 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum7 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u8> for Enum7 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum7::A),
- 0x2 => Ok(Enum7::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum7> for u8 {
- fn from(value: &Enum7) -> Self {
- match value {
- Enum7::A => 0x1,
- Enum7::B => 0x2,
- }
- }
-}
-impl From<Enum7> for u8 {
- fn from(value: Enum7) -> Self {
- (&value).into()
- }
-}
-impl From<Enum7> for i8 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for i64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u16 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u32 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum7> for u64 {
- fn from(value: Enum7) -> Self {
- u8::from(value) as Self
- }
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u16", into = "u16"))]
-pub enum Enum9 {
- A = 0x1,
- B = 0x2,
-}
-impl TryFrom<u16> for Enum9 {
- type Error = u16;
- fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x1 => Ok(Enum9::A),
- 0x2 => Ok(Enum9::B),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum9> for u16 {
- fn from(value: &Enum9) -> Self {
- match value {
- Enum9::A => 0x1,
- Enum9::B => 0x2,
- }
- }
-}
-impl From<Enum9> for u16 {
- fn from(value: Enum9) -> Self {
- (&value).into()
- }
-}
-impl From<Enum9> for i16 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for i32 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for i64 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for u32 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-impl From<Enum9> for u64 {
- fn from(value: Enum9) -> Self {
- u16::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: Enum7,
- y: u8,
- z: Enum9,
- w: u8,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub w: u8,
- pub x: Enum7,
- pub y: u8,
- pub z: Enum9,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_uint_le(3) as u32;
- let x = Enum7::try_from((chunk & 0x7f) as u8)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "x".to_string(),
- value: (chunk & 0x7f) as u8 as u64,
- type_: "Enum7".to_string(),
- })?;
- let y = ((chunk >> 7) & 0x1f) as u8;
- let z = Enum9::try_from(((chunk >> 12) & 0x1ff) as u16)
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Foo".to_string(),
- field: "z".to_string(),
- value: ((chunk >> 12) & 0x1ff) as u16 as u64,
- type_: "Enum9".to_string(),
- })?;
- let w = ((chunk >> 21) & 0x7) as u8;
- Ok(Self { x, y, z, w })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.y > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "y", self.y, 0x1f);
- }
- if self.w > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "w", self.w, 0x7);
- }
- let value = (u8::from(self.x) as u32) | ((self.y as u32) << 7)
- | ((u16::from(self.z) as u32) << 12) | ((self.w as u32) << 21);
- buffer.put_uint_le(value as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_w(&self) -> u8 {
- self.foo.as_ref().w
- }
- pub fn get_x(&self) -> Enum7 {
- self.foo.as_ref().x
- }
- pub fn get_y(&self) -> u8 {
- self.foo.as_ref().y
- }
- pub fn get_z(&self) -> Enum9 {
- self.foo.as_ref().z
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- w: self.w,
- x: self.x,
- y: self.y,
- z: self.z,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs b/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs
deleted file mode 100644
index a2c69a67af..0000000000
--- a/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs
+++ /dev/null
@@ -1,945 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum8 {
- A = 0x0,
- B = 0x1,
- C = 0x2,
-}
-impl TryFrom<u8> for Enum8 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(Enum8::A),
- 0x1 => Ok(Enum8::B),
- 0x2 => Ok(Enum8::C),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum8> for u8 {
- fn from(value: &Enum8) -> Self {
- match value {
- Enum8::A => 0x0,
- Enum8::B => 0x1,
- Enum8::C => 0x2,
- }
- }
-}
-impl From<Enum8> for u8 {
- fn from(value: Enum8) -> Self {
- (&value).into()
- }
-}
-impl From<Enum8> for i16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- AliasChild(Arc<AliasChildData>),
- NormalChild(Arc<NormalChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::AliasChild(value) => value.get_total_size(),
- ParentDataChild::NormalChild(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- AliasChild(AliasChild),
- NormalChild(NormalChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- v: Enum8,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let v = Enum8::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "v".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Enum8".to_string(),
- })?;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (v) {
- (Enum8::B | Enum8::C) if AliasChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = AliasChildData::parse_inner(&mut cell, v)?;
- ParentDataChild::AliasChild(Arc::new(child_data))
- }
- (Enum8::A) if NormalChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalChildData::parse_inner(&mut cell)?;
- ParentDataChild::NormalChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { v, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.v));
- match &self.child {
- ParentDataChild::AliasChild(child) => child.write_to(buffer),
- ParentDataChild::NormalChild(child) => child.write_to(buffer),
- ParentDataChild::Payload(payload) => buffer.put_slice(payload),
- ParentDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.child.get_total_size()
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::AliasChild(_) => {
- ParentChild::AliasChild(AliasChild::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::NormalChild(_) => {
- ParentChild::NormalChild(NormalChild::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- v: self.v,
- child: match self.payload {
- None => ParentDataChild::None,
- Some(bytes) => ParentDataChild::Payload(bytes),
- },
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum AliasChildDataChild {
- NormalGrandChild1(Arc<NormalGrandChild1Data>),
- NormalGrandChild2(Arc<NormalGrandChild2Data>),
- Payload(Bytes),
- None,
-}
-impl AliasChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- AliasChildDataChild::NormalGrandChild1(value) => value.get_total_size(),
- AliasChildDataChild::NormalGrandChild2(value) => value.get_total_size(),
- AliasChildDataChild::Payload(bytes) => bytes.len(),
- AliasChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum AliasChildChild {
- NormalGrandChild1(NormalGrandChild1),
- NormalGrandChild2(NormalGrandChild2),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChildData {
- child: AliasChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChildBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl AliasChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8], v: Enum8) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, v)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>, v: Enum8) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (v) {
- (Enum8::B) if NormalGrandChild1Data::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalGrandChild1Data::parse_inner(&mut cell)?;
- AliasChildDataChild::NormalGrandChild1(Arc::new(child_data))
- }
- (Enum8::C) if NormalGrandChild2Data::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalGrandChild2Data::parse_inner(&mut cell)?;
- AliasChildDataChild::NormalGrandChild2(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- AliasChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => AliasChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- AliasChildDataChild::NormalGrandChild1(child) => child.write_to(buffer),
- AliasChildDataChild::NormalGrandChild2(child) => child.write_to(buffer),
- AliasChildDataChild::Payload(payload) => buffer.put_slice(payload),
- AliasChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for AliasChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<AliasChild> for Bytes {
- fn from(packet: AliasChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<AliasChild> for Vec<u8> {
- fn from(packet: AliasChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<AliasChild> for Parent {
- fn from(packet: AliasChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for AliasChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<AliasChild> {
- AliasChild::new(packet.parent)
- }
-}
-impl AliasChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> AliasChildChild {
- match &self.aliaschild.child {
- AliasChildDataChild::NormalGrandChild1(_) => {
- AliasChildChild::NormalGrandChild1(
- NormalGrandChild1::new(self.parent.clone()).unwrap(),
- )
- }
- AliasChildDataChild::NormalGrandChild2(_) => {
- AliasChildChild::NormalGrandChild2(
- NormalGrandChild2::new(self.parent.clone()).unwrap(),
- )
- }
- AliasChildDataChild::Payload(payload) => {
- AliasChildChild::Payload(payload.clone())
- }
- AliasChildDataChild::None => AliasChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, aliaschild })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.aliaschild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl AliasChildBuilder {
- pub fn build(self) -> AliasChild {
- let aliaschild = Arc::new(AliasChildData {
- child: match self.payload {
- None => AliasChildDataChild::None,
- Some(bytes) => AliasChildDataChild::Payload(bytes),
- },
- });
- let parent = Arc::new(ParentData {
- v: self.v,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- AliasChild::new(parent).unwrap()
- }
-}
-impl From<AliasChildBuilder> for Parent {
- fn from(builder: AliasChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<AliasChildBuilder> for AliasChild {
- fn from(builder: AliasChildBuilder) -> AliasChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChildData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalchild: Arc<NormalChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChildBuilder {}
-impl NormalChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for NormalChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalChild> for Bytes {
- fn from(packet: NormalChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalChild> for Vec<u8> {
- fn from(packet: NormalChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalChild> for Parent {
- fn from(packet: NormalChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalChild> {
- NormalChild::new(packet.parent)
- }
-}
-impl NormalChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let normalchild = match &parent.child {
- ParentDataChild::NormalChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::NormalChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, normalchild })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalChildBuilder {
- pub fn build(self) -> NormalChild {
- let normalchild = Arc::new(NormalChildData {});
- let parent = Arc::new(ParentData {
- v: Enum8::A,
- child: ParentDataChild::NormalChild(normalchild),
- });
- NormalChild::new(parent).unwrap()
- }
-}
-impl From<NormalChildBuilder> for Parent {
- fn from(builder: NormalChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalChildBuilder> for NormalChild {
- fn from(builder: NormalChildBuilder) -> NormalChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1Data {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1 {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalgrandchild1: Arc<NormalGrandChild1Data>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1Builder {}
-impl NormalGrandChild1Data {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for NormalGrandChild1 {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalGrandChild1> for Bytes {
- fn from(packet: NormalGrandChild1) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalGrandChild1> for Vec<u8> {
- fn from(packet: NormalGrandChild1) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalGrandChild1> for Parent {
- fn from(packet: NormalGrandChild1) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<NormalGrandChild1> for AliasChild {
- fn from(packet: NormalGrandChild1) -> AliasChild {
- AliasChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalGrandChild1 {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalGrandChild1> {
- NormalGrandChild1::new(packet.parent)
- }
-}
-impl NormalGrandChild1 {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let normalgrandchild1 = match &aliaschild.child {
- AliasChildDataChild::NormalGrandChild1(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(AliasChildDataChild::NormalGrandChild1),
- actual: format!("{:?}", & aliaschild.child),
- });
- }
- };
- Ok(Self {
- parent,
- aliaschild,
- normalgrandchild1,
- })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalgrandchild1.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalGrandChild1Builder {
- pub fn build(self) -> NormalGrandChild1 {
- let normalgrandchild1 = Arc::new(NormalGrandChild1Data {});
- let aliaschild = Arc::new(AliasChildData {
- child: AliasChildDataChild::NormalGrandChild1(normalgrandchild1),
- });
- let parent = Arc::new(ParentData {
- v: Enum8::B,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- NormalGrandChild1::new(parent).unwrap()
- }
-}
-impl From<NormalGrandChild1Builder> for Parent {
- fn from(builder: NormalGrandChild1Builder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild1Builder> for AliasChild {
- fn from(builder: NormalGrandChild1Builder) -> AliasChild {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild1Builder> for NormalGrandChild1 {
- fn from(builder: NormalGrandChild1Builder) -> NormalGrandChild1 {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum NormalGrandChild2DataChild {
- Payload(Bytes),
- None,
-}
-impl NormalGrandChild2DataChild {
- fn get_total_size(&self) -> usize {
- match self {
- NormalGrandChild2DataChild::Payload(bytes) => bytes.len(),
- NormalGrandChild2DataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum NormalGrandChild2Child {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2Data {
- child: NormalGrandChild2DataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2 {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalgrandchild2: Arc<NormalGrandChild2Data>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2Builder {
- pub payload: Option<Bytes>,
-}
-impl NormalGrandChild2Data {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- NormalGrandChild2DataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => NormalGrandChild2DataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- NormalGrandChild2DataChild::Payload(payload) => buffer.put_slice(payload),
- NormalGrandChild2DataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for NormalGrandChild2 {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalGrandChild2> for Bytes {
- fn from(packet: NormalGrandChild2) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalGrandChild2> for Vec<u8> {
- fn from(packet: NormalGrandChild2) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalGrandChild2> for Parent {
- fn from(packet: NormalGrandChild2) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<NormalGrandChild2> for AliasChild {
- fn from(packet: NormalGrandChild2) -> AliasChild {
- AliasChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalGrandChild2 {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalGrandChild2> {
- NormalGrandChild2::new(packet.parent)
- }
-}
-impl NormalGrandChild2 {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> NormalGrandChild2Child {
- match &self.normalgrandchild2.child {
- NormalGrandChild2DataChild::Payload(payload) => {
- NormalGrandChild2Child::Payload(payload.clone())
- }
- NormalGrandChild2DataChild::None => NormalGrandChild2Child::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let normalgrandchild2 = match &aliaschild.child {
- AliasChildDataChild::NormalGrandChild2(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(AliasChildDataChild::NormalGrandChild2),
- actual: format!("{:?}", & aliaschild.child),
- });
- }
- };
- Ok(Self {
- parent,
- aliaschild,
- normalgrandchild2,
- })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.normalgrandchild2.child {
- NormalGrandChild2DataChild::Payload(bytes) => &bytes,
- NormalGrandChild2DataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalgrandchild2.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalGrandChild2Builder {
- pub fn build(self) -> NormalGrandChild2 {
- let normalgrandchild2 = Arc::new(NormalGrandChild2Data {
- child: match self.payload {
- None => NormalGrandChild2DataChild::None,
- Some(bytes) => NormalGrandChild2DataChild::Payload(bytes),
- },
- });
- let aliaschild = Arc::new(AliasChildData {
- child: AliasChildDataChild::NormalGrandChild2(normalgrandchild2),
- });
- let parent = Arc::new(ParentData {
- v: Enum8::C,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- NormalGrandChild2::new(parent).unwrap()
- }
-}
-impl From<NormalGrandChild2Builder> for Parent {
- fn from(builder: NormalGrandChild2Builder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild2Builder> for AliasChild {
- fn from(builder: NormalGrandChild2Builder) -> AliasChild {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild2Builder> for NormalGrandChild2 {
- fn from(builder: NormalGrandChild2Builder) -> NormalGrandChild2 {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs b/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs
deleted file mode 100644
index a2c69a67af..0000000000
--- a/tools/pdl/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs
+++ /dev/null
@@ -1,945 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum8 {
- A = 0x0,
- B = 0x1,
- C = 0x2,
-}
-impl TryFrom<u8> for Enum8 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(Enum8::A),
- 0x1 => Ok(Enum8::B),
- 0x2 => Ok(Enum8::C),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum8> for u8 {
- fn from(value: &Enum8) -> Self {
- match value {
- Enum8::A => 0x0,
- Enum8::B => 0x1,
- Enum8::C => 0x2,
- }
- }
-}
-impl From<Enum8> for u8 {
- fn from(value: Enum8) -> Self {
- (&value).into()
- }
-}
-impl From<Enum8> for i16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- AliasChild(Arc<AliasChildData>),
- NormalChild(Arc<NormalChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::AliasChild(value) => value.get_total_size(),
- ParentDataChild::NormalChild(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- AliasChild(AliasChild),
- NormalChild(NormalChild),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- v: Enum8,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let v = Enum8::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "v".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Enum8".to_string(),
- })?;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (v) {
- (Enum8::B | Enum8::C) if AliasChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = AliasChildData::parse_inner(&mut cell, v)?;
- ParentDataChild::AliasChild(Arc::new(child_data))
- }
- (Enum8::A) if NormalChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalChildData::parse_inner(&mut cell)?;
- ParentDataChild::NormalChild(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { v, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.v));
- match &self.child {
- ParentDataChild::AliasChild(child) => child.write_to(buffer),
- ParentDataChild::NormalChild(child) => child.write_to(buffer),
- ParentDataChild::Payload(payload) => buffer.put_slice(payload),
- ParentDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1 + self.child.get_total_size()
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::AliasChild(_) => {
- ParentChild::AliasChild(AliasChild::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::NormalChild(_) => {
- ParentChild::NormalChild(NormalChild::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- v: self.v,
- child: match self.payload {
- None => ParentDataChild::None,
- Some(bytes) => ParentDataChild::Payload(bytes),
- },
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum AliasChildDataChild {
- NormalGrandChild1(Arc<NormalGrandChild1Data>),
- NormalGrandChild2(Arc<NormalGrandChild2Data>),
- Payload(Bytes),
- None,
-}
-impl AliasChildDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- AliasChildDataChild::NormalGrandChild1(value) => value.get_total_size(),
- AliasChildDataChild::NormalGrandChild2(value) => value.get_total_size(),
- AliasChildDataChild::Payload(bytes) => bytes.len(),
- AliasChildDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum AliasChildChild {
- NormalGrandChild1(NormalGrandChild1),
- NormalGrandChild2(NormalGrandChild2),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChildData {
- child: AliasChildDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct AliasChildBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl AliasChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8], v: Enum8) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell, v)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>, v: Enum8) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match (v) {
- (Enum8::B) if NormalGrandChild1Data::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalGrandChild1Data::parse_inner(&mut cell)?;
- AliasChildDataChild::NormalGrandChild1(Arc::new(child_data))
- }
- (Enum8::C) if NormalGrandChild2Data::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = NormalGrandChild2Data::parse_inner(&mut cell)?;
- AliasChildDataChild::NormalGrandChild2(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- AliasChildDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => AliasChildDataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- AliasChildDataChild::NormalGrandChild1(child) => child.write_to(buffer),
- AliasChildDataChild::NormalGrandChild2(child) => child.write_to(buffer),
- AliasChildDataChild::Payload(payload) => buffer.put_slice(payload),
- AliasChildDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for AliasChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<AliasChild> for Bytes {
- fn from(packet: AliasChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<AliasChild> for Vec<u8> {
- fn from(packet: AliasChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<AliasChild> for Parent {
- fn from(packet: AliasChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for AliasChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<AliasChild> {
- AliasChild::new(packet.parent)
- }
-}
-impl AliasChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> AliasChildChild {
- match &self.aliaschild.child {
- AliasChildDataChild::NormalGrandChild1(_) => {
- AliasChildChild::NormalGrandChild1(
- NormalGrandChild1::new(self.parent.clone()).unwrap(),
- )
- }
- AliasChildDataChild::NormalGrandChild2(_) => {
- AliasChildChild::NormalGrandChild2(
- NormalGrandChild2::new(self.parent.clone()).unwrap(),
- )
- }
- AliasChildDataChild::Payload(payload) => {
- AliasChildChild::Payload(payload.clone())
- }
- AliasChildDataChild::None => AliasChildChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, aliaschild })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.aliaschild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl AliasChildBuilder {
- pub fn build(self) -> AliasChild {
- let aliaschild = Arc::new(AliasChildData {
- child: match self.payload {
- None => AliasChildDataChild::None,
- Some(bytes) => AliasChildDataChild::Payload(bytes),
- },
- });
- let parent = Arc::new(ParentData {
- v: self.v,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- AliasChild::new(parent).unwrap()
- }
-}
-impl From<AliasChildBuilder> for Parent {
- fn from(builder: AliasChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<AliasChildBuilder> for AliasChild {
- fn from(builder: AliasChildBuilder) -> AliasChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChildData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChild {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalchild: Arc<NormalChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalChildBuilder {}
-impl NormalChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for NormalChild {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalChild> for Bytes {
- fn from(packet: NormalChild) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalChild> for Vec<u8> {
- fn from(packet: NormalChild) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalChild> for Parent {
- fn from(packet: NormalChild) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalChild {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalChild> {
- NormalChild::new(packet.parent)
- }
-}
-impl NormalChild {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let normalchild = match &parent.child {
- ParentDataChild::NormalChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::NormalChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, normalchild })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalchild.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalChildBuilder {
- pub fn build(self) -> NormalChild {
- let normalchild = Arc::new(NormalChildData {});
- let parent = Arc::new(ParentData {
- v: Enum8::A,
- child: ParentDataChild::NormalChild(normalchild),
- });
- NormalChild::new(parent).unwrap()
- }
-}
-impl From<NormalChildBuilder> for Parent {
- fn from(builder: NormalChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalChildBuilder> for NormalChild {
- fn from(builder: NormalChildBuilder) -> NormalChild {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1Data {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1 {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalgrandchild1: Arc<NormalGrandChild1Data>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild1Builder {}
-impl NormalGrandChild1Data {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for NormalGrandChild1 {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalGrandChild1> for Bytes {
- fn from(packet: NormalGrandChild1) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalGrandChild1> for Vec<u8> {
- fn from(packet: NormalGrandChild1) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalGrandChild1> for Parent {
- fn from(packet: NormalGrandChild1) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<NormalGrandChild1> for AliasChild {
- fn from(packet: NormalGrandChild1) -> AliasChild {
- AliasChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalGrandChild1 {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalGrandChild1> {
- NormalGrandChild1::new(packet.parent)
- }
-}
-impl NormalGrandChild1 {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let normalgrandchild1 = match &aliaschild.child {
- AliasChildDataChild::NormalGrandChild1(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(AliasChildDataChild::NormalGrandChild1),
- actual: format!("{:?}", & aliaschild.child),
- });
- }
- };
- Ok(Self {
- parent,
- aliaschild,
- normalgrandchild1,
- })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalgrandchild1.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalGrandChild1Builder {
- pub fn build(self) -> NormalGrandChild1 {
- let normalgrandchild1 = Arc::new(NormalGrandChild1Data {});
- let aliaschild = Arc::new(AliasChildData {
- child: AliasChildDataChild::NormalGrandChild1(normalgrandchild1),
- });
- let parent = Arc::new(ParentData {
- v: Enum8::B,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- NormalGrandChild1::new(parent).unwrap()
- }
-}
-impl From<NormalGrandChild1Builder> for Parent {
- fn from(builder: NormalGrandChild1Builder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild1Builder> for AliasChild {
- fn from(builder: NormalGrandChild1Builder) -> AliasChild {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild1Builder> for NormalGrandChild1 {
- fn from(builder: NormalGrandChild1Builder) -> NormalGrandChild1 {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum NormalGrandChild2DataChild {
- Payload(Bytes),
- None,
-}
-impl NormalGrandChild2DataChild {
- fn get_total_size(&self) -> usize {
- match self {
- NormalGrandChild2DataChild::Payload(bytes) => bytes.len(),
- NormalGrandChild2DataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum NormalGrandChild2Child {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2Data {
- child: NormalGrandChild2DataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2 {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- aliaschild: Arc<AliasChildData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- normalgrandchild2: Arc<NormalGrandChild2Data>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct NormalGrandChild2Builder {
- pub payload: Option<Bytes>,
-}
-impl NormalGrandChild2Data {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- NormalGrandChild2DataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => NormalGrandChild2DataChild::None,
- };
- Ok(Self { child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- NormalGrandChild2DataChild::Payload(payload) => buffer.put_slice(payload),
- NormalGrandChild2DataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- self.child.get_total_size()
- }
-}
-impl Packet for NormalGrandChild2 {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<NormalGrandChild2> for Bytes {
- fn from(packet: NormalGrandChild2) -> Self {
- packet.to_bytes()
- }
-}
-impl From<NormalGrandChild2> for Vec<u8> {
- fn from(packet: NormalGrandChild2) -> Self {
- packet.to_vec()
- }
-}
-impl From<NormalGrandChild2> for Parent {
- fn from(packet: NormalGrandChild2) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl From<NormalGrandChild2> for AliasChild {
- fn from(packet: NormalGrandChild2) -> AliasChild {
- AliasChild::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for NormalGrandChild2 {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<NormalGrandChild2> {
- NormalGrandChild2::new(packet.parent)
- }
-}
-impl NormalGrandChild2 {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> NormalGrandChild2Child {
- match &self.normalgrandchild2.child {
- NormalGrandChild2DataChild::Payload(payload) => {
- NormalGrandChild2Child::Payload(payload.clone())
- }
- NormalGrandChild2DataChild::None => NormalGrandChild2Child::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let aliaschild = match &parent.child {
- ParentDataChild::AliasChild(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::AliasChild),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- let normalgrandchild2 = match &aliaschild.child {
- AliasChildDataChild::NormalGrandChild2(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(AliasChildDataChild::NormalGrandChild2),
- actual: format!("{:?}", & aliaschild.child),
- });
- }
- };
- Ok(Self {
- parent,
- aliaschild,
- normalgrandchild2,
- })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.normalgrandchild2.child {
- NormalGrandChild2DataChild::Payload(bytes) => &bytes,
- NormalGrandChild2DataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.normalgrandchild2.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl NormalGrandChild2Builder {
- pub fn build(self) -> NormalGrandChild2 {
- let normalgrandchild2 = Arc::new(NormalGrandChild2Data {
- child: match self.payload {
- None => NormalGrandChild2DataChild::None,
- Some(bytes) => NormalGrandChild2DataChild::Payload(bytes),
- },
- });
- let aliaschild = Arc::new(AliasChildData {
- child: AliasChildDataChild::NormalGrandChild2(normalgrandchild2),
- });
- let parent = Arc::new(ParentData {
- v: Enum8::C,
- child: ParentDataChild::AliasChild(aliaschild),
- });
- NormalGrandChild2::new(parent).unwrap()
- }
-}
-impl From<NormalGrandChild2Builder> for Parent {
- fn from(builder: NormalGrandChild2Builder) -> Parent {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild2Builder> for AliasChild {
- fn from(builder: NormalGrandChild2Builder) -> AliasChild {
- builder.build().into()
- }
-}
-impl From<NormalGrandChild2Builder> for NormalGrandChild2 {
- fn from(builder: NormalGrandChild2Builder) -> NormalGrandChild2 {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs b/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs
deleted file mode 100644
index 35c6daca19..0000000000
--- a/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs
+++ /dev/null
@@ -1,376 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum8 {
- A = 0x0,
-}
-impl TryFrom<u8> for Enum8 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(Enum8::A),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum8> for u8 {
- fn from(value: &Enum8) -> Self {
- match value {
- Enum8::A => 0x0,
- }
- }
-}
-impl From<Enum8> for u8 {
- fn from(value: Enum8) -> Self {
- (&value).into()
- }
-}
-impl From<Enum8> for i16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- Child(Arc<ChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::Child(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- Child(Child),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- v: Enum8,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let v = Enum8::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "v".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Enum8".to_string(),
- })?;
- let payload: &[u8] = &[];
- let child = match (v) {
- (Enum8::A) if ChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = ChildData::parse_inner(&mut cell)?;
- ParentDataChild::Child(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { v, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.v));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::Child(_) => {
- ParentChild::Child(Child::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- v: self.v,
- child: ParentDataChild::None,
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Child {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildBuilder {}
-impl ChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for Child {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Child> for Bytes {
- fn from(packet: Child) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Child> for Vec<u8> {
- fn from(packet: Child) -> Self {
- packet.to_vec()
- }
-}
-impl From<Child> for Parent {
- fn from(packet: Child) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for Child {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<Child> {
- Child::new(packet.parent)
- }
-}
-impl Child {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, child })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.child.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ChildBuilder {
- pub fn build(self) -> Child {
- let child = Arc::new(ChildData {});
- let parent = Arc::new(ParentData {
- v: Enum8::A,
- child: ParentDataChild::None,
- });
- Child::new(parent).unwrap()
- }
-}
-impl From<ChildBuilder> for Parent {
- fn from(builder: ChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<ChildBuilder> for Child {
- fn from(builder: ChildBuilder) -> Child {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs b/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs
deleted file mode 100644
index 35c6daca19..0000000000
--- a/tools/pdl/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs
+++ /dev/null
@@ -1,376 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[repr(u64)]
-#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum Enum8 {
- A = 0x0,
-}
-impl TryFrom<u8> for Enum8 {
- type Error = u8;
- fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
- match value {
- 0x0 => Ok(Enum8::A),
- _ => Err(value),
- }
- }
-}
-impl From<&Enum8> for u8 {
- fn from(value: &Enum8) -> Self {
- match value {
- Enum8::A => 0x0,
- }
- }
-}
-impl From<Enum8> for u8 {
- fn from(value: Enum8) -> Self {
- (&value).into()
- }
-}
-impl From<Enum8> for i16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for i64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u16 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u32 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-impl From<Enum8> for u64 {
- fn from(value: Enum8) -> Self {
- u8::from(value) as Self
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentDataChild {
- Child(Arc<ChildData>),
- Payload(Bytes),
- None,
-}
-impl ParentDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- ParentDataChild::Child(value) => value.get_total_size(),
- ParentDataChild::Payload(bytes) => bytes.len(),
- ParentDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum ParentChild {
- Child(Child),
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentData {
- v: Enum8,
- child: ParentDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Parent {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ParentBuilder {
- pub v: Enum8,
- pub payload: Option<Bytes>,
-}
-impl ParentData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 1
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Parent".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let v = Enum8::try_from(bytes.get_mut().get_u8())
- .map_err(|_| Error::InvalidEnumValueError {
- obj: "Parent".to_string(),
- field: "v".to_string(),
- value: bytes.get_mut().get_u8() as u64,
- type_: "Enum8".to_string(),
- })?;
- let payload: &[u8] = &[];
- let child = match (v) {
- (Enum8::A) if ChildData::conforms(&payload) => {
- let mut cell = Cell::new(payload);
- let child_data = ChildData::parse_inner(&mut cell)?;
- ParentDataChild::Child(Arc::new(child_data))
- }
- _ if !payload.is_empty() => {
- ParentDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => ParentDataChild::None,
- };
- Ok(Self { v, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(u8::from(self.v));
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 1
- }
-}
-impl Packet for Parent {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Parent> for Bytes {
- fn from(packet: Parent) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Parent> for Vec<u8> {
- fn from(packet: Parent) -> Self {
- packet.to_vec()
- }
-}
-impl Parent {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> ParentChild {
- match &self.parent.child {
- ParentDataChild::Child(_) => {
- ParentChild::Child(Child::new(self.parent.clone()).unwrap())
- }
- ParentDataChild::Payload(payload) => ParentChild::Payload(payload.clone()),
- ParentDataChild::None => ParentChild::None,
- }
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- Ok(Self { parent })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.parent.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ParentBuilder {
- pub fn build(self) -> Parent {
- let parent = Arc::new(ParentData {
- v: self.v,
- child: ParentDataChild::None,
- });
- Parent::new(parent).unwrap()
- }
-}
-impl From<ParentBuilder> for Parent {
- fn from(builder: ParentBuilder) -> Parent {
- builder.build().into()
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Child {
- #[cfg_attr(feature = "serde", serde(flatten))]
- parent: Arc<ParentData>,
- #[cfg_attr(feature = "serde", serde(flatten))]
- child: Arc<ChildData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct ChildBuilder {}
-impl ChildData {
- fn conforms(bytes: &[u8]) -> bool {
- true
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {}
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 0
- }
-}
-impl Packet for Child {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.parent.get_size());
- self.parent.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Child> for Bytes {
- fn from(packet: Child) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Child> for Vec<u8> {
- fn from(packet: Child) -> Self {
- packet.to_vec()
- }
-}
-impl From<Child> for Parent {
- fn from(packet: Child) -> Parent {
- Parent::new(packet.parent).unwrap()
- }
-}
-impl TryFrom<Parent> for Child {
- type Error = Error;
- fn try_from(packet: Parent) -> Result<Child> {
- Child::new(packet.parent)
- }
-}
-impl Child {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = ParentData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(parent: Arc<ParentData>) -> Result<Self> {
- let child = match &parent.child {
- ParentDataChild::Child(value) => value.clone(),
- _ => {
- return Err(Error::InvalidChildError {
- expected: stringify!(ParentDataChild::Child),
- actual: format!("{:?}", & parent.child),
- });
- }
- };
- Ok(Self { parent, child })
- }
- pub fn get_v(&self) -> Enum8 {
- self.parent.as_ref().v
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.child.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.parent.get_size()
- }
-}
-impl ChildBuilder {
- pub fn build(self) -> Child {
- let child = Arc::new(ChildData {});
- let parent = Arc::new(ParentData {
- v: Enum8::A,
- child: ParentDataChild::None,
- });
- Child::new(parent).unwrap()
- }
-}
-impl From<ChildBuilder> for Parent {
- fn from(builder: ChildBuilder) -> Parent {
- builder.build().into()
- }
-}
-impl From<ChildBuilder> for Child {
- fn from(builder: ChildBuilder) -> Child {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_big_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_big_endian.rs
deleted file mode 100644
index 452c365765..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_big_endian.rs
+++ /dev/null
@@ -1,201 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u32,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u32,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_uint(3) as u32;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0xff_ffff);
- }
- buffer.put_uint(self.a as u64, 3);
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u32 {
- self.foo.as_ref().a
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_little_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_little_endian.rs
deleted file mode 100644
index 99c06b9ccf..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_little_endian.rs
+++ /dev/null
@@ -1,201 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u32,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u32,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_uint_le(3) as u32;
- let payload = bytes.get();
- bytes.get_mut().advance(payload.len());
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0xff_ffff);
- }
- buffer.put_uint_le(self.a as u64, 3);
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u32 {
- self.foo.as_ref().a
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_big_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_big_endian.rs
deleted file mode 100644
index f54477a12c..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_big_endian.rs
+++ /dev/null
@@ -1,208 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u32,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u32,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..bytes.get().len() - 3];
- bytes.get_mut().advance(payload.len());
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_uint(3) as u32;
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- if self.a > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0xff_ffff);
- }
- buffer.put_uint(self.a as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u32 {
- self.foo.as_ref().a
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_little_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_little_endian.rs
deleted file mode 100644
index 616c87f212..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_unknown_size_terminal_little_endian.rs
+++ /dev/null
@@ -1,208 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u32,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u32,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 3
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..bytes.get().len() - 3];
- bytes.get_mut().advance(payload.len());
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_uint_le(3) as u32;
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- if self.a > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0xff_ffff);
- }
- buffer.put_uint_le(self.a as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 3 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u32 {
- self.foo.as_ref().a
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_big_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_big_endian.rs
deleted file mode 100644
index fd2a3c1891..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_big_endian.rs
+++ /dev/null
@@ -1,235 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u16,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u16,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let b = bytes.get_mut().get_u16();
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, b, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.a);
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- buffer.put_u16(self.b);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u16 {
- self.foo.as_ref().b
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_little_endian.rs b/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_little_endian.rs
deleted file mode 100644
index 65f18097ee..0000000000
--- a/tools/pdl/tests/generated/packet_decl_payload_field_variable_size_little_endian.rs
+++ /dev/null
@@ -1,235 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooDataChild {
- Payload(Bytes),
- None,
-}
-impl FooDataChild {
- fn get_total_size(&self) -> usize {
- match self {
- FooDataChild::Payload(bytes) => bytes.len(),
- FooDataChild::None => 0,
- }
- }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub enum FooChild {
- Payload(Bytes),
- None,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- a: u8,
- b: u16,
- child: FooDataChild,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub a: u8,
- pub b: u16,
- pub payload: Option<Bytes>,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 4
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let a = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let payload_size = bytes.get_mut().get_u8() as usize;
- if bytes.get().remaining() < payload_size {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: payload_size,
- got: bytes.get().remaining(),
- });
- }
- let payload = &bytes.get()[..payload_size];
- bytes.get_mut().advance(payload_size);
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let b = bytes.get_mut().get_u16_le();
- let child = match () {
- _ if !payload.is_empty() => {
- FooDataChild::Payload(Bytes::copy_from_slice(payload))
- }
- _ => FooDataChild::None,
- };
- Ok(Self { a, b, child })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.a);
- if self.child.get_total_size() > 0xff {
- panic!(
- "Invalid length for {}::{}: {} > {}", "Foo", "_payload_", self.child
- .get_total_size(), 0xff
- );
- }
- buffer.put_u8(self.child.get_total_size() as u8);
- match &self.child {
- FooDataChild::Payload(payload) => buffer.put_slice(payload),
- FooDataChild::None => {}
- }
- buffer.put_u16_le(self.b);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 4 + self.child.get_total_size()
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- pub fn specialize(&self) -> FooChild {
- match &self.foo.child {
- FooDataChild::Payload(payload) => FooChild::Payload(payload.clone()),
- FooDataChild::None => FooChild::None,
- }
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_a(&self) -> u8 {
- self.foo.as_ref().a
- }
- pub fn get_b(&self) -> u16 {
- self.foo.as_ref().b
- }
- pub fn get_payload(&self) -> &[u8] {
- match &self.foo.child {
- FooDataChild::Payload(bytes) => &bytes,
- FooDataChild::None => &[],
- }
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- a: self.a,
- b: self.b,
- child: match self.payload {
- None => FooDataChild::None,
- Some(bytes) => FooDataChild::Payload(bytes),
- },
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_reserved_field_big_endian.rs b/tools/pdl/tests/generated/packet_decl_reserved_field_big_endian.rs
deleted file mode 100644
index f03c7bfcd1..0000000000
--- a/tools/pdl/tests/generated/packet_decl_reserved_field_big_endian.rs
+++ /dev/null
@@ -1,139 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- bytes.get_mut().advance(5);
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_bytes(0, 5);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {});
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_reserved_field_little_endian.rs b/tools/pdl/tests/generated/packet_decl_reserved_field_little_endian.rs
deleted file mode 100644
index f03c7bfcd1..0000000000
--- a/tools/pdl/tests/generated/packet_decl_reserved_field_little_endian.rs
+++ /dev/null
@@ -1,139 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 5
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 5 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 5,
- got: bytes.get().remaining(),
- });
- }
- bytes.get_mut().advance(5);
- Ok(Self {})
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_bytes(0, 5);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 5
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {});
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_simple_scalars_big_endian.rs b/tools/pdl/tests/generated/packet_decl_simple_scalars_big_endian.rs
deleted file mode 100644
index 303a7855e9..0000000000
--- a/tools/pdl/tests/generated/packet_decl_simple_scalars_big_endian.rs
+++ /dev/null
@@ -1,181 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u8,
- y: u16,
- z: u32,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u8,
- pub y: u16,
- pub z: u32,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 6
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let y = bytes.get_mut().get_u16();
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let z = bytes.get_mut().get_uint(3) as u32;
- Ok(Self { x, y, z })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- buffer.put_u16(self.y);
- if self.z > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "z", self.z, 0xff_ffff);
- }
- buffer.put_uint(self.z as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 6
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u8 {
- self.foo.as_ref().x
- }
- pub fn get_y(&self) -> u16 {
- self.foo.as_ref().y
- }
- pub fn get_z(&self) -> u32 {
- self.foo.as_ref().z
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- x: self.x,
- y: self.y,
- z: self.z,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/packet_decl_simple_scalars_little_endian.rs b/tools/pdl/tests/generated/packet_decl_simple_scalars_little_endian.rs
deleted file mode 100644
index 043a72f113..0000000000
--- a/tools/pdl/tests/generated/packet_decl_simple_scalars_little_endian.rs
+++ /dev/null
@@ -1,181 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooData {
- x: u8,
- y: u16,
- z: u32,
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- #[cfg_attr(feature = "serde", serde(flatten))]
- foo: Arc<FooData>,
-}
-#[derive(Debug)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct FooBuilder {
- pub x: u8,
- pub y: u16,
- pub z: u32,
-}
-impl FooData {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 6
- }
- fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 1 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 1,
- got: bytes.get().remaining(),
- });
- }
- let x = bytes.get_mut().get_u8();
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let y = bytes.get_mut().get_u16_le();
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let z = bytes.get_mut().get_uint_le(3) as u32;
- Ok(Self { x, y, z })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- buffer.put_u8(self.x);
- buffer.put_u16_le(self.y);
- if self.z > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "z", self.z, 0xff_ffff);
- }
- buffer.put_uint_le(self.z as u64, 3);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 6
- }
-}
-impl Packet for Foo {
- fn to_bytes(self) -> Bytes {
- let mut buffer = BytesMut::with_capacity(self.foo.get_size());
- self.foo.write_to(&mut buffer);
- buffer.freeze()
- }
- fn to_vec(self) -> Vec<u8> {
- self.to_bytes().to_vec()
- }
-}
-impl From<Foo> for Bytes {
- fn from(packet: Foo) -> Self {
- packet.to_bytes()
- }
-}
-impl From<Foo> for Vec<u8> {
- fn from(packet: Foo) -> Self {
- packet.to_vec()
- }
-}
-impl Foo {
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- let data = FooData::parse_inner(&mut bytes)?;
- Self::new(Arc::new(data))
- }
- fn new(foo: Arc<FooData>) -> Result<Self> {
- Ok(Self { foo })
- }
- pub fn get_x(&self) -> u8 {
- self.foo.as_ref().x
- }
- pub fn get_y(&self) -> u16 {
- self.foo.as_ref().y
- }
- pub fn get_z(&self) -> u32 {
- self.foo.as_ref().z
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- self.foo.write_to(buffer)
- }
- pub fn get_size(&self) -> usize {
- self.foo.get_size()
- }
-}
-impl FooBuilder {
- pub fn build(self) -> Foo {
- let foo = Arc::new(FooData {
- x: self.x,
- y: self.y,
- z: self.z,
- });
- Foo::new(foo).unwrap()
- }
-}
-impl From<FooBuilder> for Foo {
- fn from(builder: FooBuilder) -> Foo {
- builder.build().into()
- }
-}
diff --git a/tools/pdl/tests/generated/preamble.rs b/tools/pdl/tests/generated/preamble.rs
deleted file mode 100644
index e200e6d7b1..0000000000
--- a/tools/pdl/tests/generated/preamble.rs
+++ /dev/null
@@ -1,46 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from foo.pdl.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
diff --git a/tools/pdl/tests/generated/struct_decl_complex_scalars_big_endian.rs b/tools/pdl/tests/generated/struct_decl_complex_scalars_big_endian.rs
deleted file mode 100644
index 8ea2bb92ba..0000000000
--- a/tools/pdl/tests/generated/struct_decl_complex_scalars_big_endian.rs
+++ /dev/null
@@ -1,126 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: u8,
- pub b: u8,
- pub c: u8,
- pub d: u32,
- pub e: u16,
- pub f: u8,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16();
- let a = (chunk & 0x7) as u8;
- let b = (chunk >> 3) as u8;
- let c = ((chunk >> 11) & 0x1f) as u8;
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let d = bytes.get_mut().get_uint(3) as u32;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16();
- let e = (chunk & 0xfff);
- let f = ((chunk >> 12) & 0xf) as u8;
- Ok(Self { a, b, c, d, e, f })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x7);
- }
- if self.c > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x1f);
- }
- let value = (self.a as u16) | ((self.b as u16) << 3) | ((self.c as u16) << 11);
- buffer.put_u16(value);
- if self.d > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "d", self.d, 0xff_ffff);
- }
- buffer.put_uint(self.d as u64, 3);
- if self.e > 0xfff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "e", self.e, 0xfff);
- }
- if self.f > 0xf {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "f", self.f, 0xf);
- }
- let value = self.e | ((self.f as u16) << 12);
- buffer.put_u16(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
diff --git a/tools/pdl/tests/generated/struct_decl_complex_scalars_little_endian.rs b/tools/pdl/tests/generated/struct_decl_complex_scalars_little_endian.rs
deleted file mode 100644
index 0ec2c384b4..0000000000
--- a/tools/pdl/tests/generated/struct_decl_complex_scalars_little_endian.rs
+++ /dev/null
@@ -1,126 +0,0 @@
-#![rustfmt::skip]
-/// @generated rust packets from test.
-use bytes::{Buf, BufMut, Bytes, BytesMut};
-use std::convert::{TryFrom, TryInto};
-use std::cell::Cell;
-use std::fmt;
-use std::sync::Arc;
-use thiserror::Error;
-type Result<T> = std::result::Result<T, Error>;
-/// Private prevents users from creating arbitrary scalar values
-/// in situations where the value needs to be validated.
-/// Users can freely deref the value, but only the backend
-/// may create it.
-#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Private<T>(T);
-impl<T> std::ops::Deref for Private<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("Packet parsing failed")]
- InvalidPacketError,
- #[error("{field} was {value:x}, which is not known")]
- ConstraintOutOfBounds { field: String, value: u64 },
- #[error("Got {actual:x}, expected {expected:x}")]
- InvalidFixedValue { expected: u64, actual: u64 },
- #[error("when parsing {obj} needed length of {wanted} but got {got}")]
- InvalidLengthError { obj: String, wanted: usize, got: usize },
- #[error(
- "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
- )]
- InvalidArraySize { array: usize, element: usize },
- #[error("Due to size restrictions a struct could not be parsed.")]
- ImpossibleStructError,
- #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
- InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
- #[error("expected child {expected}, got {actual}")]
- InvalidChildError { expected: &'static str, actual: String },
-}
-pub trait Packet {
- fn to_bytes(self) -> Bytes;
- fn to_vec(self) -> Vec<u8>;
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
-pub struct Foo {
- pub a: u8,
- pub b: u8,
- pub c: u8,
- pub d: u32,
- pub e: u16,
- pub f: u8,
-}
-impl Foo {
- fn conforms(bytes: &[u8]) -> bool {
- bytes.len() >= 7
- }
- pub fn parse(bytes: &[u8]) -> Result<Self> {
- let mut cell = Cell::new(bytes);
- let packet = Self::parse_inner(&mut cell)?;
- Ok(packet)
- }
- fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16_le();
- let a = (chunk & 0x7) as u8;
- let b = (chunk >> 3) as u8;
- let c = ((chunk >> 11) & 0x1f) as u8;
- if bytes.get().remaining() < 3 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 3,
- got: bytes.get().remaining(),
- });
- }
- let d = bytes.get_mut().get_uint_le(3) as u32;
- if bytes.get().remaining() < 2 {
- return Err(Error::InvalidLengthError {
- obj: "Foo".to_string(),
- wanted: 2,
- got: bytes.get().remaining(),
- });
- }
- let chunk = bytes.get_mut().get_u16_le();
- let e = (chunk & 0xfff);
- let f = ((chunk >> 12) & 0xf) as u8;
- Ok(Self { a, b, c, d, e, f })
- }
- fn write_to(&self, buffer: &mut BytesMut) {
- if self.a > 0x7 {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "a", self.a, 0x7);
- }
- if self.c > 0x1f {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "c", self.c, 0x1f);
- }
- let value = (self.a as u16) | ((self.b as u16) << 3) | ((self.c as u16) << 11);
- buffer.put_u16_le(value);
- if self.d > 0xff_ffff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "d", self.d, 0xff_ffff);
- }
- buffer.put_uint_le(self.d as u64, 3);
- if self.e > 0xfff {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "e", self.e, 0xfff);
- }
- if self.f > 0xf {
- panic!("Invalid value for {}::{}: {} > {}", "Foo", "f", self.f, 0xf);
- }
- let value = self.e | ((self.f as u16) << 12);
- buffer.put_u16_le(value);
- }
- fn get_total_size(&self) -> usize {
- self.get_size()
- }
- fn get_size(&self) -> usize {
- 7
- }
-}
diff --git a/tools/pdl/tests/generated_files_compile.sh b/tools/pdl/tests/generated_files_compile.sh
deleted file mode 100755
index 89d12ba214..0000000000
--- a/tools/pdl/tests/generated_files_compile.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Run this script with a number of Rust files as input. It will combine them to
-# a single file which you can compile to check the validity of the inputs.
-#
-# For a Cargo based workflow, you can run
-#
-# ./generated_files_compile.sh generated/*.rs > generated_files.rs
-#
-# followed by cargo test.
-
-for input_path in "$@"; do
- echo "mod $(basename -s .rs "$input_path") {"
- # The inner (module) attribute needs to be removed to produce a
- # valid file.
- grep -v '#!\[rustfmt::skip\]' "$input_path"
- echo "}"
-done
-
-cat <<EOF
-#[test]
-fn generated_files_compile() {
- // Empty test, we only want to see that things compile.
-}
-EOF
diff --git a/tools/pdl/tests/python_generator_test.py b/tools/pdl/tests/python_generator_test.py
deleted file mode 100644
index dbd0c5b098..0000000000
--- a/tools/pdl/tests/python_generator_test.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Tests the generated python backend against standard PDL
-# constructs, with matching input vectors.
-
-import dataclasses
-import enum
-import json
-import typing
-import typing_extensions
-import unittest
-from importlib import resources
-
-# (le|be)_pdl_test are the names of the modules generated from the canonical
-# little endian and big endian test grammars. The purpose of this module
-# is to validate the generated parsers against the set of pre-generated
-# test vectors in canonical/(le|be)_test_vectors.json.
-import le_pdl_test
-import be_pdl_test
-
-
-def match_object(self, left, right):
- """Recursively match a python class object against a reference
- json object."""
- if isinstance(right, int):
- self.assertEqual(left, right)
- elif isinstance(right, list):
- self.assertEqual(len(left), len(right))
- for n in range(len(right)):
- match_object(self, left[n], right[n])
- elif isinstance(right, dict):
- for (k, v) in right.items():
- self.assertTrue(hasattr(left, k))
- match_object(self, getattr(left, k), v)
-
-
-def create_object(typ, value):
- """Build an object of the selected type using the input value."""
- if dataclasses.is_dataclass(typ):
- field_types = dict([(f.name, f.type) for f in dataclasses.fields(typ)])
- values = dict()
- for (f, v) in value.items():
- field_type = field_types[f]
- values[f] = create_object(field_type, v)
- return typ(**values)
- elif typing_extensions.get_origin(typ) is list:
- typ = typing_extensions.get_args(typ)[0]
- return [create_object(typ, v) for v in value]
- elif typing_extensions.get_origin(typ) is typing.Union:
- # typing.Optional[int] expands to typing.Union[int, None]
- typ = typing_extensions.get_args(typ)[0]
- return create_object(typ, value) if value else None
- elif typ is bytes:
- return bytes(value)
- elif typ is bytearray:
- return bytearray(value)
- elif issubclass(typ, enum.Enum):
- return typ(value)
- elif typ is int:
- return value
- else:
- raise Exception(f"unsupported type annotation {typ}")
-
-
-class PacketParserTest(unittest.TestCase):
- """Validate the generated parser against pre-generated test
- vectors in canonical/(le|be)_test_vectors.json"""
-
- def testLittleEndian(self):
- with resources.files('tests.canonical').joinpath('le_test_vectors.json').open('r') as f:
- reference = json.load(f)
-
- for item in reference:
- # 'packet' is the name of the packet being tested,
- # 'tests' lists input vectors that must match the
- # selected packet.
- packet = item['packet']
- tests = item['tests']
- with self.subTest(packet=packet):
- # Retrieve the class object from the generated
- # module, in order to invoke the proper parse
- # method for this test.
- cls = getattr(le_pdl_test, packet)
- for test in tests:
- result = cls.parse_all(bytes.fromhex(test['packed']))
- match_object(self, result, test['unpacked'])
-
- def testBigEndian(self):
- with resources.files('tests.canonical').joinpath('be_test_vectors.json').open('r') as f:
- reference = json.load(f)
-
- for item in reference:
- # 'packet' is the name of the packet being tested,
- # 'tests' lists input vectors that must match the
- # selected packet.
- packet = item['packet']
- tests = item['tests']
- with self.subTest(packet=packet):
- # Retrieve the class object from the generated
- # module, in order to invoke the proper constructor
- # method for this test.
- cls = getattr(be_pdl_test, packet)
- for test in tests:
- result = cls.parse_all(bytes.fromhex(test['packed']))
- match_object(self, result, test['unpacked'])
-
-
-class PacketSerializerTest(unittest.TestCase):
- """Validate the generated serializer against pre-generated test
- vectors in canonical/(le|be)_test_vectors.json"""
-
- def testLittleEndian(self):
- with resources.files('tests.canonical').joinpath('le_test_vectors.json').open('r') as f:
- reference = json.load(f)
-
- for item in reference:
- # 'packet' is the name of the packet being tested,
- # 'tests' lists input vectors that must match the
- # selected packet.
- packet = item['packet']
- tests = item['tests']
- with self.subTest(packet=packet):
- # Retrieve the class object from the generated
- # module, in order to invoke the proper constructor
- # method for this test.
- for test in tests:
- cls = getattr(le_pdl_test, test.get('packet', packet))
- obj = create_object(cls, test['unpacked'])
- result = obj.serialize()
- self.assertEqual(result, bytes.fromhex(test['packed']))
-
- def testBigEndian(self):
- with resources.files('tests.canonical').joinpath('be_test_vectors.json').open('r') as f:
- reference = json.load(f)
-
- for item in reference:
- # 'packet' is the name of the packet being tested,
- # 'tests' lists input vectors that must match the
- # selected packet.
- packet = item['packet']
- tests = item['tests']
- with self.subTest(packet=packet):
- # Retrieve the class object from the generated
- # module, in order to invoke the proper parse
- # method for this test.
- for test in tests:
- cls = getattr(be_pdl_test, test.get('packet', packet))
- obj = create_object(cls, test['unpacked'])
- result = obj.serialize()
- self.assertEqual(result, bytes.fromhex(test['packed']))
-
-
-class CustomPacketParserTest(unittest.TestCase):
- """Manual testing for custom fields."""
-
- def testCustomField(self):
- result = le_pdl_test.Packet_Custom_Field_ConstantSize.parse_all([1])
- self.assertEqual(result.a.value, 1)
-
- result = le_pdl_test.Packet_Custom_Field_VariableSize.parse_all([1])
- self.assertEqual(result.a.value, 1)
-
- result = le_pdl_test.Struct_Custom_Field_ConstantSize.parse_all([1])
- self.assertEqual(result.s.a.value, 1)
-
- result = le_pdl_test.Struct_Custom_Field_VariableSize.parse_all([1])
- self.assertEqual(result.s.a.value, 1)
-
- result = be_pdl_test.Packet_Custom_Field_ConstantSize.parse_all([1])
- self.assertEqual(result.a.value, 1)
-
- result = be_pdl_test.Packet_Custom_Field_VariableSize.parse_all([1])
- self.assertEqual(result.a.value, 1)
-
- result = be_pdl_test.Struct_Custom_Field_ConstantSize.parse_all([1])
- self.assertEqual(result.s.a.value, 1)
-
- result = be_pdl_test.Struct_Custom_Field_VariableSize.parse_all([1])
- self.assertEqual(result.s.a.value, 1)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=3)
diff --git a/tools/rootcanal/Android.bp b/tools/rootcanal/Android.bp
index 0b5e9d4cce..11eef55e81 100644
--- a/tools/rootcanal/Android.bp
+++ b/tools/rootcanal/Android.bp
@@ -189,7 +189,7 @@ cc_library_host_shared {
genrule {
name: "link_layer_packets_python3_gen",
defaults: ["pdl_python_generator_defaults"],
- cmd: "$(location :pdl) $(in) |" +
+ cmd: "$(location :pdlc) $(in) |" +
" $(location :pdl_python_generator)" +
" --output $(out) --custom-type-location py.bluetooth",
srcs: [
@@ -205,7 +205,7 @@ genrule {
genrule {
name: "hci_packets_python3_gen",
defaults: ["pdl_python_generator_defaults"],
- cmd: "$(location :pdl) $(in) |" +
+ cmd: "$(location :pdlc) $(in) |" +
" $(location :pdl_python_generator)" +
" --output $(out) --custom-type-location py.bluetooth",
srcs: [
diff --git a/tools/rootcanal/CMakeLists.txt b/tools/rootcanal/CMakeLists.txt
index 15d596ea5f..5ce799c09e 100644
--- a/tools/rootcanal/CMakeLists.txt
+++ b/tools/rootcanal/CMakeLists.txt
@@ -1,24 +1,24 @@
set(BT_ROOT ${AOSP_ROOT}/packages/modules/Bluetooth/system)
set(ROOTCANAL_ROOT ${AOSP_ROOT}/packages/modules/Bluetooth/tools/rootcanal)
-set(PDL_ROOT ${AOSP_ROOT}/packages/modules/Bluetooth/tools/pdl)
+set(PDL_ROOT ${AOSP_ROOT}/external/rust/crates/pdl-compiler)
corrosion_import_crate(
MANIFEST_PATH ${PDL_ROOT}/Cargo.toml
FLAGS --offline --verbose --verbose)
corrosion_set_env_vars(generate-canonical-tests CARGO_HOME=${Rust_CARGO_HOME})
-corrosion_set_env_vars(pdl CARGO_HOME=${Rust_CARGO_HOME})
-corrosion_set_hostbuild(pdl)
+corrosion_set_env_vars(pdlc CARGO_HOME=${Rust_CARGO_HOME})
+corrosion_set_hostbuild(pdlc)
-get_property(pdl_EXECUTABLE TARGET pdl PROPERTY EXECUTABLE_PATH)
+get_property(pdlc_EXECUTABLE TARGET pdlc PROPERTY EXECUTABLE_PATH)
# These tests depend on the tempfile crate which was not imported because
# the crate remove_dir_all does not have a compatible version.
-set_tests_properties(cargo-test_pdl PROPERTIES DISABLED True)
+set_tests_properties(cargo-test_pdlc PROPERTIES DISABLED True)
set_tests_properties(cargo-test_generate-canonical-tests PROPERTIES DISABLED True)
android_license(
- TARGET pdl
+ TARGET pdlc
LIBNAME None
SPDX None
LICENSE None
@@ -58,13 +58,13 @@ function(pdl_gen)
add_custom_command(
OUTPUT "${pdl_OUTPUT_ABSOLUTE}"
COMMAND
- ${pdl_EXECUTABLE}
+ ${pdlc_EXECUTABLE}
--output-format rust
"${pdl_INPUT_ABSOLUTE}"
> "${pdl_OUTPUT_ABSOLUTE}"
COMMENT "Generating rust module from ${pdl_INPUT}"
VERBATIM
- DEPENDS pdl ${pdl_INPUT_ABSOLUTE})
+ DEPENDS pdlc ${pdl_INPUT_ABSOLUTE})
endif()
endfunction()