From 7dfdcf2102aced00a8e8df9c6610f39be7272545 Mon Sep 17 00:00:00 2001 From: Santiago Carmuega Date: Wed, 6 Nov 2024 17:28:49 -0300 Subject: [PATCH] explore: codegen primitives from cddl --- Cargo.toml | 2 +- deny.toml | 239 +++++++ pallas-ledger/Cargo.toml | 15 + pallas-ledger/build.rs | 106 ++++ pallas-ledger/cddls/conway.cddl | 674 ++++++++++++++++++++ pallas-ledger/cddls/example.ast | 1020 ++++++++++++++++++++++++++++++ pallas-ledger/cddls/example.cddl | 15 + pallas-ledger/generated.rs | 13 + pallas-ledger/src/lib.rs | 1 + 9 files changed, 2084 insertions(+), 1 deletion(-) create mode 100644 deny.toml create mode 100644 pallas-ledger/Cargo.toml create mode 100644 pallas-ledger/build.rs create mode 100644 pallas-ledger/cddls/conway.cddl create mode 100644 pallas-ledger/cddls/example.ast create mode 100644 pallas-ledger/cddls/example.cddl create mode 100644 pallas-ledger/generated.rs create mode 100644 pallas-ledger/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index d0b16308..86093f2b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,5 +19,5 @@ members = [ "examples/block-decode", "examples/crawler", "examples/n2n-miniprotocols", - "examples/n2c-miniprotocols", + "examples/n2c-miniprotocols", "pallas-ledger", ] diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..afec4d80 --- /dev/null +++ b/deny.toml @@ -0,0 +1,239 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +# The graph table configures how the dependency graph is constructed and thus +# which crates the checks are performed against +[graph] +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + #"x86_64-unknown-linux-musl", + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +#exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = false +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +#features = [] + +# The output table provides options for how/if diagnostics are outputted +[output] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +# The path where the advisory databases are cloned/fetched into +#db-path = "$CARGO_HOME/advisory-dbs" +# The url(s) of the advisory databases to use +#db-urls = ["https://github.com/rustsec/advisory-db"] +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + #"RUSTSEC-0000-0000", + #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, + #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish + #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, +] +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +#git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "BlueOak-1.0.0", + "BSD-3-Clause", + "CC0-1.0", + "Unicode-DFS-2016", +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + # Each entry is the crate and version constraint, and its specific allow + # list + #{ allow = ["Zlib"], crate = "adler32" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +#[[licenses.clarify]] +# The package spec the clarification applies to +#crate = "ring" +# The SPDX expression for the license requirements of the crate +#expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +#license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +#{ path = "LICENSE", hash = 0xbd0eed23 } +#] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + #"https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, +] +# List of crates to deny +deny = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +#[[bans.features]] +#crate = "reqwest" +# Features to not allow +#deny = ["json"] +# Features to allow +#allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +#] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +#exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies + #{ crate = "ansi_term@0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# github.com organizations to allow git sources for +github = [] +# gitlab.com organizations to allow git sources for +gitlab = [] +# bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/pallas-ledger/Cargo.toml b/pallas-ledger/Cargo.toml new file mode 100644 index 00000000..358f5757 --- /dev/null +++ b/pallas-ledger/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "pallas-ledger" +version = "0.1.0" +edition = "2021" + +[dependencies] +cddl = "0.9.4" + +[build-dependencies] +cddl = "0.9.4" +convert_case = "0.6.0" +prettyplease = "0.2.25" +proc-macro2 = "1.0.89" +quote = "1.0.37" +syn = "2.0.87" diff --git a/pallas-ledger/build.rs b/pallas-ledger/build.rs new file mode 100644 index 00000000..c0716973 --- /dev/null +++ b/pallas-ledger/build.rs @@ -0,0 +1,106 @@ +use cddl::ast::CDDL; +use cddl::parser::cddl_from_str; +use convert_case::Case; +use convert_case::Casing as _; +use proc_macro2::TokenStream; +use quote::format_ident; +use std::env; +use std::fs; +use std::path::Path; + +fn main() -> Result<(), Box> { + // Tell cargo to rerun this script if the CDDL file changes + println!("cargo:rerun-if-changed=schema.cddl"); + + // Read the CDDL file + let cddl_content = fs::read_to_string("cddls/example.cddl")?; + + // Parse the CDDL content into an AST + let ast: CDDL = cddl_from_str(&cddl_content, true)?; + + // Get the output directory from cargo + let out_dir = env::var("OUT_DIR")?; + //let dest_path = Path::new(&out_dir).join("generated.rs"); + let dest_path = Path::new("generated.rs"); + + // Generate your Rust code here based on the AST + let generated_code = generate_code_from_ast(&ast); + + // Write the generated code to a file + fs::write(dest_path, generated_code)?; + + Ok(()) +} + +fn generate_field(ast: &cddl::ast::GroupEntry) -> TokenStream { + match ast { + cddl::ast::GroupEntry::ValueMemberKey { ge, .. } => { + let field_name = ge + .member_key + .as_ref() + .and_then(|key| { + if let cddl::ast::MemberKey::Bareword { ident, .. } = key { + Some(format_ident!("{}", ident.ident.to_case(Case::Snake))) + } else { + None + } + }) + .unwrap(); + + // Get field type (simplified - you'll want to expand this) + let field_type = match &ge.entry_type.type_choices.get(0).unwrap().type1.type2 { + cddl::ast::Type2::Typename { ident, .. } if ident.ident == "uint" => { + quote::quote!(u64) + } + // Add more type mappings as needed + _ => quote::quote!(()), // Default case + }; + + quote::quote! { + pub #field_name: #field_type + } + } + _ => todo!(), + } +} + +fn generate_struct_from_rule(rule: &cddl::ast::TypeRule) -> TokenStream { + // Get the rule name from the AST + let rule_name = rule.name.ident.to_case(Case::Pascal); + let struct_name = format_ident!("{}", rule_name); + + // Extract fields from the group entries + let type2 = &rule.value.type_choices.get(0).unwrap().type1.type2; + + let fields: Vec<_> = match type2 { + cddl::ast::Type2::Array { group, .. } => group.group_choices[0] + .group_entries + .iter() + .map(|(entry, _comma)| generate_field(entry)), + _ => todo!(), + } + .collect(); + + quote::quote! { + #[derive(Debug, Clone, PartialEq)] + pub struct #struct_name { + #(#fields,)* + } + } +} + +fn generate_code_from_ast(ast: &CDDL) -> String { + let mut output = TokenStream::new(); + + for rule in &ast.rules { + let generated = match rule { + cddl::ast::Rule::Type { rule, .. } => generate_struct_from_rule(rule), + cddl::ast::Rule::Group { rule, .. } => todo!(), + }; + + output.extend(generated); + } + + let syntax_tree = syn::parse_file(&output.to_string()).unwrap(); + prettyplease::unparse(&syntax_tree) +} diff --git a/pallas-ledger/cddls/conway.cddl b/pallas-ledger/cddls/conway.cddl new file mode 100644 index 00000000..8ff25858 --- /dev/null +++ b/pallas-ledger/cddls/conway.cddl @@ -0,0 +1,674 @@ +; Conway era introduces an optional 258 tag for sets, which will become mandatory in the +; second era after Conway. We recommend all the tooling to account for this future breaking +; change sooner rather than later, in order to provide a smooth transition for their users. + +; This is an unordered set. Duplicate elements are not allowed and the order of elements is implementation specific. +set = #6.258([* a]) / [* a] + +; Just like `set`, but must contain at least one element. +nonempty_set = #6.258([+ a]) / [+ a] + +; This is a non-empty ordered set. Duplicate elements are not allowed and the order of elements will be preserved. +nonempty_oset = #6.258([+ a]) / [+ a] + +positive_int = 1 .. 18446744073709551615 + +unit_interval = #6.30([1, 2]) + ; unit_interval = #6.30([uint, uint]) + ; + ; Comment above depicts the actual definition for `unit_interval`. + ; + ; Unit interval is a number in the range between 0 and 1, which + ; means there are two extra constraints: + ; * numerator <= denominator + ; * denominator > 0 + ; + ; Relation between numerator and denominator cannot be expressed in CDDL, which + ; poses a problem for testing. We need to be able to generate random valid data + ; for testing implementation of our encoders/decoders. Which means we cannot use + ; the actual definition here and we hard code the value to 1/2 + + +nonnegative_interval = #6.30([uint, positive_int]) + + +address = + h'001000000000000000000000000000000000000000000000000000000011000000000000000000000000000000000000000000000000000000' / + h'102000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000' / + h'203000000000000000000000000000000000000000000000000000000033000000000000000000000000000000000000000000000000000000' / + h'304000000000000000000000000000000000000000000000000000000044000000000000000000000000000000000000000000000000000000' / + h'405000000000000000000000000000000000000000000000000000000087680203' / + h'506000000000000000000000000000000000000000000000000000000087680203' / + h'6070000000000000000000000000000000000000000000000000000000' / + h'7080000000000000000000000000000000000000000000000000000000' + +reward_account = + h'E090000000000000000000000000000000000000000000000000000000' / + h'F0A0000000000000000000000000000000000000000000000000000000' + +bounded_bytes = bytes .size (0..64) + ; the real bounded_bytes does not have this limit. it instead has a different + ; limit which cannot be expressed in CDDL. + ; The limit is as follows: + ; - bytes with a definite-length encoding are limited to size 0..64 + ; - for bytes with an indefinite-length CBOR encoding, each chunk is + ; limited to size 0..64 + ; ( reminder: in CBOR, the indefinite-length encoding of bytestrings + ; consists of a token #2.31 followed by a sequence of definite-length + ; encoded bytestrings and a stop code ) + +; a type for distinct values. +; The type parameter must support .size, for example: bytes or uint +distinct = a .size 8 / a .size 16 / a .size 20 / a .size 24 / a .size 30 / a .size 32 + + +; fetched 19 mar 2024 + +block = + [ header + , transaction_bodies : [* transaction_body] + , transaction_witness_sets : [* transaction_witness_set] + , auxiliary_data_set : {* transaction_index => auxiliary_data } + , invalid_transactions : [* transaction_index ] + ]; Valid blocks must also satisfy the following two constraints: + ; 1) the length of transaction_bodies and transaction_witness_sets + ; must be the same + ; 2) every transaction_index must be strictly smaller than the + ; length of transaction_bodies + +transaction = + [ transaction_body + , transaction_witness_set + , bool + , auxiliary_data / null + ] + +transaction_index = uint .size 2 + +header = + [ header_body + , body_signature : $kes_signature + ] + +header_body = + [ block_number : uint + , slot : uint + , prev_hash : $hash32 / null + , issuer_vkey : $vkey + , vrf_vkey : $vrf_vkey + , vrf_result : $vrf_cert ; replaces nonce_vrf and leader_vrf + , block_body_size : uint + , block_body_hash : $hash32 ; merkle triple root + , operational_cert + , protocol_version + ] + +operational_cert = + [ hot_vkey : $kes_vkey + , sequence_number : uint + , kes_period : uint + , sigma : $signature + ] + +next_major_protocol_version = 10 + +major_protocol_version = 1..next_major_protocol_version + +protocol_version = [(major_protocol_version, uint)] + +transaction_body = + { 0 : set ; inputs + , 1 : [* transaction_output] + , 2 : coin ; fee + , ? 3 : uint ; time to live + , ? 4 : certificates + , ? 5 : withdrawals + , ? 7 : auxiliary_data_hash + , ? 8 : uint ; validity interval start + , ? 9 : mint + , ? 11 : script_data_hash + , ? 13 : nonempty_set ; collateral inputs + , ? 14 : required_signers + , ? 15 : network_id + , ? 16 : transaction_output ; collateral return + , ? 17 : coin ; total collateral + , ? 18 : nonempty_set ; reference inputs + , ? 19 : voting_procedures ; New; Voting procedures + , ? 20 : proposal_procedures ; New; Proposal procedures + , ? 21 : coin ; New; current treasury value + , ? 22 : positive_coin ; New; donation + } + +voting_procedures = { + voter => { + gov_action_id => voting_procedure } } + +voting_procedure = + [ vote + , anchor / null + ] + +proposal_procedure = + [ deposit : coin + , reward_account + , gov_action + , anchor + ] + +proposal_procedures = nonempty_oset + +certificates = nonempty_oset + +gov_action = + [ parameter_change_action + // hard_fork_initiation_action + // treasury_withdrawals_action + // no_confidence + // update_committee + // new_constitution + // info_action + ] + +policy_hash = scripthash + +parameter_change_action = (0, gov_action_id / null, protocol_param_update, policy_hash / null) + +hard_fork_initiation_action = (1, gov_action_id / null, protocol_version) + +treasury_withdrawals_action = (2, { reward_account => coin }, policy_hash / null) + +no_confidence = (3, gov_action_id / null) + +update_committee = (4, gov_action_id / null, set, { committee_cold_credential => epoch }, unit_interval) + +new_constitution = (5, gov_action_id / null, constitution) + +constitution = + [ anchor + , scripthash / null + ] + +info_action = 6 + +; Constitutional Committee Hot KeyHash: 0 +; Constitutional Committee Hot ScriptHash: 1 +; DRep KeyHash: 2 +; DRep ScriptHash: 3 +; StakingPool KeyHash: 4 +voter = + [ 0, addr_keyhash + // 1, scripthash + // 2, addr_keyhash + // 3, scripthash + // 4, addr_keyhash + ] + +anchor = + [ anchor_url : url + , anchor_data_hash : $hash32 + ] + +; no - 0 +; yes - 1 +; abstain - 2 +vote = 0 .. 2 + +gov_action_id = + [ transaction_id : $hash32 + , gov_action_index : uint + ] + +required_signers = nonempty_set + +transaction_input = [ transaction_id : $hash32 + , index : uint + ] + +; Both of the Alonzo and Babbage style TxOut formats are equally valid +; and can be used interchangeably +transaction_output = pre_babbage_transaction_output / post_alonzo_transaction_output + +pre_babbage_transaction_output = + [ address + , amount : value + , ? datum_hash : $hash32 + ] + +post_alonzo_transaction_output = + { 0 : address + , 1 : value + , ? 2 : datum_option ; datum option + , ? 3 : script_ref ; script reference + } + +script_data_hash = $hash32 +; This is a hash of data which may affect evaluation of a script. +; This data consists of: +; - The redeemers from the transaction_witness_set (the value of field 5). +; - The datums from the transaction_witness_set (the value of field 4). +; - The value in the costmdls map corresponding to the script's language +; (in field 18 of protocol_param_update.) +; (In the future it may contain additional protocol parameters.) +; +; Since this data does not exist in contiguous form inside a transaction, it needs +; to be independently constructed by each recipient. +; +; The bytestring which is hashed is the concatenation of three things: +; redeemers || datums || language views +; The redeemers are exactly the data present in the transaction witness set. +; Similarly for the datums, if present. If no datums are provided, the middle +; field is omitted (i.e. it is the empty/null bytestring). +; +; language views CDDL: +; { * language => script_integrity_data } +; +; This must be encoded canonically, using the same scheme as in +; RFC7049 section 3.9: +; - Maps, strings, and bytestrings must use a definite-length encoding +; - Integers must be as small as possible. +; - The expressions for map length, string length, and bytestring length +; must be as short as possible. +; - The keys in the map must be sorted as follows: +; - If two keys have different lengths, the shorter one sorts earlier. +; - If two keys have the same length, the one with the lower value +; in (byte-wise) lexical order sorts earlier. +; +; For PlutusV1 (language id 0), the language view is the following: +; - the value of costmdls map at key 0 (in other words, the script_integrity_data) +; is encoded as an indefinite length list and the result is encoded as a bytestring. +; (our apologies) +; For example, the script_integrity_data corresponding to the all zero costmodel for V1 +; would be encoded as (in hex): +; 58a89f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ff +; - the language ID tag is also encoded twice. first as a uint then as +; a bytestring. (our apologies) +; Concretely, this means that the language version for V1 is encoded as +; 4100 in hex. +; For PlutusV2 (language id 1), the language view is the following: +; - the value of costmdls map at key 1 is encoded as an definite length list. +; For example, the script_integrity_data corresponding to the all zero costmodel for V2 +; would be encoded as (in hex): +; 98af0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 +; - the language ID tag is encoded as expected. +; Concretely, this means that the language version for V2 is encoded as +; 01 in hex. +; For PlutusV3 (language id 2), the language view is the following: +; - the value of costmdls map at key 2 is encoded as a definite length list. +; +; Note that each Plutus language represented inside a transaction must have +; a cost model in the costmdls protocol parameter in order to execute, +; regardless of what the script integrity data is. +; +; Finally, note that in the case that a transaction includes datums but does not +; include the redeemers field, the script data format becomes (in hex): +; [ 80 | datums | A0 ] +; corresponding to a CBOR empty list and an empty map. +; Note that a transaction might include the redeemers field and it to the +; empty map, in which case the user supplied encoding of the empty map is used. + +; address = bytes +; reward_account = bytes + +; address format: +; [ 8 bit header | payload ]; +; +; shelley payment addresses: +; bit 7: 0 +; bit 6: base/other +; bit 5: pointer/enterprise [for base: stake cred is keyhash/scripthash] +; bit 4: payment cred is keyhash/scripthash +; bits 3-0: network id +; +; reward addresses: +; bits 7-5: 111 +; bit 4: credential is keyhash/scripthash +; bits 3-0: network id +; +; byron addresses: +; bits 7-4: 1000 + +; 0000: base address: keyhash28,keyhash28 +; 0001: base address: scripthash28,keyhash28 +; 0010: base address: keyhash28,scripthash28 +; 0011: base address: scripthash28,scripthash28 +; 0100: pointer address: keyhash28, 3 variable length uint +; 0101: pointer address: scripthash28, 3 variable length uint +; 0110: enterprise address: keyhash28 +; 0111: enterprise address: scripthash28 +; 1000: byron address +; 1110: reward account: keyhash28 +; 1111: reward account: scripthash28 +; 1001 - 1101: future formats + +certificate = + [ stake_registration + // stake_deregistration + // stake_delegation + // pool_registration + // pool_retirement + // reg_cert + // unreg_cert + // vote_deleg_cert + // stake_vote_deleg_cert + // stake_reg_deleg_cert + // vote_reg_deleg_cert + // stake_vote_reg_deleg_cert + // auth_committee_hot_cert + // resign_committee_cold_cert + // reg_drep_cert + // unreg_drep_cert + // update_drep_cert + ] + +stake_registration = (0, stake_credential) ; to be deprecated in era after Conway +stake_deregistration = (1, stake_credential) ; to be deprecated in era after Conway +stake_delegation = (2, stake_credential, pool_keyhash) + +; POOL +pool_registration = (3, pool_params) +pool_retirement = (4, pool_keyhash, epoch) + +; numbers 5 and 6 used to be the Genesis and MIR certificates respectively, +; which were deprecated in Conway + +; DELEG +reg_cert = (7, stake_credential, coin) +unreg_cert = (8, stake_credential, coin) +vote_deleg_cert = (9, stake_credential, drep) +stake_vote_deleg_cert = (10, stake_credential, pool_keyhash, drep) +stake_reg_deleg_cert = (11, stake_credential, pool_keyhash, coin) +vote_reg_deleg_cert = (12, stake_credential, drep, coin) +stake_vote_reg_deleg_cert = (13, stake_credential, pool_keyhash, drep, coin) + +; GOVCERT +auth_committee_hot_cert = (14, committee_cold_credential, committee_hot_credential) +resign_committee_cold_cert = (15, committee_cold_credential, anchor / null) +reg_drep_cert = (16, drep_credential, coin, anchor / null) +unreg_drep_cert = (17, drep_credential, coin) +update_drep_cert = (18, drep_credential, anchor / null) + + +delta_coin = int + +credential = + [ 0, addr_keyhash + // 1, scripthash + ] + +drep = + [ 0, addr_keyhash + // 1, scripthash + // 2 ; always abstain + // 3 ; always no confidence + ] + +stake_credential = credential +drep_credential = credential +committee_cold_credential = credential +committee_hot_credential = credential + +pool_params = ( operator: pool_keyhash + , vrf_keyhash: vrf_keyhash + , pledge: coin + , cost: coin + , margin: unit_interval + , reward_account: reward_account + , pool_owners: set + , relays: [* relay] + , pool_metadata: pool_metadata / null + ) + +port = uint .le 65535 +ipv4 = bytes .size 4 +ipv6 = bytes .size 16 +dns_name = tstr .size (0..128) + +single_host_addr = ( 0 + , port / null + , ipv4 / null + , ipv6 / null + ) +single_host_name = ( 1 + , port / null + , dns_name ; An A or AAAA DNS record + ) +multi_host_name = ( 2 + , dns_name ; A SRV DNS record + ) +relay = + [ single_host_addr + // single_host_name + // multi_host_name + ] + +pool_metadata = [url, pool_metadata_hash] +url = tstr .size (0..128) + +withdrawals = { + reward_account => coin } + +protocol_param_update = + { ? 0: coin ; minfee A + , ? 1: coin ; minfee B + , ? 2: uint ; max block body size + , ? 3: uint ; max transaction size + , ? 4: uint ; max block header size + , ? 5: coin ; key deposit + , ? 6: coin ; pool deposit + , ? 7: epoch ; maximum epoch + , ? 8: uint ; n_opt: desired number of stake pools + , ? 9: nonnegative_interval ; pool pledge influence + , ? 10: unit_interval ; expansion rate + , ? 11: unit_interval ; treasury growth rate + , ? 16: coin ; min pool cost + , ? 17: coin ; ada per utxo byte + , ? 18: costmdls ; cost models for script languages + , ? 19: ex_unit_prices ; execution costs + , ? 20: ex_units ; max tx ex units + , ? 21: ex_units ; max block ex units + , ? 22: uint ; max value size + , ? 23: uint ; collateral percentage + , ? 24: uint ; max collateral inputs + , ? 25: pool_voting_thresholds ; pool voting thresholds + , ? 26: drep_voting_thresholds ; DRep voting thresholds + , ? 27: uint ; min committee size + , ? 28: epoch ; committee term limit + , ? 29: epoch ; governance action validity period + , ? 30: coin ; governance action deposit + , ? 31: coin ; DRep deposit + , ? 32: epoch ; DRep inactivity period + , ? 33: nonnegative_interval ; MinFee RefScriptCostPerByte + } + +pool_voting_thresholds = + [ unit_interval ; motion no confidence + , unit_interval ; committee normal + , unit_interval ; committee no confidence + , unit_interval ; hard fork initiation + , unit_interval ; security relevant parameter voting threshold + ] + +drep_voting_thresholds = + [ unit_interval ; motion no confidence + , unit_interval ; committee normal + , unit_interval ; committee no confidence + , unit_interval ; update constitution + , unit_interval ; hard fork initiation + , unit_interval ; PP network group + , unit_interval ; PP economic group + , unit_interval ; PP technical group + , unit_interval ; PP governance group + , unit_interval ; treasury withdrawal + ] + +transaction_witness_set = + { ? 0: nonempty_set + , ? 1: nonempty_set + , ? 2: nonempty_set + , ? 3: nonempty_set + , ? 4: nonempty_set + , ? 5: redeemers + , ? 6: nonempty_set + , ? 7: nonempty_set + } + +; The real type of plutus_v1_script, plutus_v2_script and plutus_v3_script is bytes. +; However, because we enforce uniqueness when many scripts are supplied, +; we need to hack around for tests in order to avoid generating duplicates, +; since the cddl tool we use for roundtrip testing doesn't generate distinct collections. +plutus_v1_script = distinct +plutus_v2_script = distinct +plutus_v3_script = distinct + +plutus_data = + constr + / { * plutus_data => plutus_data } + / [ * plutus_data ] + / big_int + / bounded_bytes + +big_int = int / big_uint / big_nint +big_uint = #6.2(bounded_bytes) +big_nint = #6.3(bounded_bytes) + +constr = + #6.121([* a]) + / #6.122([* a]) + / #6.123([* a]) + / #6.124([* a]) + / #6.125([* a]) + / #6.126([* a]) + / #6.127([* a]) + ; similarly for tag range: 6.1280 .. 6.1400 inclusive + / #6.102([uint, [* a]]) + +; Flat Array support is included for backwards compatibility and will be removed in the next era. +; It is recommended for tools to adopt using a Map instead of Array going forward. +redeemers = + [ + [ tag: redeemer_tag, index: uint, data: plutus_data, ex_units: ex_units ] ] + / { + [ tag: redeemer_tag, index: uint ] => [ data: plutus_data, ex_units: ex_units ] } + +redeemer_tag = + 0 ; Spending + / 1 ; Minting + / 2 ; Certifying + / 3 ; Rewarding + / 4 ; Voting + / 5 ; Proposing + +ex_units = [mem: uint, steps: uint] + +ex_unit_prices = + [ mem_price: nonnegative_interval, step_price: nonnegative_interval ] + +language = 0 ; Plutus v1 + / 1 ; Plutus v2 + / 2 ; Plutus v3 + +potential_languages = 0 .. 255 + +; The format for costmdls is flexible enough to allow adding Plutus built-ins and language +; versions in the future. +; +costmdls = + { ? 0 : [ 166* int ] ; Plutus v1, only 166 integers are used, but more are accepted (and ignored) + , ? 1 : [ 175* int ] ; Plutus v2, only 175 integers are used, but more are accepted (and ignored) + , ? 2 : [ 233* int ] ; Plutus v3, only 233 integers are used, but more are accepted (and ignored) + , ? 3 : [ int ] ; Any 8-bit unsigned number can be used as a key. + } + +transaction_metadatum = + { * transaction_metadatum => transaction_metadatum } + / [ * transaction_metadatum ] + / int + / bytes .size (0..64) + / text .size (0..64) + +transaction_metadatum_label = uint +metadata = { * transaction_metadatum_label => transaction_metadatum } + +auxiliary_data = + metadata ; Shelley + / [ transaction_metadata: metadata ; Shelley-ma + , auxiliary_scripts: [ * native_script ] + ] + / #6.259({ ? 0 => metadata ; Alonzo and beyond + , ? 1 => [ * native_script ] + , ? 2 => [ * plutus_v1_script ] + , ? 3 => [ * plutus_v2_script ] + , ? 4 => [ * plutus_v3_script ] + }) + +vkeywitness = [ $vkey, $signature ] + +bootstrap_witness = + [ public_key : $vkey + , signature : $signature + , chain_code : bytes .size 32 + , attributes : bytes + ] + +native_script = + [ script_pubkey + // script_all + // script_any + // script_n_of_k + // invalid_before + ; Timelock validity intervals are half-open intervals [a, b). + ; This field specifies the left (included) endpoint a. + // invalid_hereafter + ; Timelock validity intervals are half-open intervals [a, b). + ; This field specifies the right (excluded) endpoint b. + ] + +script_pubkey = (0, addr_keyhash) +script_all = (1, [ * native_script ]) +script_any = (2, [ * native_script ]) +script_n_of_k = (3, n: uint, [ * native_script ]) +invalid_before = (4, uint) +invalid_hereafter = (5, uint) + +coin = uint + +multiasset = { + policy_id => { + asset_name => a } } +policy_id = scripthash +asset_name = bytes .size (0..32) + +negInt64 = -9223372036854775808 .. -1 +posInt64 = 1 .. 9223372036854775807 +nonZeroInt64 = negInt64 / posInt64 ; this is the same as the current int64 definition but without zero + +positive_coin = 1 .. 18446744073709551615 + +value = coin / [coin, multiasset] + +mint = multiasset + +int64 = -9223372036854775808 .. 9223372036854775807 + +network_id = 0 / 1 + +epoch = uint + +addr_keyhash = $hash28 +pool_keyhash = $hash28 + +vrf_keyhash = $hash32 +auxiliary_data_hash = $hash32 +pool_metadata_hash = $hash32 + +; To compute a script hash, note that you must prepend +; a tag to the bytes of the script before hashing. +; The tag is determined by the language. +; The tags in the Conway era are: +; "\x00" for multisig scripts +; "\x01" for Plutus V1 scripts +; "\x02" for Plutus V2 scripts +; "\x03" for Plutus V3 scripts +scripthash = $hash28 + +datum_hash = $hash32 +data = #6.24(bytes .cbor plutus_data) + +datum_option = [ 0, $hash32 // 1, data ] + +script_ref = #6.24(bytes .cbor script) + +script = [ 0, native_script // 1, plutus_v1_script // 2, plutus_v2_script // 3, plutus_v3_script ] \ No newline at end of file diff --git a/pallas-ledger/cddls/example.ast b/pallas-ledger/cddls/example.ast new file mode 100644 index 00000000..a839941b --- /dev/null +++ b/pallas-ledger/cddls/example.ast @@ -0,0 +1,1020 @@ + }, + operator: None, + span: ( + 22893, + 22909, + 674, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 22893, + 22909, + 674, + ), + }, + }, + span: ( + 22893, + 22909, + 674, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: false, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ], + span: ( + 22890, + 22909, + 674, + ), + comments_before_grpchoice: None, + }, + ], + span: ( + 22824, + 22909, + 674, + ), + }, + span: ( + 22822, + 22911, + 674, + ), + comments_before_group: None, + comments_after_group: None, + }, + operator: None, + span: ( + 22822, + 22911, + 674, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 22822, + 22911, + 674, + ), + }, + comments_before_assignt: None, + comments_after_assignt: None, + }, + span: ( + 22813, + 22911, + 674, + ), + comments_after_rule: None, + }, + ], + comments: Some( + Comments( + [ + " Conway era introduces an optional 258 tag for sets, which will become mandatory in the", + " second era after Conway. We recommend all the tooling to account for this future breaking", + " change sooner rather than later, in order to provide a smooth transition for their users.", + " This is an unordered set. Duplicate elements are not allowed and the order of elements is implementation specific.", + ], + ), + ), +} +➜ pallas-ledger git:(main) ✗ cargo run + Blocking waiting for file lock on build directory +warning: unused import: `convert_case::Case` + --> pallas-ledger/build.rs:3:5 + | +3 | use convert_case::Case; + | ^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +warning: unused import: `convert_case::Casing as _` + --> pallas-ledger/build.rs:4:5 + | +4 | use convert_case::Casing as _; + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + +warning: unused variable: `out_dir` + --> pallas-ledger/build.rs:22:9 + | +22 | let out_dir = env::var("OUT_DIR")?; + | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_dir` + | + = note: `#[warn(unused_variables)]` on by default + +warning: `pallas-ledger` (build script) generated 3 warnings + Compiling pallas-ledger v0.1.0 (/Users/santiago_ho/Code/txpipe/pallas/pallas-ledger) + Finished `dev` profile [unoptimized + debuginfo] target(s) in 1.69s + Running `/Users/santiago_ho/Code/txpipe/pallas/target/debug/pallas-ledger` +error: parser errors + ┌─ input:11:5 + │ +11 │ , operational_cert + │ ^^^^^^^^^^^^^^^^ missing definition for rule operational_cert +12 │ , protocol_version + │ ^^^^^^^^^^^^^^^^ missing definition for rule protocol_version + +Error: "incremental parsing error" +➜ pallas-ledger git:(main) ✗ cargo run +warning: unused import: `convert_case::Case` + --> pallas-ledger/build.rs:3:5 + | +3 | use convert_case::Case; + | ^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default + +warning: unused import: `convert_case::Casing as _` + --> pallas-ledger/build.rs:4:5 + | +4 | use convert_case::Casing as _; + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + +warning: unused variable: `out_dir` + --> pallas-ledger/build.rs:22:9 + | +22 | let out_dir = env::var("OUT_DIR")?; + | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_dir` + | + = note: `#[warn(unused_variables)]` on by default + +warning: `pallas-ledger` (build script) generated 3 warnings + Compiling pallas-ledger v0.1.0 (/Users/santiago_ho/Code/txpipe/pallas/pallas-ledger) + Finished `dev` profile [unoptimized + debuginfo] target(s) in 1.61s + Running `/Users/santiago_ho/Code/txpipe/pallas/target/debug/pallas-ledger` +[pallas-ledger/src/main.rs:13:5] ast = CDDL { + rules: [ + Type { + rule: TypeRule { + name: Identifier { + ident: "header_body", + socket: None, + span: ( + 1, + 12, + 2, + ), + }, + generic_params: None, + is_type_choice_alternate: false, + value: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Array { + group: Group { + group_choices: [ + GroupChoice { + group_entries: [ + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "block_number", + socket: None, + span: ( + 19, + 31, + 3, + ), + }, + span: ( + 19, + 37, + 3, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 38, + 42, + 3, + ), + }, + generic_args: None, + span: ( + 38, + 42, + 3, + ), + }, + operator: None, + span: ( + 38, + 42, + 3, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 38, + 42, + 3, + ), + }, + }, + span: ( + 19, + 46, + 3, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "slot", + socket: None, + span: ( + 47, + 51, + 4, + ), + }, + span: ( + 47, + 65, + 4, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 66, + 70, + 4, + ), + }, + generic_args: None, + span: ( + 66, + 70, + 4, + ), + }, + operator: None, + span: ( + 66, + 70, + 4, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 66, + 70, + 4, + ), + }, + }, + span: ( + 47, + 74, + 4, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "prev_hash", + socket: None, + span: ( + 75, + 84, + 5, + ), + }, + span: ( + 75, + 93, + 5, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 94, + 98, + 5, + ), + }, + generic_args: None, + span: ( + 94, + 98, + 5, + ), + }, + operator: None, + span: ( + 94, + 98, + 5, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 94, + 98, + 5, + ), + }, + }, + span: ( + 75, + 102, + 5, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "issuer_vkey", + socket: None, + span: ( + 103, + 114, + 6, + ), + }, + span: ( + 103, + 121, + 6, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 122, + 126, + 6, + ), + }, + generic_args: None, + span: ( + 122, + 126, + 6, + ), + }, + operator: None, + span: ( + 122, + 126, + 6, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 122, + 126, + 6, + ), + }, + }, + span: ( + 103, + 130, + 6, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "vrf_vkey", + socket: None, + span: ( + 131, + 139, + 7, + ), + }, + span: ( + 131, + 149, + 7, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 150, + 154, + 7, + ), + }, + generic_args: None, + span: ( + 150, + 154, + 7, + ), + }, + operator: None, + span: ( + 150, + 154, + 7, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 150, + 154, + 7, + ), + }, + }, + span: ( + 131, + 158, + 7, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "vrf_result", + socket: None, + span: ( + 159, + 169, + 8, + ), + }, + span: ( + 159, + 177, + 8, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 178, + 182, + 8, + ), + }, + generic_args: None, + span: ( + 178, + 182, + 8, + ), + }, + operator: None, + span: ( + 178, + 182, + 8, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 178, + 182, + 8, + ), + }, + }, + span: ( + 159, + 186, + 8, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "block_body_size", + socket: None, + span: ( + 187, + 202, + 9, + ), + }, + span: ( + 187, + 205, + 9, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 206, + 210, + 9, + ), + }, + generic_args: None, + span: ( + 206, + 210, + 9, + ), + }, + operator: None, + span: ( + 206, + 210, + 9, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 206, + 210, + 9, + ), + }, + }, + span: ( + 187, + 214, + 9, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "block_body_hash", + socket: None, + span: ( + 215, + 230, + 10, + ), + }, + span: ( + 215, + 233, + 10, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 234, + 238, + 10, + ), + }, + generic_args: None, + span: ( + 234, + 238, + 10, + ), + }, + operator: None, + span: ( + 234, + 238, + 10, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 234, + 238, + 10, + ), + }, + }, + span: ( + 215, + 242, + 10, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "operational_cert", + socket: None, + span: ( + 243, + 259, + 11, + ), + }, + span: ( + 243, + 261, + 11, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 262, + 266, + 11, + ), + }, + generic_args: None, + span: ( + 262, + 266, + 11, + ), + }, + operator: None, + span: ( + 262, + 266, + 11, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 262, + 266, + 11, + ), + }, + }, + span: ( + 243, + 270, + 11, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: true, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ( + ValueMemberKey { + ge: ValueMemberKeyEntry { + occur: None, + member_key: Some( + Bareword { + ident: Identifier { + ident: "protocol_version", + socket: None, + span: ( + 271, + 287, + 12, + ), + }, + span: ( + 271, + 289, + 12, + ), + comments: None, + comments_after_colon: None, + }, + ), + entry_type: Type { + type_choices: [ + TypeChoice { + type1: Type1 { + type2: Typename { + ident: Identifier { + ident: "uint", + socket: None, + span: ( + 290, + 294, + 12, + ), + }, + generic_args: None, + span: ( + 290, + 294, + 12, + ), + }, + operator: None, + span: ( + 290, + 294, + 12, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 290, + 294, + 12, + ), + }, + }, + span: ( + 271, + 294, + 12, + ), + leading_comments: None, + trailing_comments: None, + }, + OptionalComma { + optional_comma: false, + trailing_comments: None, + _a: PhantomData<&()>, + }, + ), + ], + span: ( + 19, + 294, + 3, + ), + comments_before_grpchoice: None, + }, + ], + span: ( + 19, + 294, + 3, + ), + }, + span: ( + 17, + 298, + 3, + ), + comments_before_group: None, + comments_after_group: None, + }, + operator: None, + span: ( + 17, + 298, + 3, + ), + comments_after_type: None, + }, + comments_before_type: None, + comments_after_type: None, + }, + ], + span: ( + 17, + 298, + 3, + ), + }, + comments_before_assignt: None, + comments_after_assignt: None, + }, + span: ( + 1, + 298, + 2, + ), + comments_after_rule: None, + }, + ], + comments: None, +} +➜ pallas-ledger git:(main) ✗ \ No newline at end of file diff --git a/pallas-ledger/cddls/example.cddl b/pallas-ledger/cddls/example.cddl new file mode 100644 index 00000000..57e09386 --- /dev/null +++ b/pallas-ledger/cddls/example.cddl @@ -0,0 +1,15 @@ + +header_body = + [ block_number : uint + , slot : uint + , prev_hash : uint + , issuer_vkey : uint + , vrf_vkey : uint + , vrf_result : uint + , block_body_size : uint + , block_body_hash : uint + , operational_cert : uint + , protocol_version : uint + ] + + diff --git a/pallas-ledger/generated.rs b/pallas-ledger/generated.rs new file mode 100644 index 00000000..7b007f45 --- /dev/null +++ b/pallas-ledger/generated.rs @@ -0,0 +1,13 @@ +#[derive(Debug, Clone, PartialEq)] +pub struct HeaderBody { + pub block_number: u64, + pub slot: u64, + pub prev_hash: u64, + pub issuer_vkey: u64, + pub vrf_vkey: u64, + pub vrf_result: u64, + pub block_body_size: u64, + pub block_body_hash: u64, + pub operational_cert: u64, + pub protocol_version: u64, +} diff --git a/pallas-ledger/src/lib.rs b/pallas-ledger/src/lib.rs new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/pallas-ledger/src/lib.rs @@ -0,0 +1 @@ +