From a21236e7e0d1757ac20a08d70ca584f15485b905 Mon Sep 17 00:00:00 2001 From: Julien R Date: Mon, 18 Sep 2023 11:45:09 -0400 Subject: [PATCH] Docs update (#308) * Documentation update * Updated protobuf dev url --- docs/concepts-and-fundamentals/benefits.md | 38 +++++++++---------- .../concepts-and-fundamentals/fundamentals.md | 22 +++++------ .../cookbook/advanced-params.md | 5 ++- .../creating-protobuf-schemas.md | 22 +++++++---- docs/developers-guide/parallel-execution.md | 2 +- .../substreams-powered-subgraph.md | 2 +- .../sink-targets/substreams-sink-files.md | 12 +++--- docs/reference-and-specs/manifests.md | 4 +- .../map_block_meta_module.md | 7 ++-- docs/tutorials/rust/result.md | 18 +++++---- 10 files changed, 73 insertions(+), 59 deletions(-) diff --git a/docs/concepts-and-fundamentals/benefits.md b/docs/concepts-and-fundamentals/benefits.md index 326e9a193..a98dcf3c3 100644 --- a/docs/concepts-and-fundamentals/benefits.md +++ b/docs/concepts-and-fundamentals/benefits.md @@ -6,24 +6,24 @@ description: StreamingFast Substreams benefits and comparisons ## Important Substreams facts include: -* It provides a streaming-first system based on gRPC, protobuf, and the StreamingFast Firehose. -* It supports a highly cacheable and parallelizable remote code execution framework. -* It enables the community to build higher-order modules that are composable down to individual modules. -* Deterministic blockchain data is fed to Substreams, **making it deterministic**. -* It is **not** a relational database. -* It is **not** a REST service. -* It is **not** concerned directly about how data is queried. -* It is **not** a general-purpose non-deterministic event stream processor. +- It provides a streaming-first system based on gRPC, protobuf, and the StreamingFast Firehose. +- It supports a highly cacheable and parallelizable remote code execution framework. +- It enables the community to build higher-order modules that are composable down to individual modules. +- Deterministic blockchain data is fed to Substreams, **making it deterministic**. +- It is **not** a relational database. +- It is **not** a REST service. +- It is **not** concerned directly about how data is queried. +- It is **not** a general-purpose non-deterministic event stream processor. ### Substreams offers several benefits including: -* The ability to store and process blockchain data using advanced parallelization techniques, making the processed data available for various types of data stores or real-time systems. -* A streaming-first approach that inherits low latency extraction from [StreamingFast Firehose](https://firehose.streamingfast.io/). -* The ability to save time and money by horizontally scaling and increasing efficiency by reducing processing time and wait time. -* The ability for communities to [combine Substreams modules](../developers-guide/modules/) to form compounding levels of data richness and refinement. -* The use of [protobufs for data modeling and integration](../developers-guide/creating-protobuf-schemas.md) in a variety of programming languages. -* The use of the Rust programming language and a wide array of third-party libraries compilable with WASM to manipulate blockchain data on-the-fly. -* Inspiration from conventional large-scale data systems fused into the novelties of blockchain technology. +- The ability to store and process blockchain data using advanced parallelization techniques, making the processed data available for various types of data stores or real-time systems. +- A streaming-first approach that inherits low latency extraction from [StreamingFast Firehose](https://firehose.streamingfast.io/). +- The ability to save time and money by horizontally scaling and increasing efficiency by reducing processing time and wait time. +- The ability for communities to [combine Substreams modules](../developers-guide/modules/) to form compounding levels of data richness and refinement. +- The use of [protobufs for data modeling and integration](../developers-guide/creating-protobuf-schemas.md) in a variety of programming languages. +- The use of the Rust programming language and a wide array of third-party libraries compilable with WASM to manipulate blockchain data on-the-fly. +- Inspiration from conventional large-scale data systems fused into the novelties of blockchain technology. ### **Other features** @@ -49,16 +49,16 @@ Substreams is a streaming engine similar to [Fluvio](https://www.fluvio.io/), [K #### Substreams & Subgraphs -A lot of questions arise around Substreams and Subgraphs as they are both part of The Graph ecosystem. Substreams has been created by StreamingFast team, the first core developers teams outside of Edge & Node, the founding team of The Graph. It was created in response to different use cases especially around analytics and big data that couldn't be served by Subgraph due to its current programming model. Here some of the key points for which Substreams were created: +A lot of questions arise around Substreams and Subgraphs as they are both part of The Graph ecosystem. Substreams has been created by the StreamingFast team, the first core developers teams outside of Edge & Node, the founding team of The Graph. It was created in response to different use cases especially around analytics and big data that couldn't be served by Subgraph due to its current programming model. Here some of the key points for which Substreams were created: - Offer a streaming-first approach to consuming/transforming blockchain's data - Offer a highly parallelizable yet simple model to consume/transform blockchain's data - Offer a composable system where you can depend on building blocks offered by the community - Offer rich block model -While they share similar ideas around blockchain's transformation/processing and they are both part of The Graph ecosystem, both can be viewed as independent technology that are unrelated to each other. One cannot take a Subgraph's code and run it on Substreams engine, they are incompatible. Here some of key differences: +While they share similar ideas around blockchain's transformation/processing and they are both part of The Graph ecosystem, both can be viewed as independent technology that are unrelated to each other. One cannot take a Subgraph's code and run it on the Substreams engine, they are incompatible. Here some of key differences: -- You write your Substreams in Rust while Subgraph are written in AssemblyScript +- You write your Substreams in Rust while Subgraphs are written in AssemblyScript - Substreams are "stateless" request through gRPC while Subgraphs are persistent deployment - Substreams offers you the chain's specific full block while in Subgraph, you define "triggers" that will invoke your code - Substreams are consumed through a gRPC connection where you control the actual output message while Subgraphs are consumed through GraphQL @@ -67,4 +67,4 @@ While they share similar ideas around blockchain's transformation/processing and Substreams offer quite a different model when compared to Subgraph, just Rust alone is a big shift for someone used to write Subgraphs in AssemblyScript. Substreams is working a lot also with Protobuf models also. -One of the benefits of Substreams is that the persistent storage solution is not part of the technology directly, so you are free to use the database of your choice which enable a lot of analytics use cases that was not possible (or harder to implement) today using Subgraphs like persistent your transformed data to BigQuery or Clickhouse, Kafka, etc. Also, the live streaming feature of Substreams enables further use cases and super quick reactivity that will benefits a lot of user. \ No newline at end of file +One of the benefits of Substreams is that the persistent storage solution is not part of the technology directly, so you are free to use the database of your choice. This enables a lot of analytics use cases that were not possible (or harder to implement) today using Subgraphs like persistent your transformed data to BigQuery or Clickhouse, Kafka, etc. Also, the live streaming feature of Substreams enables further use cases and super quick reactivity that will benefits a lot of user. diff --git a/docs/concepts-and-fundamentals/fundamentals.md b/docs/concepts-and-fundamentals/fundamentals.md index 95eb3d7a5..27415fb3d 100644 --- a/docs/concepts-and-fundamentals/fundamentals.md +++ b/docs/concepts-and-fundamentals/fundamentals.md @@ -12,13 +12,13 @@ Substreams development involves using several different pieces of technology, in ### The process to use Substreams includes: -* Choose the blockchain to capture and process data. -* Identify interesting smart contract addresses (like DEXs or interesting wallet addresses). -* Identify the data and defining and creating protobufs. -* Find already-built Substreams modules and consume their streams, or: -* Write Rust Substreams module handler functions. -* Update the Substreams manifest to reference the protobufs and module handlers. -* Use the [`substreams` CLI](../reference-and-specs/command-line-interface.md) to send commands and view results. +- Choose the blockchain to capture and process data. +- Identify interesting smart contract addresses (like DEXs or interesting wallet addresses). +- Identify the data and defining and creating protobufs. +- Find already-built Substreams modules and consume their streams, or: +- Write Rust Substreams module handler functions. +- Update the Substreams manifest to reference the protobufs and module handlers. +- Use the [`substreams` CLI](../reference-and-specs/command-line-interface.md) to send commands and view results. ### **The Substreams engine** @@ -42,7 +42,7 @@ The data flow is [defined in the Substreams manifest](../reference-and-specs/man ### **Substreams DAG** -Substreams modules are composed through a [directed acyclic graph](https://en.wikipedia.org/wiki/Directed\_acyclic\_graph) (DAG). +Substreams modules are composed through a [directed acyclic graph](https://en.wikipedia.org/wiki/Directed_acyclic_graph) (DAG). {% hint style="info" %} **Note**: In DAGs, data flows from one module to another in a one-directional manner, with no cycle, similar to Git's model of commits and branches. @@ -60,8 +60,8 @@ The Substreams engine creates the "_compute graph_" or "_dependency graph_" at r [Protocol buffers or protobufs](https://developers.google.com/protocol-buffers) are the data models operated on by the[ Rust-based module handler functions](../developers-guide/modules/writing-module-handlers.md). They define and outline the data models in the protobufs. -* View the [`erc721.proto`](https://github.com/streamingfast/substreams-template/blob/develop/proto/erc721.proto) protobuf file in the [Substreams Template repository](https://github.com/streamingfast/substreams-template). -* View the Rust module handlers in the [`lib.rs`](https://github.com/streamingfast/substreams-template/blob/develop/src/lib.rs) file in the [Substreams Template repository](https://github.com/streamingfast/substreams-template). +- View the [`erc721.proto`](https://github.com/streamingfast/substreams-template/blob/develop/proto/erc721.proto) protobuf file in the [Substreams Template repository](https://github.com/streamingfast/substreams-template). +- View the Rust module handlers in the [`lib.rs`](https://github.com/streamingfast/substreams-template/blob/develop/src/lib.rs) file in the [Substreams Template repository](https://github.com/streamingfast/substreams-template). {% hint style="info" %} **Note**: Protobufs include the names of the data objects and the fields contained and accessible within them. @@ -69,7 +69,7 @@ The Substreams engine creates the "_compute graph_" or "_dependency graph_" at r Many protobuf definitions have already been created, such as [the erc721 token model](https://github.com/streamingfast/substreams-template/blob/develop/proto/erc721.proto), for use by developers creating Substreams data transformation strategies. -Custom smart contracts, [like UniSwap](https://github.com/streamingfast/substreams-uniswap-v3/blob/e4b0fb016210870a385484f29bb5116931ea9a50/proto/uniswap/v1/uniswap.proto), also have protobuf definitions that are referenced in the Substreams manifest and made available to module handler functions. Protobufs provide an API to the data for smart contract addresses. +Custom smart contracts, like [UniSwap](https://github.com/streamingfast/substreams-uniswap-v3/blob/e4b0fb016210870a385484f29bb5116931ea9a50/proto/uniswap/v1/uniswap.proto), also have protobuf definitions that are referenced in the Substreams manifest and made available to module handler functions. Protobufs provide an API to the data for smart contract addresses. In object-oriented programming terminology, protobufs are the objects or object models. In front-end web development, they are similar to REST or other data APIs. diff --git a/docs/developers-guide/cookbook/advanced-params.md b/docs/developers-guide/cookbook/advanced-params.md index 72faf6d13..f936fc52d 100644 --- a/docs/developers-guide/cookbook/advanced-params.md +++ b/docs/developers-guide/cookbook/advanced-params.md @@ -63,7 +63,7 @@ pub fn map_whale_transfers(params: String, block: Block) -> Result Result`](https://doc.rust-lang.org/rust-by-example/std/option.html). {% endhint %} -The Rust [`match`](https://doc.rust-lang.org/rust-by-example/flow\_control/match.html) keyword is used to compare the value of an [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) to a [`Some`](https://doc.rust-lang.org/std/option/) or [`None`](https://doc.rust-lang.org/std/option/) variant. Handle a type wrapped [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) in Rust by using: +The Rust [`match`](https://doc.rust-lang.org/rust-by-example/flow_control/match.html) keyword is used to compare the value of an [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) to a [`Some`](https://doc.rust-lang.org/std/option/) or [`None`](https://doc.rust-lang.org/std/option/) variant. Handle a type wrapped [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) in Rust by using: ```rust match person.Location { @@ -122,7 +128,7 @@ match person.Location { } ``` -If you are only interested in finding the presence of a value, use the [`if let`](https://doc.rust-lang.org/rust-by-example/flow\_control/if\_let.html) statement to handle the [`Some(x)`](https://doc.rust-lang.org/std/option/) arm of the [`match`](https://doc.rust-lang.org/rust-by-example/flow\_control/match.html) code. +If you are only interested in finding the presence of a value, use the [`if let`](https://doc.rust-lang.org/rust-by-example/flow_control/if_let.html) statement to handle the [`Some(x)`](https://doc.rust-lang.org/std/option/) arm of the [`match`](https://doc.rust-lang.org/rust-by-example/flow_control/match.html) code. ```rust if let Some(location) = person.location { @@ -130,7 +136,7 @@ if let Some(location) = person.location { } ``` -If a value is present, use the [`.unwrap()`](https://doc.rust-lang.org/rust-by-example/error/option\_unwrap.html) call on the [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) to obtain the wrapped data. You'll need to account for these types of scenarios if you control the creation of the messages yourself or if the field is documented as always being present. +If a value is present, use the [`.unwrap()`](https://doc.rust-lang.org/rust-by-example/error/option_unwrap.html) call on the [`Option`](https://doc.rust-lang.org/rust-by-example/std/option.html) to obtain the wrapped data. You'll need to account for these types of scenarios if you control the creation of the messages yourself or if the field is documented as always being present. {% hint style="info" %} **Note**: You need to be **absolutely sure** **the field is always defined**, otherwise Substreams panics and never completes, getting stuck on a block indefinitely. diff --git a/docs/developers-guide/parallel-execution.md b/docs/developers-guide/parallel-execution.md index c0b120302..3c4630050 100644 --- a/docs/developers-guide/parallel-execution.md +++ b/docs/developers-guide/parallel-execution.md @@ -12,7 +12,7 @@ Parallel execution addresses the problem of the slow single linear execution of The server will define an execution schedule and take the module's dependencies into consideration. The server's execution schedule is a list of pairs of (`module, range`), where range contains `N` blocks. This is a configurable value set to 25K blocks, on the server. -The single map_transfer module will fulfill a request from 0 - 75,000. The server's execution plan returns the results of `[(map_transfer, 0 -> 24,999), (map_transfer, 25,000 -> 74,999), (map_transfer, 50,000 -> 74,999)]`. +The single map_transfer module will fulfill a request from 0 - 75,000. The server's execution plan returns the results of `[(map_transfer, 0 -> 24,999), (map_transfer, 25,000 -> 49,999), (map_transfer, 50,000 -> 74,999)]`. The three pairs will be simultaneously executed by the server handling caching of the output of the store. For stores, an additional step will combine the store keys across multiple segments producing a unified and linear view of the store's state. diff --git a/docs/developers-guide/sink-targets/substreams-powered-subgraph.md b/docs/developers-guide/sink-targets/substreams-powered-subgraph.md index 3fbee9001..e7127b592 100644 --- a/docs/developers-guide/sink-targets/substreams-powered-subgraph.md +++ b/docs/developers-guide/sink-targets/substreams-powered-subgraph.md @@ -2,6 +2,6 @@ Substreams-powered subgraph are the prime candidate for Substreams output. -See The Graph's documentation to roll out your: +See The Graph's documentation to roll out yours: [https://thegraph.com/docs/en/cookbook/substreams-powered-subgraphs/](https://thegraph.com/docs/en/cookbook/substreams-powered-subgraphs/) diff --git a/docs/developers-guide/sink-targets/substreams-sink-files.md b/docs/developers-guide/sink-targets/substreams-sink-files.md index 5165f90dd..b7b96c3b6 100644 --- a/docs/developers-guide/sink-targets/substreams-sink-files.md +++ b/docs/developers-guide/sink-targets/substreams-sink-files.md @@ -36,7 +36,7 @@ Extract `substreams-sink-files` into a folder and ensure this folder is referenc ### Accompanying code example -The accompanying code example for this tutorial is available in the `substreams-sink-files` repository. The Substreams project for the tutorial is located in the [docs/tutorial](https://github.com/streamingfast/substreams-sink-files/blob/develop/docs/tutorial) directory. +The accompanying code example for this tutorial is available in the `substreams-sink-files` repository. The Substreams project for the tutorial is located in the [docs/tutorial](https://github.com/streamingfast/substreams-sink-files/tree/master/docs/tutorial) directory. Run the included `make protogen` command to create the required Protobuf files. @@ -100,7 +100,7 @@ fn jsonl_out(block: eth::Block) -> Result { } ``` -This module handler uses `JSONL` for the output type, any other plain-text line-based format can be supported, `CSV` for example. The [`json!`](https://docs.rs/serde\_json/latest/serde\_json/macro.json.html) macro is used to write the block data to the Rust `Vec` type by using the Rust [`vec!`](https://doc.rust-lang.org/std/macro.vec.html) macro. +This module handler uses `JSONL` for the output type, any other plain-text line-based format can be supported, `CSV` for example. The [`json!`](https://docs.rs/serde_json/latest/serde_json/macro.json.html) macro is used to write the block data to the Rust `Vec` type by using the Rust [`vec!`](https://doc.rust-lang.org/std/macro.vec.html) macro. The example code is intentionally very basic. StreamingFast [provides a more robust and full example](https://github.com/streamingfast/substreams-eth-token-transfers/blob/develop/src/lib.rs#L24) demonstrating how to extract data related to transfers from Ethereum. A crucial aspect of working with Substreams and sinks is a significant amount of data can be extracted from a Block object. The data is extracted and packed into a row. The row is represented by the JSONL or CSV based Protobuf you're responsible for designing for your sink. @@ -108,9 +108,9 @@ The output type for sink is a list of lines. The line content can be any type an ### Core steps for Substreams sink modules -* Import sink `.spkg` files, re-generate Protobufs and create and add a mod.rs file. -* Create a map module outputting sf.substreams.sink.files.v1 format. This module extracts the entity to be written, one per block from the block or another module's dependencies. Each line will be in JSON format. You can use the json! macro from the [`serde_json`](https://docs.rs/serde\_json/latest/serde\_json) crate to assist creating your structure, one per line. -* Add the correct module definition to the Substreams manifest `substreams.yaml`. +- Import sink `.spkg` files, re-generate Protobufs and create and add a mod.rs file. +- Create a map module outputting sf.substreams.sink.files.v1 format. This module extracts the entity to be written, one per block from the block or another module's dependencies. Each line will be in JSON format. You can use the json! macro from the [`serde_json`](https://docs.rs/serde_json/latest/serde_json) crate to assist creating your structure, one per line. +- Add the correct module definition to the Substreams manifest `substreams.yaml`. ```yaml imports: @@ -137,9 +137,11 @@ modules: The command to start and run the `substreams-sink-files` tool for the accompanying Substreams project will resemble: {% code overflow="wrap" %} + ```bash substreams-sink-files run --encoder=lines --state-store=./output/state.yaml mainnet.eth.streamingfast.io:443 substreams.yaml jsonl_out ./output/files ``` + {% endcode %} ## Verify output from tool diff --git a/docs/reference-and-specs/manifests.md b/docs/reference-and-specs/manifests.md index 8875225f5..df33945aa 100644 --- a/docs/reference-and-specs/manifests.md +++ b/docs/reference-and-specs/manifests.md @@ -118,7 +118,7 @@ The Substreams packager loads files in any of the listed `importPaths`. Protobufs and modules are packaged together to help Substreams clients decode the incoming streams. Protobufs are not sent to the Substreams server in network requests. -[Learn more about Google Protocol Buffers](https://developers.google.com/protocol-buffers) in the official documentation provided by Google. +[Learn more about Google Protocol Buffers](https://protobuf.dev/) in the official documentation provided by Google. ### `binaries` @@ -186,7 +186,7 @@ The identifier for the module, prefixed by a letter, followed by a maximum of 64 The module `name` is the reference identifier used on the command line for the `substreams` [`run`](https://substreams.streamingfast.io/reference-and-specs/command-line-interface#run) command. The module `name` is also used in the [`inputs`](manifests.md#modules-.inputs) defined in the Substreams manifest. -The module `name` also corresponds to the **name of the Rust function** invoked on the compiled WASM code upon execution. The module `name` is the same `#[substreams::handlers::map]` as defined in the Rust \_\_ code\_.\_ Maps and stores both work in the same fashion. +The module `name` also corresponds to the **name of the Rust function** invoked on the compiled WASM code upon execution. The module `name` is the same `#[substreams::handlers::map]` as defined in the Rust code. Maps and stores both work in the same fashion. {% hint style="warning" %} **Important**_:_ When importing another package, all module names are prefixed by the package's name and a colon. Prefixing ensures there are no name clashes across multiple imported packages and almost any name can be safely used for a module `name`. diff --git a/docs/tutorials/ethereum/exploring-ethereum/map_block_meta_module.md b/docs/tutorials/ethereum/exploring-ethereum/map_block_meta_module.md index 78fe058d5..ab6e8133b 100644 --- a/docs/tutorials/ethereum/exploring-ethereum/map_block_meta_module.md +++ b/docs/tutorials/ethereum/exploring-ethereum/map_block_meta_module.md @@ -27,13 +27,14 @@ $ substreams run -e mainnet.eth.streamingfast.io:443 substreams.yaml map_block_m ``` Let's break down the command into pieces: -- `mainnet.eth.streamingfast.io:443`: is the StreamingFast Ethereum Mainnet endpoint where you are sending your Substreams for execution. + +- `mainnet.eth.streamingfast.io:443`: is the StreamingFast Ethereum Mainnet endpoint where you are sending your Substreams for execution. - `substreams.yaml`: specifies the Substreams manifest. - `map_block_meta`: specifies the module to execute. Since the Ethereum Explorer application contains several modules, it is necessary to specify which one you want to execute. - `--start-block 17712040`: specifies the starting block (i.e. the block where Substreams will start streaming). - `--stop-block +1`: specifies how many blocks after the starting block should be considered. In this example, `+1` means that the streaming will start at `17712040` and finish at `17712041` (just one block). -The output of the command should similar to: +The output of the command should be similar to: ```bash ...output omitted... @@ -126,4 +127,4 @@ Ok(BlockMeta { hash: Hex::encode(&blk.hash), parent_hash: Hex::encode(&header.parent_hash), }) -``` \ No newline at end of file +``` diff --git a/docs/tutorials/rust/result.md b/docs/tutorials/rust/result.md index aa05ec290..ba47c248b 100644 --- a/docs/tutorials/rust/result.md +++ b/docs/tutorials/rust/result.md @@ -3,6 +3,7 @@ In Rust, the `Result` struct is used to abstract both a successful response (if it exists) and an error (if it occurs). Let's better understand through an example. ## Basic Usage + Consider that you have a function `divide(num1, num2)`, which executes the division between two numbers. As you already know, dividing by 0 is undefined, and generates an error in Rust. You can use `Result` to return a controlled error. ```rust @@ -35,14 +36,15 @@ fn divide(num1: u32, num2: u32) -> Result { // 1. return Ok(num1 / num2); // 3. } ``` + 1. Declaration of the function. Two unsigned numbers of 32-bit length are passed as parameters. -The return type is `Result`: the first type (`u32`) is for the successful response, and the second type (`String`) is for the error response. + The return type is `Result`: the first type (`u32`) is for the successful response, and the second type (`String`) is for the error response. 2. If dividing by 0, you return an error String. 3. If not, you return the result of the division (`u32`). -The `Result` is really an enum that can take two values: `Ok(T)` (success) and `Err(E)` (error). +The `Result` is really an enum that can take two values: `Ok(T)` (success) and `Err(E)` (error). -In the previous code, when you return `Err(String)`, the success part is automatically empty. At the same time, when you return `Ok(u32)`, the error part is empty. +In the previous code, when you return `Err(String)`, the success part is automatically empty. At the same time, when you return `Ok(u32)`, the error part is empty. Now, let's see how you can interact with this result. @@ -56,6 +58,7 @@ fn main() { } } ``` + 1. You invoke the function and store the `Result` enum in a variable. 2. If the result _is ok_ (i.e. the happy path has been returned), you can take its value by using the `result.unwrap()` method. 3. If the error has been returned, you can return the error string by using the `result.err().unwrap()` method. @@ -87,7 +90,7 @@ fn divide(num1: u32, num2: u32) -> Result { return Ok(num1 / num2); } -fn compute() -> Result { +fn computations() -> Result { let result = divide(6, 0); // Performing the division if result.is_err() { // If the division returns an error, then you return an error. @@ -120,7 +123,7 @@ fn divide(num1: u32, num2: u32) -> Result { return Ok(num1 / num2); } -fn compute() -> Result { +fn computations() -> Result { let division_result = divide(6, 0)?; return Ok(division_result + 5); @@ -137,9 +140,10 @@ fn main() { ``` The `?` symbol after a `Result` enum does two things: + 1. If successful, it unwraps the result (in this case, a `u32` number), and stores it in a variable -`let division_result = divide(6, 0)?;` -2. If an error, it returns the error directly. In this example, the error type of the `divide` and the `compute` function is the same (a `String`). + `let division_result = divide(6, 0)?;` +2. If an error occurs, it returns the error directly. In this example, the error type of the `divide` and the `computations` function is the same (a `String`). ## In Substreams