Initial commit

This commit is contained in:
2026-04-14 03:44:59 +02:00
commit e92ea08a05
32 changed files with 589 additions and 0 deletions
+20
View File
@@ -0,0 +1,20 @@
# Created by https://www.toptal.com/developers/gitignore/api/rust
# Edit at https://www.toptal.com/developers/gitignore?templates=rust
### Rust ###
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# End of https://www.toptal.com/developers/gitignore/api/rust
+6
View File
@@ -0,0 +1,6 @@
[package]
name = "demo"
version = "0.1.0"
edition = "2024"
[dependencies]
+31
View File
@@ -0,0 +1,31 @@
// Declarative macros
// macro_rules! custom_vec {
// ( $( $x:expr ),* ) => {
// {
// let mut temp_vec = Vec::new();
// $(
// temp_vec.push($x);
// )*
// temp_vec
// }
// };
// // ( $( $x:expr ),*; mult = $mult:expr ) => {
// // custom_vec![$( $x * $mult ),*]
// // };
// }
fn main() {
let mut a = Vec::new();
a.push(1);
a.push(2);
a.push(3);
// let a = vec![1, 2, 3];
// let a = custom_vec!(1, 2, 3);
// let a = custom_vec!(1, 2, 3; mult = 2);
println!("{a:?}");
}
+10
View File
@@ -0,0 +1,10 @@
[package]
name = "demo"
version = "0.1.0"
edition = "2024"
[workspace]
members = ["demo_macro"]
[dependencies]
demo_macro = { path = "demo_macro" }
+12
View File
@@ -0,0 +1,12 @@
[package]
name = "demo_macro"
version = "0.1.0"
edition = "2024"
[dependencies]
proc-macro2 = "1.0.106"
quote = "1.0.45"
syn = "2.0.117"
[lib]
proc-macro = true
+70
View File
@@ -0,0 +1,70 @@
use proc_macro::TokenStream;
#[proc_macro]
pub fn hello_world(_item: TokenStream) -> TokenStream {
"println!(\"Hello, World\")".parse().unwrap()
}
// #[proc_macro]
// pub fn make_answer(_item: TokenStream) -> TokenStream {
// "fn answer() -> u32 { 42 }".parse().unwrap()
// }
// use quote::quote;
//
// #[proc_macro]
// pub fn make_answer_quote(_item: TokenStream) -> TokenStream {
// let value: u32 = 43;
//
// quote! {
// fn answer_quote() -> u32 {
// #value
// }
// }
// .into()
// }
// use quote::ToTokens;
// use syn::{
// Error, LitInt, Token,
// parse::{Parse, ParseStream},
// parse_macro_input,
// };
//
// struct Custom {
// value: LitInt,
// }
//
// impl Parse for Custom {
// // answer = <value>
// fn parse(input: ParseStream) -> syn::Result<Self> {
// let ident: syn::Ident = input.parse()?;
// if ident != "answer" {
// return Err(Error::new(ident.span(), "expected 'answer'"));
// }
//
// input.parse::<Token![=]>()?;
//
// let value = input.parse()?;
//
// Ok(Custom { value })
// }
// }
//
// impl ToTokens for Custom {
// fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
// self.value.to_tokens(tokens);
// }
// }
//
// #[proc_macro]
// pub fn make_answer_custom(input: TokenStream) -> TokenStream {
// let custom = parse_macro_input!(input as Custom);
//
// quote! {
// fn answer_custom() -> u32 {
// #custom
// }
// }
// .into()
// }
+23
View File
@@ -0,0 +1,23 @@
use demo_macro::hello_world;
// use demo_macro::make_answer;
// make_answer!();
// use demo_macro::make_answer_quote;
// make_answer_quote!();
// use demo_macro::make_answer_custom;
// make_answer_custom!(answer = 33);
fn main() {
hello_world!();
// let a = answer();
// println!("{a}");
// let b = answer_quote();
// println!("{b}");
// let c = answer_custom();
// println!("{c}");
}
+6
View File
@@ -0,0 +1,6 @@
[package]
name = "demo"
version = "0.1.0"
edition = "2024"
[dependencies]
+26
View File
@@ -0,0 +1,26 @@
// Procedural macro
// Derive macro
#[derive(Debug)]
struct User {
first_name: String,
last_name: String,
email: String,
}
impl std::fmt::Display for User {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {}", self.first_name, self.last_name)
}
}
fn main() {
let tim = User {
first_name: "Tim".into(),
last_name: "Huizinga".into(),
email: "tim@huizinga.dev".into(),
};
println!("User: {tim}");
println!("User: {tim:?}");
}
+10
View File
@@ -0,0 +1,10 @@
[package]
name = "demo"
version = "0.1.0"
edition = "2024"
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
serde-xml-rs = "0.8.2"
serde_json = "1.0.149"
serde_yaml = "0.9.34"
+45
View File
@@ -0,0 +1,45 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
// #[serde(rename_all = "camelCase")]
// #[serde(rename_all = "PascalCase")]
enum Access {
Guest { vip: bool },
Normal,
Admin,
}
#[derive(Debug, Serialize, Deserialize)]
// #[serde(rename_all = "camelCase")]
// #[serde(rename_all = "PascalCase")]
struct User {
// #[serde(rename = "@firstName")]
first_name: String,
// #[serde(rename = "@lastName")]
last_name: String,
// #[serde(skip)]
// #[serde(rename = "@email")]
email: String,
access: Access,
}
fn main() {
let content = std::fs::read_to_string("user.json").unwrap();
let user: User = serde_json::from_str(&content).unwrap();
println!("{user:#?}");
// let tim = User {
// first_name: "Tim".into(),
// last_name: "Huizinga".into(),
// email: "tim@huizinga.dev".into(),
// access: Access::Normal,
// // access: Access::Guest { vip: true },
// };
//
// println!("{}", serde_json::to_string_pretty(&tim).unwrap());
//
// println!("{}", serde_yaml::to_string(&tim).unwrap());
//
// println!("{}", serde_xml_rs::to_string(&tim).unwrap());
}
+6
View File
@@ -0,0 +1,6 @@
{
"first_name": "Sebastiano",
"last_name": "Tronto",
"email": "sebastiano@tronto.net",
"access": "Normal"
}
+1
View File
@@ -0,0 +1 @@
export DATABASE_URL=postgres://postgres:rustiscool@localhost/postgres
+8
View File
@@ -0,0 +1,8 @@
[package]
name = "demo"
version = "0.1.0"
edition = "2024"
[dependencies]
sqlx = { version = "0.8.6", features = ["macros", "postgres", "runtime-tokio"] }
tokio = { version = "1.51.1", features = ["full"] }
+15
View File
@@ -0,0 +1,15 @@
services:
database:
image: postgres:18
ports:
- 5432:5432
environment:
- TZ=Europe/Amsterdam
- PGTZ=Europe/Amsterdam
- POSTGRES_PASSWORD=rustiscool
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres"]
interval: 10s
retries: 5
start_period: 10s
timeout: 10s
@@ -0,0 +1,8 @@
CREATE TABLE users (
id SERIAL PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL
);
INSERT INTO users (first_name, last_name) VALUES ('Tim', 'Huizinga');
INSERT INTO users (first_name, last_name) VALUES ('Sebastiano', 'Tronto');
+1
View File
@@ -0,0 +1 @@
SELECT (first_name) FROM users;
+23
View File
@@ -0,0 +1,23 @@
use sqlx::{Connection, PgConnection};
#[tokio::main]
async fn main() {
let url = std::env::var("DATABASE_URL").unwrap();
let mut conn = PgConnection::connect(&url).await.unwrap();
// sqlx::migrate!("migrations").run(&conn).await?;
let users = sqlx::query!("SELECT * FROM users")
.fetch_all(&mut conn)
.await
.unwrap();
// let users = sqlx::query_file!("src/get_users.sql")
// .fetch_all(&mut conn)
// .await
// .unwrap();
println!("{users:#?}");
// users[0].first_name;
}
+12
View File
@@ -0,0 +1,12 @@
[package]
name = "workshop"
version = "0.1.0"
edition = "2024"
[workspace]
members = ["workshop_macro"]
[dependencies]
workshop_macro = { path = "./workshop_macro" }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
+4
View File
@@ -0,0 +1,4 @@
fn main() {
// Trigger a rebuild if the schema file has changed
println!("cargo::rerun-if-changed=schema.json");
}
+11
View File
@@ -0,0 +1,11 @@
use workshop_macro::from_schema;
from_schema!("https://download.huizinga.dev/workshop/schema.json");
fn main() {
let content = std::fs::read_to_string("./users.json").unwrap();
let users: Vec<User> = serde_json::from_str(&content).unwrap();
println!("{users:#?}");
}
+8
View File
@@ -0,0 +1,8 @@
[
{
"first_name": "Tim",
"last_name": "Huizinga",
"age": 28,
"admin": true
}
]
+15
View File
@@ -0,0 +1,15 @@
[package]
name = "workshop_macro"
version = "0.1.0"
edition = "2024"
[dependencies]
proc-macro2 = "1.0.106"
quote = "1.0.45"
reqwest = { version = "0.13.2", features = ["blocking"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
syn = "2.0.117"
[lib]
proc-macro = true
+34
View File
@@ -0,0 +1,34 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::{LitStr, parse_macro_input};
// HINT: Maybe with creating some structs/enums that match with the format of schema.json?
// HINT: The word `type` is reserved in rust but can still be used if you write `r#type`
// HINT: Maybe implementing ToTokens can be useful?
// HINT: You can access fields in the quote macro (e.g. `#schema.name`) so instead first bind it
// to a local variable (e.g. `let name = &schema.name`).
// HINT: If you place use a string variable inside of the quote macro it will place it as a
// string (e.g. "User"). You need to first create a syn::Ident
// (see: https://docs.rs/syn/latest/syn/struct.Ident.html)
// HINT: If you have a iterable of qoutes! you can use and expand this in another quote macro
// with `#(#fields),*`, similar to how it works with declarative macros.
// HINT: Since the macro will be placed in the context of the "callsite" it is best to use the
// fully qualified path (e.g. `serde::Deserialize`), otherwise the user of the macro needs to
// manually add `use serde::Deserialize` to their file.
#[proc_macro]
pub fn from_schema(input: TokenStream) -> TokenStream {
let url = parse_macro_input!(input as LitStr).value();
let schema = reqwest::blocking::get(url).unwrap().text().unwrap();
// TODO: Print out the received schema text during compilation
println!("{schema:#?}");
quote! {}.into()
}
+12
View File
@@ -0,0 +1,12 @@
[package]
name = "workshop"
version = "0.1.0"
edition = "2024"
[workspace]
members = ["workshop_macro"]
[dependencies]
workshop_macro = { path = "./workshop_macro" }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
+4
View File
@@ -0,0 +1,4 @@
fn main() {
// Trigger a rebuild if the schema file has changed
println!("cargo::rerun-if-changed=schema.json");
}
+21
View File
@@ -0,0 +1,21 @@
{
"name": "User",
"fields": [
{
"name": "first_name",
"type": "string"
},
{
"name": "last_name",
"type": "string"
},
{
"name": "age",
"type": "number"
},
{
"name": "admin",
"type": "bool"
}
]
}
+11
View File
@@ -0,0 +1,11 @@
use workshop_macro::from_schema;
from_schema!("https://download.huizinga.dev/workshop/schema.json");
fn main() {
let content = std::fs::read_to_string("./users.json").unwrap();
let users: Vec<User> = serde_json::from_str(&content).unwrap();
println!("{users:#?}");
}
+8
View File
@@ -0,0 +1,8 @@
[
{
"first_name": "Tim",
"last_name": "Huizinga",
"age": 28,
"admin": true
}
]
@@ -0,0 +1,15 @@
[package]
name = "workshop_macro"
version = "0.1.0"
edition = "2024"
[dependencies]
proc-macro2 = "1.0.106"
quote = "1.0.45"
reqwest = { version = "0.13.2", features = ["blocking"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
syn = "2.0.117"
[lib]
proc-macro = true
@@ -0,0 +1,96 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::{ToTokens, quote};
use serde::Deserialize;
use syn::{Ident, LitStr, parse_macro_input};
// HINT: Maybe with creating some structs/enums that match with the format of schema.json?
// HINT: The word `type` is reserved in rust but can still be used if you write `r#type`
// HINT: Maybe implementing ToTokens can be useful?
// HINT: You can access fields in the quote macro (e.g. `#schema.name`) so instead first bind it
// to a local variable (e.g. `let name = &schema.name`).
// HINT: If you place use a string variable inside of the quote macro it will place it as a
// string (e.g. "User"). You need to first create a syn::Ident
// (see: https://docs.rs/syn/latest/syn/struct.Ident.html)
// HINT: If you have a iterable of qoutes! you can use and expand this in another quote macro
// with `#(#fields),*`, similar to how it works with declarative macros.
// HINT: Since the macro will be placed in the context of the "callsite" it is best to use the
// fully qualified path (e.g. `serde::Deserialize`), otherwise the user of the macro needs to
// manually add `use serde::Deserialize` to their file.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
enum Type {
String,
Number,
Bool,
}
impl ToTokens for Type {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let ts = match self {
Type::String => quote! { String },
Type::Number => quote! { i64 },
Type::Bool => quote! { bool },
};
tokens.extend(ts);
}
}
#[derive(Debug, Deserialize)]
struct Field {
name: String,
r#type: Type,
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let name = Ident::new(&self.name, Span::call_site());
let r#type = &self.r#type;
tokens.extend(quote! {
#name: #r#type
});
}
}
#[derive(Debug, Deserialize)]
struct Schema {
name: String,
fields: Vec<Field>,
}
impl ToTokens for Schema {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let name = Ident::new(&self.name, Span::call_site());
let fields = &self.fields;
tokens.extend(quote! {
#[derive(Debug, serde::Deserialize)]
struct #name {
#(#fields),*
}
});
}
}
#[proc_macro]
pub fn from_schema(input: TokenStream) -> TokenStream {
let url = parse_macro_input!(input as LitStr).value();
let schema = reqwest::blocking::get(url).unwrap().text().unwrap();
let schema: Schema = serde_json::from_str(&schema).unwrap();
// HINT: There are two different TokenStreams, proc_macro::TokenStream (which the macro should
// return) and proc_macro2::TokenStream (which syn uses). This can be a bit confusing, so you
// will likely need to call `.into()` on your final TokenStream to convert it to the correct
// type.
schema.to_token_stream().into()
}
+17
View File
@@ -0,0 +1,17 @@
# Knowledge Group Advanced Software: Rust macros
This repo contains the examples and short workshop (and possible solution) from the talk.
## Notes:
Example 05 requires that you are running a postgres database, this can be started by running `docker compose up` in the example directory.
It also requires that the `DATABASE_URL` environment variable is set, this can be achieved by sourcing the `.env` file.
To setup the schema and data in the database you can use `sqlx-cli` (install: `cargo install sqlx-cli`), to apply to migrations run:
```bash
sqlx migrate run
```
For the workshop in 06 several hints are giving on things that might cause trouble.
To help debug the macro you can install `cargo expand`: `cargo install cargo-expand`.