Initial commit

This commit is contained in:
2026-04-14 03:44:59 +02:00
commit a6eb642c76
32 changed files with 589 additions and 0 deletions
+12
View File
@@ -0,0 +1,12 @@
[package]
name = "workshop"
version = "0.1.0"
edition = "2024"
[workspace]
members = ["workshop_macro"]
[dependencies]
workshop_macro = { path = "./workshop_macro" }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
+4
View File
@@ -0,0 +1,4 @@
fn main() {
// Trigger a rebuild if the schema file has changed
println!("cargo::rerun-if-changed=schema.json");
}
+21
View File
@@ -0,0 +1,21 @@
{
"name": "User",
"fields": [
{
"name": "first_name",
"type": "string"
},
{
"name": "last_name",
"type": "string"
},
{
"name": "age",
"type": "number"
},
{
"name": "admin",
"type": "bool"
}
]
}
+11
View File
@@ -0,0 +1,11 @@
use workshop_macro::from_schema;
from_schema!("https://download.huizinga.dev/workshop/schema.json");
fn main() {
let content = std::fs::read_to_string("./users.json").unwrap();
let users: Vec<User> = serde_json::from_str(&content).unwrap();
println!("{users:#?}");
}
+8
View File
@@ -0,0 +1,8 @@
[
{
"first_name": "Tim",
"last_name": "Huizinga",
"age": 28,
"admin": true
}
]
@@ -0,0 +1,15 @@
[package]
name = "workshop_macro"
version = "0.1.0"
edition = "2024"
[dependencies]
proc-macro2 = "1.0.106"
quote = "1.0.45"
reqwest = { version = "0.13.2", features = ["blocking"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
syn = "2.0.117"
[lib]
proc-macro = true
@@ -0,0 +1,96 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::{ToTokens, quote};
use serde::Deserialize;
use syn::{Ident, LitStr, parse_macro_input};
// HINT: Maybe with creating some structs/enums that match with the format of schema.json?
// HINT: The word `type` is reserved in rust but can still be used if you write `r#type`
// HINT: Maybe implementing ToTokens can be useful?
// HINT: You can access fields in the quote macro (e.g. `#schema.name`) so instead first bind it
// to a local variable (e.g. `let name = &schema.name`).
// HINT: If you place use a string variable inside of the quote macro it will place it as a
// string (e.g. "User"). You need to first create a syn::Ident
// (see: https://docs.rs/syn/latest/syn/struct.Ident.html)
// HINT: If you have a iterable of qoutes! you can use and expand this in another quote macro
// with `#(#fields),*`, similar to how it works with declarative macros.
// HINT: Since the macro will be placed in the context of the "callsite" it is best to use the
// fully qualified path (e.g. `serde::Deserialize`), otherwise the user of the macro needs to
// manually add `use serde::Deserialize` to their file.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
enum Type {
String,
Number,
Bool,
}
impl ToTokens for Type {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let ts = match self {
Type::String => quote! { String },
Type::Number => quote! { i64 },
Type::Bool => quote! { bool },
};
tokens.extend(ts);
}
}
#[derive(Debug, Deserialize)]
struct Field {
name: String,
r#type: Type,
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let name = Ident::new(&self.name, Span::call_site());
let r#type = &self.r#type;
tokens.extend(quote! {
#name: #r#type
});
}
}
#[derive(Debug, Deserialize)]
struct Schema {
name: String,
fields: Vec<Field>,
}
impl ToTokens for Schema {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let name = Ident::new(&self.name, Span::call_site());
let fields = &self.fields;
tokens.extend(quote! {
#[derive(Debug, serde::Deserialize)]
struct #name {
#(#fields),*
}
});
}
}
#[proc_macro]
pub fn from_schema(input: TokenStream) -> TokenStream {
let url = parse_macro_input!(input as LitStr).value();
let schema = reqwest::blocking::get(url).unwrap().text().unwrap();
let schema: Schema = serde_json::from_str(&schema).unwrap();
// HINT: There are two different TokenStreams, proc_macro::TokenStream (which the macro should
// return) and proc_macro2::TokenStream (which syn uses). This can be a bit confusing, so you
// will likely need to call `.into()` on your final TokenStream to convert it to the correct
// type.
schema.to_token_stream().into()
}