1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
use std::{
    collections::BTreeMap,
    io::{self, BufWriter, Write},
    path::{Path, PathBuf},
    process::Command,
};

pub use abbau_types::KsySchema;
use ctx::NamingContext;
use proc_macro2::{Ident, TokenStream};
use quote::{format_ident, quote};
use r#type::{Enum, Type};

mod ctx;
mod doc;
mod enums;
mod parser;
mod rt;
mod structs;
mod r#type;

pub use rt::codegen_rt;

pub struct Module {
    pub id: Ident,
    pub import: TokenStream,
    pub out_path: PathBuf,
    pub types: BTreeMap<String, Type>,
}

/// Context for transpiling a module / schema file
pub struct Context<'a> {
    /// Identifier for the parent module, e.g. `quote!(crate)`
    pub parent: TokenStream,
    pub available_imports: &'a BTreeMap<String, Module>,
    pub schema: &'a KsySchema,
    pub out_dir: &'a Path,
}

impl Context<'_> {
    pub fn codegen(&self, file_id: Option<&str>) -> Result<Module, io::Error> {
        let schema = &self.schema;
        let out_dir = &self.out_dir;
        let p = &self.parent;
        let id = schema.meta.id.0.as_str();
        let sid = format_ident!("{}", id);
        let import = quote!(#p::#sid);

        let out_file = format!("{}.rs", file_id.unwrap_or(&schema.meta.id.0));
        let out_path = out_dir.join(out_file);

        let mut nc = NamingContext::new();
        let mut structs = vec![];

        let mut q_imports = vec![];
        for imp in &schema.meta.imports {
            let module = self.available_imports.get(imp).expect("Missing import");
            let mod_id = &module.import;
            q_imports.push(quote!(use #mod_id;));
            nc.import_module(module);
        }

        for (key, spec) in &schema.enums {
            let r#enum = Enum::new(key, spec);
            nc.add_enum(key, r#enum);
        }

        // First stage analysis
        let _root_ty = Type::new_root(schema);
        let root_endian = _root_ty.endian;
        nc.set_root(id, _root_ty);
        for (key, spec) in &schema.types {
            let st = Type::new(key, spec, root_endian);
            nc.add(key, st);
        }

        nc.process_dependencies();

        // Struct Codegen
        for (key, spec) in &schema.types {
            let doc = spec.doc.as_deref();
            let doc_ref = spec.doc_ref.as_ref();
            let struct_ty = nc.resolve(key).unwrap();
            let st = structs::codegen_struct(&nc, doc, doc_ref, &spec.seq, struct_ty).unwrap();
            structs.push(st);
        }

        let enums = schema
            .enums
            .iter()
            .map(|(name, spec)| enums::codegen_enum(name, spec));

        let mod_doc = schema.doc.as_deref().map(|d| quote!(#![doc = #d]));

        if !schema.seq.is_empty() {
            let root_ty = nc.get_root().unwrap();
            let root_struct = structs::codegen_struct(
                &nc,
                schema.doc.as_deref(),
                schema.doc_ref.as_ref(),
                &schema.seq,
                root_ty,
            )
            .unwrap();
            structs.push(root_struct);
        };

        let file = quote! {
            #mod_doc

            #(#q_imports)*
            #(#structs)*
            #(#enums)*
        };

        write_file(&out_path, file)?;

        Ok(Module {
            id: sid,
            out_path,
            types: nc.into_types(),
            import,
        })
    }
}

fn write_file(out_path: &Path, file: TokenStream) -> Result<(), io::Error> {
    let writer = std::fs::File::create(out_path)?;
    let mut writer = BufWriter::new(writer);
    write!(writer, "{}", file)?;
    Command::new("rustfmt").arg(out_path).spawn().unwrap();
    Ok(())
}