196 lines
5.3 KiB
Rust
196 lines
5.3 KiB
Rust
// Copyright (C) 2023-2099 The crate authors.
|
|
//
|
|
// This program is free software: you can redistribute it and/or modify it
|
|
// under the terms of the GNU Affero General Public License as published by the
|
|
// Free Software Foundation, either version 3 of the License, or (at your
|
|
// option) any later version.
|
|
//
|
|
// This program is distributed in the hope that it will be useful, but WITHOUT
|
|
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
|
|
// for more details.
|
|
//
|
|
// You should have received a copy of the GNU Affero General Public License
|
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
use std::ffi::OsString;
|
|
use std::fmt::Display;
|
|
use std::fs::File;
|
|
use std::io::{self, BufWriter, Read, Write};
|
|
use std::path::{Path, PathBuf};
|
|
|
|
use minidom::Error as MinidomError;
|
|
use quote::{format_ident, quote, TokenStreamExt};
|
|
use scansion::{read_actions_component, read_spec, Spec};
|
|
|
|
fn generate_spec_tokens(spec: Spec) -> Result<impl TokenStreamExt + Display, MinidomError> {
|
|
println!("FOO0");
|
|
|
|
let docstr = {
|
|
let mut tmp = String::new();
|
|
if let Some(ref meta) = spec.metadata {
|
|
tmp.push_str(meta.title.as_str());
|
|
tmp.push('\n');
|
|
|
|
if let Some(ref desc) = meta.description {
|
|
tmp.push_str(desc.as_str());
|
|
tmp.push('\n');
|
|
}
|
|
|
|
for tag in &meta.tags {
|
|
tmp.push_str("tag: ");
|
|
tmp.push_str(tag.as_str());
|
|
tmp.push('\n');
|
|
}
|
|
} else {
|
|
tmp.push_str("No metadata");
|
|
}
|
|
|
|
tmp
|
|
};
|
|
|
|
let context = spec.context.clone();
|
|
let actions = read_actions_component(spec, &context).unwrap();
|
|
|
|
let stanzas_in = actions
|
|
.inbound
|
|
.into_iter()
|
|
.map(|elem| {
|
|
// TODO: Prevent having to parse elements again.
|
|
// We do need some kind of structure already though to be able to verify the presence
|
|
// of attrs and all before the generation.
|
|
let elem_str = String::from(&elem);
|
|
quote! { ScanElement::new(#elem_str.parse::<Element>().unwrap()).apply_context(&context) }
|
|
})
|
|
.collect::<Vec<_>>();
|
|
|
|
let stanzas_out = actions
|
|
.outbound
|
|
.into_iter()
|
|
.map(|elem| {
|
|
let elem_str = String::from(&elem);
|
|
quote! {
|
|
component.expect(
|
|
#elem_str.parse::<ScanElement>().unwrap().apply_context(&context)
|
|
);
|
|
}
|
|
})
|
|
.collect::<Vec<_>>();
|
|
|
|
let (keys, values) =
|
|
context
|
|
.clone()
|
|
.into_iter()
|
|
.fold((vec![], vec![]), |(mut keys, mut values), (k, v)| {
|
|
keys.push(quote! { #k });
|
|
values.push(quote! { #v });
|
|
(keys, values)
|
|
});
|
|
|
|
Ok(quote! {
|
|
#![doc = #docstr]
|
|
use crate::component::TestComponent;
|
|
use crate::handlers::handle_stanza;
|
|
use crate::room::Room;
|
|
|
|
use ::std::collections::HashMap;
|
|
use ::xmpp_parsers::{Jid, BareJid, FullJid, Element};
|
|
use ::scansion::{ScanElement, Entity, Client};
|
|
|
|
#[tokio::test]
|
|
async fn spec() {
|
|
let context: ::scansion::Context = {
|
|
let mut tmp = ::std::collections::HashMap::new();
|
|
let (keys, values) = (vec![#(#keys),*], vec![#(#values),*]);
|
|
for (k, v) in keys.iter().zip(values.iter()) {
|
|
tmp.insert(String::from(*k), v.clone());
|
|
}
|
|
tmp
|
|
};
|
|
|
|
let stanzas_in = vec![#(#stanzas_in),*];
|
|
let mut component = TestComponent::new_scan(stanzas_in);
|
|
let mut rooms: HashMap<BareJid, Room> = HashMap::new();
|
|
|
|
#(#stanzas_out)*
|
|
|
|
println!("FOO: {component:?}");
|
|
handle_stanza(&mut component, &mut rooms).await.unwrap();
|
|
}
|
|
})
|
|
}
|
|
|
|
fn generate_mod_tokens(modfiles: Vec<OsString>) -> impl TokenStreamExt + Display {
|
|
let modfiles: Vec<_> = modfiles
|
|
.into_iter()
|
|
.map(|s| format_ident!("{}", s.into_string().unwrap().strip_suffix(".rs").unwrap()))
|
|
.collect();
|
|
quote! {
|
|
/// Scansion tests module.
|
|
/// These tests are generated by the build script, DO NOT EDIT.
|
|
|
|
#(#[cfg(test)] mod #modfiles;)*
|
|
}
|
|
}
|
|
|
|
fn read_input_dir(indir: PathBuf, outdir: PathBuf) -> io::Result<Vec<OsString>> {
|
|
// Will be used to generate mod.rs
|
|
let mut modfiles: Vec<OsString> = Vec::new();
|
|
|
|
for entry in indir.read_dir()? {
|
|
let mut p = entry?.path();
|
|
match p.extension() {
|
|
Some(ext) if ext == "scs" => (),
|
|
_ => continue,
|
|
}
|
|
|
|
let mut infile = File::open(p.clone())?;
|
|
let mut contents = String::new();
|
|
infile.read_to_string(&mut contents)?;
|
|
|
|
let spec = read_spec(&contents);
|
|
match spec {
|
|
Ok(_) => println!("Path: {p:?}: \x1b[32m OK\x1b[0m"),
|
|
Err(err) => {
|
|
println!("Path: {p:?}: \x1b[31mERR\x1b[0m\n{err:?}");
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Path is now only going to be used as .rs
|
|
p.set_extension("rs");
|
|
|
|
let filename = OsString::from(p.file_name().unwrap());
|
|
let outpath = outdir.join(filename.clone());
|
|
println!("Outpath: {outpath:?}");
|
|
|
|
let tokens = generate_spec_tokens(spec.unwrap()).unwrap();
|
|
let mut output = BufWriter::new(File::create(&outpath)?);
|
|
write!(output, "{}", tokens)?;
|
|
|
|
// Add to the set of files for which generation succeeded to then generate mod.rs
|
|
modfiles.push(filename);
|
|
}
|
|
|
|
Ok(modfiles)
|
|
}
|
|
|
|
fn main() -> io::Result<()> {
|
|
let indir = Path::new("./specs");
|
|
let outdir = Path::new("./src/tests/scansion");
|
|
|
|
if !indir.is_dir() || !outdir.is_dir() {
|
|
return Err(io::Error::new(
|
|
io::ErrorKind::Other,
|
|
"Input and output paths must be directories.",
|
|
));
|
|
}
|
|
|
|
let modfiles = read_input_dir(indir.to_path_buf(), outdir.to_path_buf())?;
|
|
|
|
let mut modout = BufWriter::new(File::create(outdir.join("mod.rs"))?);
|
|
let tokens = generate_mod_tokens(modfiles);
|
|
write!(modout, "{}", tokens)?;
|
|
|
|
Ok(())
|
|
}
|