diff --git a/a2lfile/src/a2ml.rs b/a2lfile/src/a2ml.rs index 0065dae..e2f2a36 100644 --- a/a2lfile/src/a2ml.rs +++ b/a2lfile/src/a2ml.rs @@ -1,5 +1,8 @@ -use super::writer::{TaggedItemInfo, Writer}; -use super::{loader, tokenizer}; +use crate::{ + loader, tokenizer, + writer::{TaggedItemInfo, Writer}, + Filename, +}; use std::borrow::Cow; use std::collections::HashMap; use std::path::Path; @@ -121,7 +124,7 @@ pub enum GenericIfData { // tokenize() // Tokenize the text of the a2ml section -fn tokenize_a2ml(filename: &str, input: &str) -> Result<(Vec, String), String> { +fn tokenize_a2ml(filename: &Filename, input: &str) -> Result<(Vec, String), String> { let mut amltokens = Vec::::new(); let input_bytes = input.as_bytes(); let datalen = input_bytes.len(); @@ -247,7 +250,7 @@ fn tokenize_tag(input: &str, bytepos: &mut usize) -> Result { } fn tokenize_include( - filename: &str, + filename: &Filename, input: &str, bytepos: &mut usize, ) -> Result<(Vec, String), String> { @@ -301,13 +304,13 @@ fn tokenize_include( } let incname = &input[fname_idx_start..fname_idx_end]; - let incfilename = loader::make_include_filename(incname, filename); + let incfilename = loader::make_include_filename(incname, &filename.full); // check if incname is an accessible file let incpathref = Path::new(&incfilename); let loadresult = loader::load(incpathref); if let Ok(incfiledata) = loadresult { - tokenize_a2ml(incpathref.to_string_lossy().as_ref(), &incfiledata) + tokenize_a2ml(&Filename::from(incpathref), &incfiledata) } else { Err(format!("failed reading {}", incpathref.display())) } @@ -389,7 +392,10 @@ fn make_errtxt(pos: usize, input_bytes: &[u8]) -> Cow { // parse an a2ml fragment in an a2l file // The target data structure is the parsing definition used by the a2l parser, so that the // a2ml can control the parsing of IF_DATA blocks -pub(crate) fn parse_a2ml(filename: &str, input: &str) -> Result<(A2mlTypeSpec, String), String> { +pub(crate) fn parse_a2ml( + filename: &Filename, + input: &str, +) -> Result<(A2mlTypeSpec, String), String> { let (tok_result, complete_string) = tokenize_a2ml(filename, input)?; let mut tok_iter = tok_result.iter().peekable(); @@ -1335,42 +1341,43 @@ impl PartialEq for GenericIfDataTaggedItem { #[cfg(test)] mod test { use super::*; + use std::io::Write; use tempfile::tempdir; #[test] fn tokenize() { - let (tokenvec, _) = tokenize_a2ml("test", " ").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), " ").unwrap(); assert!(tokenvec.is_empty()); - let (tokenvec, _) = tokenize_a2ml("test", "/* // */").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "/* // */").unwrap(); assert!(tokenvec.is_empty()); - let (tokenvec, _) = tokenize_a2ml("test", "/*/*/").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "/*/*/").unwrap(); assert!(tokenvec.is_empty()); - let (tokenvec, _) = tokenize_a2ml("test", "/***/").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "/***/").unwrap(); assert!(tokenvec.is_empty()); - let tokenvec_err = tokenize_a2ml("test", "/* "); + let tokenvec_err = tokenize_a2ml(&Filename::from("test"), "/* "); assert!(tokenvec_err.is_err()); - let (tokenvec, _) = tokenize_a2ml("test", "//*/").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "//*/").unwrap(); assert!(tokenvec.is_empty()); - let (tokenvec, _) = tokenize_a2ml("test", r#""TAG""#).unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), r#""TAG""#).unwrap(); assert_eq!(tokenvec.len(), 1); let _tag = TokenType::Tag("TAG".to_string()); assert!(matches!(&tokenvec[0], _tag)); - let (tokenvec, _) = tokenize_a2ml("test", ";").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), ";").unwrap(); assert_eq!(tokenvec.len(), 1); assert!(matches!(tokenvec[0], TokenType::Semicolon)); - let (tokenvec, _) = tokenize_a2ml("test", "0").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "0").unwrap(); assert_eq!(tokenvec.len(), 1); assert!(matches!(tokenvec[0], TokenType::Constant(0))); - let (tokenvec, _) = tokenize_a2ml("test", "0x03").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "0x03").unwrap(); assert_eq!(tokenvec.len(), 1); assert!(matches!(tokenvec[0], TokenType::Constant(3))); - let (tokenvec, _) = tokenize_a2ml("test", "123456").unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), "123456").unwrap(); assert_eq!(tokenvec.len(), 1); assert!(matches!(tokenvec[0], TokenType::Constant(123456))); @@ -1381,19 +1388,24 @@ mod test { // create the empty "testfile" so that it can be included std::fs::File::create_new("testfile").unwrap(); - let (tokenvec, _) = tokenize_a2ml("test", r#"/include "testfile""#).unwrap(); + let (tokenvec, _) = + tokenize_a2ml(&Filename::from("test"), r#"/include "testfile""#).unwrap(); assert_eq!(tokenvec.len(), 0); - let (tokenvec, _) = tokenize_a2ml("test", r#"/include"testfile""#).unwrap(); + let (tokenvec, _) = + tokenize_a2ml(&Filename::from("test"), r#"/include"testfile""#).unwrap(); assert_eq!(tokenvec.len(), 0); - let (tokenvec, _) = tokenize_a2ml("test", r#"/include testfile"#).unwrap(); + let (tokenvec, _) = tokenize_a2ml(&Filename::from("test"), r#"/include testfile"#).unwrap(); assert_eq!(tokenvec.len(), 0); - let err_result = tokenize_a2ml("test", r#"/include "testfile_unclosed_quote"#); + let err_result = tokenize_a2ml( + &Filename::from("test"), + r#"/include "testfile_unclosed_quote"#, + ); assert!(err_result.is_err()); - let err_result = tokenize_a2ml("test", r#" "unclosed "#); + let err_result = tokenize_a2ml(&Filename::from("test"), r#" "unclosed "#); assert!(err_result.is_err()); } @@ -1512,10 +1524,39 @@ mod test { A2mlTypeSpec::TaggedStruct(taggedstruct_hashmap), ]); - let parse_result = parse_a2ml("test", TEST_INPUT); + let parse_result = parse_a2ml(&Filename::from("test"), TEST_INPUT); assert!(parse_result.is_ok()); let (a2ml_spec, _complete_string) = parse_result.unwrap(); println!("{:?}", a2ml_spec); assert_eq!(a2ml_spec, expected_parse_result); } + + #[test] + fn included_files() { + let dir = tempdir().unwrap(); + + // base file at /base + let base_filename = dir.path().join("base"); + let mut basefile = std::fs::File::create_new(&base_filename).unwrap(); + basefile.write(br#"/include "abc/include1""#).unwrap(); + + // include file 1 at /abc/include1 + let subdir = dir.path().join("abc"); + let inc1name = subdir.join("include1"); + std::fs::create_dir(&subdir).unwrap(); + let mut incfile1 = std::fs::File::create_new(&inc1name).unwrap(); + incfile1.write(br#"/include "def/include2""#).unwrap(); + + // include file 2 at /abc/def/include2 + let subdir2 = subdir.join("def"); + std::fs::create_dir(&subdir2).unwrap(); + let _incfile2 = std::fs::File::create_new(subdir2.join("include2")).unwrap(); + + // run the a2ml tokenizer. It should not return an error from the includes + let filetext = loader::load(&base_filename).unwrap(); + let (tokens, fulltext) = + tokenize_a2ml(&Filename::from(base_filename.as_path()), &filetext).unwrap(); + assert_eq!(tokens.len(), 0); + assert!(fulltext.trim().is_empty()); + } } diff --git a/a2lfile/src/ifdata.rs b/a2lfile/src/ifdata.rs index 3dd3258..932e373 100644 --- a/a2lfile/src/ifdata.rs +++ b/a2lfile/src/ifdata.rs @@ -250,7 +250,7 @@ fn parse_ifdata_taggeditem( let endtag = parser.get_token_text(endident); if endtag != tag { return Err(ParserError::IncorrectEndTag { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: endtag.to_owned(), block: newcontext.element.clone(), @@ -437,7 +437,7 @@ fn parse_unknown_taggedstruct( let endtag = parser.get_token_text(endident); if endtag != tag { return Err(ParserError::IncorrectEndTag { - filename: parser.filenames[newcontext.fileid].clone(), + filename: parser.filenames[newcontext.fileid].to_string(), error_line: parser.last_token_position, tag: endtag.to_owned(), block: newcontext.element.clone(), @@ -470,7 +470,7 @@ fn parse_unknown_taggedstruct( }) = parser.peek_token() { return Err(ParserError::InvalidBegin { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, block: context.element.clone(), }); @@ -539,7 +539,8 @@ fn remove_unknown_ifdata_from_list(ifdata_list: &mut Vec) { #[cfg(test)] mod ifdata_test { - use crate::{self as a2lfile, IfData}; + use super::*; + use crate::{self as a2lfile, Filename, IfData}; crate::a2ml_specification! { @@ -739,21 +740,24 @@ mod ifdata_test { assert!(decoded_ifdata.none.is_some()); } - fn parse_helper( - ifdata: &str, - ) -> Result<(Option, bool), super::ParserError> { - let token_result = a2lfile::tokenizer::tokenize("".to_string(), 0, ifdata).unwrap(); + fn parse_helper(ifdata: &str) -> Result<(Option, bool), ParserError> { + let token_result = + a2lfile::tokenizer::tokenize(&Filename::from("test"), 0, ifdata).unwrap(); let mut log_msgs = Vec::new(); let ifdatas = [ifdata.to_string()]; - let filenames = ["".to_string()]; - let mut parser = super::ParserState::new_internal( + let filenames = [Filename::from("test")]; + let mut parser = ParserState::new_internal( &token_result.tokens, &ifdatas, &filenames, &mut log_msgs, false, ); - parser.builtin_a2mlspec = Some(a2lfile::a2ml::parse_a2ml("test", A2MLTEST_TEXT).unwrap().0); + parser.builtin_a2mlspec = Some( + a2lfile::a2ml::parse_a2ml(&Filename::from("test"), A2MLTEST_TEXT) + .unwrap() + .0, + ); super::parse_ifdata( &mut parser, &a2lfile::ParseContext { @@ -765,9 +769,7 @@ mod ifdata_test { ) } - fn check_and_decode( - result: Result<(Option, bool), super::ParserError>, - ) -> A2mlTest { + fn check_and_decode(result: Result<(Option, bool), ParserError>) -> A2mlTest { let (data, valid) = result.unwrap(); assert!(data.is_some()); assert_eq!(valid, true); diff --git a/a2lfile/src/lib.rs b/a2lfile/src/lib.rs index 279c416..28f0cde 100644 --- a/a2lfile/src/lib.rs +++ b/a2lfile/src/lib.rs @@ -18,6 +18,8 @@ mod writer; pub use namemap::{ModuleNameMap, NameMapCompuTab, NameMapObject, NameMapTypedef}; pub use parser::ParserError; use std::convert::AsRef; +use std::ffi::OsString; +use std::fmt::Display; use std::path::Path; use std::path::PathBuf; use thiserror::Error; @@ -191,7 +193,7 @@ fn load_impl( a2ml_spec: Option, ) -> Result { // tokenize the input data - let tokenresult = tokenizer::tokenize(path.to_string_lossy().to_string(), 0, filedata) + let tokenresult = tokenizer::tokenize(&Filename::from(path), 0, filedata) .map_err(|tokenizer_error| A2lError::TokenizerError { tokenizer_error })?; if tokenresult.tokens.is_empty() { @@ -206,7 +208,7 @@ fn load_impl( // if a built-in A2ml specification was passed as a string, then it is parsed here if let Some(spec) = a2ml_spec { parser.builtin_a2mlspec = Some( - a2ml::parse_a2ml(path.to_string_lossy().as_ref(), &spec) + a2ml::parse_a2ml(&Filename::from(path), &spec) .map_err(|parse_err| A2lError::InvalidBuiltinA2mlSpec { parse_err })? .0, ); @@ -231,7 +233,7 @@ fn load_impl( pub fn load_fragment(a2ldata: &str) -> Result { let fixed_a2ldata = format!(r#"fragment "" {a2ldata} /end MODULE"#); // tokenize the input data - let tokenresult = tokenizer::tokenize("(fragment)".to_string(), 0, &fixed_a2ldata) + let tokenresult = tokenizer::tokenize(&Filename::from("(fragment)"), 0, &fixed_a2ldata) .map_err(|tokenizer_error| A2lError::TokenizerError { tokenizer_error })?; let firstline = tokenresult.tokens.first().map_or(1, |tok| tok.line); let context = ParseContext { @@ -369,6 +371,56 @@ impl Module { } } +#[derive(Debug, Clone)] +struct Filename { + // the full filename, which has been extended with a base path relative to the working directory + full: OsString, + // the "display" name, i.e. the name that appears in an /include directive or an error message + display: String, +} + +impl Filename { + pub(crate) fn new(full: OsString, display: &str) -> Self { + Self { + full, + display: display.to_string(), + } + } +} + +impl From<&str> for Filename { + fn from(value: &str) -> Self { + Self { + full: OsString::from(value), + display: String::from(value), + } + } +} + +impl From<&Path> for Filename { + fn from(value: &Path) -> Self { + Self { + display: value.to_string_lossy().to_string(), + full: OsString::from(value), + } + } +} + +impl From for Filename { + fn from(value: OsString) -> Self { + Self { + display: value.to_string_lossy().to_string(), + full: value, + } + } +} + +impl Display for Filename { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.display) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/a2lfile/src/loader.rs b/a2lfile/src/loader.rs index 007bca3..d80c1af 100644 --- a/a2lfile/src/loader.rs +++ b/a2lfile/src/loader.rs @@ -1,10 +1,10 @@ use crate::A2lError; -use std::ffi::OsString; +use std::ffi::{OsStr, OsString}; use std::fs::File; use std::io::Read; use std::path::Path; -pub(crate) fn make_include_filename(incname: &str, base_filename: &str) -> OsString { +pub(crate) fn make_include_filename(incname: &str, base_filename: &OsStr) -> OsString { let base = std::path::Path::new(base_filename); if let Some(basedir) = base.parent() { let joined = basedir.join(incname); @@ -146,6 +146,8 @@ fn decode_raw_bytes(filedata: &[u8]) -> String { #[cfg(test)] mod tests { use super::*; + use std::io::Write; + use tempfile::tempdir; #[test] fn load_nonexistent_file() { @@ -204,4 +206,44 @@ mod tests { let data: Vec = vec![0xa9]; // "©" assert_eq!(decode_raw_bytes(&data), String::from("\u{00a9}")); } + + #[test] + fn included_files() { + let dir = tempdir().unwrap(); + + // base file at /base + let base_filename = dir.path().join("base"); + let mut basefile = std::fs::File::create_new(&base_filename).unwrap(); + basefile.write(br#"/include "abc/include1""#).unwrap(); + + // include file 1 at /abc/include1 + let subdir = dir.path().join("abc"); + let inc1name = subdir.join("include1"); + std::fs::create_dir(&subdir).unwrap(); + let mut incfile1 = std::fs::File::create_new(&inc1name).unwrap(); + incfile1.write(br#"/include "def/include2""#).unwrap(); + + // include file 2 at /abc/def/include2 + let subdir2 = subdir.join("def"); + std::fs::create_dir(&subdir2).unwrap(); + let _incfile2 = std::fs::File::create_new(subdir2.join("include2")).unwrap(); + + // verify include 1 + let out = make_include_filename(r#"abc/include1"#, base_filename.as_os_str()) + .into_string() + .unwrap(); + // canonicalize both out and expected - this fixes issues with "/" and "\" so that the test passes on windows and linux + let out_path = Path::new(&out).canonicalize().unwrap(); + let expected = dir.path().join("abc/include1").canonicalize().unwrap(); + assert_eq!(out_path.to_string_lossy(), expected.to_string_lossy()); + + // verify include 2 + let out = make_include_filename(r#"def/include2"#, inc1name.as_os_str()) + .into_string() + .unwrap(); + // canonicalize both out and expected - this fixes issues with "/" and "\" so that the test passes on windows and linux + let out_path = Path::new(&out).canonicalize().unwrap(); + let expected = subdir.join("def/include2").canonicalize().unwrap(); + assert_eq!(out_path.to_string_lossy(), expected.to_string_lossy()); + } } diff --git a/a2lfile/src/parser.rs b/a2lfile/src/parser.rs index 9a5d86f..d168128 100644 --- a/a2lfile/src/parser.rs +++ b/a2lfile/src/parser.rs @@ -3,7 +3,7 @@ use thiserror::Error; use crate::a2ml::A2mlTypeSpec; use crate::tokenizer::{A2lToken, A2lTokenType, TokenResult}; -use crate::A2lError; +use crate::{A2lError, Filename}; const MAX_IDENT: usize = 1024; @@ -25,7 +25,7 @@ pub enum A2lVersion { pub struct ParserState<'a> { token_cursor: TokenIter<'a>, filedata: &'a [String], - pub(crate) filenames: &'a [String], + pub(crate) filenames: &'a [Filename], pub(crate) last_token_position: u32, sequential_id: u32, pub(crate) log_msgs: &'a mut Vec, @@ -270,7 +270,7 @@ impl<'a> ParserState<'a> { pub(crate) fn new_internal<'b>( tokens: &'b [A2lToken], filedata: &'b [String], - filenames: &'b [String], + filenames: &'b [Filename], log_msgs: &'b mut Vec, strict: bool, ) -> ParserState<'b> { @@ -308,7 +308,7 @@ impl<'a> ParserState<'a> { // make sure this is the end of the input, i.e. no additional data after the parsed data if let Some(token) = self.peek_token() { self.error_or_log(ParserError::AdditionalTokensError { - filename: self.filenames[token.fileid].clone(), + filename: self.filenames[token.fileid].to_string(), error_line: self.last_token_position, text: self.get_token_text(token).to_owned(), })?; @@ -422,7 +422,7 @@ impl<'a> ParserState<'a> { if fileid == 0 || fileid >= self.filenames.len() { None } else { - Some(self.filenames[fileid].clone()) + Some(self.filenames[fileid].to_string()) } } @@ -438,7 +438,7 @@ impl<'a> ParserState<'a> { ) -> Result<(), ParserError> { if self.file_ver < min_ver { self.error_or_log(ParserError::BlockRefTooNew { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), tag: tag.to_string(), @@ -457,7 +457,7 @@ impl<'a> ParserState<'a> { ) { if self.file_ver > max_ver { self.log_warning(ParserError::BlockRefDeprecated { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), tag: tag.to_string(), @@ -475,7 +475,7 @@ impl<'a> ParserState<'a> { ) -> Result<(), ParserError> { if self.file_ver < min_ver { self.error_or_log(ParserError::EnumRefTooNew { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), tag: tag.to_string(), @@ -494,7 +494,7 @@ impl<'a> ParserState<'a> { ) { if self.file_ver > max_ver { self.log_warning(ParserError::EnumRefDeprecated { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), tag: tag.to_string(), @@ -565,7 +565,7 @@ impl<'a> ParserState<'a> { let text = self.get_string(context)?; if text.len() > maxlen { self.error_or_log(ParserError::StringTooLong { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), text: text.clone(), @@ -583,7 +583,7 @@ impl<'a> ParserState<'a> { let text = self.get_token_text(token); if text.as_bytes()[0].is_ascii_digit() || text.len() > MAX_IDENT { self.error_or_log(ParserError::InvalidIdentifier { - filename: self.filenames[context.fileid].clone(), + filename: self.filenames[context.fileid].to_string(), error_line: self.last_token_position, block: context.element.clone(), ident: text.to_owned(), @@ -940,7 +940,7 @@ impl ParserError { expected_ttype: A2lTokenType, ) -> Self { Self::UnexpectedTokenType { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, block_line: context.line, element: context.element.clone(), @@ -956,7 +956,7 @@ impl ParserError { numstr: &str, ) -> Self { Self::MalformedNumber { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, numstr: numstr.to_owned(), } @@ -968,7 +968,7 @@ impl ParserError { enumitem: &str, ) -> Self { Self::InvalidEnumValue { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumitem.to_owned(), block: context.element.clone(), @@ -982,7 +982,7 @@ impl ParserError { tag: &str, ) -> Self { Self::InvalidMultiplicityTooMany { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -996,7 +996,7 @@ impl ParserError { tag: &str, ) -> Self { Self::IncorrectEndTag { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_owned(), block: context.element.clone(), @@ -1010,7 +1010,7 @@ impl ParserError { tag: &str, ) -> Self { Self::UnknownSubBlock { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_owned(), block: context.element.clone(), @@ -1020,7 +1020,7 @@ impl ParserError { pub(crate) fn unexpected_eof(parser: &ParserState, context: &ParseContext) -> Self { Self::UnexpectedEOF { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, block: context.element.clone(), block_line: context.line, @@ -1102,12 +1102,12 @@ impl Display for A2lVersion { #[cfg(test)] mod tests { use super::*; - use crate::load_from_string; + use crate::{load_from_string, tokenizer, Filename}; #[test] fn parsing_numbers_test() { let input_text = r##"0 0x1 1.0e+2 1000 0 0.1 0x11 1.0e+2"##; - let tokenresult = super::super::tokenizer::tokenize("test_input".to_owned(), 0, input_text); + let tokenresult = tokenizer::tokenize(&Filename::from("test_input"), 0, input_text); assert!(tokenresult.is_ok()); let tokenresult = tokenresult.unwrap(); @@ -1194,7 +1194,7 @@ mod tests { #[test] fn parsing_identifiers_test() { let input_text = r##"ident 0ident 123"##; - let tokenresult = super::super::tokenizer::tokenize("test_input".to_owned(), 0, input_text); + let tokenresult = tokenizer::tokenize(&Filename::from("test_input"), 0, input_text); assert!(tokenresult.is_ok()); let tokenresult = tokenresult.unwrap(); @@ -1224,8 +1224,7 @@ mod tests { #[test] fn test_check_version() { - let tokenresult = - super::super::tokenizer::tokenize("test_input".to_owned(), 0, "").unwrap(); + let tokenresult = tokenizer::tokenize(&Filename::from("test_input"), 0, "").unwrap(); let mut log_msgs = Vec::::new(); let mut parser = ParserState::new(&tokenresult, &mut log_msgs, true); let context = ParseContext { diff --git a/a2lfile/src/specification.rs b/a2lfile/src/specification.rs index 0496667..9f4c084 100644 --- a/a2lfile/src/specification.rs +++ b/a2lfile/src/specification.rs @@ -186,7 +186,7 @@ impl A2lFile { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -194,7 +194,7 @@ impl A2lFile { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -208,7 +208,7 @@ impl A2lFile { value } else { return Err(ParserError::InvalidMultiplicityNotPresent { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: "PROJECT".to_string(), block: context.element.clone(), @@ -523,7 +523,7 @@ impl AddrType { } "DIRECT" => Ok(Self::Direct), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -1467,7 +1467,7 @@ impl Annotation { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -1475,7 +1475,7 @@ impl Annotation { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -1978,7 +1978,7 @@ impl ArComponent { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -1986,7 +1986,7 @@ impl ArComponent { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -2724,7 +2724,7 @@ impl AxisDescr { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -2732,7 +2732,7 @@ impl AxisDescr { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -3043,7 +3043,7 @@ impl AxisDescrAttribute { "RES_AXIS" => Ok(Self::ResAxis), "STD_AXIS" => Ok(Self::StdAxis), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -3604,7 +3604,7 @@ impl AxisPts { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -3612,7 +3612,7 @@ impl AxisPts { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -4603,7 +4603,7 @@ impl BitOperation { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -4611,7 +4611,7 @@ impl BitOperation { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -4960,7 +4960,7 @@ impl Blob { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -4968,7 +4968,7 @@ impl Blob { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -5304,7 +5304,7 @@ impl ByteOrderEnum { Ok(Self::MsbLastMswFirst) } _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -5444,7 +5444,7 @@ impl CalibrationAccessEnum { "NOT_IN_MCD_SYSTEM" => Ok(Self::NotInMcdSystem), "OFFLINE_CALIBRATION" => Ok(Self::OfflineCalibration), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -5602,7 +5602,7 @@ impl CalibrationHandle { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -5610,7 +5610,7 @@ impl CalibrationHandle { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -5896,7 +5896,7 @@ impl CalibrationMethod { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -5904,7 +5904,7 @@ impl CalibrationMethod { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -5983,7 +5983,7 @@ impl CharacterEncoding { "UTF16" => Ok(Self::Utf16), "UTF32" => Ok(Self::Utf32), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -6617,7 +6617,7 @@ impl Characteristic { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -6625,7 +6625,7 @@ impl Characteristic { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -7137,7 +7137,7 @@ impl CharacteristicType { "VAL_BLK" => Ok(Self::ValBlk), "VALUE" => Ok(Self::Value), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -7851,7 +7851,7 @@ impl CompuMethod { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -7859,7 +7859,7 @@ impl CompuMethod { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8199,7 +8199,7 @@ impl CompuTab { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8207,7 +8207,7 @@ impl CompuTab { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8563,7 +8563,7 @@ impl CompuVtab { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8571,7 +8571,7 @@ impl CompuVtab { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8806,7 +8806,7 @@ impl CompuVtabRange { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -8814,7 +8814,7 @@ impl CompuVtabRange { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -9099,7 +9099,7 @@ impl ConversionType { "TAB_NOINTP" => Ok(Self::TabNointp), "TAB_VERB" => Ok(Self::TabVerb), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -9637,7 +9637,7 @@ impl DataType { "FLOAT32_IEEE" => Ok(Self::Float32Ieee), "FLOAT64_IEEE" => Ok(Self::Float64Ieee), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -9686,7 +9686,7 @@ impl DataTypeSize { "WORD" => Ok(Self::Word), "LONG" => Ok(Self::Long), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -10250,7 +10250,7 @@ impl DepositMode { "ABSOLUTE" => Ok(Self::Absolute), "DIFFERENCE" => Ok(Self::Difference), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -12152,7 +12152,7 @@ impl Formula { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -12160,7 +12160,7 @@ impl Formula { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -12460,7 +12460,7 @@ impl Frame { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -12468,7 +12468,7 @@ impl Frame { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -12936,7 +12936,7 @@ impl Function { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -12944,7 +12944,7 @@ impl Function { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -13563,7 +13563,7 @@ impl Group { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -13571,7 +13571,7 @@ impl Group { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -13922,7 +13922,7 @@ impl Header { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -13930,7 +13930,7 @@ impl Header { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -14254,7 +14254,7 @@ impl IndexMode { "COLUMN_DIR" => Ok(Self::ColumnDir), "ROW_DIR" => Ok(Self::RowDir), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -14295,7 +14295,7 @@ impl IndexOrder { "INDEX_INCR" => Ok(Self::IndexIncr), "INDEX_DECR" => Ok(Self::IndexDecr), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -14746,7 +14746,7 @@ impl Instance { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -14754,7 +14754,7 @@ impl Instance { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -16400,7 +16400,7 @@ impl Measurement { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -16408,7 +16408,7 @@ impl Measurement { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -16839,7 +16839,7 @@ impl MemoryAttribute { "INTERN" => Ok(Self::Intern), "EXTERN" => Ok(Self::Extern), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -17043,7 +17043,7 @@ impl MemoryLayout { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -17051,7 +17051,7 @@ impl MemoryLayout { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -17409,7 +17409,7 @@ impl MemorySegment { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -17417,7 +17417,7 @@ impl MemorySegment { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -17547,7 +17547,7 @@ impl MemoryType { Ok(Self::NotInEcu) } _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -17855,7 +17855,7 @@ impl ModCommon { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -17863,7 +17863,7 @@ impl ModCommon { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -18403,7 +18403,7 @@ impl ModPar { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -18411,7 +18411,7 @@ impl ModPar { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -19292,7 +19292,7 @@ impl Module { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -19300,7 +19300,7 @@ impl Module { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -19863,7 +19863,7 @@ impl MonotonyType { Ok(Self::NotMon) } _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -20747,7 +20747,7 @@ impl Overwrite { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -20755,7 +20755,7 @@ impl Overwrite { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -21120,7 +21120,7 @@ impl PrgType { "SERAM" => Ok(Self::Seram), "VARIABLES" => Ok(Self::Variables), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -21164,7 +21164,7 @@ impl ProgType { "PRG_DATA" => Ok(Self::PrgData), "PRG_RESERVED" => Ok(Self::PrgReserved), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -21320,7 +21320,7 @@ impl Project { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -21328,7 +21328,7 @@ impl Project { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -21340,7 +21340,7 @@ impl Project { } if module.is_empty() { parser.error_or_log(ParserError::InvalidMultiplicityNotPresent { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: "MODULE".to_string(), block: context.element.clone(), @@ -22746,7 +22746,7 @@ impl RecordLayout { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -22754,7 +22754,7 @@ impl RecordLayout { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -25862,7 +25862,7 @@ impl StructureComponent { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -25870,7 +25870,7 @@ impl StructureComponent { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -26909,7 +26909,7 @@ impl Transformer { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -26917,7 +26917,7 @@ impl Transformer { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -27269,7 +27269,7 @@ impl TransformerTrigger { "ON_USER_REQUEST" => Ok(Self::OnUserRequest), "ON_CHANGE" => Ok(Self::OnChange), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -27582,7 +27582,7 @@ impl TypedefAxis { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -27590,7 +27590,7 @@ impl TypedefAxis { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -27901,7 +27901,7 @@ impl TypedefBlob { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -27909,7 +27909,7 @@ impl TypedefBlob { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -28302,7 +28302,7 @@ impl TypedefCharacteristic { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -28310,7 +28310,7 @@ impl TypedefCharacteristic { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -28860,7 +28860,7 @@ impl TypedefMeasurement { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -28868,7 +28868,7 @@ impl TypedefMeasurement { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -29269,7 +29269,7 @@ impl TypedefStructure { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -29277,7 +29277,7 @@ impl TypedefStructure { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -29560,7 +29560,7 @@ impl Unit { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -29568,7 +29568,7 @@ impl Unit { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -29784,7 +29784,7 @@ impl UnitType { "DERIVED" => Ok(Self::Derived), "EXTENDED_SI" => Ok(Self::ExtendedSi), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -30018,7 +30018,7 @@ impl UserRights { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -30026,7 +30026,7 @@ impl UserRights { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -30583,7 +30583,7 @@ impl VarCharacteristic { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -30591,7 +30591,7 @@ impl VarCharacteristic { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -30823,7 +30823,7 @@ impl VarCriterion { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -30831,7 +30831,7 @@ impl VarCriterion { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -31237,7 +31237,7 @@ impl VarNamingTag { match &*enumname { "NUMERIC" => Ok(Self::Numeric), _ => Err(ParserError::InvalidEnumValue { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, enumtxt: enumname, block: context.element.to_owned(), @@ -31607,7 +31607,7 @@ impl VariantCoding { if expect_block != is_block { if expect_block { parser.error_or_log(ParserError::IncorrectBlockError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -31615,7 +31615,7 @@ impl VariantCoding { })?; } else { parser.error_or_log(ParserError::IncorrectKeywordError { - filename: parser.filenames[context.fileid].to_owned(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: tag.to_string(), block: context.element.clone(), @@ -32135,7 +32135,7 @@ impl A2ml { let ident = parser.get_identifier(context)?; if ident != "A2ML" { parser.error_or_log(ParserError::IncorrectEndTag { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: ident.clone(), block: context.element.clone(), @@ -32241,7 +32241,7 @@ impl IfData { let ident = parser.get_identifier(context)?; if ident != "IF_DATA" { parser.error_or_log(ParserError::IncorrectEndTag { - filename: parser.filenames[context.fileid].clone(), + filename: parser.filenames[context.fileid].to_string(), error_line: parser.last_token_position, tag: ident.clone(), block: context.element.clone(), diff --git a/a2lfile/src/tokenizer.rs b/a2lfile/src/tokenizer.rs index 14df6a1..92bb3c0 100644 --- a/a2lfile/src/tokenizer.rs +++ b/a2lfile/src/tokenizer.rs @@ -1,8 +1,8 @@ +use crate::loader; +use crate::Filename; use std::path::Path; use thiserror::Error; -use super::loader; - #[derive(Debug, Error)] #[non_exhaustive] pub enum TokenizerError { @@ -63,22 +63,22 @@ pub struct A2lToken { pub(crate) struct TokenResult { pub(crate) tokens: Vec, pub(crate) filedata: Vec, - pub(crate) filenames: Vec, + pub(crate) filenames: Vec, } // tokenize() // Runs the actual tokenizer, then ensures that any /include directives are resolved pub(crate) fn tokenize( - filename: String, + filename: &Filename, fileid: usize, filetext: &str, ) -> Result { - let mut filenames: Vec = vec![filename.clone()]; + let mut filenames: Vec = vec![filename.clone()]; let mut filedatas: Vec = vec![filetext.to_owned()]; let filebytes = filetext.as_bytes(); let mut next_fileid = fileid + 1; - let input_tokens = tokenize_core(filename.clone(), fileid, filetext)?; + let input_tokens = tokenize_core(filename.display.clone(), fileid, filetext)?; let mut include_directives: Vec = input_tokens .iter() .enumerate() @@ -117,13 +117,17 @@ pub(crate) fn tokenize( } // incname is the include filename from the filetext without the surrounding quotes let incname = &filetext[filename_start..filename_end]; - let incfilename = loader::make_include_filename(incname, &filenames[0]); + let incfilename = loader::make_include_filename(incname, &filename.full); // check if incname is an accessible file let incpathref = Path::new(&incfilename); let loadresult = loader::load(incpathref); if let Ok(incfiledata) = loadresult { - let mut tokresult = tokenize(incname.to_owned(), next_fileid, &incfiledata)?; + let mut tokresult = tokenize( + &Filename::new(incfilename, incname), + next_fileid, + &incfiledata, + )?; next_fileid += tokresult.filenames.len(); @@ -135,7 +139,7 @@ pub(crate) fn tokenize( filedatas.append(&mut tokresult.filedata); } else { return Err(TokenizerError::IncludeFileError { - filename, + filename: filename.to_string(), line: token_subseq[0].line, incname: incname.to_owned(), }); @@ -145,7 +149,10 @@ pub(crate) fn tokenize( tokens.extend_from_slice(&token_subseq[1..]); } else { let line = input_tokens[include_directives[idx - 1]].line; - return Err(TokenizerError::IncompleteIncludeError { filename, line }); + return Err(TokenizerError::IncompleteIncludeError { + filename: filename.to_string(), + line, + }); } } tokens @@ -570,41 +577,43 @@ fn is_numchar(c: u8) -> bool { #[cfg(test)] mod tests { use super::*; + use std::io::Write; + use tempfile::tempdir; #[test] fn tokenize_a2l_comment() { let data = String::from("/**/"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); //assert_eq!(tok[0].ttype, A2lTokenType::BlockComment); let data = String::from("/*/*/"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); //assert_eq!(tok[0].ttype, A2lTokenType::BlockComment); let data = String::from("/***********/"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); //assert_eq!(tok[0].ttype, A2lTokenType::BlockComment); let data = String::from("/***********/ abcdef"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); //assert_eq!(tok[0].ttype, A2lTokenType::BlockComment); let data = String::from("//"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); //assert_eq!(tok[0].ttype, A2lTokenType::LineComment); let data = String::from("// abcdef"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); //assert_eq!(tok[0].ttype, A2lTokenType::LineComment); let data = String::from("// abcdef\nabcde"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); //assert_eq!(tok[0].ttype, A2lTokenType::LineComment); //assert_eq!(tok[1].data.line, 2); @@ -613,17 +622,17 @@ mod tests { #[test] fn tokenize_a2l_command() { let data = String::from("/begin"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::Begin); let data = String::from("/end"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::End); let data = String::from("/include"); - let tokresult = tokenize_core("test".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize_core(String::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.len(), 1); assert_eq!(tokresult[0].ttype, A2lTokenType::Include); } @@ -632,43 +641,43 @@ mod tests { fn tokenize_a2l_string() { /* empty string */ let data = String::from(r#" "" "#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* string containing a single double quote escaped as two double quotes */ let data = String::from(r#" """" "#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* string containing two instances of a single double quote escaped as two double quotes */ let data = String::from(r#"" ""x"" ""#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* string containing a single double quote escaped with a backslash */ let data = String::from(r#" "\"" "#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* string containing two instances of a single double quote escaped with a backslash */ let data = String::from(r#"" \"x\" ""#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* a string containing text */ let data = String::from("\"sdf sdf sdf\""); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); /* a string containing unicode characters */ let data = String::from("\"\u{1234}\u{2345}\""); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::String); } @@ -676,7 +685,7 @@ mod tests { #[test] fn tokenize_a2l_item() { let data = String::from("foo_bar"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::Identifier); } @@ -684,12 +693,12 @@ mod tests { #[test] fn tokenize_a2l_number() { let data = String::from("0xabc1234"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::Number); let data = String::from("0ident"); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 1); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::Identifier); } @@ -697,22 +706,22 @@ mod tests { #[test] fn tokenize_a2l_skip_whitespace() { let data = String::from(""); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); let data = String::from(" "); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); let data = String::from("\n\n "); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 0); } #[test] fn tokenize_string_with_backslash() { let data = String::from(r#" ident "\\" 0 "#); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); assert_eq!(tokresult.tokens.len(), 3); } @@ -734,7 +743,7 @@ ASAP2_VERSION 1 60 /end PROJECT "##, ); - let tokresult = tokenize("testcase".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize(&Filename::from("testcase"), 0, &data).expect("Error"); println!("token count: {}", tokresult.tokens.len()); assert_eq!(tokresult.tokens.len(), 20); assert_eq!(tokresult.tokens[0].ttype, A2lTokenType::Identifier); @@ -752,7 +761,7 @@ ASAP2_VERSION 1 60 "##, ); - let tokresult = tokenize_core("test".to_string(), 0, &data).expect("Error"); + let tokresult = tokenize_core(String::from("test"), 0, &data).expect("Error"); assert_eq!(tokresult.len(), 8); println!("{:?}", tokresult); assert_eq!(tokresult[0].ttype, A2lTokenType::Include); @@ -764,4 +773,31 @@ ASAP2_VERSION 1 60 assert_eq!(tokresult[6].ttype, A2lTokenType::Include); assert_eq!(tokresult[7].ttype, A2lTokenType::String); } + + #[test] + fn included_files() { + let dir = tempdir().unwrap(); + + // base file at /base + let base_filename = dir.path().join("base"); + let mut basefile = std::fs::File::create_new(&base_filename).unwrap(); + basefile.write(br#"/include "abc/include1""#).unwrap(); + + // include file 1 at /abc/include1 + let subdir = dir.path().join("abc"); + let inc1name = subdir.join("include1"); + std::fs::create_dir(&subdir).unwrap(); + let mut incfile1 = std::fs::File::create_new(&inc1name).unwrap(); + incfile1.write(br#"/include "def/include2""#).unwrap(); + + // include file 2 at /abc/def/include2 + let subdir2 = subdir.join("def"); + std::fs::create_dir(&subdir2).unwrap(); + let _incfile2 = std::fs::File::create_new(subdir2.join("include2")).unwrap(); + + // run the a2l tokenizer. It should not return an error from the includes + let filetext = loader::load(&base_filename).unwrap(); + let tokresult = tokenize(&Filename::from(base_filename.as_path()), 0, &filetext).unwrap(); + assert_eq!(tokresult.filenames.len(), 3); + } } diff --git a/a2lmacros/src/a2mlspec.rs b/a2lmacros/src/a2mlspec.rs index a1956cb..6a19303 100644 --- a/a2lmacros/src/a2mlspec.rs +++ b/a2lmacros/src/a2mlspec.rs @@ -1,6 +1,8 @@ use std::collections::{HashMap, HashSet}; use crate::codegenerator; +use crate::codegenerator::{BaseType, DataItem, EnumItem, TaggedItem}; +use crate::util::*; use proc_macro2::Delimiter; use proc_macro2::Ident; use proc_macro2::TokenStream; @@ -8,9 +10,6 @@ use proc_macro2::TokenTree; use quote::format_ident; use quote::quote; -use super::codegenerator::{BaseType, DataItem, EnumItem, TaggedItem}; -use super::util::*; - #[derive(Debug)] struct A2mlSpec { name: String, diff --git a/a2lmacros/src/codegenerator.rs b/a2lmacros/src/codegenerator.rs index af45a84..f932c11 100644 --- a/a2lmacros/src/codegenerator.rs +++ b/a2lmacros/src/codegenerator.rs @@ -1,9 +1,9 @@ -use std::str::FromStr; - +use crate::util::{make_varname, ucname_to_typename}; use proc_macro2::TokenStream; use quote::format_ident; use quote::quote; use quote::ToTokens; +use std::str::FromStr; pub(crate) mod data_structure; pub(crate) mod ifdata_parser; @@ -11,8 +11,6 @@ pub(crate) mod ifdata_writer; pub(crate) mod parser; pub(crate) mod writer; -use super::util::{make_varname, ucname_to_typename}; - #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub(crate) enum A2lVersion { V1_5_0, diff --git a/a2lmacros/src/codegenerator/ifdata_parser.rs b/a2lmacros/src/codegenerator/ifdata_parser.rs index b5705bd..114ae79 100644 --- a/a2lmacros/src/codegenerator/ifdata_parser.rs +++ b/a2lmacros/src/codegenerator/ifdata_parser.rs @@ -2,7 +2,7 @@ use proc_macro2::TokenStream; use quote::format_ident; use quote::quote; -use super::{BaseType, DataItem, EnumItem}; +use crate::codegenerator::{BaseType, DataItem, EnumItem}; use crate::util::{make_varname, ucname_to_typename}; //----------------------------------------------------------------------------- diff --git a/a2lmacros/src/codegenerator/ifdata_writer.rs b/a2lmacros/src/codegenerator/ifdata_writer.rs index 0e2f204..e1f3d7a 100644 --- a/a2lmacros/src/codegenerator/ifdata_writer.rs +++ b/a2lmacros/src/codegenerator/ifdata_writer.rs @@ -3,7 +3,7 @@ use proc_macro2::TokenStream; use quote::format_ident; use quote::quote; -use super::{BaseType, DataItem, EnumItem, TaggedItem}; +use crate::codegenerator::{BaseType, DataItem, EnumItem, TaggedItem}; use crate::util::{make_varname, ucname_to_typename}; pub(crate) fn generate(typename: &str, dataitem: &DataItem) -> TokenStream { diff --git a/a2lmacros/src/codegenerator/parser.rs b/a2lmacros/src/codegenerator/parser.rs index 81a2252..b317645 100644 --- a/a2lmacros/src/codegenerator/parser.rs +++ b/a2lmacros/src/codegenerator/parser.rs @@ -4,7 +4,7 @@ use quote::format_ident; use quote::quote; use quote::TokenStreamExt; -use super::{generate_bare_typename, BaseType, DataItem, EnumItem, TaggedItem}; +use crate::codegenerator::{generate_bare_typename, BaseType, DataItem, EnumItem, TaggedItem}; use crate::util::{make_varname, ucname_to_typename}; // generate diff --git a/a2lmacros/src/codegenerator/writer.rs b/a2lmacros/src/codegenerator/writer.rs index 1821440..8c40da9 100644 --- a/a2lmacros/src/codegenerator/writer.rs +++ b/a2lmacros/src/codegenerator/writer.rs @@ -2,7 +2,7 @@ use proc_macro2::{Literal, TokenStream}; use quote::format_ident; use quote::quote; -use super::{make_varname, ucname_to_typename, BaseType, DataItem, EnumItem}; +use crate::codegenerator::{make_varname, ucname_to_typename, BaseType, DataItem, EnumItem}; // generate() // Generate a pub fn stringify() function for all data types in the specification