Skip to content

Commit 324be49

Browse files
authored
Bring back the relative position calculation to have reasonable error messages (#6)
1 parent a672516 commit 324be49

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+214
-195
lines changed

crates/codegen/src/irgen/statements.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ pub fn statement_gen<'a>(
165165
// Special case:
166166
// If the macro provided to __codesize is the current macro, we need to avoid a
167167
// circular reference If this is the case we will store a
168-
// place holder inside the bytecode and fill it in later when
168+
// placeholder inside the bytecode and fill it in later when
169169
// we have adequate information about the macros eventual size.
170170
// We also need to avoid if the codesize arg is any of the previous macros to
171171
// avoid a circular reference

crates/core/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ impl<'a, 'l> Compiler<'a, 'l> {
312312

313313
// Perform Lexical Analysis
314314
// Create a new lexer from the FileSource, flattening dependencies
315-
let lexer = Lexer::new(full_source.source, Some(file.clone()));
315+
let lexer = Lexer::new(full_source);
316316

317317
// Grab the tokens from the lexer
318318
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
@@ -346,7 +346,7 @@ impl<'a, 'l> Compiler<'a, 'l> {
346346

347347
// Perform Lexical Analysis
348348
// Create a new lexer from the FileSource, flattening dependencies
349-
let lexer = Lexer::new(full_source.source, Some(file.clone()));
349+
let lexer = Lexer::new(full_source);
350350

351351
// Grab the tokens from the lexer
352352
let mut tokens = Vec::new();

crates/core/tests/alternative_constructor_macro.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ fn test_alternative_constructor_macro_provided() {
1818
"#;
1919

2020
let full_source = FullFileSource { source, file: None, spans: vec![] };
21-
let lexer = Lexer::new(full_source.source, None);
21+
let lexer = Lexer::new(full_source);
2222
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
2323
let mut parser = Parser::new(tokens, Some("".to_string()));
2424
let mut contract = parser.parse().unwrap();

crates/core/tests/alternative_main_macro.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ fn test_alternative_main_macro_provided() {
1818
"#;
1919

2020
let full_source = FullFileSource { source, file: None, spans: vec![] };
21-
let lexer = Lexer::new(full_source.source, None);
21+
let lexer = Lexer::new(full_source);
2222
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
2323
let mut parser = Parser::new(tokens, Some("".to_string()));
2424
let mut contract = parser.parse().unwrap();

crates/core/tests/breaking_jumptable.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ fn test_breaking_jump_table() {
133133

134134
// Parse tokens
135135
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
136-
let lexer = Lexer::new(flattened_source.source, None);
136+
let lexer = Lexer::new(flattened_source);
137137
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
138138
let mut parser = Parser::new(tokens, None);
139139

crates/core/tests/builtins.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ fn test_codesize_builtin() {
2424

2525
// Parse tokens
2626
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
27-
let lexer = Lexer::new(flattened_source.source, None);
27+
let lexer = Lexer::new(flattened_source);
2828
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
2929
let mut parser = Parser::new(tokens, None);
3030

@@ -59,7 +59,7 @@ fn test_dyn_constructor_arg_builtin() {
5959

6060
// Parse tokens
6161
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
62-
let lexer = Lexer::new(flattened_source.source, None);
62+
let lexer = Lexer::new(flattened_source);
6363
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
6464
let mut parser = Parser::new(tokens, None);
6565

@@ -149,7 +149,7 @@ fn test_tablesize_builtin() {
149149

150150
// Parse tokens
151151
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
152-
let lexer = Lexer::new(flattened_source.source, None);
152+
let lexer = Lexer::new(flattened_source);
153153
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
154154
let mut parser = Parser::new(tokens, None);
155155

@@ -213,7 +213,7 @@ fn test_tablestart_builtin() {
213213

214214
// Parse tokens
215215
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
216-
let lexer = Lexer::new(flattened_source.source, None);
216+
let lexer = Lexer::new(flattened_source);
217217
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
218218
let mut parser = Parser::new(tokens, None);
219219

@@ -277,7 +277,7 @@ fn test_jump_table_exhaustive_usage() {
277277

278278
// Parse tokens
279279
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
280-
let lexer = Lexer::new(flattened_source.source, None);
280+
let lexer = Lexer::new(flattened_source);
281281
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
282282
let mut parser = Parser::new(tokens, None);
283283

@@ -337,7 +337,7 @@ fn test_jump_table_packed_exhaustive_usage() {
337337

338338
// Parse tokens
339339
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
340-
let lexer = Lexer::new(flattened_source.source, None);
340+
let lexer = Lexer::new(flattened_source);
341341
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
342342
let mut parser = Parser::new(tokens, None);
343343

@@ -404,7 +404,7 @@ fn test_label_clashing() {
404404

405405
// Parse tokens
406406
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
407-
let lexer = Lexer::new(flattened_source.source, None);
407+
let lexer = Lexer::new(flattened_source);
408408
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
409409
let mut parser = Parser::new(tokens, None);
410410

@@ -448,7 +448,7 @@ fn test_func_sig_builtin() {
448448

449449
// Parse tokens
450450
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
451-
let lexer = Lexer::new(flattened_source.source, None);
451+
let lexer = Lexer::new(flattened_source);
452452
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
453453
let mut parser = Parser::new(tokens, None);
454454

@@ -488,7 +488,7 @@ fn test_event_hash_builtin() {
488488

489489
// Parse tokens
490490
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
491-
let lexer = Lexer::new(flattened_source.source, None);
491+
let lexer = Lexer::new(flattened_source);
492492
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
493493
let mut parser = Parser::new(tokens, None);
494494

@@ -559,7 +559,7 @@ fn test_error_selector_builtin() {
559559

560560
// Parse tokens
561561
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
562-
let lexer = Lexer::new(flattened_source.source, None);
562+
let lexer = Lexer::new(flattened_source);
563563
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
564564
let mut parser = Parser::new(tokens, None);
565565

@@ -599,7 +599,7 @@ fn test_rightpad_builtin() {
599599

600600
// Parse tokens
601601
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
602-
let lexer = Lexer::new(flattened_source.source, None);
602+
let lexer = Lexer::new(flattened_source);
603603
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
604604
let mut parser = Parser::new(tokens, None);
605605

crates/core/tests/codegen_errors.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ fn test_storage_pointers_not_derived() {
3333
// let const_end = const_start + "UNKNOWN_CONSTANT_DEFINITION".len();
3434

3535
let full_source = FullFileSource { source, file: None, spans: vec![] };
36-
let lexer = Lexer::new(full_source.source, None);
36+
let lexer = Lexer::new(full_source);
3737
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
3838
let mut parser = Parser::new(tokens, Some("".to_string()));
3939
let contract = parser.parse().unwrap();
@@ -92,7 +92,7 @@ fn test_invalid_constant_definition() {
9292
let const_end = const_start + "UNKNOWN_CONSTANT_DEFINITION".len() - 1;
9393

9494
let full_source = FullFileSource { source, file: None, spans: vec![] };
95-
let lexer = Lexer::new(full_source.source, None);
95+
let lexer = Lexer::new(full_source);
9696
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
9797
let mut parser = Parser::new(tokens, Some("".to_string()));
9898
let mut contract = parser.parse().unwrap();
@@ -133,7 +133,7 @@ fn test_missing_constructor() {
133133
"#;
134134

135135
let full_source = FullFileSource { source, file: None, spans: vec![] };
136-
let lexer = Lexer::new(full_source.source, None);
136+
let lexer = Lexer::new(full_source);
137137
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
138138
let mut parser = Parser::new(tokens, Some("".to_string()));
139139
let mut contract = parser.parse().unwrap();
@@ -166,7 +166,7 @@ fn test_missing_main() {
166166
"#;
167167

168168
let full_source = FullFileSource { source, file: None, spans: vec![] };
169-
let lexer = Lexer::new(full_source.source, None);
169+
let lexer = Lexer::new(full_source);
170170
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
171171
let mut parser = Parser::new(tokens, Some("".to_string()));
172172
let mut contract = parser.parse().unwrap();
@@ -199,7 +199,7 @@ fn test_missing_when_alternative_main_provided() {
199199
"#;
200200

201201
let full_source = FullFileSource { source, file: None, spans: vec![] };
202-
let lexer = Lexer::new(full_source.source, None);
202+
let lexer = Lexer::new(full_source);
203203
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
204204
let mut parser = Parser::new(tokens, Some("".to_string()));
205205
let mut contract = parser.parse().unwrap();
@@ -242,7 +242,7 @@ fn test_unknown_macro_definition() {
242242
"#;
243243

244244
let full_source = FullFileSource { source, file: None, spans: vec![] };
245-
let lexer = Lexer::new(full_source.source, None);
245+
let lexer = Lexer::new(full_source);
246246
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
247247
let mut parser = Parser::new(tokens, Some("".to_string()));
248248
let mut contract = parser.parse().unwrap();
@@ -289,7 +289,7 @@ fn test_unmatched_jump_label() {
289289
"#;
290290

291291
let full_source = FullFileSource { source, file: None, spans: vec![] };
292-
let lexer = Lexer::new(full_source.source, None);
292+
let lexer = Lexer::new(full_source);
293293
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
294294
let mut parser = Parser::new(tokens, Some("".to_string()));
295295
let mut contract = parser.parse().unwrap();

crates/core/tests/compiling.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ const SOURCE: &str = r#"
4040
fn compiles_constructor_bytecode() {
4141
// Lex and Parse the source code
4242
let flattened_source = FullFileSource { source: SOURCE, file: None, spans: vec![] };
43-
let lexer = Lexer::new(flattened_source.source, None);
43+
let lexer = Lexer::new(flattened_source);
4444
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
4545
let mut parser = Parser::new(tokens, None);
4646

@@ -66,7 +66,7 @@ fn compiles_constructor_bytecode() {
6666
fn compiles_runtime_bytecode() {
6767
// Lex and Parse the source code
6868
let flattened_source = FullFileSource { source: SOURCE, file: None, spans: vec![] };
69-
let lexer = Lexer::new(flattened_source.source, None);
69+
let lexer = Lexer::new(flattened_source);
7070
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
7171
let mut parser = Parser::new(tokens, None);
7272

crates/core/tests/erc20.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ fn test_erc20_compile() {
2020
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider).unwrap();
2121
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
2222
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };
23-
let lexer = Lexer::new(full_source.source, None);
23+
let lexer = Lexer::new(full_source);
2424
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
2525
let mut parser = Parser::new(tokens, Some("../../huff-examples/erc20/contracts".to_string()));
2626
let mut contract = parser.parse().unwrap();

crates/core/tests/erc721.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ fn test_erc721_compile() {
2020
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider).unwrap();
2121
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
2222
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };
23-
let lexer = Lexer::new(full_source.source, None);
23+
let lexer = Lexer::new(full_source);
2424
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
2525
let mut parser = Parser::new(tokens, Some("../../huff-examples/erc20/contracts".to_string()));
2626
let mut contract = parser.parse().unwrap();

0 commit comments

Comments
 (0)