@@ -12,9 +12,9 @@ fn empty() {
12
12
let allocator = Allocator :: default ( ) ;
13
13
let mut lex = Lexer :: new ( & allocator, "" ) ;
14
14
assert_eq ! ( lex. pos( ) , 0 ) ;
15
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
15
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
16
16
assert_eq ! ( lex. pos( ) , 0 ) ;
17
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
17
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
18
18
assert_eq ! ( lex. pos( ) , 0 ) ;
19
19
}
20
20
@@ -23,11 +23,11 @@ fn tokenizes_tilde_as_ddelim() {
23
23
let allocator = Allocator :: default ( ) ;
24
24
let mut lex = Lexer :: new ( & allocator, "~" ) ;
25
25
assert_eq ! ( lex. pos( ) , 0 ) ;
26
- assert_eq ! ( lex. next_token ( ) , Token :: Delim ( '~' ) ) ;
26
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Delim ( '~' ) ) ;
27
27
assert_eq ! ( lex. pos( ) , 1 ) ;
28
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
28
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
29
29
assert_eq ! ( lex. pos( ) , 1 ) ;
30
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
30
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
31
31
assert_eq ! ( lex. pos( ) , 1 ) ;
32
32
}
33
33
@@ -36,11 +36,11 @@ fn tokenizes_newlines_as_whitespace() {
36
36
let allocator = Allocator :: default ( ) ;
37
37
let mut lex = Lexer :: new ( & allocator, "\r \n " ) ;
38
38
assert_eq ! ( lex. pos( ) , 0 ) ;
39
- assert_eq ! ( lex. next_token ( ) , Token :: Whitespace ) ;
39
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Whitespace ) ;
40
40
assert_eq ! ( lex. pos( ) , 2 ) ;
41
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
41
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
42
42
assert_eq ! ( lex. pos( ) , 2 ) ;
43
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
43
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
44
44
assert_eq ! ( lex. pos( ) , 2 ) ;
45
45
}
46
46
@@ -49,11 +49,11 @@ fn tokenizes_multiple_newlines_as_whitespace() {
49
49
let allocator = Allocator :: default ( ) ;
50
50
let mut lex = Lexer :: new ( & allocator, "\r \n " ) ;
51
51
assert_eq ! ( lex. pos( ) , 0 ) ;
52
- assert_eq ! ( lex. next_token ( ) , Token :: Whitespace ) ;
52
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Whitespace ) ;
53
53
assert_eq ! ( lex. pos( ) , 2 ) ;
54
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
54
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
55
55
assert_eq ! ( lex. pos( ) , 2 ) ;
56
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
56
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
57
57
assert_eq ! ( lex. pos( ) , 2 ) ;
58
58
}
59
59
@@ -62,39 +62,62 @@ fn tokenizes_multiple_whitespace_as_whitespace() {
62
62
let allocator = Allocator :: default ( ) ;
63
63
let mut lex = Lexer :: new ( & allocator, "\t \t \t " ) ;
64
64
assert_eq ! ( lex. pos( ) , 0 ) ;
65
- assert_eq ! ( lex. next_token ( ) , Token :: Whitespace ) ;
65
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Whitespace ) ;
66
66
assert_eq ! ( lex. pos( ) , 5 ) ;
67
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
67
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
68
68
assert_eq ! ( lex. pos( ) , 5 ) ;
69
- assert_eq ! ( lex. next_token ( ) , Token :: Eof ) ;
69
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Eof ) ;
70
70
assert_eq ! ( lex. pos( ) , 5 ) ;
71
71
}
72
72
73
73
#[ test]
74
74
fn tokenizes_trivial_css_file ( ) {
75
75
let allocator = Allocator :: default ( ) ;
76
- let mut lex = Lexer :: new ( & allocator, "body { color: black }" ) ;
76
+ let mut lex = Lexer :: new ( & allocator, "body { color: black }/* fin */ " ) ;
77
77
assert_eq ! ( lex. pos( ) , 0 ) ;
78
- assert_eq ! ( lex. next_token ( ) , Token :: Ident ( atom!( "body" ) ) ) ;
78
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Ident ( atom!( "body" ) ) ) ;
79
79
assert_eq ! ( lex. pos( ) , 4 ) ;
80
- assert_eq ! ( lex. next_token ( ) , Token :: Whitespace ) ;
80
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Whitespace ) ;
81
81
assert_eq ! ( lex. pos( ) , 5 ) ;
82
- assert_eq ! ( lex. next_token ( ) , Token :: LeftCurly ) ;
82
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: LeftCurly ) ;
83
83
assert_eq ! ( lex. pos( ) , 6 ) ;
84
- assert_eq ! ( lex. next_token ( ) , Token :: Whitespace ) ;
84
+ assert_eq ! ( lex. next_including_whitespace_and_comments ( ) , Token :: Whitespace ) ;
85
85
assert_eq ! ( lex. pos( ) , 7 ) ;
86
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Ident ( atom!( "color" ) ) ) ;
87
+ assert_eq ! ( lex. pos( ) , 12 ) ;
88
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Colon ) ;
89
+ assert_eq ! ( lex. pos( ) , 13 ) ;
90
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Whitespace ) ;
91
+ assert_eq ! ( lex. pos( ) , 14 ) ;
92
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Ident ( atom!( "black" ) ) ) ;
93
+ assert_eq ! ( lex. pos( ) , 19 ) ;
94
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Whitespace ) ;
95
+ assert_eq ! ( lex. pos( ) , 20 ) ;
96
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: RightCurly ) ;
97
+ assert_eq ! ( lex. pos( ) , 21 ) ;
98
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Comment ) ;
99
+ assert_eq ! ( lex. pos( ) , 30 ) ;
100
+ assert_eq ! ( lex. next_including_whitespace_and_comments( ) , Token :: Eof ) ;
101
+ assert_eq ! ( lex. pos( ) , 30 ) ;
102
+ }
103
+
104
+ #[ test]
105
+ fn skips_whitespace_and_comments_with_next ( ) {
106
+ let allocator = Allocator :: default ( ) ;
107
+ let mut lex = Lexer :: new ( & allocator, "body { color: black }/* fin */" ) ;
108
+ assert_eq ! ( lex. pos( ) , 0 ) ;
109
+ assert_eq ! ( lex. next_token( ) , Token :: Ident ( atom!( "body" ) ) ) ;
110
+ assert_eq ! ( lex. pos( ) , 4 ) ;
111
+ assert_eq ! ( lex. next_token( ) , Token :: LeftCurly ) ;
112
+ assert_eq ! ( lex. pos( ) , 6 ) ;
86
113
assert_eq ! ( lex. next_token( ) , Token :: Ident ( atom!( "color" ) ) ) ;
87
114
assert_eq ! ( lex. pos( ) , 12 ) ;
88
115
assert_eq ! ( lex. next_token( ) , Token :: Colon ) ;
89
116
assert_eq ! ( lex. pos( ) , 13 ) ;
90
- assert_eq ! ( lex. next_token( ) , Token :: Whitespace ) ;
91
- assert_eq ! ( lex. pos( ) , 14 ) ;
92
117
assert_eq ! ( lex. next_token( ) , Token :: Ident ( atom!( "black" ) ) ) ;
93
118
assert_eq ! ( lex. pos( ) , 19 ) ;
94
- assert_eq ! ( lex. next_token( ) , Token :: Whitespace ) ;
95
- assert_eq ! ( lex. pos( ) , 20 ) ;
96
119
assert_eq ! ( lex. next_token( ) , Token :: RightCurly ) ;
97
120
assert_eq ! ( lex. pos( ) , 21 ) ;
98
121
assert_eq ! ( lex. next_token( ) , Token :: Eof ) ;
99
- assert_eq ! ( lex. pos( ) , 21 ) ;
122
+ assert_eq ! ( lex. pos( ) , 30 ) ;
100
123
}
0 commit comments