summaryrefslogtreecommitdiffstats
path: root/crates/rebel-parse/src/grammar/tokenize.rs
blob: eb8a900dddee851f9a729fab104c6a34bce1c91e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
use crate::token::*;

pub use rules::*;

static KEYWORDS: phf::Map<&'static str, Keyword> = phf::phf_map! {
	"else" => Keyword::Else,
	"false" => Keyword::False,
	"fetch" => Keyword::Fetch,
	"fn" => Keyword::Fn,
	"for" => Keyword::For,
	"if" => Keyword::If,
	"let" => Keyword::Let,
	"map" => Keyword::Map,
	"mut" => Keyword::Mut,
	"none" => Keyword::None,
	"recipe" => Keyword::Recipe,
	"set" =>  Keyword::Set,
	"task" => Keyword::Task,
	"true" => Keyword::True,
};

peg::parser! {
	pub grammar rules() for str {
		pub rule token_stream() -> TokenStream<'input>
			= _ tokens:(token() ** _) _ { TokenStream(tokens) }

		pub rule token() -> Token<'input>
			= number:number() { Token::Number(number) }
			/ string:string() { Token::Str(string) }
			/ token:ident_or_keyword() { token }
			/ punct:punct() { Token::Punct(punct) }

		rule ident_or_keyword() -> Token<'input>
			= s:$(
				['a'..='z' | 'A' ..='Z' | '_' ]
				['a'..='z' | 'A' ..='Z' | '_' | '0'..='9']*
			) {
				if let Some(kw) = KEYWORDS.get(s) {
					Token::Keyword(*kw)
				} else {
					Token::Ident(s)
				}
			}

		rule punct() -> Punct
			= ch:punct_char() spacing:spacing() { Punct(ch, spacing) }

		rule punct_char() -> char
			= !comment_start() ch:[
				| '~' | '!' | '@' | '#' | '$' | '%' | '^' | '&'
				| '*' | '-' | '=' | '+' | '|' | ';' | ':' | ','
				| '<' | '.' | '>' | '/' | '\'' | '?' | '(' | ')'
				| '[' | ']' | '{' | '}'
			] { ch }

		rule spacing() -> Spacing
			= &punct_char() { Spacing::Joint }
			/ { Spacing::Alone }

		rule number() -> &'input str
			= $(['0'..='9'] ['0'..='9' | 'a'..='z' | 'A'..='Z' | '_']*)

		rule string() -> Str<'input>
			= "\"" pieces:string_piece()* "\"" {
				Str {
					pieces,
					kind: StrKind::Regular,
				}
			}
			/ "r\"" chars:$([^'"']*) "\"" {
				Str {
					pieces: vec![StrPiece::Chars(chars)],
					kind: StrKind::Raw,
				}
			}
			/ "```" newline() pieces:script_string_piece()* "```" {
				Str {
					pieces,
					kind: StrKind::Script,
				}
			}

		rule string_piece() -> StrPiece<'input>
			= chars:$((!"{{" [^'"' | '\\'])+) { StrPiece::Chars(chars) }
			/ "\\" escape:string_escape() { StrPiece::Escape(escape) }
			/ string_interp()

		rule string_escape() -> char
			= "n" { '\n' }
			/ "r" { '\r' }
			/ "t" { '\t' }
			/ "\\" { '\\' }
			/ "\"" { '"' }
			/ "{" { '{' }
			/ "0" { '\0' }
			/ "x" digits:$(['0'..='7'] hex_digit()) {
				u8::from_str_radix(digits, 16).unwrap().into()
			}
			/ "u{" digits:$(hex_digit()*<1,6>) "}" { ?
				u32::from_str_radix(digits, 16).unwrap().try_into().or(Err("Invalid unicode escape"))
			}

		rule script_string_piece() -> StrPiece<'input>
			= chars:$((!"{{" !"```" [_])+) { StrPiece::Chars(chars) }
			/ string_interp()

		rule string_interp() -> StrPiece<'input>
			= "{{" _ tokens:(subtoken() ++ _) _ "}}" {
				StrPiece::Interp(TokenStream(tokens))
			}

		rule subtoken() -> Token<'input>
			= !"}}" token:token() { token }

		rule hex_digit()
			= ['0'..='9' | 'a'..='f' | 'A'..='F']

		/// Mandatory whitespace
		rule __
			= ([' ' | '\t'] / quiet!{newline()} / quiet!{comment()})+

		/// Optional whitespace
		rule _
			= quiet!{__?}

		rule comment_start()
			= "//"
			/ "/*"

		rule comment()
			= "//" (!newline() [_])* (newline() / ![_])
			/ "/*" (!"*/" [_])* "*/"

		rule newline()
			= ['\n' | '\r']
	}
}