1:- module(st_tokens, [
2 st_tokens/3
3]).
10:- use_module(library(dcg/basics)). 11:- use_module(library(error)). 12:- use_module(library(option)).
Throws error(invalid_input(String))
when
the input in out/block instruction cannot
be parsed into a Prolog term.
22st_tokens(Codes, Options, Tokens):- 23 option(frontend(Frontend), Options), 24 (( Frontend = 25 syntax_tokens( 26 comment(_, _), 27 out(_, _), 28 out_unescaped(_, _), 29 statement(_, _)), 30 SyntaxTokens = Frontend 31 ) ; ( 32 must_be(oneof([simple, semblance]), Frontend), 33 named_syntax_tokens(Frontend, SyntaxTokens) 34 )), 35 phrase(tokens(Tmp1, SyntaxTokens), Codes), 36 phrase(collapse(Tmp2), Tmp1), !, 37 Tokens = Tmp2. 38 39named_syntax_tokens(simple, syntax_tokens( 40 comment("{{%","}}"), 41 out("{{=","}}"), 42 out_unescaped("{{-","}}"), 43 statement("{{","}}"))). 44 45named_syntax_tokens(semblance, syntax_tokens( 46 comment("{#","#}"), 47 out("{{","}}"), 48 out_unescaped(keyword_unescape_start("{%"),"%}"), 49 statement("{%","%}"))). 50 51keyword_unescape_start(StartToken) --> , whites, "unescape". 52 53tokens(Tokens, Frontend) --> 54 comment(Frontend), !, 55 tokens(Tokens, Frontend). 56 57tokens([Token|Tokens], Frontend) --> 58 token(Token, Frontend), !, 59 tokens(Tokens, Frontend). 60 61tokens([], _) --> "". 62 63comment(syntax_tokens(comment(Start, End), out(_,_), out_unescaped(_,_), statement(_,_))) --> 64 , string(_), , !. 65 66out(syntax_tokens(comment(_,_), out(Start, End), out_unescaped(_,_), statement(_,_)), out(Term)) --> 67 { string_codes(End, EndCodes) }, 68 , whites, term_to_token(Term, EndCodes), !. 69 70out_unescaped(syntax_tokens(comment(_,_), out(_,_), out_unescaped(Start, End), statement(_,_)), out_unescaped(Term)) --> 71 { string_codes(End, EndCodes) }, 72 , whites, term_to_token(Term, EndCodes), !. 73 74token(Term, Frontend) --> 75 out_unescaped(Frontend, Term), !. 76 77token(Term, Frontend) --> 78 out(Frontend, Term), !. 79 80token(end, Frontend) --> 81 start(Frontend), whites, "end", whites, end(Frontend), !. 82 83token(else, Frontend) --> 84 start(Frontend), whites, "else", whites, end(Frontend), !. 85 86token(Token, Frontend) --> 87 start(Frontend), whites, "include ", whites, term(Term, Frontend), !, 88 { token_term_include(Term, Token) }. 89 90token(Token, Frontend) --> 91 start(Frontend), whites, "dynamic_include ", whites, term(Term, Frontend), !, 92 { token_term_dyn_include(Term, Token) }. 93 94token(Token, Frontend) --> 95 start(Frontend), whites, "block", whites, term(Term, Frontend), !, 96 { token_term_block(Term, Token) }. 97 98token(slot, Frontend) --> 99 start(Frontend), whites, "slot", whites, end(Frontend), !. 100 101token(if(Cond), Frontend) --> 102 start(Frontend), whites, "if ", whites, term(Cond, Frontend), !. 103 104token(else_if(Cond), Frontend) --> 105 start(Frontend), whites, "else ", whites, "if ", whites, term(Cond, Frontend), !. 106 107token(Token, Frontend) --> 108 start(Frontend), whites, "each ", whites, term(Term, Frontend), !, 109 { token_term_each(Term, Token) }. 110 111token(_, Frontend) --> 112 invalid(Frontend). 113 114token(Code, _) --> 115 [Code]. 116 117start(syntax_tokens(comment(_,_), out(_,_), out_unescaped(_,_), statement(Start, _))) --> 118 , !. 119 120end(syntax_tokens(comment(_,_), out(_,_), out_unescaped(_,_), statement(_, End))) --> 121 , !. 122 123% Collapses codes into text tokens. 124 125collapse([Token|Tokens]) --> 126 text(Token), !, 127 collapse(Tokens). 128 129collapse([Token|Tokens]) --> 130 [Token], collapse(Tokens). 131 132collapse([]) --> []. 133 134text(text(Codes)) --> 135 text_codes(Codes). 136 137text_codes([Code|Codes]) --> 138 text_code(Code), 139 text_codes(Codes). 140 141text_codes([Code]) --> 142 text_code(Code). 143 144text_code(Code) --> 145 [Code], { number(Code) }. 146 147% Turns term into an include token. 148 149% FIXME validate path spec. 150 151token_term_include(Term, Token):- 152 ( Term =.. [',', File, Var] 153 -> Token = include(File, Var) 154 ; Token = include(Term)). 155 156% Turns term into a dynamic include token. 157 158token_term_dyn_include(Term, Token):- 159 ( Term = ','(File, Var) 160 -> Token = dynamic_include(File, Var) 161 ; Token = dynamic_include(Term)). 162 163% Turns term into an each token. 164 165token_term_each(Term, Token):- 166 ( Term = ','(Items, ','(Item, ','(Index, Len))) 167 -> Token = each(Items, Item, Index, Len) 168 ; ( Term = ','(Items, ','(Item, Index)) 169 -> Token = each(Items, Item, Index) 170 ; ( Term = ','(Items, Item) 171 -> Token = each(Items, Item) 172 ; throw(error(invalid_each(Term)))))). 173 174% Turns term into a block token. 175 176token_term_block(Term, Token):- 177 ( Term =.. [',', File, Var] 178 -> Token = block(File, Var) 179 ; Token = block(Term)). 180 181% Helper to report invalid instructions. 182 183invalid(Frontend) --> 184 start(Frontend), whites, [C1,C2,C3,C4,C5], 185 { 186 atom_codes(Atom, [C1,C2,C3,C4,C5]), 187 atom_concat(Atom, '...', At), 188 throw(error(invalid_instruction(At))) 189 }. 190 191% Extracts term from input. 192 193term(Term, syntax_tokens(comment(_,_), out(_,_), out_unescaped(_,_), statement(_, End))) --> 194 { string_codes(End, EndCodes) }, 195 term_to_token(Term, EndCodes), !. 196 197term_to_token(Term, Delimiter) --> 198 codes_delimiter(Delimiter, Codes), 199 { 200 ( read_term_from_codes(Codes, Term, []) 201 -> ( ground(Term) 202 -> true 203 ; throw(error(non_ground_expression(Term)))) 204 ; string_codes(String, Codes), 205 throw(error(invalid_input(String)))) 206 }. 207 208% Takes input up to and including the 209% given delimiter. 210 211codes_delimiter(Delimiter, []) --> 212 match_codes(Delimiter), !. 213 214codes_delimiter(Until, [Code|Codes]) --> 215 [Code], codes_delimiter(Until, Codes). 216 217match_codes([Code|Codes]) --> [Code], match_codes(Codes). 218match_codes([]) --> ""
Template tokenizer
Recognizes tokens from symbol codes. */