73 } |
73 } |
74 } |
74 } |
75 |
75 |
76 final class Outer_Syntax private( |
76 final class Outer_Syntax private( |
77 keywords: Map[String, (String, List[String])] = Map.empty, |
77 keywords: Map[String, (String, List[String])] = Map.empty, |
78 lexicon: Scan.Lexicon = Scan.Lexicon.empty, |
78 minor: Scan.Lexicon = Scan.Lexicon.empty, |
|
79 major: Scan.Lexicon = Scan.Lexicon.empty, |
79 val completion: Completion = Completion.empty, |
80 val completion: Completion = Completion.empty, |
80 val language_context: Completion.Language_Context = Completion.Language_Context.outer, |
81 val language_context: Completion.Language_Context = Completion.Language_Context.outer, |
81 val has_tokens: Boolean = true) extends Prover.Syntax |
82 val has_tokens: Boolean = true) extends Prover.Syntax |
82 { |
83 { |
83 /** syntax content **/ |
84 /** syntax content **/ |
125 /* add keywords */ |
126 /* add keywords */ |
126 |
127 |
127 def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax = |
128 def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax = |
128 { |
129 { |
129 val keywords1 = keywords + (name -> kind) |
130 val keywords1 = keywords + (name -> kind) |
130 val lexicon1 = lexicon + name |
131 val (minor1, major1) = |
|
132 if (kind._1 == Keyword.MINOR) (minor + name, major) else (minor, major + name) |
131 val completion1 = |
133 val completion1 = |
132 if (replace == Some("")) completion |
134 if (replace == Some("")) completion |
133 else completion + (name, replace getOrElse name) |
135 else completion + (name, replace getOrElse name) |
134 new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true) |
136 new Outer_Syntax(keywords1, minor1, major1, completion1, language_context, true) |
135 } |
137 } |
136 |
138 |
137 def + (name: String, kind: (String, List[String])): Outer_Syntax = |
139 def + (name: String, kind: (String, List[String])): Outer_Syntax = |
138 this + (name, kind, Some(name)) |
140 this + (name, kind, Some(name)) |
139 def + (name: String, kind: String): Outer_Syntax = |
141 def + (name: String, kind: String): Outer_Syntax = |
156 |
158 |
157 |
159 |
158 /* language context */ |
160 /* language context */ |
159 |
161 |
160 def set_language_context(context: Completion.Language_Context): Outer_Syntax = |
162 def set_language_context(context: Completion.Language_Context): Outer_Syntax = |
161 new Outer_Syntax(keywords, lexicon, completion, context, has_tokens) |
163 new Outer_Syntax(keywords, minor, major, completion, context, has_tokens) |
162 |
164 |
163 def no_tokens: Outer_Syntax = |
165 def no_tokens: Outer_Syntax = |
164 { |
166 { |
165 require(keywords.isEmpty && lexicon.isEmpty) |
167 require(keywords.isEmpty && minor.isEmpty && major.isEmpty) |
166 new Outer_Syntax( |
168 new Outer_Syntax( |
167 completion = completion, |
169 completion = completion, |
168 language_context = language_context, |
170 language_context = language_context, |
169 has_tokens = false) |
171 has_tokens = false) |
170 } |
172 } |
207 /* token language */ |
209 /* token language */ |
208 |
210 |
209 def scan(input: CharSequence): List[Token] = |
211 def scan(input: CharSequence): List[Token] = |
210 { |
212 { |
211 val in: Reader[Char] = new CharSequenceReader(input) |
213 val in: Reader[Char] = new CharSequenceReader(input) |
212 Token.Parsers.parseAll( |
214 Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(minor, major)), in) match { |
213 Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match { |
|
214 case Token.Parsers.Success(tokens, _) => tokens |
215 case Token.Parsers.Success(tokens, _) => tokens |
215 case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
216 case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
216 } |
217 } |
217 } |
218 } |
218 |
219 |
220 { |
221 { |
221 var in: Reader[Char] = new CharSequenceReader(input) |
222 var in: Reader[Char] = new CharSequenceReader(input) |
222 val toks = new mutable.ListBuffer[Token] |
223 val toks = new mutable.ListBuffer[Token] |
223 var ctxt = context |
224 var ctxt = context |
224 while (!in.atEnd) { |
225 while (!in.atEnd) { |
225 Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match { |
226 Token.Parsers.parse(Token.Parsers.token_line(minor, major, ctxt), in) match { |
226 case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
227 case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
227 case Token.Parsers.NoSuccess(_, rest) => |
228 case Token.Parsers.NoSuccess(_, rest) => |
228 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
229 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
229 } |
230 } |
230 } |
231 } |