173 } |
173 } |
174 |
174 |
175 |
175 |
176 /* line context */ |
176 /* line context */ |
177 |
177 |
178 class Generic_Line_Context[C](rules: ParserRuleSet, val context: Option[C]) |
178 class Generic_Line_Context[C]( |
|
179 rules: ParserRuleSet, |
|
180 val context: Option[C], |
|
181 val depth: Int) |
179 extends TokenMarker.LineContext(rules, null) |
182 extends TokenMarker.LineContext(rules, null) |
180 { |
183 { |
181 override def hashCode: Int = context.hashCode |
184 override def hashCode: Int = (context, depth).hashCode |
182 override def equals(that: Any): Boolean = |
185 override def equals(that: Any): Boolean = |
183 that match { |
186 that match { |
184 case other: Generic_Line_Context[_] => context == other.context |
187 case other: Generic_Line_Context[_] => |
|
188 context == other.context && depth == other.depth |
185 case _ => false |
189 case _ => false |
186 } |
190 } |
187 } |
191 } |
188 |
192 |
189 def buffer_line_context[C](buffer: JEditBuffer, line: Int): Option[Generic_Line_Context[C]] = |
193 def buffer_line_context[C](buffer: JEditBuffer, line: Int): Option[Generic_Line_Context[C]] = |
190 Untyped.get(buffer, "lineMgr") match { |
194 Untyped.get(buffer, "lineMgr") match { |
191 case line_mgr: LineManager => |
195 case line_mgr: LineManager => |
192 line_mgr.getLineContext(line) match { |
196 line_mgr.getLineContext(line) match { |
193 case ctxt: Generic_Line_Context[C] => Some(ctxt) |
197 case c: Generic_Line_Context[C] => Some(c) |
194 case _ => None |
198 case _ => None |
195 } |
199 } |
196 case _ => None |
200 case _ => None |
197 } |
201 } |
198 |
202 |
|
203 def buffer_line_depth(buffer: JEditBuffer, line: Int): Int = |
|
204 buffer_line_context(buffer, line) match { case Some(c) => c.depth case None => 0 } |
|
205 |
|
206 |
|
207 /* token marker */ |
199 |
208 |
200 private val context_rules = new ParserRuleSet("isabelle", "MAIN") |
209 private val context_rules = new ParserRuleSet("isabelle", "MAIN") |
201 |
210 |
202 private class Line_Context(context: Option[Scan.Line_Context]) |
211 private class Line_Context(context: Option[Scan.Line_Context], depth: Int) |
203 extends Generic_Line_Context[Scan.Line_Context](context_rules, context) |
212 extends Generic_Line_Context[Scan.Line_Context](context_rules, context, depth) |
204 |
|
205 |
|
206 /* token marker */ |
|
207 |
213 |
208 class Marker(mode: String) extends TokenMarker |
214 class Marker(mode: String) extends TokenMarker |
209 { |
215 { |
210 override def markTokens(context: TokenMarker.LineContext, |
216 override def markTokens(context: TokenMarker.LineContext, |
211 handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext = |
217 handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext = |
212 { |
218 { |
213 val line_ctxt = |
219 val (opt_ctxt, depth) = |
214 context match { |
220 context match { |
215 case c: Line_Context => c.context |
221 case c: Line_Context => (c.context, c.depth) |
216 case _ => Some(Scan.Finished) |
222 case _ => (Some(Scan.Finished), 0) |
217 } |
223 } |
218 val line = if (raw_line == null) new Segment else raw_line |
224 val line = if (raw_line == null) new Segment else raw_line |
219 |
225 |
220 val context1 = |
226 val context1 = |
221 { |
227 { |
222 def no_context = |
|
223 { |
|
224 val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString) |
|
225 (List(styled_token), new Line_Context(None)) |
|
226 } |
|
227 val (styled_tokens, context1) = |
228 val (styled_tokens, context1) = |
228 if (mode == "isabelle-ml" || mode == "sml") { |
229 (opt_ctxt, Isabelle.mode_syntax(mode)) match { |
229 if (line_ctxt.isDefined) { |
230 case (Some(ctxt), _) if mode == "isabelle-ml" || mode == "sml" => |
230 val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, line_ctxt.get) |
231 val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, ctxt) |
231 val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source)) |
232 val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source)) |
232 (styled_tokens, new Line_Context(Some(ctxt1))) |
233 (styled_tokens, new Line_Context(Some(ctxt1), depth)) |
233 } |
234 |
234 else no_context |
235 case (Some(ctxt), Some(syntax)) if syntax.has_tokens => |
235 } |
236 val (tokens, ctxt1, depth1) = syntax.scan_line(line, ctxt, depth) |
236 else { |
237 val styled_tokens = |
237 Isabelle.mode_syntax(mode) match { |
238 tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source)) |
238 case Some(syntax) if syntax.has_tokens && line_ctxt.isDefined => |
239 (styled_tokens, new Line_Context(Some(ctxt1), depth1)) |
239 val (tokens, ctxt1) = syntax.scan_line(line, line_ctxt.get) |
240 |
240 val styled_tokens = |
241 case _ => |
241 tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source)) |
242 val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString) |
242 (styled_tokens, new Line_Context(Some(ctxt1))) |
243 (List(styled_token), new Line_Context(None, 0)) |
243 case _ => no_context |
|
244 } |
|
245 } |
244 } |
246 |
245 |
247 val extended = extended_styles(line) |
246 val extended = extended_styles(line) |
248 |
247 |
249 var offset = 0 |
248 var offset = 0 |