src/Pure/Isar/outer_syntax.scala
changeset 55492 28d4db6c6e79
parent 54513 5545aff878b1
child 55494 009b71c1ed23
equal deleted inserted replaced
55491:74db756853d4 55492:28d4db6c6e79
   126     new Outer_Syntax(completion = completion, has_tokens = false)
   126     new Outer_Syntax(completion = completion, has_tokens = false)
   127   }
   127   }
   128 
   128 
   129   def scan(input: Reader[Char]): List[Token] =
   129   def scan(input: Reader[Char]): List[Token] =
   130   {
   130   {
   131     import lexicon._
   131     Scan.Parsers.parseAll(Scan.Parsers.rep(Scan.Parsers.token(lexicon, is_command)), input) match {
   132 
   132       case Scan.Parsers.Success(tokens, _) => tokens
   133     parseAll(rep(token(is_command)), input) match {
       
   134       case Success(tokens, _) => tokens
       
   135       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
   133       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
   136     }
   134     }
   137   }
   135   }
   138 
   136 
   139   def scan(input: CharSequence): List[Token] =
   137   def scan(input: CharSequence): List[Token] =
   140     scan(new CharSequenceReader(input))
   138     scan(new CharSequenceReader(input))
   141 
   139 
   142   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
   140   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
   143   {
   141   {
   144     import lexicon._
       
   145 
       
   146     var in: Reader[Char] = new CharSequenceReader(input)
   142     var in: Reader[Char] = new CharSequenceReader(input)
   147     val toks = new mutable.ListBuffer[Token]
   143     val toks = new mutable.ListBuffer[Token]
   148     var ctxt = context
   144     var ctxt = context
   149     while (!in.atEnd) {
   145     while (!in.atEnd) {
   150       parse(token_context(is_command, ctxt), in) match {
   146       Scan.Parsers.parse(Scan.Parsers.token_context(lexicon, is_command, ctxt), in) match {
   151         case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   147         case Scan.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   152         case NoSuccess(_, rest) =>
   148         case Scan.Parsers.NoSuccess(_, rest) =>
   153           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   149           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   154       }
   150       }
   155     }
   151     }
   156     (toks.toList, ctxt)
   152     (toks.toList, ctxt)
   157   }
   153   }