| author | paulson | 
| Thu, 16 Sep 2010 15:33:42 +0100 | |
| changeset 39459 | 7753083c00e6 | 
| parent 38471 | 0924654b8163 | 
| child 40454 | 2516ea25a54b | 
| permissions | -rw-r--r-- | 
| 34166 | 1  | 
/* Title: Pure/Isar/outer_syntax.scala  | 
2  | 
Author: Makarius  | 
|
3  | 
||
4  | 
Isabelle/Isar outer syntax.  | 
|
5  | 
*/  | 
|
6  | 
||
7  | 
package isabelle  | 
|
8  | 
||
9  | 
||
10  | 
import scala.util.parsing.input.{Reader, CharSequenceReader}
 | 
|
11  | 
||
12  | 
||
13  | 
class Outer_Syntax(symbols: Symbol.Interpretation)  | 
|
14  | 
{
 | 
|
| 36947 | 15  | 
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
 | 
| 34166 | 16  | 
protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty  | 
17  | 
lazy val completion: Completion = new Completion + symbols // FIXME !?  | 
|
18  | 
||
| 
38471
 
0924654b8163
report command token name instead of kind, which can be retrieved later via Outer_Syntax.keyword_kind;
 
wenzelm 
parents: 
36956 
diff
changeset
 | 
19  | 
def keyword_kind(name: String): Option[String] = keywords.get(name)  | 
| 
 
0924654b8163
report command token name instead of kind, which can be retrieved later via Outer_Syntax.keyword_kind;
 
wenzelm 
parents: 
36956 
diff
changeset
 | 
20  | 
|
| 34166 | 21  | 
def + (name: String, kind: String): Outer_Syntax =  | 
22  | 
  {
 | 
|
23  | 
val new_keywords = keywords + (name -> kind)  | 
|
24  | 
val new_lexicon = lexicon + name  | 
|
25  | 
val new_completion = completion + name  | 
|
26  | 
    new Outer_Syntax(symbols) {
 | 
|
27  | 
override val lexicon = new_lexicon  | 
|
28  | 
override val keywords = new_keywords  | 
|
29  | 
override lazy val completion = new_completion  | 
|
30  | 
}  | 
|
31  | 
}  | 
|
32  | 
||
| 36947 | 33  | 
def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)  | 
| 34166 | 34  | 
|
35  | 
def is_command(name: String): Boolean =  | 
|
36  | 
    keywords.get(name) match {
 | 
|
| 36947 | 37  | 
case Some(kind) => kind != Keyword.MINOR  | 
| 34166 | 38  | 
case None => false  | 
39  | 
}  | 
|
40  | 
||
41  | 
||
42  | 
/* tokenize */  | 
|
43  | 
||
| 
36956
 
21be4832c362
renamed class Outer_Lex to Token and Token_Kind to Token.Kind;
 
wenzelm 
parents: 
36947 
diff
changeset
 | 
44  | 
def scan(input: Reader[Char]): List[Token] =  | 
| 34166 | 45  | 
  {
 | 
46  | 
import lexicon._  | 
|
47  | 
||
48  | 
    parseAll(rep(token(symbols, is_command)), input) match {
 | 
|
49  | 
case Success(tokens, _) => tokens  | 
|
| 34264 | 50  | 
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
 | 
| 34166 | 51  | 
}  | 
52  | 
}  | 
|
53  | 
||
| 
36956
 
21be4832c362
renamed class Outer_Lex to Token and Token_Kind to Token.Kind;
 
wenzelm 
parents: 
36947 
diff
changeset
 | 
54  | 
def scan(input: CharSequence): List[Token] =  | 
| 34166 | 55  | 
scan(new CharSequenceReader(input))  | 
56  | 
}  |