renamed class Outer_Keyword to Outer_Syntax;
authorwenzelm
Tue, 22 Dec 2009 19:38:06 +0100
changeset 34166 446a33b874b3
parent 34165 557b1c60f27f
child 34167 0a5e2c5195d5
renamed class Outer_Keyword to Outer_Syntax; renamed tokenize to scan (cf. ML version);
src/Pure/IsaMakefile
src/Pure/Isar/outer_keyword.scala
src/Pure/Isar/outer_syntax.scala
--- a/src/Pure/IsaMakefile	Tue Dec 22 18:36:01 2009 +0100
+++ b/src/Pure/IsaMakefile	Tue Dec 22 19:38:06 2009 +0100
@@ -126,7 +126,7 @@
   General/scan.scala General/swing_thread.scala General/symbol.scala	\
   General/xml.scala General/yxml.scala Isar/isar_document.scala		\
   Isar/outer_keyword.scala Isar/outer_lex.scala Isar/outer_parse.scala	\
-  System/cygwin.scala System/gui_setup.scala				\
+  Isar/outer_syntax.scala System/cygwin.scala System/gui_setup.scala	\
   System/isabelle_process.scala System/isabelle_syntax.scala		\
   System/isabelle_system.scala System/platform.scala			\
   System/session_manager.scala Thy/completion.scala Thy/html.scala	\
--- a/src/Pure/Isar/outer_keyword.scala	Tue Dec 22 18:36:01 2009 +0100
+++ b/src/Pure/Isar/outer_keyword.scala	Tue Dec 22 19:38:06 2009 +0100
@@ -7,9 +7,6 @@
 package isabelle
 
 
-import scala.util.parsing.input.{Reader, CharSequenceReader}
-
-
 object Outer_Keyword
 {
   val MINOR = "minor"
@@ -48,46 +45,3 @@
   val improper = Set(THY_SCRIPT, PRF_SCRIPT)
 }
 
-
-class Outer_Keyword(symbols: Symbol.Interpretation)
-{
-  protected val keywords: Map[String, String] = Map((";" -> Outer_Keyword.DIAG))
-  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
-  lazy val completion: Completion = new Completion + symbols
-
-  def + (name: String, kind: String): Outer_Keyword =
-  {
-    val new_keywords = keywords + (name -> kind)
-    val new_lexicon = lexicon + name
-    val new_completion = completion + name
-    new Outer_Keyword(symbols) {
-      override val lexicon = new_lexicon
-      override val keywords = new_keywords
-      override lazy val completion = new_completion
-    }
-  }
-
-  def + (name: String): Outer_Keyword = this + (name, Outer_Keyword.MINOR)
-
-  def is_command(name: String): Boolean =
-    keywords.get(name) match {
-      case Some(kind) => kind != Outer_Keyword.MINOR
-      case None => false
-    }
-
-
-  /* tokenize */
-
-  def tokenize(input: Reader[Char]): List[Outer_Lex.Token] =
-  {
-    import lexicon._
-
-    parseAll(rep(token(symbols, is_command)), input) match {
-      case Success(tokens, _) => tokens
-      case _ => error("Failed to tokenize input:\n" + input.source.toString)
-    }
-  }
-
-  def tokenize(input: CharSequence): List[Outer_Lex.Token] =
-    tokenize(new CharSequenceReader(input))
-}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Isar/outer_syntax.scala	Tue Dec 22 19:38:06 2009 +0100
@@ -0,0 +1,54 @@
+/*  Title:      Pure/Isar/outer_syntax.scala
+    Author:     Makarius
+
+Isabelle/Isar outer syntax.
+*/
+
+package isabelle
+
+
+import scala.util.parsing.input.{Reader, CharSequenceReader}
+
+
+class Outer_Syntax(symbols: Symbol.Interpretation)
+{
+  protected val keywords: Map[String, String] = Map((";" -> Outer_Keyword.DIAG))
+  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
+  lazy val completion: Completion = new Completion + symbols  // FIXME !?
+
+  def + (name: String, kind: String): Outer_Syntax =
+  {
+    val new_keywords = keywords + (name -> kind)
+    val new_lexicon = lexicon + name
+    val new_completion = completion + name
+    new Outer_Syntax(symbols) {
+      override val lexicon = new_lexicon
+      override val keywords = new_keywords
+      override lazy val completion = new_completion
+    }
+  }
+
+  def + (name: String): Outer_Syntax = this + (name, Outer_Keyword.MINOR)
+
+  def is_command(name: String): Boolean =
+    keywords.get(name) match {
+      case Some(kind) => kind != Outer_Keyword.MINOR
+      case None => false
+    }
+
+
+  /* tokenize */
+
+  def scan(input: Reader[Char]): List[Outer_Lex.Token] =
+  {
+    import lexicon._
+
+    parseAll(rep(token(symbols, is_command)), input) match {
+      case Success(tokens, _) => tokens
+      case _ => error("Failed to tokenize input:\n" + input.source.toString)
+    }
+  }
+
+  def scan(input: CharSequence): List[Outer_Lex.Token] =
+    scan(new CharSequenceReader(input))
+}