src/Tools/jEdit/src/proofdocument/ProofDocument.scala
changeset 34759 bfea7839d9e1
parent 34758 710e3a9a4c95
child 34760 dc7f5e0d9d27
     1.1 --- a/src/Tools/jEdit/src/proofdocument/ProofDocument.scala	Tue Dec 08 14:29:29 2009 +0100
     1.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.3 @@ -1,277 +0,0 @@
     1.4 -/*
     1.5 - * Document as list of commands, consisting of lists of tokens
     1.6 - *
     1.7 - * @author Johannes Hölzl, TU Munich
     1.8 - * @author Fabian Immler, TU Munich
     1.9 - * @author Makarius
    1.10 - */
    1.11 -
    1.12 -package isabelle.proofdocument
    1.13 -
    1.14 -import scala.actors.Actor, Actor._
    1.15 -
    1.16 -import java.util.regex.Pattern
    1.17 -
    1.18 -import isabelle.prover.{Prover, Command, Command_State}
    1.19 -
    1.20 -
    1.21 -object ProofDocument
    1.22 -{
    1.23 -  // Be careful when changing this regex. Not only must it handle the
    1.24 -  // spurious end of a token but also:  
    1.25 -  // Bug ID: 5050507 Pattern.matches throws StackOverflow Error
    1.26 -  // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5050507
    1.27 -  
    1.28 -  val token_pattern = 
    1.29 -    Pattern.compile(
    1.30 -      "\\{\\*([^*]|\\*[^}]|\\*\\z)*(\\z|\\*\\})|" +
    1.31 -      "\\(\\*([^*]|\\*[^)]|\\*\\z)*(\\z|\\*\\))|" +
    1.32 -      "(\\?'?|')[A-Za-z_0-9.]*|" + 
    1.33 -      "[A-Za-z_0-9.]+|" + 
    1.34 -      "[!#$%&*+-/<=>?@^_|~]+|" +
    1.35 -      "\"([^\\\\\"]?(\\\\(.|\\z))?)*+(\"|\\z)|" +
    1.36 -      "`([^\\\\`]?(\\\\(.|\\z))?)*+(`|\\z)|" +
    1.37 -      "[()\\[\\]{}:;]", Pattern.MULTILINE)
    1.38 -
    1.39 -  val empty =
    1.40 -    new ProofDocument(isabelle.jedit.Isabelle.system.id(),
    1.41 -      Linear_Set(), Map(), Linear_Set(), Map(), _ => false)
    1.42 -
    1.43 -  type StructureChange = List[(Option[Command], Option[Command])]
    1.44 -
    1.45 -}
    1.46 -
    1.47 -class ProofDocument(
    1.48 -  val id: String,
    1.49 -  val tokens: Linear_Set[Token],
    1.50 -  val token_start: Map[Token, Int],
    1.51 -  val commands: Linear_Set[Command],
    1.52 -  var states: Map[Command, Command_State],   // FIXME immutable
    1.53 -  is_command_keyword: String => Boolean)
    1.54 -{
    1.55 -  import ProofDocument.StructureChange
    1.56 -
    1.57 -  def set_command_keyword(f: String => Boolean): ProofDocument =
    1.58 -    new ProofDocument(id, tokens, token_start, commands, states, f)
    1.59 -
    1.60 -  def content = Token.string_from_tokens(Nil ++ tokens, token_start)
    1.61 -
    1.62 -
    1.63 -  
    1.64 -  /** token view **/
    1.65 -
    1.66 -  def text_changed(change: Change): (ProofDocument, StructureChange) =
    1.67 -  {
    1.68 -    def edit_doc(doc_chgs: (ProofDocument, StructureChange), edit: Edit) = {
    1.69 -      val (doc, chgs) = doc_chgs
    1.70 -      val (new_doc, chg) = doc.text_edit(edit, change.id)
    1.71 -      (new_doc, chgs ++ chg)
    1.72 -    }
    1.73 -    ((this, Nil: StructureChange) /: change.edits)(edit_doc)
    1.74 -  }
    1.75 -
    1.76 -  def text_edit(e: Edit, id: String): (ProofDocument, StructureChange) =
    1.77 -  {
    1.78 -    case class TextChange(start: Int, added: String, removed: String)
    1.79 -    val change = e match {
    1.80 -      case Insert(s, a) => TextChange(s, a, "")
    1.81 -      case Remove(s, r) => TextChange(s, "", r)
    1.82 -    }
    1.83 -    //indices of tokens
    1.84 -    var start: Map[Token, Int] = token_start
    1.85 -    def stop(t: Token) = start(t) + t.length
    1.86 -    // split old token lists
    1.87 -    val tokens = Nil ++ this.tokens
    1.88 -    val (begin, remaining) = tokens.span(stop(_) < change.start)
    1.89 -    val (removed, end) = remaining.span(token_start(_) <= change.start + change.removed.length)
    1.90 -    // update indices
    1.91 -    start = end.foldLeft(start)((s, t) =>
    1.92 -      s + (t -> (s(t) + change.added.length - change.removed.length)))
    1.93 -
    1.94 -    val split_begin = removed.takeWhile(start(_) < change.start).
    1.95 -      map (t => {
    1.96 -          val split_tok = new Token(t.content.substring(0, change.start - start(t)), t.kind)
    1.97 -          start += (split_tok -> start(t))
    1.98 -          split_tok
    1.99 -        })
   1.100 -
   1.101 -    val split_end = removed.dropWhile(stop(_) < change.start + change.removed.length).
   1.102 -      map (t => {
   1.103 -          val split_tok =
   1.104 -            new Token(t.content.substring(change.start + change.removed.length - start(t)), t.kind)
   1.105 -          start += (split_tok -> start(t))
   1.106 -          split_tok
   1.107 -        })
   1.108 -    // update indices
   1.109 -    start = removed.foldLeft (start) ((s, t) => s - t)
   1.110 -    start = split_end.foldLeft (start) ((s, t) =>
   1.111 -    s + (t -> (change.start + change.added.length)))
   1.112 -
   1.113 -    val ins = new Token(change.added, Token.Kind.OTHER)
   1.114 -    start += (ins -> change.start)
   1.115 -    
   1.116 -    var invalid_tokens = split_begin ::: ins :: split_end ::: end
   1.117 -    var new_tokens: List[Token] = Nil
   1.118 -    var old_suffix: List[Token] = Nil
   1.119 -
   1.120 -    val match_start = invalid_tokens.firstOption.map(start(_)).getOrElse(0)
   1.121 -    val matcher =
   1.122 -      ProofDocument.token_pattern.matcher(Token.string_from_tokens(invalid_tokens, start))
   1.123 -
   1.124 -    while (matcher.find() && invalid_tokens != Nil) {
   1.125 -			val kind =
   1.126 -        if (is_command_keyword(matcher.group))
   1.127 -          Token.Kind.COMMAND_START
   1.128 -        else if (matcher.end - matcher.start > 2 && matcher.group.substring(0, 2) == "(*")
   1.129 -          Token.Kind.COMMENT
   1.130 -        else
   1.131 -          Token.Kind.OTHER
   1.132 -      val new_token = new Token(matcher.group, kind)
   1.133 -      start += (new_token -> (match_start + matcher.start))
   1.134 -      new_tokens ::= new_token
   1.135 -
   1.136 -      invalid_tokens = invalid_tokens dropWhile (stop(_) < stop(new_token))
   1.137 -      invalid_tokens match {
   1.138 -        case t :: ts =>
   1.139 -          if (start(t) == start(new_token) &&
   1.140 -              start(t) > change.start + change.added.length) {
   1.141 -          old_suffix = t :: ts
   1.142 -          new_tokens = new_tokens.tail
   1.143 -          invalid_tokens = Nil
   1.144 -        }
   1.145 -        case _ =>
   1.146 -      }
   1.147 -    }
   1.148 -    val insert = new_tokens.reverse
   1.149 -    val new_token_list = begin ::: insert ::: old_suffix
   1.150 -    token_changed(id, begin.lastOption, insert,
   1.151 -      old_suffix.firstOption, new_token_list, start)
   1.152 -  }
   1.153 -
   1.154 -  
   1.155 -  /** command view **/
   1.156 -
   1.157 -  private def token_changed(
   1.158 -    new_id: String,
   1.159 -    before_change: Option[Token],
   1.160 -    inserted_tokens: List[Token],
   1.161 -    after_change: Option[Token],
   1.162 -    new_tokens: List[Token],
   1.163 -    new_token_start: Map[Token, Int]):
   1.164 -  (ProofDocument, StructureChange) =
   1.165 -  {
   1.166 -    val new_tokenset = Linear_Set[Token]() ++ new_tokens
   1.167 -    val cmd_before_change = before_change match {
   1.168 -      case None => None
   1.169 -      case Some(bc) =>
   1.170 -        val cmd_with_bc = commands.find(_.contains(bc)).get
   1.171 -        if (cmd_with_bc.tokens.last == bc) {
   1.172 -          if (new_tokenset.next(bc).map(_.is_start).getOrElse(true))
   1.173 -            Some(cmd_with_bc)
   1.174 -          else commands.prev(cmd_with_bc)
   1.175 -        }
   1.176 -        else commands.prev(cmd_with_bc)
   1.177 -    }
   1.178 -
   1.179 -    val cmd_after_change = after_change match {
   1.180 -      case None => None
   1.181 -      case Some(ac) =>
   1.182 -        val cmd_with_ac = commands.find(_.contains(ac)).get
   1.183 -        if (ac.is_start)
   1.184 -          Some(cmd_with_ac)
   1.185 -        else
   1.186 -          commands.next(cmd_with_ac)
   1.187 -    }
   1.188 -
   1.189 -    val removed_commands = commands.dropWhile(Some(_) != cmd_before_change).drop(1).
   1.190 -      takeWhile(Some(_) != cmd_after_change)
   1.191 -
   1.192 -    // calculate inserted commands
   1.193 -    def tokens_to_commands(tokens: List[Token]): List[Command]= {
   1.194 -      tokens match {
   1.195 -        case Nil => Nil
   1.196 -        case t :: ts =>
   1.197 -          val (cmd, rest) =
   1.198 -            ts.span(t => t.kind != Token.Kind.COMMAND_START && t.kind != Token.Kind.COMMENT)
   1.199 -          new Command(t :: cmd, new_token_start) :: tokens_to_commands(rest)
   1.200 -      }
   1.201 -    }
   1.202 -
   1.203 -    val split_begin =
   1.204 -      if (before_change.isDefined) {
   1.205 -        val changed =
   1.206 -          if (cmd_before_change.isDefined)
   1.207 -            new_tokens.dropWhile(_ != cmd_before_change.get.tokens.last).drop(1)
   1.208 -          else new_tokenset
   1.209 -        if (changed.exists(_ == before_change.get))
   1.210 -          changed.takeWhile(_ != before_change.get).toList :::
   1.211 -            List(before_change.get)
   1.212 -        else Nil
   1.213 -      } else Nil
   1.214 -
   1.215 -    val split_end =
   1.216 -      if (after_change.isDefined) {
   1.217 -        val unchanged = new_tokens.dropWhile(_ != after_change.get)
   1.218 -        if(cmd_after_change.isDefined) {
   1.219 -          if (unchanged.exists(_ == cmd_after_change.get.tokens.first))
   1.220 -            unchanged.takeWhile(_ != cmd_after_change.get.tokens.first).toList
   1.221 -          else Nil
   1.222 -        } else {
   1.223 -          unchanged
   1.224 -        }
   1.225 -      } else Nil
   1.226 -
   1.227 -    val rescan_begin =
   1.228 -      split_begin :::
   1.229 -        before_change.map(bc => new_tokens.dropWhile(_ != bc).drop(1)).getOrElse(new_tokens)
   1.230 -    val rescanning_tokens =
   1.231 -      after_change.map(ac => rescan_begin.takeWhile(_ != ac)).getOrElse(rescan_begin) :::
   1.232 -        split_end
   1.233 -    val inserted_commands = tokens_to_commands(rescanning_tokens.toList)
   1.234 -
   1.235 -    // build new document
   1.236 -    val new_commandset = commands.
   1.237 -      delete_between(cmd_before_change, cmd_after_change).
   1.238 -      append_after(cmd_before_change, inserted_commands)
   1.239 -
   1.240 -
   1.241 -    val doc =
   1.242 -      new ProofDocument(new_id, new_tokenset, new_token_start, new_commandset,
   1.243 -        states -- removed_commands, is_command_keyword)
   1.244 -
   1.245 -    val removes =
   1.246 -      for (cmd <- removed_commands) yield (cmd_before_change -> None)
   1.247 -    val inserts =
   1.248 -      for (cmd <- inserted_commands) yield (doc.commands.prev(cmd) -> Some(cmd))
   1.249 -
   1.250 -    return (doc, removes.toList ++ inserts)
   1.251 -  }
   1.252 -
   1.253 -  val commands_offsets = {
   1.254 -    var last_stop = 0
   1.255 -    (for (c <- commands) yield {
   1.256 -      val r = c -> (last_stop,c.stop(this))
   1.257 -      last_stop = c.stop(this)
   1.258 -      r
   1.259 -    }).toArray
   1.260 -  }
   1.261 -
   1.262 -  def command_at(pos: Int): Option[Command] =
   1.263 -    find_command(pos, 0, commands_offsets.length)
   1.264 -
   1.265 -  // use a binary search to find commands for a given offset
   1.266 -  private def find_command(pos: Int, array_start: Int, array_stop: Int): Option[Command] =
   1.267 -  {
   1.268 -    val middle_index = (array_start + array_stop) / 2
   1.269 -    if (middle_index >= commands_offsets.length) return None
   1.270 -    val (middle, (start, stop)) = commands_offsets(middle_index)
   1.271 -    // does middle contain pos?
   1.272 -    if (start <= pos && pos < stop)
   1.273 -      Some(middle)
   1.274 -    else if (start > pos)
   1.275 -      find_command(pos, array_start, middle_index)
   1.276 -    else if (stop <= pos)
   1.277 -      find_command(pos, middle_index + 1, array_stop)
   1.278 -    else error("impossible")
   1.279 -  }
   1.280 -}