1/*  Title:      Pure/Isar/outer_syntax.scala
2    Author:     Makarius
3
4Isabelle/Isar outer syntax.
5*/
6
7package isabelle
8
9
10import scala.collection.mutable
11
12
13object Outer_Syntax
14{
15  /* syntax */
16
17  val empty: Outer_Syntax = new Outer_Syntax()
18
19  def merge(syns: List[Outer_Syntax]): Outer_Syntax = (empty /: syns)(_ ++ _)
20
21
22  /* string literals */
23
24  def quote_string(str: String): String =
25  {
26    val result = new StringBuilder(str.length + 10)
27    result += '"'
28    for (s <- Symbol.iterator(str)) {
29      if (s.length == 1) {
30        val c = s(0)
31        if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') {
32          result += '\\'
33          if (c < 10) result += '0'
34          if (c < 100) result += '0'
35          result ++= c.asInstanceOf[Int].toString
36        }
37        else result += c
38      }
39      else result ++= s
40    }
41    result += '"'
42    result.toString
43  }
44}
45
46final class Outer_Syntax private(
47  val keywords: Keyword.Keywords = Keyword.Keywords.empty,
48  val rev_abbrevs: Thy_Header.Abbrevs = Nil,
49  val language_context: Completion.Language_Context = Completion.Language_Context.outer,
50  val has_tokens: Boolean = true)
51{
52  /** syntax content **/
53
54  override def toString: String = keywords.toString
55
56
57  /* keywords */
58
59  def + (name: String, kind: String = "", exts: List[String] = Nil): Outer_Syntax =
60    new Outer_Syntax(
61      keywords + (name, kind, exts), rev_abbrevs, language_context, has_tokens = true)
62
63  def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
64    (this /: keywords) {
65      case (syntax, (name, spec)) =>
66        syntax +
67          (Symbol.decode(name), spec.kind, spec.exts) +
68          (Symbol.encode(name), spec.kind, spec.exts)
69    }
70
71
72  /* abbrevs */
73
74  def abbrevs: Thy_Header.Abbrevs = rev_abbrevs.reverse
75
76  def add_abbrevs(new_abbrevs: Thy_Header.Abbrevs): Outer_Syntax =
77    if (new_abbrevs.isEmpty) this
78    else {
79      val rev_abbrevs1 = Library.distinct(new_abbrevs) reverse_::: rev_abbrevs
80      new Outer_Syntax(keywords, rev_abbrevs1, language_context, has_tokens)
81    }
82
83
84  /* completion */
85
86  private lazy val completion: Completion =
87  {
88    val completion_keywords = (keywords.minor.iterator ++ keywords.major.iterator).toList
89    val completion_abbrevs =
90      completion_keywords.flatMap(name =>
91        (if (Keyword.theory_block.contains(keywords.kinds(name)))
92          List((name, name + "\nbegin\n\u0007\nend"))
93         else Nil) :::
94        (if (Completion.Word_Parsers.is_word(name)) List((name, name)) else Nil)) :::
95      abbrevs.flatMap(
96        { case (a, b) =>
97            val a1 = Symbol.decode(a)
98            val a2 = Symbol.encode(a)
99            val b1 = Symbol.decode(b)
100            List((a1, b1), (a2, b1))
101        })
102    Completion.make(completion_keywords, completion_abbrevs)
103  }
104
105  def complete(
106    history: Completion.History,
107    unicode: Boolean,
108    explicit: Boolean,
109    start: Text.Offset,
110    text: CharSequence,
111    caret: Int,
112    context: Completion.Language_Context): Option[Completion.Result] =
113  {
114    completion.complete(history, unicode, explicit, start, text, caret, context)
115  }
116
117
118  /* build */
119
120  def + (header: Document.Node.Header): Outer_Syntax =
121    add_keywords(header.keywords).add_abbrevs(header.abbrevs)
122
123  def ++ (other: Outer_Syntax): Outer_Syntax =
124    if (this eq other) this
125    else if (this eq Outer_Syntax.empty) other
126    else {
127      val keywords1 = keywords ++ other.keywords
128      val rev_abbrevs1 = Library.merge(rev_abbrevs, other.rev_abbrevs)
129      if ((keywords eq keywords1) && (rev_abbrevs eq rev_abbrevs1)) this
130      else new Outer_Syntax(keywords1, rev_abbrevs1, language_context, has_tokens)
131    }
132
133
134  /* load commands */
135
136  def load_command(name: String): Option[List[String]] = keywords.load_commands.get(name)
137  def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
138
139
140  /* language context */
141
142  def set_language_context(context: Completion.Language_Context): Outer_Syntax =
143    new Outer_Syntax(keywords, rev_abbrevs, context, has_tokens)
144
145  def no_tokens: Outer_Syntax =
146  {
147    require(keywords.is_empty)
148    new Outer_Syntax(
149      rev_abbrevs = rev_abbrevs,
150      language_context = language_context,
151      has_tokens = false)
152  }
153
154
155
156  /** parsing **/
157
158  /* command spans */
159
160  def parse_spans(toks: List[Token]): List[Command_Span.Span] =
161  {
162    val result = new mutable.ListBuffer[Command_Span.Span]
163    val content = new mutable.ListBuffer[Token]
164    val ignored = new mutable.ListBuffer[Token]
165
166    def ship(span: List[Token])
167    {
168      val kind =
169        if (span.forall(_.is_ignored)) Command_Span.Ignored_Span
170        else if (span.exists(_.is_error)) Command_Span.Malformed_Span
171        else
172          span.find(_.is_command) match {
173            case None => Command_Span.Malformed_Span
174            case Some(cmd) =>
175              val name = cmd.source
176              val offset =
177                (0 /: span.takeWhile(_ != cmd)) {
178                  case (i, tok) => i + Symbol.length(tok.source) }
179              val end_offset = offset + Symbol.length(name)
180              val pos = Position.Range(Text.Range(offset, end_offset) + 1)
181              Command_Span.Command_Span(name, pos)
182          }
183      result += Command_Span.Span(kind, span)
184    }
185
186    def flush()
187    {
188      if (content.nonEmpty) { ship(content.toList); content.clear }
189      if (ignored.nonEmpty) { ship(ignored.toList); ignored.clear }
190    }
191
192    for (tok <- toks) {
193      if (tok.is_ignored) ignored += tok
194      else if (keywords.is_before_command(tok) ||
195        tok.is_command &&
196          (!content.exists(keywords.is_before_command(_)) || content.exists(_.is_command)))
197      { flush(); content += tok }
198      else { content ++= ignored; ignored.clear; content += tok }
199    }
200    flush()
201
202    result.toList
203  }
204
205  def parse_spans(input: CharSequence): List[Command_Span.Span] =
206    parse_spans(Token.explode(keywords, input))
207}
208