1/*  Title:      Pure/PIDE/resources.scala
2    Author:     Makarius
3
4Resources for theories and auxiliary files.
5*/
6
7package isabelle
8
9
10import scala.annotation.tailrec
11import scala.util.parsing.input.Reader
12
13import java.io.{File => JFile}
14
15
16class Resources(
17  val session_base: Sessions.Base,
18  val log: Logger = No_Logger)
19{
20  resources =>
21
22  def thy_path(path: Path): Path = path.ext("thy")
23
24
25  /* file-system operations */
26
27  def append(dir: String, source_path: Path): String =
28    (Path.explode(dir) + source_path).expand.implode
29
30  def append(node_name: Document.Node.Name, source_path: Path): String =
31    append(node_name.master_dir, source_path)
32
33
34  /* source files of Isabelle/ML bootstrap */
35
36  def source_file(raw_name: String): Option[String] =
37  {
38    if (Path.is_wellformed(raw_name)) {
39      if (Path.is_valid(raw_name)) {
40        def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None
41
42        val path = Path.explode(raw_name)
43        val path1 =
44          if (path.is_absolute || path.is_current) check(path)
45          else {
46            check(Path.explode("~~/src/Pure") + path) orElse
47              (if (Isabelle_System.getenv("ML_SOURCES") == "") None
48               else check(Path.explode("$ML_SOURCES") + path))
49          }
50        Some(File.platform_path(path1 getOrElse path))
51      }
52      else None
53    }
54    else Some(raw_name)
55  }
56
57
58  /* theory files */
59
60  def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name): () => List[Path] =
61  {
62    val (is_utf8, raw_text) =
63      with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
64    () => {
65      if (syntax.load_commands_in(raw_text)) {
66        val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
67        val spans = syntax.parse_spans(text)
68        val dir = Path.explode(name.master_dir)
69        spans.iterator.map(Command.span_files(syntax, _)._1).flatten.
70          map(a => (dir + Path.explode(a)).expand).toList
71      }
72      else Nil
73    }
74  }
75
76  def pure_files(syntax: Outer_Syntax, dir: Path): List[Path] =
77  {
78    val roots =
79      for { (name, _) <- Thy_Header.ml_roots }
80      yield (dir + Path.explode(name)).expand
81    val files =
82      for {
83        (path, (_, theory)) <- roots zip Thy_Header.ml_roots
84        file <- loaded_files(syntax, Document.Node.Name(path.implode, path.dir.implode, theory))()
85      } yield file
86    roots ::: files
87  }
88
89  def theory_name(qualifier: String, theory: String): String =
90    if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory))
91      theory
92    else Long_Name.qualify(qualifier, theory)
93
94  def import_name(qualifier: String, dir: String, s: String): Document.Node.Name =
95  {
96    val theory = theory_name(qualifier, Thy_Header.import_name(s))
97    if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
98    else {
99      session_base.known_theory(theory) match {
100        case Some(node_name) => node_name
101        case None =>
102          if (Thy_Header.is_base_name(s) && Long_Name.is_qualified(s))
103            Document.Node.Name.loaded_theory(theory)
104          else {
105            val path = Path.explode(s)
106            val node = append(dir, thy_path(path))
107            val master_dir = append(dir, path.dir)
108            Document.Node.Name(node, master_dir, theory)
109          }
110      }
111    }
112  }
113
114  def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
115    import_name(session_base.theory_qualifier(name), name.master_dir, s)
116
117  def standard_import(base: Sessions.Base, qualifier: String, dir: String, s: String): String =
118  {
119    val name = import_name(qualifier, dir, s)
120    val s1 =
121      if (session_base.loaded_theory(name)) name.theory
122      else {
123        (try { Some(name.path) } catch { case ERROR(_) => None }) match {
124          case None => s
125          case Some(path) =>
126            session_base.known.get_file(path.file) match {
127              case Some(name1) if base.theory_qualifier(name1) != qualifier =>
128                name1.theory
129              case Some(name1) if Thy_Header.is_base_name(s) =>
130                name1.theory_base_name
131              case _ => s
132            }
133        }
134      }
135    val name2 = import_name(qualifier, dir, s1)
136    if (name.node == name2.node) s1 else s
137  }
138
139  def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A =
140  {
141    val path = name.path
142    if (path.is_file) {
143      val reader = Scan.byte_reader(path.file)
144      try { f(reader) } finally { reader.close }
145    }
146    else if (name.node == name.theory)
147      error("Cannot load theory " + quote(name.theory))
148    else error ("Cannot load theory file " + path)
149  }
150
151  def check_thy_reader(node_name: Document.Node.Name, reader: Reader[Char],
152    start: Token.Pos = Token.Pos.command, strict: Boolean = true): Document.Node.Header =
153  {
154    if (node_name.is_theory && reader.source.length > 0) {
155      try {
156        val header = Thy_Header.read(reader, start, strict)
157
158        val base_name = node_name.theory_base_name
159        val (name, pos) = header.name
160        if (base_name != name)
161          error("Bad theory name " + quote(name) +
162            " for file " + thy_path(Path.basic(base_name)) + Position.here(pos) +
163            Completion.report_theories(pos, List(base_name)))
164
165        val imports =
166          header.imports.map({ case (s, pos) =>
167            val name = import_name(node_name, s)
168            if (Sessions.exclude_theory(name.theory_base_name))
169              error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos))
170            (name, pos)
171          })
172        Document.Node.Header(imports, header.keywords, header.abbrevs)
173      }
174      catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
175    }
176    else Document.Node.no_header
177  }
178
179  def check_thy(name: Document.Node.Name, start: Token.Pos = Token.Pos.command,
180      strict: Boolean = true): Document.Node.Header =
181    with_thy_reader(name, check_thy_reader(name, _, start, strict))
182
183
184  /* special header */
185
186  def special_header(name: Document.Node.Name): Option[Document.Node.Header] =
187  {
188    val imports =
189      if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name))
190      else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP))
191      else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE))
192      else Nil
193    if (imports.isEmpty) None
194    else Some(Document.Node.Header(imports.map((_, Position.none))))
195  }
196
197  def is_hidden(name: Document.Node.Name): Boolean =
198    !name.is_theory || name.theory == Sessions.root_name || name.is_bibtex_theory
199
200
201  /* blobs */
202
203  def undefined_blobs(nodes: Document.Nodes): List[Document.Node.Name] =
204    (for {
205      (node_name, node) <- nodes.iterator
206      if !session_base.loaded_theory(node_name)
207      cmd <- node.load_commands.iterator
208      name <- cmd.blobs_undefined.iterator
209    } yield name).toList
210
211
212  /* document changes */
213
214  def parse_change(
215      reparse_limit: Int,
216      previous: Document.Version,
217      doc_blobs: Document.Blobs,
218      edits: List[Document.Edit_Text],
219      consolidate: List[Document.Node.Name]): Session.Change =
220    Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate)
221
222  def commit(change: Session.Change) { }
223
224
225  /* theory and file dependencies */
226
227  object Dependencies
228  {
229    val empty = new Dependencies(Nil, Set.empty)
230  }
231
232  final class Dependencies private(
233    rev_entries: List[Document.Node.Entry],
234    val seen: Set[Document.Node.Name])
235  {
236    def :: (entry: Document.Node.Entry): Dependencies =
237      new Dependencies(entry :: rev_entries, seen)
238
239    def + (name: Document.Node.Name): Dependencies =
240      new Dependencies(rev_entries, seen + name)
241
242    def entries: List[Document.Node.Entry] = rev_entries.reverse
243    def theories: List[Document.Node.Name] = entries.map(_.name)
244
245    def errors: List[String] = entries.flatMap(_.header.errors)
246
247    def check_errors: Dependencies =
248      errors match {
249        case Nil => this
250        case errs => error(cat_lines(errs))
251      }
252
253    lazy val loaded_theories: Graph[String, Outer_Syntax] =
254      (session_base.loaded_theories /: entries)({ case (graph, entry) =>
255        val name = entry.name.theory
256        val imports = entry.header.imports.map(p => p._1.theory)
257
258        val graph1 = (graph /: (name :: imports))(_.default_node(_, Outer_Syntax.empty))
259        val graph2 = (graph1 /: imports)(_.add_edge(_, name))
260
261        val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil
262        val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node(_))
263        val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header
264
265        graph2.map_node(name, _ => syntax)
266      })
267
268    def loaded_files: List[(String, List[Path])] =
269    {
270      theories.map(_.theory) zip
271        Par_List.map((e: () => List[Path]) => e(),
272          theories.map(name => resources.loaded_files(loaded_theories.get_node(name.theory), name)))
273    }
274
275    def imported_files: List[Path] =
276    {
277      val base_theories =
278        loaded_theories.all_preds(theories.map(_.theory)).
279          filter(session_base.loaded_theories.defined(_))
280
281      base_theories.map(theory => session_base.known.theories(theory).name.path) :::
282      base_theories.flatMap(session_base.known.loaded_files(_))
283    }
284
285    lazy val overall_syntax: Outer_Syntax =
286      Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node(_)))
287
288    override def toString: String = entries.toString
289  }
290
291  private def show_path(names: List[Document.Node.Name]): String =
292    names.map(name => quote(name.theory)).mkString(" via ")
293
294  private def cycle_msg(names: List[Document.Node.Name]): String =
295    "Cyclic dependency of " + show_path(names)
296
297  private def required_by(initiators: List[Document.Node.Name]): String =
298    if (initiators.isEmpty) ""
299    else "\n(required by " + show_path(initiators.reverse) + ")"
300
301  private def require_thy(
302    progress: Progress,
303    initiators: List[Document.Node.Name],
304    dependencies: Dependencies,
305    thy: (Document.Node.Name, Position.T)): Dependencies =
306  {
307    val (name, pos) = thy
308
309    def message: String =
310      "The error(s) above occurred for theory " + quote(name.theory) +
311        required_by(initiators) + Position.here(pos)
312
313    if (dependencies.seen(name)) dependencies
314    else {
315      val dependencies1 = dependencies + name
316      if (session_base.loaded_theory(name)) dependencies1
317      else {
318        try {
319          if (initiators.contains(name)) error(cycle_msg(initiators))
320
321          progress.expose_interrupt()
322          val header =
323            try { check_thy(name, Token.Pos.file(name.node)).cat_errors(message) }
324            catch { case ERROR(msg) => cat_error(msg, message) }
325          Document.Node.Entry(name, header) ::
326            require_thys(progress, name :: initiators, dependencies1, header.imports)
327        }
328        catch {
329          case e: Throwable =>
330            Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))) :: dependencies1
331        }
332      }
333    }
334  }
335
336  private def require_thys(
337      progress: Progress,
338      initiators: List[Document.Node.Name],
339      dependencies: Dependencies,
340      thys: List[(Document.Node.Name, Position.T)]): Dependencies =
341    (dependencies /: thys)(require_thy(progress, initiators, _, _))
342
343  def dependencies(
344      thys: List[(Document.Node.Name, Position.T)],
345      progress: Progress = No_Progress): Dependencies =
346    require_thys(progress, Nil, Dependencies.empty, thys)
347}
348