1/*  Title:      Pure/Thy/sessions.scala
2    Author:     Makarius
3
4Cumulative session information.
5*/
6
7package isabelle
8
9import java.io.{File => JFile}
10import java.nio.ByteBuffer
11import java.nio.channels.FileChannel
12import java.nio.file.StandardOpenOption
13
14import scala.collection.SortedSet
15import scala.collection.mutable
16
17
18object Sessions
19{
20  /* session and theory names */
21
22  val root_name: String = "ROOT"
23  val theory_name: String = "Pure.Sessions"
24
25  val DRAFT = "Draft"
26
27  def is_pure(name: String): Boolean = name == Thy_Header.PURE
28
29
30  def exclude_session(name: String): Boolean = name == "" || name == DRAFT
31
32  def exclude_theory(name: String): Boolean =
33    name == root_name || name == "README" || name == "index" || name == "bib"
34
35
36  /* base info and source dependencies */
37
38  object Known
39  {
40    val empty: Known = Known()
41
42    def make(local_dir: Path, bases: List[Base],
43      sessions: List[(String, Position.T)] = Nil,
44      theories: List[Document.Node.Entry] = Nil,
45      loaded_files: List[(String, List[Path])] = Nil): Known =
46    {
47      def bases_iterator(local: Boolean) =
48        for {
49          base <- bases.iterator
50          (_, entry) <- (if (local) base.known.theories_local else base.known.theories).iterator
51        } yield entry
52
53      def local_theories_iterator =
54      {
55        val local_path = local_dir.canonical_file.toPath
56        theories.iterator.filter(entry =>
57          entry.name.path.canonical_file.toPath.startsWith(local_path))
58      }
59
60      val known_sessions =
61        (sessions.toMap /: bases)({ case (known, base) => known ++ base.known.sessions })
62
63      val known_theories =
64        (Map.empty[String, Document.Node.Entry] /: (bases_iterator(false) ++ theories.iterator))({
65          case (known, entry) =>
66            known.get(entry.name.theory) match {
67              case Some(entry1) if entry.name != entry1.name =>
68                error("Duplicate theory " + quote(entry.name.node) + " vs. " +
69                  quote(entry1.name.node))
70              case _ => known + (entry.name.theory -> entry)
71            }
72          })
73      val known_theories_local =
74        (Map.empty[String, Document.Node.Entry] /:
75            (bases_iterator(true) ++ local_theories_iterator))({
76          case (known, entry) => known + (entry.name.theory -> entry)
77        })
78      val known_files =
79        (Map.empty[JFile, List[Document.Node.Name]] /:
80            (bases_iterator(true) ++ bases_iterator(false) ++ theories.iterator))({
81          case (known, entry) =>
82            val name = entry.name
83            val file = name.path.canonical_file
84            val theories1 = known.getOrElse(file, Nil)
85            if (theories1.exists(name1 => name.node == name1.node && name.theory == name1.theory))
86              known
87            else known + (file -> (name :: theories1))
88        })
89
90      val known_loaded_files =
91        (loaded_files.toMap /: bases.map(base => base.known.loaded_files))(_ ++ _)
92
93      Known(
94        known_sessions,
95        known_theories,
96        known_theories_local,
97        known_files.iterator.map(p => (p._1, p._2.reverse)).toMap,
98        known_loaded_files)
99    }
100  }
101
102  sealed case class Known(
103    sessions: Map[String, Position.T] = Map.empty,
104    theories: Map[String, Document.Node.Entry] = Map.empty,
105    theories_local: Map[String, Document.Node.Entry] = Map.empty,
106    files: Map[JFile, List[Document.Node.Name]] = Map.empty,
107    loaded_files: Map[String, List[Path]] = Map.empty)
108  {
109    def platform_path: Known =
110      copy(theories = for ((a, b) <- theories) yield (a, b.map(File.platform_path(_))),
111        theories_local = for ((a, b) <- theories_local) yield (a, b.map(File.platform_path(_))),
112        files = for ((a, b) <- files) yield (a, b.map(c => c.map(File.platform_path(_)))))
113
114    def standard_path: Known =
115      copy(theories = for ((a, b) <- theories) yield (a, b.map(File.standard_path(_))),
116        theories_local = for ((a, b) <- theories_local) yield (a, b.map(File.standard_path(_))),
117        files = for ((a, b) <- files) yield (a, b.map(c => c.map(File.standard_path(_)))))
118
119    def theory_names: List[Document.Node.Name] = theories.iterator.map(p => p._2.name).toList
120
121    lazy val theory_graph: Graph[Document.Node.Name, Unit] =
122    {
123      val entries =
124        for ((_, entry) <- theories.toList)
125        yield ((entry.name, ()), entry.header.imports.map(imp => theories(imp._1.theory).name))
126      Graph.make(entries, symmetric = true)(Document.Node.Name.Ordering)
127    }
128
129    def get_file(file: JFile, bootstrap: Boolean = false): Option[Document.Node.Name] =
130    {
131      val res = files.getOrElse(File.canonical(file), Nil).headOption
132      if (bootstrap) res.map(_.map_theory(Thy_Header.bootstrap_name(_))) else res
133    }
134  }
135
136  object Base
137  {
138    def bootstrap(global_theories: Map[String, String]): Base =
139      Base(
140        global_theories = global_theories,
141        overall_syntax = Thy_Header.bootstrap_syntax)
142  }
143
144  sealed case class Base(
145    pos: Position.T = Position.none,
146    doc_names: List[String] = Nil,
147    global_theories: Map[String, String] = Map.empty,
148    loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
149    used_theories: List[Document.Node.Name] = Nil,
150    known: Known = Known.empty,
151    overall_syntax: Outer_Syntax = Outer_Syntax.empty,
152    imported_sources: List[(Path, SHA1.Digest)] = Nil,
153    sources: List[(Path, SHA1.Digest)] = Nil,
154    session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
155    errors: List[String] = Nil,
156    imports: Option[Base] = None)
157  {
158    def platform_path: Base = copy(known = known.platform_path)
159    def standard_path: Base = copy(known = known.standard_path)
160
161    def theory_qualifier(name: String): String =
162      global_theories.getOrElse(name, Long_Name.qualifier(name))
163    def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
164
165    def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
166    def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
167
168    def loaded_theory_syntax(name: String): Option[Outer_Syntax] =
169      if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None
170    def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] =
171      loaded_theory_syntax(name.theory)
172
173    def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax =
174      nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
175
176    def known_theory(name: String): Option[Document.Node.Name] =
177      known.theories.get(name).map(_.name)
178
179    def dest_known_theories: List[(String, String)] =
180      for ((theory, entry) <- known.theories.toList)
181        yield (theory, entry.name.node)
182
183    def get_imports: Base = imports getOrElse Base.bootstrap(global_theories)
184  }
185
186  sealed case class Deps(
187    sessions_structure: Structure, session_bases: Map[String, Base], all_known: Known)
188  {
189    def is_empty: Boolean = session_bases.isEmpty
190    def apply(name: String): Base = session_bases(name)
191    def get(name: String): Option[Base] = session_bases.get(name)
192
193    def imported_sources(name: String): List[SHA1.Digest] =
194      session_bases(name).imported_sources.map(_._2)
195
196    def sources(name: String): List[SHA1.Digest] =
197      session_bases(name).sources.map(_._2)
198
199    def errors: List[String] =
200      (for {
201        (name, base) <- session_bases.iterator
202        if base.errors.nonEmpty
203      } yield cat_lines(base.errors) +
204          "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos)
205      ).toList
206
207    def check_errors: Deps =
208      errors match {
209        case Nil => this
210        case errs => error(cat_lines(errs))
211      }
212  }
213
214  def deps(sessions_structure: Structure,
215      global_theories: Map[String, String],
216      progress: Progress = No_Progress,
217      inlined_files: Boolean = false,
218      verbose: Boolean = false,
219      list_files: Boolean = false,
220      check_keywords: Set[String] = Set.empty): Deps =
221  {
222    var cache_sources = Map.empty[JFile, SHA1.Digest]
223    def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] =
224    {
225      for {
226        path <- paths
227        file = path.file
228        if cache_sources.isDefinedAt(file) || file.isFile
229      }
230      yield {
231        cache_sources.get(file) match {
232          case Some(digest) => (path, digest)
233          case None =>
234            val digest = SHA1.digest(file)
235            cache_sources = cache_sources + (file -> digest)
236            (path, digest)
237        }
238      }
239    }
240
241    val doc_names = Doc.doc_names()
242
243    val session_bases =
244      (Map.empty[String, Base] /: sessions_structure.imports_topological_order)({
245        case (session_bases, session_name) =>
246          progress.expose_interrupt()
247
248          val info = sessions_structure(session_name)
249          try {
250            val parent_base: Sessions.Base =
251              info.parent match {
252                case None => Base.bootstrap(global_theories)
253                case Some(parent) => session_bases(parent)
254              }
255            val imports_base: Sessions.Base =
256              parent_base.copy(known =
257                Known.make(info.dir, parent_base :: info.imports.map(session_bases(_))))
258
259            val resources = new Resources(imports_base)
260
261            if (verbose || list_files) {
262              val groups =
263                if (info.groups.isEmpty) ""
264                else info.groups.mkString(" (", " ", ")")
265              progress.echo("Session " + info.chapter + "/" + info.name + groups)
266            }
267
268            val dependencies =
269              resources.dependencies(
270                for { (_, thys) <- info.theories; (thy, pos) <- thys }
271                yield (resources.import_name(info.name, info.dir.implode, thy), pos))
272
273            val overall_syntax = dependencies.overall_syntax
274
275            val theory_files = dependencies.theories.map(_.path)
276            val loaded_files =
277              if (inlined_files) {
278                if (Sessions.is_pure(info.name)) {
279                  val pure_files = resources.pure_files(overall_syntax, info.dir)
280                  dependencies.loaded_files.map({ case (name, files) =>
281                    (name, if (name == Thy_Header.PURE) pure_files ::: files else files) })
282                }
283                else dependencies.loaded_files
284              }
285              else Nil
286
287            val session_files =
288              (theory_files ::: loaded_files.flatMap(_._2) :::
289                info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand)
290
291            val imported_files = if (inlined_files) dependencies.imported_files else Nil
292
293            if (list_files)
294              progress.echo(cat_lines(session_files.map(_.implode).sorted.map("  " + _)))
295
296            if (check_keywords.nonEmpty) {
297              Check_Keywords.check_keywords(
298                progress, overall_syntax.keywords, check_keywords, theory_files)
299            }
300
301            val session_graph_display: Graph_Display.Graph =
302            {
303              def session_node(name: String): Graph_Display.Node =
304                Graph_Display.Node("[" + name + "]", "session." + name)
305
306              def node(name: Document.Node.Name): Graph_Display.Node =
307              {
308                val qualifier = imports_base.theory_qualifier(name)
309                if (qualifier == info.name)
310                  Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
311                else session_node(qualifier)
312              }
313
314              val imports_subgraph =
315                sessions_structure.imports_graph.
316                  restrict(sessions_structure.imports_graph.all_preds(info.deps).toSet)
317
318              val graph0 =
319                (Graph_Display.empty_graph /: imports_subgraph.topological_order)(
320                  { case (g, session) =>
321                      val a = session_node(session)
322                      val bs = imports_subgraph.imm_preds(session).toList.map(session_node(_))
323                      ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) })
324
325              (graph0 /: dependencies.entries)(
326                { case (g, entry) =>
327                    val a = node(entry.name)
328                    val bs =
329                      entry.header.imports.map({ case (name, _) => node(name) }).
330                        filterNot(_ == a)
331                    ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) })
332            }
333
334            val known =
335              Known.make(info.dir, List(imports_base),
336                sessions = List(info.name -> info.pos),
337                theories = dependencies.entries,
338                loaded_files = loaded_files)
339
340            val sources_errors =
341              for (p <- session_files if !p.is_file) yield "No such file: " + p
342
343            val bibtex_errors =
344              try { info.bibtex_entries; Nil }
345              catch { case ERROR(msg) => List(msg) }
346
347            val base =
348              Base(
349                pos = info.pos,
350                doc_names = doc_names,
351                global_theories = global_theories,
352                loaded_theories = dependencies.loaded_theories,
353                used_theories = dependencies.theories,
354                known = known,
355                overall_syntax = overall_syntax,
356                imported_sources = check_sources(imported_files),
357                sources = check_sources(session_files),
358                session_graph_display = session_graph_display,
359                errors = dependencies.errors ::: sources_errors ::: bibtex_errors,
360                imports = Some(imports_base))
361
362            session_bases + (info.name -> base)
363          }
364          catch {
365            case ERROR(msg) =>
366              cat_error(msg, "The error(s) above occurred in session " +
367                quote(info.name) + Position.here(info.pos))
368          }
369      })
370
371    val all_known =
372      Known.make(Path.current, sessions_structure.imports_topological_order.map(session_bases(_)))
373
374    Deps(sessions_structure, session_bases, all_known)
375  }
376
377
378  /* base info */
379
380  sealed case class Base_Info(
381    options: Options,
382    dirs: List[Path],
383    session: String,
384    sessions_structure: Structure,
385    errors: List[String],
386    base: Base,
387    infos: List[Info])
388  {
389    def check_base: Base = if (errors.isEmpty) base else error(cat_lines(errors))
390  }
391
392  def base_info(options: Options,
393    session: String,
394    progress: Progress = No_Progress,
395    dirs: List[Path] = Nil,
396    include_sessions: List[String] = Nil,
397    session_ancestor: Option[String] = None,
398    session_requirements: Boolean = false,
399    session_focus: Boolean = false,
400    all_known: Boolean = false): Base_Info =
401  {
402    val full_sessions = load_structure(options, dirs = dirs)
403    val global_theories = full_sessions.global_theories
404
405    val selected_sessions =
406      full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
407    val info = selected_sessions(session)
408    val ancestor = session_ancestor orElse info.parent
409
410    val (session1, infos1) =
411      if (session_requirements && ancestor.isDefined) {
412        val deps = Sessions.deps(selected_sessions, global_theories, progress = progress)
413        val base = deps(session)
414
415        val ancestor_loaded =
416          deps.get(ancestor.get) match {
417            case Some(ancestor_base)
418            if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
419              ancestor_base.loaded_theories.defined(_)
420            case _ =>
421              error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
422          }
423
424        val required_theories =
425          for {
426            thy <- base.loaded_theories.keys
427            if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session
428          }
429          yield thy
430
431        if (required_theories.isEmpty) (ancestor.get, Nil)
432        else {
433          val other_name = info.name + "_requirements(" + ancestor.get + ")"
434          (other_name,
435            List(
436              make_info(info.options,
437                dir_selected = false,
438                dir = info.dir,
439                chapter = info.chapter,
440                Session_Entry(
441                  pos = info.pos,
442                  name = other_name,
443                  groups = info.groups,
444                  path = ".",
445                  parent = ancestor,
446                  description = "Required theory imports from other sessions",
447                  options = Nil,
448                  imports = info.deps,
449                  theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
450                  document_files = Nil,
451                  export_files = Nil))))
452        }
453      }
454      else (session, Nil)
455
456    val full_sessions1 =
457      if (infos1.isEmpty) full_sessions
458      else load_structure(options, dirs = dirs, infos = infos1)
459
460    val selected_sessions1 =
461    {
462      val sel_sessions1 = session1 :: session :: include_sessions
463      val select_sessions1 =
464        if (session_focus) {
465          full_sessions1.check_sessions(sel_sessions1)
466          full_sessions1.imports_descendants(sel_sessions1)
467        }
468        else sel_sessions1
469      full_sessions1.selection(Selection(sessions = select_sessions1))
470    }
471
472    val sessions1 = if (all_known) full_sessions1 else selected_sessions1
473    val deps1 = Sessions.deps(sessions1, global_theories, progress = progress)
474    val base1 = deps1(session1).copy(known = deps1.all_known)
475
476    Base_Info(options, dirs, session1, deps1.sessions_structure, deps1.errors, base1, infos1)
477  }
478
479
480  /* cumulative session info */
481
482  sealed case class Info(
483    name: String,
484    chapter: String,
485    dir_selected: Boolean,
486    pos: Position.T,
487    groups: List[String],
488    dir: Path,
489    parent: Option[String],
490    description: String,
491    options: Options,
492    imports: List[String],
493    theories: List[(Options, List[(String, Position.T)])],
494    global_theories: List[String],
495    document_files: List[(Path, Path)],
496    export_files: List[(Path, List[String])],
497    meta_digest: SHA1.Digest)
498  {
499    def deps: List[String] = parent.toList ::: imports
500
501    def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale"))
502
503    def bibtex_entries: List[Text.Info[String]] =
504      (for {
505        (document_dir, file) <- document_files.iterator
506        if Bibtex.is_bibtex(file.base_name)
507        info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator
508      } yield info).toList
509  }
510
511  def make_info(options: Options, dir_selected: Boolean, dir: Path, chapter: String,
512    entry: Session_Entry): Info =
513  {
514    try {
515      val name = entry.name
516
517      if (exclude_session(name)) error("Bad session name")
518      if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
519      if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
520
521      val session_options = options ++ entry.options
522
523      val theories =
524        entry.theories.map({ case (opts, thys) =>
525          (session_options ++ opts,
526            thys.map({ case ((thy, pos), _) =>
527              if (exclude_theory(thy))
528                error("Bad theory name " + quote(thy) + Position.here(pos))
529              else (thy, pos) })) })
530
531      val global_theories =
532        for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global }
533        yield {
534          val thy_name = Path.explode(thy).expand.base_name
535          if (Long_Name.is_qualified(thy_name))
536            error("Bad qualified name for global theory " +
537              quote(thy_name) + Position.here(pos))
538          else thy_name
539        }
540
541      val conditions =
542        theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted.
543          map(x => (x, Isabelle_System.getenv(x) != ""))
544
545      val document_files =
546        entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
547
548      val export_files =
549        entry.export_files.map({ case (dir, pats) => (Path.explode(dir), pats) })
550
551      val meta_digest =
552        SHA1.digest((name, chapter, entry.parent, entry.options, entry.imports,
553          entry.theories_no_position, conditions, entry.document_files, entry.export_files).toString)
554
555      Info(name, chapter, dir_selected, entry.pos, entry.groups,
556        dir + Path.explode(entry.path), entry.parent, entry.description, session_options,
557        entry.imports, theories, global_theories, document_files, export_files, meta_digest)
558    }
559    catch {
560      case ERROR(msg) =>
561        error(msg + "\nThe error(s) above occurred in session entry " +
562          quote(entry.name) + Position.here(entry.pos))
563    }
564  }
565
566  object Selection
567  {
568    val empty: Selection = Selection()
569    val all: Selection = Selection(all_sessions = true)
570  }
571
572  sealed case class Selection(
573    requirements: Boolean = false,
574    all_sessions: Boolean = false,
575    base_sessions: List[String] = Nil,
576    exclude_session_groups: List[String] = Nil,
577    exclude_sessions: List[String] = Nil,
578    session_groups: List[String] = Nil,
579    sessions: List[String] = Nil)
580  {
581    def ++ (other: Selection): Selection =
582      Selection(
583        requirements = requirements || other.requirements,
584        all_sessions = all_sessions || other.all_sessions,
585        base_sessions = Library.merge(base_sessions, other.base_sessions),
586        exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups),
587        exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions),
588        session_groups = Library.merge(session_groups, other.session_groups),
589        sessions = Library.merge(sessions, other.sessions))
590  }
591
592  def make(infos: List[Info]): Structure =
593  {
594    def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Traversable[String])
595      : Graph[String, Info] =
596    {
597      def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) =
598      {
599        if (!g.defined(parent))
600          error("Bad " + kind + " session " + quote(parent) + " for " +
601            quote(name) + Position.here(pos))
602
603        try { g.add_edge_acyclic(parent, name) }
604        catch {
605          case exn: Graph.Cycles[_] =>
606            error(cat_lines(exn.cycles.map(cycle =>
607              "Cyclic session dependency of " +
608                cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
609        }
610      }
611      (graph /: graph.iterator) {
612        case (g, (name, (info, _))) => (g /: edges(info))(add_edge(info.pos, name, _, _))
613      }
614    }
615
616    val graph0 =
617      (Graph.string[Info] /: infos) {
618        case (graph, info) =>
619          if (graph.defined(info.name))
620            error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
621              Position.here(graph.get_node(info.name).pos))
622          else graph.new_node(info.name, info)
623      }
624    val graph1 = add_edges(graph0, "parent", _.parent)
625    val graph2 = add_edges(graph1, "imports", _.imports)
626
627    new Structure(graph1, graph2)
628  }
629
630  final class Structure private[Sessions](
631      val build_graph: Graph[String, Info],
632      val imports_graph: Graph[String, Info])
633  {
634    def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph)
635    def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph)
636
637    def defined(name: String): Boolean = imports_graph.defined(name)
638    def apply(name: String): Info = imports_graph.get_node(name)
639    def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None
640
641    def global_theories: Map[String, String] =
642      (Thy_Header.bootstrap_global_theories.toMap /:
643        (for {
644          (_, (info, _)) <- imports_graph.iterator
645          thy <- info.global_theories.iterator }
646         yield (thy, info)))({
647            case (global, (thy, info)) =>
648              val qualifier = info.name
649              global.get(thy) match {
650                case Some(qualifier1) if qualifier != qualifier1 =>
651                  error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
652                case _ => global + (thy -> qualifier)
653              }
654          })
655
656    def check_sessions(names: List[String])
657    {
658      val bad_sessions = SortedSet(names.filterNot(defined(_)): _*).toList
659      if (bad_sessions.nonEmpty)
660        error("Undefined session(s): " + commas_quote(bad_sessions))
661    }
662
663    def check_sessions(sel: Selection): Unit =
664      check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions)
665
666    private def selected(graph: Graph[String, Info], sel: Selection): List[String] =
667    {
668      check_sessions(sel)
669
670      val select_group = sel.session_groups.toSet
671      val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions)
672
673      val selected0 =
674        if (sel.all_sessions) graph.keys
675        else {
676          (for {
677            (name, (info, _)) <- graph.iterator
678            if info.dir_selected || select_session(name) ||
679              graph.get_node(name).groups.exists(select_group)
680          } yield name).toList
681        }
682
683      if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList
684      else selected0
685    }
686
687    def selection(sel: Selection): Structure =
688    {
689      check_sessions(sel)
690
691      val excluded =
692      {
693        val exclude_group = sel.exclude_session_groups.toSet
694        val exclude_group_sessions =
695          (for {
696            (name, (info, _)) <- imports_graph.iterator
697            if imports_graph.get_node(name).groups.exists(exclude_group)
698          } yield name).toList
699        imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet
700      }
701
702      def restrict(graph: Graph[String, Info]): Graph[String, Info] =
703      {
704        val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded)
705        graph.restrict(graph.all_preds(sessions).toSet)
706      }
707
708      new Structure(restrict(build_graph), restrict(imports_graph))
709    }
710
711    def selection_deps(sel: Selection,
712      progress: Progress = No_Progress,
713      inlined_files: Boolean = false,
714      verbose: Boolean = false): Deps =
715    {
716      Sessions.deps(selection(sel), global_theories,
717        progress = progress, inlined_files = inlined_files, verbose = verbose)
718    }
719
720    def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
721    def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
722    def build_requirements(ss: List[String]): List[String] = build_graph.all_preds(ss).reverse
723    def build_topological_order: List[String] = build_graph.topological_order
724
725    def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
726    def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
727    def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds(ss).reverse
728    def imports_topological_order: List[String] = imports_graph.topological_order
729
730    override def toString: String =
731      imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")")
732  }
733
734
735  /* parser */
736
737  val ROOT = Path.explode("ROOT")
738  val ROOTS = Path.explode("ROOTS")
739
740  private val CHAPTER = "chapter"
741  private val SESSION = "session"
742  private val IN = "in"
743  private val DESCRIPTION = "description"
744  private val OPTIONS = "options"
745  private val SESSIONS = "sessions"
746  private val THEORIES = "theories"
747  private val GLOBAL = "global"
748  private val DOCUMENT_FILES = "document_files"
749  private val EXPORT_FILES = "export_files"
750
751  val root_syntax =
752    Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN +
753      (CHAPTER, Keyword.THY_DECL) +
754      (SESSION, Keyword.THY_DECL) +
755      (DESCRIPTION, Keyword.QUASI_COMMAND) +
756      (OPTIONS, Keyword.QUASI_COMMAND) +
757      (SESSIONS, Keyword.QUASI_COMMAND) +
758      (THEORIES, Keyword.QUASI_COMMAND) +
759      (DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
760      (EXPORT_FILES, Keyword.QUASI_COMMAND)
761
762  abstract class Entry
763  sealed case class Chapter(name: String) extends Entry
764  sealed case class Session_Entry(
765    pos: Position.T,
766    name: String,
767    groups: List[String],
768    path: String,
769    parent: Option[String],
770    description: String,
771    options: List[Options.Spec],
772    imports: List[String],
773    theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
774    document_files: List[(String, String)],
775    export_files: List[(String, List[String])]) extends Entry
776  {
777    def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
778      theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
779  }
780
781  private object Parser extends Parse.Parser with Options.Parser
782  {
783    private val chapter: Parser[Chapter] =
784    {
785      val chapter_name = atom("chapter name", _.is_name)
786
787      command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) }
788    }
789
790    private val session_entry: Parser[Session_Entry] =
791    {
792      val option =
793        option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^
794          { case x ~ y => (x, y) }
795      val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]")
796
797      val global =
798        ($$$("(") ~! $$$(GLOBAL) ~ $$$(")")) ^^ { case _ => true } | success(false)
799
800      val theory_entry =
801        position(theory_name) ~ global ^^ { case x ~ y => (x, y) }
802
803      val theories =
804        $$$(THEORIES) ~!
805          ((options | success(Nil)) ~ rep1(theory_entry)) ^^
806          { case _ ~ (x ~ y) => (x, y) }
807
808      val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x }
809
810      val document_files =
811        $$$(DOCUMENT_FILES) ~!
812          ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) }
813
814      val export_files =
815        $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ rep1(name)) ^^
816          { case _ ~ (x ~ y) => (x, y) }
817
818      command(SESSION) ~!
819        (position(session_name) ~
820          (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~
821          (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~
822          ($$$("=") ~!
823            (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~
824              (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~
825              (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
826              (($$$(SESSIONS) ~! rep1(session_name)  ^^ { case _ ~ x => x }) | success(Nil)) ~
827              rep(theories) ~
828              (rep(document_files) ^^ (x => x.flatten)) ~
829              (rep(export_files))))) ^^
830        { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j))) =>
831            Session_Entry(pos, a, b, c, d, e, f, g, h, i, j) }
832    }
833
834    def parse_root(path: Path): List[Entry] =
835    {
836      val toks = Token.explode(root_syntax.keywords, File.read(path))
837      val start = Token.Pos.file(path.implode)
838
839      parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match {
840        case Success(result, _) => result
841        case bad => error(bad.toString)
842      }
843    }
844  }
845
846  def parse_root(path: Path): List[Entry] = Parser.parse_root(path)
847
848  def parse_root_entries(path: Path): List[Session_Entry] =
849    for (entry <- Parser.parse_root(path) if entry.isInstanceOf[Session_Entry])
850    yield entry.asInstanceOf[Session_Entry]
851
852  def read_root(options: Options, select: Boolean, path: Path): List[Info] =
853  {
854    var entry_chapter = "Unsorted"
855    val infos = new mutable.ListBuffer[Info]
856    parse_root(path).foreach {
857      case Chapter(name) => entry_chapter = name
858      case entry: Session_Entry =>
859        infos += make_info(options, select, path.dir, entry_chapter, entry)
860    }
861    infos.toList
862  }
863
864  def parse_roots(roots: Path): List[String] =
865  {
866    for {
867      line <- split_lines(File.read(roots))
868      if !(line == "" || line.startsWith("#"))
869    } yield line
870  }
871
872
873  /* load sessions from certain directories */
874
875  private def is_session_dir(dir: Path): Boolean =
876    (dir + ROOT).is_file || (dir + ROOTS).is_file
877
878  private def check_session_dir(dir: Path): Path =
879    if (is_session_dir(dir)) File.pwd() + dir.expand
880    else error("Bad session root directory: " + dir.toString)
881
882  def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] =
883  {
884    val default_dirs = Isabelle_System.components().filter(is_session_dir(_))
885    (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _))
886  }
887
888  def load_structure(options: Options,
889    dirs: List[Path] = Nil,
890    select_dirs: List[Path] = Nil,
891    infos: List[Info] = Nil): Structure =
892  {
893    def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] =
894      load_root(select, dir) ::: load_roots(select, dir)
895
896    def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] =
897    {
898      val root = dir + ROOT
899      if (root.is_file) List((select, root)) else Nil
900    }
901
902    def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] =
903    {
904      val roots = dir + ROOTS
905      if (roots.is_file) {
906        for {
907          entry <- parse_roots(roots)
908          dir1 =
909            try { check_session_dir(dir + Path.explode(entry)) }
910            catch {
911              case ERROR(msg) =>
912                error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
913            }
914          res <- load_dir(select, dir1)
915        } yield res
916      }
917      else Nil
918    }
919
920    val roots =
921      for {
922        (select, dir) <- directories(dirs, select_dirs)
923        res <- load_dir(select, check_session_dir(dir))
924      } yield res
925
926    val unique_roots =
927      ((Map.empty[JFile, (Boolean, Path)] /: roots) { case (m, (select, path)) =>
928        val file = path.canonical_file
929        m.get(file) match {
930          case None => m + (file -> (select, path))
931          case Some((select1, path1)) => m + (file -> (select1 || select, path1))
932        }
933      }).toList.map(_._2)
934
935    make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos)
936  }
937
938
939
940  /** heap file with SHA1 digest **/
941
942  private val sha1_prefix = "SHA1:"
943
944  def read_heap_digest(heap: Path): Option[String] =
945  {
946    if (heap.is_file) {
947      val file = FileChannel.open(heap.file.toPath, StandardOpenOption.READ)
948      try {
949        val len = file.size
950        val n = sha1_prefix.length + SHA1.digest_length
951        if (len >= n) {
952          file.position(len - n)
953
954          val buf = ByteBuffer.allocate(n)
955          var i = 0
956          var m = 0
957          do {
958            m = file.read(buf)
959            if (m != -1) i += m
960          }
961          while (m != -1 && n > i)
962
963          if (i == n) {
964            val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset)
965            val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset)
966            if (prefix == sha1_prefix) Some(s) else None
967          }
968          else None
969        }
970        else None
971      }
972      finally { file.close }
973    }
974    else None
975  }
976
977  def write_heap_digest(heap: Path): String =
978    read_heap_digest(heap) match {
979      case None =>
980        val s = SHA1.digest(heap).rep
981        File.append(heap, sha1_prefix + s)
982        s
983      case Some(s) => s
984    }
985
986
987
988  /** persistent store **/
989
990  object Session_Info
991  {
992    val session_name = SQL.Column.string("session_name").make_primary_key
993
994    // Build_Log.Session_Info
995    val session_timing = SQL.Column.bytes("session_timing")
996    val command_timings = SQL.Column.bytes("command_timings")
997    val theory_timings = SQL.Column.bytes("theory_timings")
998    val ml_statistics = SQL.Column.bytes("ml_statistics")
999    val task_statistics = SQL.Column.bytes("task_statistics")
1000    val errors = SQL.Column.bytes("errors")
1001    val build_log_columns =
1002      List(session_name, session_timing, command_timings, theory_timings,
1003        ml_statistics, task_statistics, errors)
1004
1005    // Build.Session_Info
1006    val sources = SQL.Column.string("sources")
1007    val input_heaps = SQL.Column.string("input_heaps")
1008    val output_heap = SQL.Column.string("output_heap")
1009    val return_code = SQL.Column.int("return_code")
1010    val build_columns = List(sources, input_heaps, output_heap, return_code)
1011
1012    val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
1013  }
1014
1015  def store(options: Options, system_mode: Boolean = false): Store =
1016    new Store(options, system_mode)
1017
1018  class Store private[Sessions](val options: Options, val system_mode: Boolean)
1019  {
1020    override def toString: String = "Store(output_dir = " + output_dir.expand + ")"
1021
1022
1023    /* directories */
1024
1025    val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER")
1026    val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
1027
1028    val output_dir: Path =
1029      if (system_mode) system_output_dir else user_output_dir
1030
1031    val input_dirs: List[Path] =
1032      if (system_mode) List(system_output_dir)
1033      else List(user_output_dir, system_output_dir)
1034
1035    val browser_info: Path =
1036      if (system_mode) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
1037      else Path.explode("$ISABELLE_BROWSER_INFO")
1038
1039
1040    /* file names */
1041
1042    def heap(name: String): Path = Path.basic(name)
1043    def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db")
1044    def log(name: String): Path = Path.basic("log") + Path.basic(name)
1045    def log_gz(name: String): Path = log(name).ext("gz")
1046
1047    def output_heap(name: String): Path = output_dir + heap(name)
1048    def output_database(name: String): Path = output_dir + database(name)
1049    def output_log(name: String): Path = output_dir + log(name)
1050    def output_log_gz(name: String): Path = output_dir + log_gz(name)
1051
1052    def prepare_output_dir() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) }
1053
1054
1055    /* heap */
1056
1057    def find_heap(name: String): Option[Path] =
1058      input_dirs.map(_ + heap(name)).find(_.is_file)
1059
1060    def find_heap_digest(name: String): Option[String] =
1061      find_heap(name).flatMap(read_heap_digest(_))
1062
1063    def the_heap(name: String): Path =
1064      find_heap(name) getOrElse
1065        error("Missing heap image for session " + quote(name) + " -- expected in:\n" +
1066          cat_lines(input_dirs.map(dir => "  " + dir.expand.implode)))
1067
1068
1069    /* database */
1070
1071    private def database_server: Boolean = options.bool("build_database_server")
1072
1073    def access_database(name: String, output: Boolean = false): Option[SQL.Database] =
1074    {
1075      if (database_server) {
1076        val db =
1077          PostgreSQL.open_database(
1078            user = options.string("build_database_user"),
1079            password = options.string("build_database_password"),
1080            database = options.string("build_database_name"),
1081            host = options.string("build_database_host"),
1082            port = options.int("build_database_port"),
1083            ssh =
1084              proper_string(options.string("build_database_ssh_host")).map(ssh_host =>
1085                SSH.open_session(options,
1086                  host = ssh_host,
1087                  user = options.string("build_database_ssh_user"),
1088                  port = options.int("build_database_ssh_port"))),
1089            ssh_close = true)
1090        if (output || has_session_info(db, name)) Some(db) else { db.close; None }
1091      }
1092      else if (output) Some(SQLite.open_database(output_database(name)))
1093      else input_dirs.map(_ + database(name)).find(_.is_file).map(SQLite.open_database(_))
1094    }
1095
1096    def open_database(name: String, output: Boolean = false): SQL.Database =
1097      access_database(name, output = output) getOrElse
1098        error("Missing build database for session " + quote(name))
1099
1100    def clean_output(name: String): (Boolean, Boolean) =
1101    {
1102      val relevant_db =
1103        database_server &&
1104        {
1105          access_database(name) match {
1106            case Some(db) =>
1107              try {
1108                db.transaction {
1109                  val relevant_db = has_session_info(db, name)
1110                  init_session_info(db, name)
1111                  relevant_db
1112                }
1113              } finally { db.close }
1114            case None => false
1115          }
1116        }
1117
1118      val del =
1119        for {
1120          dir <- (if (system_mode) List(user_output_dir, system_output_dir) else List(user_output_dir))
1121          file <- List(heap(name), database(name), log(name), log_gz(name))
1122          path = dir + file if path.is_file
1123        } yield path.file.delete
1124
1125      val relevant = relevant_db || del.nonEmpty
1126      val ok = del.forall(b => b)
1127      (relevant, ok)
1128    }
1129
1130
1131    /* SQL database content */
1132
1133    val xml_cache: XML.Cache = XML.make_cache()
1134    val xz_cache: XZ.Cache = XZ.make_cache()
1135
1136    def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
1137      db.using_statement(Session_Info.table.select(List(column),
1138        Session_Info.session_name.where_equal(name)))(stmt =>
1139      {
1140        val res = stmt.execute_query()
1141        if (!res.next()) Bytes.empty else res.bytes(column)
1142      })
1143
1144    def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] =
1145      Properties.uncompress(
1146        read_bytes(db, name, column), cache = xz_cache, xml_cache = Some(xml_cache))
1147
1148
1149    /* session info */
1150
1151    def init_session_info(db: SQL.Database, name: String)
1152    {
1153      db.transaction {
1154        db.create_table(Session_Info.table)
1155        db.using_statement(
1156          Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute)
1157
1158        db.create_table(Export.Data.table)
1159        db.using_statement(
1160          Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute)
1161      }
1162    }
1163
1164    def has_session_info(db: SQL.Database, name: String): Boolean =
1165    {
1166      db.transaction {
1167        db.tables.contains(Session_Info.table.name) &&
1168        {
1169          db.using_statement(
1170            Session_Info.table.select(List(Session_Info.session_name),
1171              Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
1172        }
1173      }
1174    }
1175
1176    def write_session_info(
1177      db: SQL.Database,
1178      name: String,
1179      build_log: Build_Log.Session_Info,
1180      build: Build.Session_Info)
1181    {
1182      db.transaction {
1183        db.using_statement(Session_Info.table.insert())(stmt =>
1184        {
1185          stmt.string(1) = name
1186          stmt.bytes(2) = Properties.encode(build_log.session_timing)
1187          stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = xz_cache)
1188          stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = xz_cache)
1189          stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = xz_cache)
1190          stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = xz_cache)
1191          stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = xz_cache)
1192          stmt.string(8) = build.sources
1193          stmt.string(9) = cat_lines(build.input_heaps)
1194          stmt.string(10) = build.output_heap getOrElse ""
1195          stmt.int(11) = build.return_code
1196          stmt.execute()
1197        })
1198      }
1199    }
1200
1201    def read_session_timing(db: SQL.Database, name: String): Properties.T =
1202      Properties.decode(read_bytes(db, name, Session_Info.session_timing), Some(xml_cache))
1203
1204    def read_command_timings(db: SQL.Database, name: String): List[Properties.T] =
1205      read_properties(db, name, Session_Info.command_timings)
1206
1207    def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] =
1208      read_properties(db, name, Session_Info.theory_timings)
1209
1210    def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] =
1211      read_properties(db, name, Session_Info.ml_statistics)
1212
1213    def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] =
1214      read_properties(db, name, Session_Info.task_statistics)
1215
1216    def read_errors(db: SQL.Database, name: String): List[String] =
1217      Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = xz_cache)
1218
1219    def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
1220    {
1221      if (db.tables.contains(Session_Info.table.name)) {
1222        db.using_statement(Session_Info.table.select(Session_Info.build_columns,
1223          Session_Info.session_name.where_equal(name)))(stmt =>
1224        {
1225          val res = stmt.execute_query()
1226          if (!res.next()) None
1227          else {
1228            Some(
1229              Build.Session_Info(
1230                res.string(Session_Info.sources),
1231                split_lines(res.string(Session_Info.input_heaps)),
1232                res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) },
1233                res.int(Session_Info.return_code)))
1234          }
1235        })
1236      }
1237      else None
1238    }
1239  }
1240}
1241