1/*  Title:      Pure/Thy/bibtex.scala
2    Author:     Makarius
3
4BibTeX support.
5*/
6
7package isabelle
8
9
10import java.io.{File => JFile}
11
12import scala.collection.mutable
13import scala.util.parsing.combinator.RegexParsers
14import scala.util.parsing.input.Reader
15
16
17object Bibtex
18{
19  /** file format **/
20
21  def is_bibtex(name: String): Boolean = name.endsWith(".bib")
22
23  class File_Format extends isabelle.File_Format
24  {
25    val format_name: String = "bibtex"
26    val file_ext: String = "bib"
27
28    override def theory_suffix: String = "bibtex_file"
29    override def theory_content(name: String): String =
30      """theory "bib" imports Pure begin bibtex_file """ + quote(name) + """ end"""
31
32    override def make_preview(snapshot: Document.Snapshot): Option[Present.Preview] =
33    {
34      val name = snapshot.node_name
35      if (detect(name.node)) {
36        val title = "Bibliography " + quote(snapshot.node_name.path.file_name)
37        val content =
38          Isabelle_System.with_tmp_file("bib", "bib") { bib =>
39            File.write(bib, snapshot.node.source)
40            Bibtex.html_output(List(bib), style = "unsort", title = title)
41          }
42        Some(Present.Preview(title, content))
43      }
44      else None
45    }
46  }
47
48
49
50  /** bibtex errors **/
51
52  def bibtex_errors(dir: Path, root_name: String): List[String] =
53  {
54    val log_path = dir + Path.explode(root_name).ext("blg")
55    if (log_path.is_file) {
56      val Error1 = """^(I couldn't open database file .+)$""".r
57      val Error2 = """^(.+)---line (\d+) of file (.+)""".r
58      Line.logical_lines(File.read(log_path)).flatMap(line =>
59        line match {
60          case Error1(msg) => Some("Bibtex error: " + msg)
61          case Error2(msg, Value.Int(l), file) =>
62            val path = File.standard_path(file)
63            if (Path.is_wellformed(path)) {
64              val pos = Position.Line_File(l, (dir + Path.explode(path)).canonical.implode)
65              Some("Bibtex error" + Position.here(pos) + ":\n  " + msg)
66            }
67            else None
68          case _ => None
69        })
70    }
71    else Nil
72  }
73
74
75
76  /** check database **/
77
78  def check_database(database: String): (List[(String, Position.T)], List[(String, Position.T)]) =
79  {
80    val chunks = parse(Line.normalize(database))
81    var chunk_pos = Map.empty[String, Position.T]
82    val tokens = new mutable.ListBuffer[(Token, Position.T)]
83    var line = 1
84    var offset = 1
85
86    def make_pos(length: Int): Position.T =
87      Position.Offset(offset) ::: Position.End_Offset(offset + length) ::: Position.Line(line)
88
89    def advance_pos(tok: Token)
90    {
91      for (s <- Symbol.iterator(tok.source)) {
92        if (Symbol.is_newline(s)) line += 1
93        offset += 1
94      }
95    }
96
97    def get_line_pos(l: Int): Position.T =
98      if (0 < l && l <= tokens.length) tokens(l - 1)._2 else Position.none
99
100    for (chunk <- chunks) {
101      val name = chunk.name
102      if (name != "" && !chunk_pos.isDefinedAt(name)) {
103        chunk_pos += (name -> make_pos(chunk.heading_length))
104      }
105      for (tok <- chunk.tokens) {
106        tokens += (tok.copy(source = tok.source.replace("\n", " ")) -> make_pos(tok.source.length))
107        advance_pos(tok)
108      }
109    }
110
111    Isabelle_System.with_tmp_dir("bibtex")(tmp_dir =>
112    {
113      File.write(tmp_dir + Path.explode("root.bib"),
114        tokens.iterator.map(p => p._1.source).mkString("", "\n", "\n"))
115      File.write(tmp_dir + Path.explode("root.aux"),
116        "\\bibstyle{plain}\n\\bibdata{root}\n\\citation{*}")
117      Isabelle_System.bash("\"$ISABELLE_BIBTEX\" root", cwd = tmp_dir.file)
118
119      val Error = """^(.*)---line (\d+) of file root.bib$""".r
120      val Warning = """^Warning--(.+)$""".r
121      val Warning_Line = """--line (\d+) of file root.bib$""".r
122      val Warning_in_Chunk = """^Warning--(.+) in (.+)$""".r
123
124      val log_file = tmp_dir + Path.explode("root.blg")
125      val lines = if (log_file.is_file) Line.logical_lines(File.read(log_file)) else Nil
126
127      val (errors, warnings) =
128        if (lines.isEmpty) (Nil, Nil)
129        else {
130          lines.zip(lines.tail ::: List("")).flatMap(
131            {
132              case (Error(msg, Value.Int(l)), _) =>
133                Some((true, (msg, get_line_pos(l))))
134              case (Warning_in_Chunk(msg, name), _) if chunk_pos.isDefinedAt(name) =>
135                Some((false, (Word.capitalize(msg + " in entry " + quote(name)), chunk_pos(name))))
136              case (Warning(msg), Warning_Line(Value.Int(l))) =>
137                Some((false, (Word.capitalize(msg), get_line_pos(l))))
138              case (Warning(msg), _) =>
139                Some((false, (Word.capitalize(msg), Position.none)))
140              case _ => None
141            }
142          ).partition(_._1)
143        }
144      (errors.map(_._2), warnings.map(_._2))
145    })
146  }
147
148  def check_database_yxml(database: String): String =
149  {
150    import XML.Encode._
151    YXML.string_of_body(pair(list(pair(string, properties)), list(pair(string, properties)))(
152      check_database(database)))
153  }
154
155
156
157  /** document model **/
158
159  /* entries */
160
161  def entries(text: String): List[Text.Info[String]] =
162  {
163    val result = new mutable.ListBuffer[Text.Info[String]]
164    var offset = 0
165    for (chunk <- Bibtex.parse(text)) {
166      val end_offset = offset + chunk.source.length
167      if (chunk.name != "" && !chunk.is_command)
168        result += Text.Info(Text.Range(offset, end_offset), chunk.name)
169      offset = end_offset
170    }
171    result.toList
172  }
173
174  def entries_iterator[A, B <: Document.Model](models: Map[A, B])
175    : Iterator[Text.Info[(String, B)]] =
176  {
177    for {
178      (_, model) <- models.iterator
179      info <- model.bibtex_entries.iterator
180    } yield info.map((_, model))
181  }
182
183
184  /* completion */
185
186  def completion[A, B <: Document.Model](
187    history: Completion.History, rendering: Rendering, caret: Text.Offset,
188    models: Map[A, B]): Option[Completion.Result] =
189  {
190    for {
191      Text.Info(r, name) <- rendering.citation(rendering.before_caret_range(caret))
192      name1 <- Completion.clean_name(name)
193
194      original <- rendering.model.get_text(r)
195      original1 <- Completion.clean_name(Library.perhaps_unquote(original))
196
197      entries =
198        (for {
199          Text.Info(_, (entry, _)) <- entries_iterator(models)
200          if entry.toLowerCase.containsSlice(name1.toLowerCase) && entry != original1
201        } yield entry).toList
202      if entries.nonEmpty
203
204      items =
205        entries.sorted.map({
206          case entry =>
207            val full_name = Long_Name.qualify(Markup.CITATION, entry)
208            val description = List(entry, "(BibTeX entry)")
209            val replacement = quote(entry)
210          Completion.Item(r, original, full_name, description, replacement, 0, false)
211        }).sorted(history.ordering).take(rendering.options.int("completion_limit"))
212    } yield Completion.Result(r, original, false, items)
213  }
214
215
216
217  /** content **/
218
219  private val months = List(
220    "jan",
221    "feb",
222    "mar",
223    "apr",
224    "may",
225    "jun",
226    "jul",
227    "aug",
228    "sep",
229    "oct",
230    "nov",
231    "dec")
232  def is_month(s: String): Boolean = months.contains(s.toLowerCase)
233
234  private val commands = List("preamble", "string")
235  def is_command(s: String): Boolean = commands.contains(s.toLowerCase)
236
237  sealed case class Entry(
238    kind: String,
239    required: List[String],
240    optional_crossref: List[String],
241    optional_other: List[String])
242  {
243    val optional_standard: List[String] = List("url", "doi", "ee")
244
245    def is_required(s: String): Boolean = required.contains(s.toLowerCase)
246    def is_optional(s: String): Boolean =
247      optional_crossref.contains(s.toLowerCase) ||
248      optional_other.contains(s.toLowerCase) ||
249      optional_standard.contains(s.toLowerCase)
250
251    def fields: List[String] =
252      required ::: optional_crossref ::: optional_other ::: optional_standard
253
254    def template: String =
255      "@" + kind + "{,\n" + fields.map(x => "  " + x + " = {},\n").mkString + "}\n"
256  }
257
258  val known_entries: List[Entry] =
259    List(
260      Entry("Article",
261        List("author", "title"),
262        List("journal", "year"),
263        List("volume", "number", "pages", "month", "note")),
264      Entry("InProceedings",
265        List("author", "title"),
266        List("booktitle", "year"),
267        List("editor", "volume", "number", "series", "pages", "month", "address",
268          "organization", "publisher", "note")),
269      Entry("InCollection",
270        List("author", "title", "booktitle"),
271        List("publisher", "year"),
272        List("editor", "volume", "number", "series", "type", "chapter", "pages",
273          "edition", "month", "address", "note")),
274      Entry("InBook",
275        List("author", "editor", "title", "chapter"),
276        List("publisher", "year"),
277        List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")),
278      Entry("Proceedings",
279        List("title", "year"),
280        List(),
281        List("booktitle", "editor", "volume", "number", "series", "address", "month",
282          "organization", "publisher", "note")),
283      Entry("Book",
284        List("author", "editor", "title"),
285        List("publisher", "year"),
286        List("volume", "number", "series", "address", "edition", "month", "note")),
287      Entry("Booklet",
288        List("title"),
289        List(),
290        List("author", "howpublished", "address", "month", "year", "note")),
291      Entry("PhdThesis",
292        List("author", "title", "school", "year"),
293        List(),
294        List("type", "address", "month", "note")),
295      Entry("MastersThesis",
296        List("author", "title", "school", "year"),
297        List(),
298        List("type", "address", "month", "note")),
299      Entry("TechReport",
300        List("author", "title", "institution", "year"),
301        List(),
302        List("type", "number", "address", "month", "note")),
303      Entry("Manual",
304        List("title"),
305        List(),
306        List("author", "organization", "address", "edition", "month", "year", "note")),
307      Entry("Unpublished",
308        List("author", "title", "note"),
309        List(),
310        List("month", "year")),
311      Entry("Misc",
312        List(),
313        List(),
314        List("author", "title", "howpublished", "month", "year", "note")))
315
316  def get_entry(kind: String): Option[Entry] =
317    known_entries.find(entry => entry.kind.toLowerCase == kind.toLowerCase)
318
319  def is_entry(kind: String): Boolean = get_entry(kind).isDefined
320
321
322
323  /** tokens and chunks **/
324
325  object Token
326  {
327    object Kind extends Enumeration
328    {
329      val COMMAND = Value("command")
330      val ENTRY = Value("entry")
331      val KEYWORD = Value("keyword")
332      val NAT = Value("natural number")
333      val STRING = Value("string")
334      val NAME = Value("name")
335      val IDENT = Value("identifier")
336      val SPACE = Value("white space")
337      val COMMENT = Value("ignored text")
338      val ERROR = Value("bad input")
339    }
340  }
341
342  sealed case class Token(kind: Token.Kind.Value, source: String)
343  {
344    def is_kind: Boolean =
345      kind == Token.Kind.COMMAND ||
346      kind == Token.Kind.ENTRY ||
347      kind == Token.Kind.IDENT
348    def is_name: Boolean =
349      kind == Token.Kind.NAME ||
350      kind == Token.Kind.IDENT
351    def is_ignored: Boolean =
352      kind == Token.Kind.SPACE ||
353      kind == Token.Kind.COMMENT
354    def is_malformed: Boolean =
355      kind == Token.Kind.ERROR
356    def is_open: Boolean =
357      kind == Token.Kind.KEYWORD && (source == "{" || source == "(")
358  }
359
360  case class Chunk(kind: String, tokens: List[Token])
361  {
362    val source = tokens.map(_.source).mkString
363
364    private val content: Option[List[Token]] =
365      tokens match {
366        case Token(Token.Kind.KEYWORD, "@") :: body if body.nonEmpty =>
367          (body.init.filterNot(_.is_ignored), body.last) match {
368            case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}"))
369            if tok.is_kind => Some(toks)
370
371            case (tok :: Token(Token.Kind.KEYWORD, "(") :: toks, Token(Token.Kind.KEYWORD, ")"))
372            if tok.is_kind => Some(toks)
373
374            case _ => None
375          }
376        case _ => None
377      }
378
379    def name: String =
380      content match {
381        case Some(tok :: _) if tok.is_name => tok.source
382        case _ => ""
383      }
384
385    def heading_length: Int =
386      if (name == "") 1
387      else (0 /: tokens.takeWhile(tok => !tok.is_open)){ case (n, tok) => n + tok.source.length }
388
389    def is_ignored: Boolean = kind == "" && tokens.forall(_.is_ignored)
390    def is_malformed: Boolean = kind == "" || tokens.exists(_.is_malformed)
391    def is_command: Boolean = Bibtex.is_command(kind) && name != "" && content.isDefined
392    def is_entry: Boolean = Bibtex.is_entry(kind) && name != "" && content.isDefined
393  }
394
395
396
397  /** parsing **/
398
399  // context of partial line-oriented scans
400  abstract class Line_Context
401  case object Ignored extends Line_Context
402  case object At extends Line_Context
403  case class Item_Start(kind: String) extends Line_Context
404  case class Item_Open(kind: String, end: String) extends Line_Context
405  case class Item(kind: String, end: String, delim: Delimited) extends Line_Context
406
407  case class Delimited(quoted: Boolean, depth: Int)
408  val Closed = Delimited(false, 0)
409
410  private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source)
411  private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source)
412
413
414  // See also https://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web
415  // module @<Scan for and process a \.{.bib} command or database entry@>.
416
417  object Parsers extends RegexParsers
418  {
419    /* white space and comments */
420
421    override val whiteSpace = "".r
422
423    private val space = """[ \t\n\r]+""".r ^^ token(Token.Kind.SPACE)
424    private val spaces = rep(space)
425
426
427    /* ignored text */
428
429    private val ignored: Parser[Chunk] =
430      rep1("""(?i)([^@]+|@[ \t]*comment)""".r) ^^ {
431        case ss => Chunk("", List(Token(Token.Kind.COMMENT, ss.mkString))) }
432
433    private def ignored_line: Parser[(Chunk, Line_Context)] =
434      ignored ^^ { case a => (a, Ignored) }
435
436
437    /* delimited string: outermost "..." or {...} and body with balanced {...} */
438
439    // see also bibtex.web: scan_a_field_token_and_eat_white, scan_balanced_braces
440    private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] =
441      new Parser[(String, Delimited)]
442      {
443        require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0)
444
445        def apply(in: Input) =
446        {
447          val start = in.offset
448          val end = in.source.length
449
450          var i = start
451          var q = delim.quoted
452          var d = delim.depth
453          var finished = false
454          while (!finished && i < end) {
455            val c = in.source.charAt(i)
456
457            if (c == '"' && d == 0) { i += 1; d = 1; q = true }
458            else if (c == '"' && d == 1 && q) {
459              i += 1; d = 0; q = false; finished = true
460            }
461            else if (c == '{') { i += 1; d += 1 }
462            else if (c == '}') {
463              if (d == 1 && !q || d > 1) { i += 1; d -= 1; if (d == 0) finished = true }
464              else {i = start; finished = true }
465            }
466            else if (d > 0) i += 1
467            else finished = true
468          }
469          if (i == start) Failure("bad input", in)
470          else {
471            val s = in.source.subSequence(start, i).toString
472            Success((s, Delimited(q, d)), in.drop(i - start))
473          }
474        }
475      }.named("delimited_depth")
476
477    private def delimited: Parser[Token] =
478      delimited_depth(Closed) ^?
479        { case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) }
480
481    private def recover_delimited: Parser[Token] =
482      """["{][^@]*""".r ^^ token(Token.Kind.ERROR)
483
484    def delimited_line(ctxt: Item): Parser[(Chunk, Line_Context)] =
485      delimited_depth(ctxt.delim) ^^ { case (s, delim1) =>
486        (Chunk(ctxt.kind, List(Token(Token.Kind.STRING, s))), ctxt.copy(delim = delim1)) } |
487      recover_delimited ^^ { case a => (Chunk(ctxt.kind, List(a)), Ignored) }
488
489
490    /* other tokens */
491
492    private val at = "@" ^^ keyword
493
494    private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT)
495
496    private val name = """[\x21-\x7f&&[^"#%'(),={}]]+""".r ^^ token(Token.Kind.NAME)
497
498    private val identifier =
499      """[\x21-\x7f&&[^"#%'(),={}0-9]][\x21-\x7f&&[^"#%'(),={}]]*""".r
500
501    private val ident = identifier ^^ token(Token.Kind.IDENT)
502
503    val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space))
504
505
506    /* body */
507
508    private val body =
509      delimited | (recover_delimited | other_token)
510
511    private def body_line(ctxt: Item) =
512      if (ctxt.delim.depth > 0)
513        delimited_line(ctxt)
514      else
515        delimited_line(ctxt) |
516        other_token ^^ { case a => (Chunk(ctxt.kind, List(a)), ctxt) } |
517        ctxt.end ^^ { case a => (Chunk(ctxt.kind, List(keyword(a))), Ignored) }
518
519
520    /* items: command or entry */
521
522    private val item_kind =
523      identifier ^^ { case a =>
524        val kind =
525          if (is_command(a)) Token.Kind.COMMAND
526          else if (is_entry(a)) Token.Kind.ENTRY
527          else Token.Kind.IDENT
528        Token(kind, a)
529      }
530
531    private val item_begin =
532      "{" ^^ { case a => ("}", keyword(a)) } |
533      "(" ^^ { case a => (")", keyword(a)) }
534
535    private def item_name(kind: String) =
536      kind.toLowerCase match {
537        case "preamble" => failure("")
538        case "string" => identifier ^^ token(Token.Kind.NAME)
539        case _ => name
540      }
541
542    private val item_start =
543      at ~ spaces ~ item_kind ~ spaces ^^
544        { case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) }
545
546    private val item: Parser[Chunk] =
547      (item_start ~ item_begin ~ spaces) into
548        { case (kind, a) ~ ((end, b)) ~ c =>
549            opt(item_name(kind)) ~ rep(body) ~ opt(end ^^ keyword) ^^ {
550              case d ~ e ~ f => Chunk(kind, a ::: List(b) ::: c ::: d.toList ::: e ::: f.toList) } }
551
552    private val recover_item: Parser[Chunk] =
553      at ~ "[^@]*".r ^^ { case a ~ b => Chunk("", List(a, Token(Token.Kind.ERROR, b))) }
554
555
556    /* chunks */
557
558    val chunk: Parser[Chunk] = ignored | (item | recover_item)
559
560    def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] =
561    {
562      ctxt match {
563        case Ignored =>
564          ignored_line |
565          at ^^ { case a => (Chunk("", List(a)), At) }
566
567        case At =>
568          space ^^ { case a => (Chunk("", List(a)), ctxt) } |
569          item_kind ^^ { case a => (Chunk(a.source, List(a)), Item_Start(a.source)) } |
570          recover_item ^^ { case a => (a, Ignored) } |
571          ignored_line
572
573        case Item_Start(kind) =>
574          space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
575          item_begin ^^ { case (end, a) => (Chunk(kind, List(a)), Item_Open(kind, end)) } |
576          recover_item ^^ { case a => (a, Ignored) } |
577          ignored_line
578
579        case Item_Open(kind, end) =>
580          space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
581          item_name(kind) ^^ { case a => (Chunk(kind, List(a)), Item(kind, end, Closed)) } |
582          body_line(Item(kind, end, Closed)) |
583          ignored_line
584
585        case item_ctxt: Item =>
586          body_line(item_ctxt) |
587          ignored_line
588
589        case _ => failure("")
590      }
591    }
592  }
593
594
595  /* parse */
596
597  def parse(input: CharSequence): List[Chunk] =
598    Parsers.parseAll(Parsers.rep(Parsers.chunk), Scan.char_reader(input)) match {
599      case Parsers.Success(result, _) => result
600      case _ => error("Unexpected failure to parse input:\n" + input.toString)
601    }
602
603  def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) =
604  {
605    var in: Reader[Char] = Scan.char_reader(input)
606    val chunks = new mutable.ListBuffer[Chunk]
607    var ctxt = context
608    while (!in.atEnd) {
609      Parsers.parse(Parsers.chunk_line(ctxt), in) match {
610        case Parsers.Success((x, c), rest) => chunks += x; ctxt = c; in = rest
611        case Parsers.NoSuccess(_, rest) =>
612          error("Unepected failure to parse input:\n" + rest.source.toString)
613      }
614    }
615    (chunks.toList, ctxt)
616  }
617
618
619
620  /** HTML output **/
621
622  private val output_styles =
623    List(
624      "" -> "html-n",
625      "plain" -> "html-n",
626      "alpha" -> "html-a",
627      "named" -> "html-n",
628      "paragraph" -> "html-n",
629      "unsort" -> "html-u",
630      "unsortlist" -> "html-u")
631
632  def html_output(bib: List[Path],
633    title: String = "Bibliography",
634    body: Boolean = false,
635    citations: List[String] = List("*"),
636    style: String = "",
637    chronological: Boolean = false): String =
638  {
639    Isabelle_System.with_tmp_dir("bibtex")(tmp_dir =>
640    {
641      /* database files */
642
643      val bib_files = bib.map(_.drop_ext)
644      val bib_names =
645      {
646        val names0 = bib_files.map(_.file_name)
647        if (Library.duplicates(names0).isEmpty) names0
648        else names0.zipWithIndex.map({ case (name, i) => (i + 1).toString + "-" + name })
649      }
650
651      for ((a, b) <- bib_files zip bib_names) {
652        File.copy(a.ext("bib"), tmp_dir + Path.basic(b).ext("bib"))
653      }
654
655
656      /* style file */
657
658      val bst =
659        output_styles.toMap.get(style) match {
660          case Some(base) => base + (if (chronological) "c" else "") + ".bst"
661          case None =>
662            error("Bad style for bibtex HTML output: " + quote(style) +
663              "\n(expected: " + commas_quote(output_styles.map(_._1)) + ")")
664        }
665      File.copy(Path.explode("$BIB2XHTML_HOME/bst") + Path.explode(bst), tmp_dir)
666
667
668      /* result */
669
670      val in_file = Path.explode("bib.aux")
671      val out_file = Path.explode("bib.html")
672
673      File.write(tmp_dir + in_file,
674        bib_names.mkString("\\bibdata{", ",", "}\n") +
675        citations.map(cite => "\\citation{" + cite + "}\n").mkString)
676
677      Isabelle_System.bash(
678        "\"$BIB2XHTML_HOME/main/bib2xhtml.pl\" -B \"$ISABELLE_BIBTEX\"" +
679          " -u -s " + Bash.string(proper_string(style) getOrElse "empty") +
680          (if (chronological) " -c" else "") +
681          (if (title != "") " -h " + Bash.string(title) + " " else "") +
682          " " + File.bash_path(in_file) + " " + File.bash_path(out_file),
683        cwd = tmp_dir.file).check
684
685      val html = File.read(tmp_dir + out_file)
686
687      if (body) {
688        cat_lines(
689          split_lines(html).
690            dropWhile(line => !line.startsWith("<!-- BEGIN BIBLIOGRAPHY")).reverse.
691            dropWhile(line => !line.startsWith("<!-- END BIBLIOGRAPHY")).reverse)
692      }
693      else html
694    })
695  }
696}
697