1/*  Title:      Pure/Thy/bibtex.scala
2    Author:     Makarius
3
4BibTeX support.
5*/
6
7package isabelle
8
9
10import java.io.{File => JFile}
11
12import scala.collection.mutable
13import scala.util.parsing.combinator.RegexParsers
14import scala.util.parsing.input.Reader
15
16
17object Bibtex
18{
19  /** bibtex errors **/
20
21  def bibtex_errors(dir: Path, root_name: String): List[String] =
22  {
23    val log_path = dir + Path.explode(root_name).ext("blg")
24    if (log_path.is_file) {
25      val Error1 = """^(I couldn't open database file .+)$""".r
26      val Error2 = """^(.+)---line (\d+) of file (.+)""".r
27      Line.logical_lines(File.read(log_path)).flatMap(line =>
28        line match {
29          case Error1(msg) => Some("Bibtex error: " + msg)
30          case Error2(msg, Value.Int(l), file) =>
31            val path = File.standard_path(file)
32            if (Path.is_wellformed(path)) {
33              val pos = Position.Line_File(l, (dir + Path.explode(path)).canonical.implode)
34              Some("Bibtex error" + Position.here(pos) + ":\n  " + msg)
35            }
36            else None
37          case _ => None
38        })
39    }
40    else Nil
41  }
42
43
44
45  /** check database **/
46
47  def check_database(database: String): (List[(String, Position.T)], List[(String, Position.T)]) =
48  {
49    val chunks = parse(Line.normalize(database))
50    var chunk_pos = Map.empty[String, Position.T]
51    val tokens = new mutable.ListBuffer[(Token, Position.T)]
52    var line = 1
53    var offset = 1
54
55    def make_pos(length: Int): Position.T =
56      Position.Offset(offset) ::: Position.End_Offset(offset + length) ::: Position.Line(line)
57
58    def advance_pos(tok: Token)
59    {
60      for (s <- Symbol.iterator(tok.source)) {
61        if (Symbol.is_newline(s)) line += 1
62        offset += 1
63      }
64    }
65
66    def get_line_pos(l: Int): Position.T =
67      if (0 < l && l <= tokens.length) tokens(l - 1)._2 else Position.none
68
69    for (chunk <- chunks) {
70      val name = chunk.name
71      if (name != "" && !chunk_pos.isDefinedAt(name)) {
72        chunk_pos += (name -> make_pos(chunk.heading_length))
73      }
74      for (tok <- chunk.tokens) {
75        tokens += (tok.copy(source = tok.source.replace("\n", " ")) -> make_pos(tok.source.length))
76        advance_pos(tok)
77      }
78    }
79
80    Isabelle_System.with_tmp_dir("bibtex")(tmp_dir =>
81    {
82      File.write(tmp_dir + Path.explode("root.bib"),
83        tokens.iterator.map(p => p._1.source).mkString("", "\n", "\n"))
84      File.write(tmp_dir + Path.explode("root.aux"),
85        "\\bibstyle{plain}\n\\bibdata{root}\n\\citation{*}")
86      Isabelle_System.bash("\"$ISABELLE_BIBTEX\" root", cwd = tmp_dir.file)
87
88      val Error = """^(.*)---line (\d+) of file root.bib$""".r
89      val Warning = """^Warning--(.+)$""".r
90      val Warning_Line = """--line (\d+) of file root.bib$""".r
91      val Warning_in_Chunk = """^Warning--(.+) in (.+)$""".r
92
93      val log_file = tmp_dir + Path.explode("root.blg")
94      val lines = if (log_file.is_file) Line.logical_lines(File.read(log_file)) else Nil
95
96      val (errors, warnings) =
97        lines.zip(lines.tail ::: List("")).flatMap(
98          {
99            case (Error(msg, Value.Int(l)), _) =>
100              Some((true, (msg, get_line_pos(l))))
101            case (Warning_in_Chunk(msg, name), _) if chunk_pos.isDefinedAt(name) =>
102              Some((false, (Word.capitalize(msg + " in entry " + quote(name)), chunk_pos(name))))
103            case (Warning(msg), Warning_Line(Value.Int(l))) =>
104              Some((false, (Word.capitalize(msg), get_line_pos(l))))
105            case (Warning(msg), _) =>
106              Some((false, (Word.capitalize(msg), Position.none)))
107            case _ => None
108          }
109        ).partition(_._1)
110      (errors.map(_._2), warnings.map(_._2))
111    })
112  }
113
114  def check_database_yxml(database: String): String =
115  {
116    import XML.Encode._
117    YXML.string_of_body(pair(list(pair(string, properties)), list(pair(string, properties)))(
118      check_database(database)))
119  }
120
121
122
123  /** document model **/
124
125  /* bibtex files */
126
127  def is_bibtex(name: String): Boolean = name.endsWith(".bib")
128
129  private val node_suffix: String = "bibtex_file"
130
131  def make_theory_name(resources: Resources, name: Document.Node.Name): Option[Document.Node.Name] =
132  {
133    Thy_Header.file_name(name.node) match {
134      case Some(bib_name) if is_bibtex(bib_name) =>
135        val thy_node = resources.append(name.node, Path.explode(node_suffix))
136        Some(Document.Node.Name(thy_node, name.master_dir, bib_name))
137      case _ => None
138    }
139  }
140
141  def make_theory_content(bib_name: String): Option[String] =
142    if (is_bibtex(bib_name)) {
143      Some("""theory "bib" imports Pure begin bibtex_file """ + quote(bib_name) + """ end""")
144    }
145    else None
146
147  def make_theory_content(file: JFile): Option[String] =
148    if (file.getName == node_suffix) {
149      val parent = file.getParentFile
150      if (parent != null && is_bibtex(parent.getName)) make_theory_content(parent.getName)
151      else None
152    }
153    else None
154
155
156  /* entries */
157
158  def entries(text: String): List[Text.Info[String]] =
159  {
160    val result = new mutable.ListBuffer[Text.Info[String]]
161    var offset = 0
162    for (chunk <- Bibtex.parse(text)) {
163      val end_offset = offset + chunk.source.length
164      if (chunk.name != "" && !chunk.is_command)
165        result += Text.Info(Text.Range(offset, end_offset), chunk.name)
166      offset = end_offset
167    }
168    result.toList
169  }
170
171  def entries_iterator[A, B <: Document.Model](models: Map[A, B])
172    : Iterator[Text.Info[(String, B)]] =
173  {
174    for {
175      (_, model) <- models.iterator
176      info <- model.bibtex_entries.iterator
177    } yield info.map((_, model))
178  }
179
180
181  /* completion */
182
183  def completion[A, B <: Document.Model](
184    history: Completion.History, rendering: Rendering, caret: Text.Offset,
185    models: Map[A, B]): Option[Completion.Result] =
186  {
187    for {
188      Text.Info(r, name) <- rendering.citation(rendering.before_caret_range(caret))
189      name1 <- Completion.clean_name(name)
190
191      original <- rendering.model.get_text(r)
192      original1 <- Completion.clean_name(Library.perhaps_unquote(original))
193
194      entries =
195        (for {
196          Text.Info(_, (entry, _)) <- entries_iterator(models)
197          if entry.toLowerCase.containsSlice(name1.toLowerCase) && entry != original1
198        } yield entry).toList
199      if entries.nonEmpty
200
201      items =
202        entries.sorted.map({
203          case entry =>
204            val full_name = Long_Name.qualify(Markup.CITATION, entry)
205            val description = List(entry, "(BibTeX entry)")
206            val replacement = quote(entry)
207          Completion.Item(r, original, full_name, description, replacement, 0, false)
208        }).sorted(history.ordering).take(rendering.options.int("completion_limit"))
209    } yield Completion.Result(r, original, false, items)
210  }
211
212
213
214  /** content **/
215
216  private val months = List(
217    "jan",
218    "feb",
219    "mar",
220    "apr",
221    "may",
222    "jun",
223    "jul",
224    "aug",
225    "sep",
226    "oct",
227    "nov",
228    "dec")
229  def is_month(s: String): Boolean = months.contains(s.toLowerCase)
230
231  private val commands = List("preamble", "string")
232  def is_command(s: String): Boolean = commands.contains(s.toLowerCase)
233
234  sealed case class Entry(
235    kind: String,
236    required: List[String],
237    optional_crossref: List[String],
238    optional_other: List[String])
239  {
240    def is_required(s: String): Boolean = required.contains(s.toLowerCase)
241    def is_optional(s: String): Boolean =
242      optional_crossref.contains(s.toLowerCase) || optional_other.contains(s.toLowerCase)
243
244    def fields: List[String] = required ::: optional_crossref ::: optional_other
245    def template: String =
246      "@" + kind + "{,\n" + fields.map(x => "  " + x + " = {},\n").mkString + "}\n"
247  }
248
249  val known_entries: List[Entry] =
250    List(
251      Entry("Article",
252        List("author", "title"),
253        List("journal", "year"),
254        List("volume", "number", "pages", "month", "note")),
255      Entry("InProceedings",
256        List("author", "title"),
257        List("booktitle", "year"),
258        List("editor", "volume", "number", "series", "pages", "month", "address",
259          "organization", "publisher", "note")),
260      Entry("InCollection",
261        List("author", "title", "booktitle"),
262        List("publisher", "year"),
263        List("editor", "volume", "number", "series", "type", "chapter", "pages",
264          "edition", "month", "address", "note")),
265      Entry("InBook",
266        List("author", "editor", "title", "chapter"),
267        List("publisher", "year"),
268        List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")),
269      Entry("Proceedings",
270        List("title", "year"),
271        List(),
272        List("booktitle", "editor", "volume", "number", "series", "address", "month",
273          "organization", "publisher", "note")),
274      Entry("Book",
275        List("author", "editor", "title"),
276        List("publisher", "year"),
277        List("volume", "number", "series", "address", "edition", "month", "note")),
278      Entry("Booklet",
279        List("title"),
280        List(),
281        List("author", "howpublished", "address", "month", "year", "note")),
282      Entry("PhdThesis",
283        List("author", "title", "school", "year"),
284        List(),
285        List("type", "address", "month", "note")),
286      Entry("MastersThesis",
287        List("author", "title", "school", "year"),
288        List(),
289        List("type", "address", "month", "note")),
290      Entry("TechReport",
291        List("author", "title", "institution", "year"),
292        List(),
293        List("type", "number", "address", "month", "note")),
294      Entry("Manual",
295        List("title"),
296        List(),
297        List("author", "organization", "address", "edition", "month", "year", "note")),
298      Entry("Unpublished",
299        List("author", "title", "note"),
300        List(),
301        List("month", "year")),
302      Entry("Misc",
303        List(),
304        List(),
305        List("author", "title", "howpublished", "month", "year", "note")))
306
307  def get_entry(kind: String): Option[Entry] =
308    known_entries.find(entry => entry.kind.toLowerCase == kind.toLowerCase)
309
310  def is_entry(kind: String): Boolean = get_entry(kind).isDefined
311
312
313
314  /** tokens and chunks **/
315
316  object Token
317  {
318    object Kind extends Enumeration
319    {
320      val COMMAND = Value("command")
321      val ENTRY = Value("entry")
322      val KEYWORD = Value("keyword")
323      val NAT = Value("natural number")
324      val STRING = Value("string")
325      val NAME = Value("name")
326      val IDENT = Value("identifier")
327      val SPACE = Value("white space")
328      val COMMENT = Value("ignored text")
329      val ERROR = Value("bad input")
330    }
331  }
332
333  sealed case class Token(kind: Token.Kind.Value, source: String)
334  {
335    def is_kind: Boolean =
336      kind == Token.Kind.COMMAND ||
337      kind == Token.Kind.ENTRY ||
338      kind == Token.Kind.IDENT
339    def is_name: Boolean =
340      kind == Token.Kind.NAME ||
341      kind == Token.Kind.IDENT
342    def is_ignored: Boolean =
343      kind == Token.Kind.SPACE ||
344      kind == Token.Kind.COMMENT
345    def is_malformed: Boolean =
346      kind == Token.Kind.ERROR
347    def is_open: Boolean =
348      kind == Token.Kind.KEYWORD && (source == "{" || source == "(")
349  }
350
351  case class Chunk(kind: String, tokens: List[Token])
352  {
353    val source = tokens.map(_.source).mkString
354
355    private val content: Option[List[Token]] =
356      tokens match {
357        case Token(Token.Kind.KEYWORD, "@") :: body if body.nonEmpty =>
358          (body.init.filterNot(_.is_ignored), body.last) match {
359            case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}"))
360            if tok.is_kind => Some(toks)
361
362            case (tok :: Token(Token.Kind.KEYWORD, "(") :: toks, Token(Token.Kind.KEYWORD, ")"))
363            if tok.is_kind => Some(toks)
364
365            case _ => None
366          }
367        case _ => None
368      }
369
370    def name: String =
371      content match {
372        case Some(tok :: _) if tok.is_name => tok.source
373        case _ => ""
374      }
375
376    def heading_length: Int =
377      if (name == "") 1
378      else (0 /: tokens.takeWhile(tok => !tok.is_open)){ case (n, tok) => n + tok.source.length }
379
380    def is_ignored: Boolean = kind == "" && tokens.forall(_.is_ignored)
381    def is_malformed: Boolean = kind == "" || tokens.exists(_.is_malformed)
382    def is_command: Boolean = Bibtex.is_command(kind) && name != "" && content.isDefined
383    def is_entry: Boolean = Bibtex.is_entry(kind) && name != "" && content.isDefined
384  }
385
386
387
388  /** parsing **/
389
390  // context of partial line-oriented scans
391  abstract class Line_Context
392  case object Ignored extends Line_Context
393  case object At extends Line_Context
394  case class Item_Start(kind: String) extends Line_Context
395  case class Item_Open(kind: String, end: String) extends Line_Context
396  case class Item(kind: String, end: String, delim: Delimited) extends Line_Context
397
398  case class Delimited(quoted: Boolean, depth: Int)
399  val Closed = Delimited(false, 0)
400
401  private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source)
402  private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source)
403
404
405  // See also https://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web
406  // module @<Scan for and process a \.{.bib} command or database entry@>.
407
408  object Parsers extends RegexParsers
409  {
410    /* white space and comments */
411
412    override val whiteSpace = "".r
413
414    private val space = """[ \t\n\r]+""".r ^^ token(Token.Kind.SPACE)
415    private val spaces = rep(space)
416
417
418    /* ignored text */
419
420    private val ignored: Parser[Chunk] =
421      rep1("""(?i)([^@]+|@[ \t]*comment)""".r) ^^ {
422        case ss => Chunk("", List(Token(Token.Kind.COMMENT, ss.mkString))) }
423
424    private def ignored_line: Parser[(Chunk, Line_Context)] =
425      ignored ^^ { case a => (a, Ignored) }
426
427
428    /* delimited string: outermost "..." or {...} and body with balanced {...} */
429
430    // see also bibtex.web: scan_a_field_token_and_eat_white, scan_balanced_braces
431    private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] =
432      new Parser[(String, Delimited)]
433      {
434        require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0)
435
436        def apply(in: Input) =
437        {
438          val start = in.offset
439          val end = in.source.length
440
441          var i = start
442          var q = delim.quoted
443          var d = delim.depth
444          var finished = false
445          while (!finished && i < end) {
446            val c = in.source.charAt(i)
447
448            if (c == '"' && d == 0) { i += 1; d = 1; q = true }
449            else if (c == '"' && d == 1 && q) {
450              i += 1; d = 0; q = false; finished = true
451            }
452            else if (c == '{') { i += 1; d += 1 }
453            else if (c == '}') {
454              if (d == 1 && !q || d > 1) { i += 1; d -= 1; if (d == 0) finished = true }
455              else {i = start; finished = true }
456            }
457            else if (d > 0) i += 1
458            else finished = true
459          }
460          if (i == start) Failure("bad input", in)
461          else {
462            val s = in.source.subSequence(start, i).toString
463            Success((s, Delimited(q, d)), in.drop(i - start))
464          }
465        }
466      }.named("delimited_depth")
467
468    private def delimited: Parser[Token] =
469      delimited_depth(Closed) ^?
470        { case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) }
471
472    private def recover_delimited: Parser[Token] =
473      """["{][^@]*""".r ^^ token(Token.Kind.ERROR)
474
475    def delimited_line(ctxt: Item): Parser[(Chunk, Line_Context)] =
476      delimited_depth(ctxt.delim) ^^ { case (s, delim1) =>
477        (Chunk(ctxt.kind, List(Token(Token.Kind.STRING, s))), ctxt.copy(delim = delim1)) } |
478      recover_delimited ^^ { case a => (Chunk(ctxt.kind, List(a)), Ignored) }
479
480
481    /* other tokens */
482
483    private val at = "@" ^^ keyword
484
485    private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT)
486
487    private val name = """[\x21-\x7f&&[^"#%'(),={}]]+""".r ^^ token(Token.Kind.NAME)
488
489    private val identifier =
490      """[\x21-\x7f&&[^"#%'(),={}0-9]][\x21-\x7f&&[^"#%'(),={}]]*""".r
491
492    private val ident = identifier ^^ token(Token.Kind.IDENT)
493
494    val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space))
495
496
497    /* body */
498
499    private val body =
500      delimited | (recover_delimited | other_token)
501
502    private def body_line(ctxt: Item) =
503      if (ctxt.delim.depth > 0)
504        delimited_line(ctxt)
505      else
506        delimited_line(ctxt) |
507        other_token ^^ { case a => (Chunk(ctxt.kind, List(a)), ctxt) } |
508        ctxt.end ^^ { case a => (Chunk(ctxt.kind, List(keyword(a))), Ignored) }
509
510
511    /* items: command or entry */
512
513    private val item_kind =
514      identifier ^^ { case a =>
515        val kind =
516          if (is_command(a)) Token.Kind.COMMAND
517          else if (is_entry(a)) Token.Kind.ENTRY
518          else Token.Kind.IDENT
519        Token(kind, a)
520      }
521
522    private val item_begin =
523      "{" ^^ { case a => ("}", keyword(a)) } |
524      "(" ^^ { case a => (")", keyword(a)) }
525
526    private def item_name(kind: String) =
527      kind.toLowerCase match {
528        case "preamble" => failure("")
529        case "string" => identifier ^^ token(Token.Kind.NAME)
530        case _ => name
531      }
532
533    private val item_start =
534      at ~ spaces ~ item_kind ~ spaces ^^
535        { case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) }
536
537    private val item: Parser[Chunk] =
538      (item_start ~ item_begin ~ spaces) into
539        { case (kind, a) ~ ((end, b)) ~ c =>
540            opt(item_name(kind)) ~ rep(body) ~ opt(end ^^ keyword) ^^ {
541              case d ~ e ~ f => Chunk(kind, a ::: List(b) ::: c ::: d.toList ::: e ::: f.toList) } }
542
543    private val recover_item: Parser[Chunk] =
544      at ~ "[^@]*".r ^^ { case a ~ b => Chunk("", List(a, Token(Token.Kind.ERROR, b))) }
545
546
547    /* chunks */
548
549    val chunk: Parser[Chunk] = ignored | (item | recover_item)
550
551    def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] =
552    {
553      ctxt match {
554        case Ignored =>
555          ignored_line |
556          at ^^ { case a => (Chunk("", List(a)), At) }
557
558        case At =>
559          space ^^ { case a => (Chunk("", List(a)), ctxt) } |
560          item_kind ^^ { case a => (Chunk(a.source, List(a)), Item_Start(a.source)) } |
561          recover_item ^^ { case a => (a, Ignored) } |
562          ignored_line
563
564        case Item_Start(kind) =>
565          space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
566          item_begin ^^ { case (end, a) => (Chunk(kind, List(a)), Item_Open(kind, end)) } |
567          recover_item ^^ { case a => (a, Ignored) } |
568          ignored_line
569
570        case Item_Open(kind, end) =>
571          space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
572          item_name(kind) ^^ { case a => (Chunk(kind, List(a)), Item(kind, end, Closed)) } |
573          body_line(Item(kind, end, Closed)) |
574          ignored_line
575
576        case item_ctxt: Item =>
577          body_line(item_ctxt) |
578          ignored_line
579
580        case _ => failure("")
581      }
582    }
583  }
584
585
586  /* parse */
587
588  def parse(input: CharSequence): List[Chunk] =
589    Parsers.parseAll(Parsers.rep(Parsers.chunk), Scan.char_reader(input)) match {
590      case Parsers.Success(result, _) => result
591      case _ => error("Unexpected failure to parse input:\n" + input.toString)
592    }
593
594  def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) =
595  {
596    var in: Reader[Char] = Scan.char_reader(input)
597    val chunks = new mutable.ListBuffer[Chunk]
598    var ctxt = context
599    while (!in.atEnd) {
600      Parsers.parse(Parsers.chunk_line(ctxt), in) match {
601        case Parsers.Success((x, c), rest) => chunks += x; ctxt = c; in = rest
602        case Parsers.NoSuccess(_, rest) =>
603          error("Unepected failure to parse input:\n" + rest.source.toString)
604      }
605    }
606    (chunks.toList, ctxt)
607  }
608
609
610
611  /** HTML output **/
612
613  private val output_styles =
614    List(
615      "" -> "html-n",
616      "plain" -> "html-n",
617      "alpha" -> "html-a",
618      "named" -> "html-n",
619      "paragraph" -> "html-n",
620      "unsort" -> "html-u",
621      "unsortlist" -> "html-u")
622
623  def html_output(bib: List[Path],
624    title: String = "Bibliography",
625    body: Boolean = false,
626    citations: List[String] = List("*"),
627    style: String = "",
628    chronological: Boolean = false): String =
629  {
630    Isabelle_System.with_tmp_dir("bibtex")(tmp_dir =>
631    {
632      /* database files */
633
634      val bib_files = bib.map(path => path.split_ext._1)
635      val bib_names =
636      {
637        val names0 = bib_files.map(_.base_name)
638        if (Library.duplicates(names0).isEmpty) names0
639        else names0.zipWithIndex.map({ case (name, i) => (i + 1).toString + "-" + name })
640      }
641
642      for ((a, b) <- bib_files zip bib_names) {
643        File.copy(a.ext("bib"), tmp_dir + Path.basic(b).ext("bib"))
644      }
645
646
647      /* style file */
648
649      val bst =
650        output_styles.toMap.get(style) match {
651          case Some(base) => base + (if (chronological) "c" else "") + ".bst"
652          case None =>
653            error("Bad style for bibtex HTML output: " + quote(style) +
654              "\n(expected: " + commas_quote(output_styles.map(_._1)) + ")")
655        }
656      File.copy(Path.explode("$BIB2XHTML_HOME/bst") + Path.explode(bst), tmp_dir)
657
658
659      /* result */
660
661      val in_file = Path.explode("bib.aux")
662      val out_file = Path.explode("bib.html")
663
664      File.write(tmp_dir + in_file,
665        bib_names.mkString("\\bibdata{", ",", "}\n") +
666        citations.map(cite => "\\citation{" + cite + "}\n").mkString)
667
668      Isabelle_System.bash(
669        "\"$BIB2XHTML_HOME/main/bib2xhtml.pl\" -B \"$ISABELLE_BIBTEX\"" +
670          " -u -s " + Bash.string(proper_string(style) getOrElse "empty") +
671          (if (chronological) " -c" else "") +
672          (if (title != "") " -h " + Bash.string(title) + " " else "") +
673          " " + File.bash_path(in_file) + " " + File.bash_path(out_file),
674        cwd = tmp_dir.file).check
675
676      val html = File.read(tmp_dir + out_file)
677
678      if (body) {
679        cat_lines(
680          split_lines(html).
681            dropWhile(line => !line.startsWith("<!-- BEGIN BIBLIOGRAPHY")).reverse.
682            dropWhile(line => !line.startsWith("<!-- END BIBLIOGRAPHY")).reverse)
683      }
684      else html
685    })
686  }
687}
688