1/* Keep track of visited URLs in spider mode.
2   Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 Free Software
3   Foundation, Inc.
4
5This file is part of GNU Wget.
6
7GNU Wget is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12GNU Wget is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with Wget.  If not, see <http://www.gnu.org/licenses/>.
19
20Additional permission under GNU GPL version 3 section 7
21
22If you modify this program, or any covered work, by linking or
23combining it with the OpenSSL project's OpenSSL library (or a
24modified version of that library), containing parts covered by the
25terms of the OpenSSL or SSLeay licenses, the Free Software Foundation
26grants you additional permission to convey the resulting work.
27Corresponding Source for a non-source form of such a combination
28shall include the source code for the parts of OpenSSL used as well
29as that of the covered work.  */
30
31#include "wget.h"
32
33#include <stdio.h>
34#include <errno.h>
35#include <assert.h>
36
37#include "spider.h"
38#include "url.h"
39#include "utils.h"
40#include "hash.h"
41#include "res.h"
42
43
44static struct hash_table *nonexisting_urls_set;
45
46/* Cleanup the data structures associated with this file.  */
47
48void
49spider_cleanup (void)
50{
51  if (nonexisting_urls_set)
52    string_set_free (nonexisting_urls_set);
53}
54
55/* Remembers broken links.  */
56void
57nonexisting_url (const char *url)
58{
59  /* Ignore robots.txt URLs */
60  if (is_robots_txt_url (url))
61    return;
62  if (!nonexisting_urls_set)
63    nonexisting_urls_set = make_string_hash_table (0);
64  string_set_add (nonexisting_urls_set, url);
65}
66
67void
68print_broken_links (void)
69{
70  hash_table_iterator iter;
71  int num_elems;
72
73  if (!nonexisting_urls_set)
74    {
75      logprintf (LOG_NOTQUIET, _("Found no broken links.\n\n"));
76      return;
77    }
78
79  num_elems = hash_table_count (nonexisting_urls_set);
80  assert (num_elems > 0);
81
82  logprintf (LOG_NOTQUIET, ngettext("Found %d broken link.\n\n",
83                                    "Found %d broken links.\n\n", num_elems),
84             num_elems);
85
86  for (hash_table_iterate (nonexisting_urls_set, &iter);
87       hash_table_iter_next (&iter); )
88    {
89      /* Struct url_list *list; */
90      const char *url = (const char *) iter.key;
91
92      logprintf (LOG_NOTQUIET, _("%s\n"), url);
93    }
94  logputs (LOG_NOTQUIET, "\n");
95}
96
97/*
98 * vim: et ts=2 sw=2
99 */
100