1/* Keep track of visited URLs in spider mode.
2   Copyright (C) 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3
4This file is part of GNU Wget.
5
6GNU Wget is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
10
11GNU Wget is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with Wget.  If not, see <http://www.gnu.org/licenses/>.
18
19Additional permission under GNU GPL version 3 section 7
20
21If you modify this program, or any covered work, by linking or
22combining it with the OpenSSL project's OpenSSL library (or a
23modified version of that library), containing parts covered by the
24terms of the OpenSSL or SSLeay licenses, the Free Software Foundation
25grants you additional permission to convey the resulting work.
26Corresponding Source for a non-source form of such a combination
27shall include the source code for the parts of OpenSSL used as well
28as that of the covered work.  */
29
30#include "wget.h"
31
32#include <stdio.h>
33#include <errno.h>
34#include <assert.h>
35
36#include "spider.h"
37#include "url.h"
38#include "utils.h"
39#include "hash.h"
40#include "res.h"
41
42
43static struct hash_table *nonexisting_urls_set;
44
45/* Cleanup the data structures associated with this file.  */
46
47void
48spider_cleanup (void)
49{
50  if (nonexisting_urls_set)
51    string_set_free (nonexisting_urls_set);
52}
53
54/* Remembers broken links.  */
55void
56nonexisting_url (const char *url)
57{
58  /* Ignore robots.txt URLs */
59  if (is_robots_txt_url (url))
60    return;
61  if (!nonexisting_urls_set)
62    nonexisting_urls_set = make_string_hash_table (0);
63  string_set_add (nonexisting_urls_set, url);
64}
65
66void
67print_broken_links (void)
68{
69  hash_table_iterator iter;
70  int num_elems;
71
72  if (!nonexisting_urls_set)
73    {
74      logprintf (LOG_NOTQUIET, _("Found no broken links.\n\n"));
75      return;
76    }
77
78  num_elems = hash_table_count (nonexisting_urls_set);
79  assert (num_elems > 0);
80
81  logprintf (LOG_NOTQUIET, ngettext("Found %d broken link.\n\n",
82                                    "Found %d broken links.\n\n", num_elems),
83             num_elems);
84
85  for (hash_table_iterate (nonexisting_urls_set, &iter);
86       hash_table_iter_next (&iter); )
87    {
88      /* Struct url_list *list; */
89      const char *url = (const char *) iter.key;
90
91      logprintf (LOG_NOTQUIET, _("%s\n"), url);
92    }
93  logputs (LOG_NOTQUIET, "\n");
94}
95
96/*
97 * vim: et ts=2 sw=2
98 */
99
100