1#!/usr/bin/perl
2
3# Copy log files from a GCC build for HTTP access.
4# Copyright (C) 2008, 2009 Free Software Foundation, Inc.
5#
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation, either version 3 of the License, or
9# (at your option) any later version.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
19# INPUT:
20# mkindex.pl <srcdir> <destdir> <branchname>
21
22# This script copies log files from a GCC build directory, compresses
23# and indexes them for web browser access.  It's aimed at having an
24# easy-to-access collection of files for analyzing regressions without
25# needing to run the build yourself.  Binary files (.o, executables)
26# are intentionally not included since usually if they are needed it's
27# better to just run a build, and because they take up a lot of space.
28
29# 'srcdir' is the root directory of a GCC build (was $objdir in the build).
30# 'destdir' will be erased and replaced with the log files, and should be an
31#   absolute path.
32# 'branchname' is used only to produce the title of the index page,
33#   which will be named 'index.html'.
34
35use warnings;
36use strict;
37use File::Path qw(mkpath rmtree);
38use File::Find qw(find);
39
40if ($#ARGV != 2) {
41    print "usage: $0 <srcdir> <destdir> <branchname>\n";
42    exit 1;
43}
44
45my ($srcdir, $destdir, $branchname) = @ARGV;
46die "destdir is not absolute" unless ($destdir =~ m,^/,);
47
48# Erase the destination.
49rmtree $destdir;
50mkdir $destdir or die "${destdir}: $!";
51
52# Copy and compress the files into the destination, and keep a list in @files.
53my @files = ();
54sub my_wanted {
55    # Copy all files ending with .log or .sum.
56    if (/\.(log|sum)$/ && -f) {
57
58	die unless (substr ($File::Find::dir,0,(length $srcdir)) eq $srcdir);
59	my $dir = substr $File::Find::dir,(length $srcdir);
60	$dir = substr $dir,1 unless ($dir eq '');
61	my $name = $_;
62	$name = $dir . '/' . $_ if ($dir ne '');
63
64	mkpath $destdir . '/' . $dir;
65	# Compress the files.  Use .gzip instead of .gz for the
66	# extension to avoid (broken) browser workarounds for broken
67	# web servers.
68	system ("gzip -c -q -9 $_ > $destdir/${name}.gzip") == 0 or exit 2;
69
70	# Write the (compressed) size consistently in Kbytes.
71	my $size = -s $destdir .'/' . $name . '.gzip';
72	my $printable_size = (sprintf "%.0fK",$size / 1024);
73
74	push @files,[$name.'.gzip',$name,$printable_size];
75    }
76}
77find ({wanted => \&my_wanted}, $srcdir);
78
79# Sort the list of files for the index.
80@files = sort {$a->[1] cmp $b->[1]} @files;
81
82# Create the index.
83open INDEX,'>',$destdir . '/index.html' or die "${destdir}/index.html: $!";
84# Use strict XHTML 1.0, and set charset to UTF-8.
85print INDEX <<EOF or die "writing index: $!";
86<!DOCTYPE html
87     PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
88     "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
89<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
90<head>
91 <title>Log files for $branchname</title>
92 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
93</head>
94<body>
95<h1>Log files for $branchname</h1>
96<table><tr><th>Name</th><th align='right'>Size</th></tr>
97EOF
98# The index will have two columns, filename (without .gzip) and
99# compressed size.
100foreach my $f (@files) {
101    printf INDEX "<tr><td><a href=\"%s\">%s</a></td><td align=\'right\'>%s</td></tr>\n",
102	$f->[0], $f->[1], $f->[2] or die "writing index: $!";
103}
104
105print INDEX "</table></body></html>\n" or die "writing index: $!";
106close INDEX or die "writing index: $!";
107exit 0;
108