mirror of
git://gcc.gnu.org/git/gcc.git
synced 2024-12-29 23:35:55 +08:00
6a82a28a50
PR other/23693 * update_web_docs: Add PDF support. Correct check of DEV-PHASE. From-SVN: r104837
181 lines
4.4 KiB
Bash
Executable File
181 lines
4.4 KiB
Bash
Executable File
#!/bin/sh -x
|
|
|
|
# Generate HTML documentation from GCC Texinfo docs.
|
|
# This version is for GCC 3.1 and later versions.
|
|
|
|
# Run this from /tmp.
|
|
CVSROOT=/cvs/gcc
|
|
export CVSROOT
|
|
|
|
PATH=/usr/local/bin:$PATH
|
|
|
|
WWWBASE=/www/gcc/htdocs
|
|
WWWBASE_PREFORMATTED=/www/gcc/htdocs-preformatted
|
|
WWWPREPROCESS='/www/gcc/bin/preprocess -r'
|
|
|
|
# Process options -rrelease and -ddirectory
|
|
RELEASE=""
|
|
SUBDIR=""
|
|
|
|
while [ $# -gt 0 ]; do
|
|
case $1 in
|
|
-r*)
|
|
if [ -n "$RELEASE" ]; then
|
|
echo "Multiple releases specified" >&2
|
|
exit 1
|
|
fi
|
|
RELEASE="${1#-r}"
|
|
if [ -z "$RELEASE" ]; then
|
|
shift
|
|
RELEASE="$1"
|
|
if [ -z "$RELEASE" ]; then
|
|
echo "No release specified with -r" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
;;
|
|
-d*)
|
|
if [ -n "$SUBDIR" ]; then
|
|
echo "Multiple subdirectories specified" >&2
|
|
exit 1
|
|
fi
|
|
SUBDIR="${1#-d}"
|
|
if [ -z "$SUBDIR" ]; then
|
|
shift
|
|
SUBDIR="$1"
|
|
if [ -z "$SUBDIR" ]; then
|
|
echo "No subdirectory specified with -d" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
;;
|
|
*)
|
|
echo "Unknown argument \"$1\"" >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
shift
|
|
done
|
|
|
|
if [ -n "$RELEASE" ] && [ -z "$SUBDIR" ]; then
|
|
echo "Release specified without subdirectory" >&2
|
|
exit 1
|
|
fi
|
|
|
|
if [ -z "$SUBDIR" ]; then
|
|
DOCSDIR=$WWWBASE/onlinedocs
|
|
else
|
|
DOCSDIR=$WWWBASE/onlinedocs/$SUBDIR
|
|
fi
|
|
|
|
if [ ! -d $DOCSDIR ]; then
|
|
mkdir $DOCSDIR
|
|
fi
|
|
|
|
if [ -z "$RELEASE" ]; then
|
|
RELEASE=HEAD
|
|
fi
|
|
|
|
WORKDIR=/tmp/gcc-doc-update.$$
|
|
|
|
/bin/rm -rf $WORKDIR
|
|
/bin/mkdir $WORKDIR
|
|
cd $WORKDIR
|
|
|
|
# Find all the texi files in the repository, except those in directories
|
|
# we do not care about (texinfo, etc).
|
|
find $CVSROOT/gcc -name \*.texi,v -print | fgrep -v -f/home/gccadmin/scripts/doc_exclude | sed -e s#$CVSROOT/##g -e s#,v##g -e s#Attic/##g > FILES
|
|
|
|
|
|
# Checkout all the texi files.
|
|
cvs -Q export -r$RELEASE `cat FILES` gcc/gcc/doc/install.texi2html gcc/gcc/doc/include/texinfo.tex gcc/gcc/ada/xgnatugn.adb gcc/gcc/ada/ug_words gcc/gcc/BASE-VER gcc/gcc/DEV-PHASE
|
|
|
|
# Build a tarball of the sources.
|
|
tar cf docs-sources.tar gcc
|
|
|
|
# The directory to pass to -I; this is the one with texinfo.tex
|
|
# and fdl.texi.
|
|
includedir=gcc/gcc/doc/include
|
|
|
|
MANUALS="cpp cppinternals fastjar gcc gccint gcj g77 gfortran gnat_ug_unx gnat_ug_vms gnat_ug_vxw gnat_ug_wnt gnat_ugn_unw gnat-style gnat_rm libiberty porting"
|
|
|
|
# Generate gnat_ugn_unw
|
|
|
|
if [ -f gcc/gcc/ada/xgnatugn.adb ]; then
|
|
gnatmake -q gcc/gcc/ada/xgnatugn
|
|
./xgnatugn unw gcc/gcc/ada/gnat_ugn.texi \
|
|
gcc/gcc/ada/ug_words gnat_ugn_unw.texi
|
|
fi
|
|
|
|
# Generate gcc-vers.texi.
|
|
(
|
|
echo "@set version-GCC $(cat gcc/gcc/BASE-VER)"
|
|
if [ "$(cat gcc/gcc/DEV-PHASE)" = "experimental" ]; then
|
|
echo "@set DEVELOPMENT"
|
|
else
|
|
echo "@clear DEVELOPMENT"
|
|
fi
|
|
) > $includedir/gcc-vers.texi
|
|
|
|
# Now convert the relevant files from texi to HTML, PDF and PostScript.
|
|
for file in $MANUALS; do
|
|
filename=`find . -name ${file}.texi`
|
|
if [ "${filename}" ]; then
|
|
makeinfo --html -I ${includedir} -I `dirname ${filename}` ${filename}
|
|
tar cf ${file}-html.tar ${file}/*.html
|
|
texi2dvi -I ${includedir} ${filename} </dev/null && dvips -o ${file}.ps ${file}.dvi
|
|
texi2pdf -I ${includedir} ${filename} </dev/null
|
|
mkdir -p $DOCSDIR/$file
|
|
fi
|
|
done
|
|
|
|
# Then build a gzipped copy of each of the resulting .html, .ps and .tar files
|
|
for file in */*.html *.ps *.pdf *.tar; do
|
|
cat $file | gzip --best > $file.gz
|
|
done
|
|
|
|
# On the 15th of the month, wipe all the old files from the
|
|
# web server.
|
|
today=`date +%d`
|
|
if test $today = 15; then
|
|
find $DOCSDIR -type f -maxdepth 1 -print | grep -v index.html | xargs rm
|
|
for m in $MANUALS; do
|
|
rm $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
|
|
done
|
|
fi
|
|
|
|
# And copy the resulting files to the web server
|
|
for file in */*.html *.ps *.pdf *.tar; do
|
|
cat $DOCSDIR/$file |
|
|
sed -e '/^<meta name=generator/d' \
|
|
-e '/^%DVIPSSource:/d' > file1
|
|
cat $file |
|
|
sed -e '/^<meta name=generator/d' \
|
|
-e '/^%DVIPSSource:/d' > file2
|
|
if cmp -s file1 file2; then
|
|
:
|
|
else
|
|
cp $file $DOCSDIR/$file
|
|
cp $file.gz $DOCSDIR/$file.gz
|
|
fi
|
|
done
|
|
|
|
cd $DOCSDIR
|
|
|
|
# Finally, generate the installation documentation (but only for CVS HEAD).
|
|
if [ "$RELEASE" = "HEAD" ]; then
|
|
SOURCEDIR=$WORKDIR/gcc/gcc/doc
|
|
DESTDIR=$WWWBASE_PREFORMATTED/install
|
|
export SOURCEDIR
|
|
export DESTDIR
|
|
$WORKDIR/gcc/gcc/doc/install.texi2html
|
|
|
|
# Preprocess the entire web site, not just the install docs!
|
|
echo "Invoking $WWWPREPROCESS"
|
|
$WWWPREPROCESS |grep -v '^ Warning: Keeping'
|
|
fi
|
|
|
|
# Clean up behind us.
|
|
|
|
rm -rf $WORKDIR
|