1
0
mirror of https://github.com/kanzure/pdfparanoia.git synced 2024-12-04 23:15:52 +01:00

comparediffs seems to be working nicely

This commit is contained in:
Scott Morrison 2013-05-02 22:45:34 +10:00
parent 54b6ab070a
commit 2fb3783dea
4 changed files with 72 additions and 27 deletions

2
.gitignore vendored
View File

@ -15,4 +15,4 @@ pdfparanoia.egg-info/
/*.pdf /*.pdf
# temporary pdfs in tests/diff/ # temporary pdfs in tests/diff/
tests/diff/*.pdf tests/diff/pdf

View File

@ -1,23 +1,68 @@
for url in `cat urls`; do if [ "$#" != "2" ]; then
echo "Retrieving $url"; echo "Please supply the addresses of two SOCKS proxies:"
# TODO allow customizing the proxies, or more generally the retrieval mechanisms echo "compare host1:port1 host2:port2"
# TODO allow local caching exit 1
curl --socks localhost:1080 $url > 1.pdf fi
curl --socks localhost:1083 $url > 2.pdf
# TODO verify that we actually obtained pdfs proxy1="$1"
if diff 1.pdf 2.pdf; then proxy2="$2"
curl -s --socks $proxy1 http://google.com | grep "The document has moved" > /dev/null
if [ "$?" != "0" ]; then
echo "SOCKS proxy $proxy1 doesn't seem to be working."
exit 2
fi
curl -s --socks $proxy2 http://google.com | grep "The document has moved" > /dev/null
if [ "$?" != "0" ]; then
echo "SOCKS proxy $proxy2 doesn't seem to be working."
exit 3
fi
echo "--------------------------------------------------------------------------------------------------------------------------------"
echo "Comparing PDFs downloaded via '$proxy1' and '$proxy2'"
echo "Please enter URLs, one per line."
echo
echo "PDFs which still differ after pdfparanoia will be left in the pdf/ subdirectory, with suffixes .1.cleaned.pdf and .2.cleaned.pdf"
echo "--------------------------------------------------------------------------------------------------------------------------------"
echo
mkdir -p pdf
while read url; do
hash=`echo $url | openssl sha1 | cut -d ' ' -f 2`
echo "Retrieving $url, with hash $hash";
if [ ! -s pdf/$hash.1.pdf ]; then
curl --socks $proxy1 -L --cookie cookie.jar -A "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)" -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" $url > pdf/$hash.1.pdf
fi
if [ "`head -c 4 pdf/$hash.1.pdf`" == "%PDF" ]; then
if [ ! -s pdf/$hash.2.pdf ]; then
curl --socks $proxy2 -L --cookie cookie.jar -A "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)" -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" $url > pdf/$hash.2.pdf
fi
if [ "`head -c 4 pdf/$hash.2.pdf`" == "%PDF" ]; then
if cmp -s pdf/$hash.1.pdf pdf/$hash.2.pdf; then
echo "PDFs are identical already, no need to use pdfparanoia" echo "PDFs are identical already, no need to use pdfparanoia"
else else
cat 1.pdf | pdfparanoia > 1.cleaned.pdf echo "PDFs differ, running pdfparanoia"
cat 2.pdf | pdfparanoia > 2.cleaned.pdf if [ ! -s pdf/$hash.1.cleaned.pdf ]; then
if diff 1.cleaned.pdf 2.cleaned.pdf; then cat pdf/$hash.1.pdf | pdfparanoia > pdf/$hash.1.cleaned.pdf
fi
if [ ! -s pdf/$hash.2.cleaned.pdf ]; then
cat pdf/$hash.2.pdf | pdfparanoia > pdf/$hash.2.cleaned.pdf
fi
if cmp -s pdf/$hash.1.cleaned.pdf pdf/$hash.2.cleaned.pdf; then
echo "pdfparanoia successful scrubbed the PDFs" echo "pdfparanoia successful scrubbed the PDFs"
rm pdf/$hash.*.cleaned.pdf
else else
echo "pdfparanoia failed!" echo "pdfparanoia failed!"
fi fi
rm 1.cleaned.pdf
rm 2.cleaned.pdf
fi fi
rm 1.pdf else
rm 2.pdf echo "Download failed from source 2"
rm pdf/$hash.*.pdf
fi
else
echo "Download failed from source 1"
rm pdf/$hash.*.pdf
fi
echo
done done

View File

@ -1,8 +1,6 @@
http://link.springer.com/content/pdf/10.1007/s00440-011-0397-9
http://msp.org/apde/2012/5-2/apde-v5-n2-p07-s.pdf
http://annals.math.princeton.edu/wp-content/uploads/annals-v176-n2-p11-s.pdf http://annals.math.princeton.edu/wp-content/uploads/annals-v176-n2-p11-s.pdf
http://www.ams.org/journals/mcom/2012-81-278/S0025-5718-2011-02542-1/S0025-5718-2011-02542-1.pdf
http://nyjm.albany.edu/j/2009/15-14p.pdf
http://link.springer.com/content/pdf/10.1007/s00440-011-0397-9
http://www.worldscientific.com/doi/pdf/10.1142/S2010326311500018 http://www.worldscientific.com/doi/pdf/10.1142/S2010326311500018
http://www.sciencedirect.com/science?_ob=MiamiImageURL&_cid=272585&_user=994540&_pii=S0001870812001806&_check=y&_origin=article&_zone=toolbar&_coverDate=10-Sep-2012&view=c&originContentFamily=serial&wchp=dGLzVlt-zSkWb&md5=bfeb5e0619d45362640529aff02baeda&pid=1-s2.0-S0001870812001806-main.pdf http://www.sciencedirect.com/science?_ob=MiamiImageURL&_cid=272585&_user=994540&_pii=S0001870812001806&_check=y&_origin=article&_zone=toolbar&_coverDate=10-Sep-2012&view=c&originContentFamily=serial&wchp=dGLzVlt-zSkWb&md5=bfeb5e0619d45362640529aff02baeda&pid=1-s2.0-S0001870812001806-main.pdf
http://www.ams.org/journals/mcom/2012-81-278/S0025-5718-2011-02542-1/S0025-5718-2011-02542-1.pdf
http://www.ems-ph.org/journals/show_pdf.php?issn=1661-7207&vol=5&iss=2&rank=6
http://nyjm.albany.edu/j/2009/15-14p.pdf

2
tests/diff/urls.denied Normal file
View File

@ -0,0 +1,2 @@
http://msp.org/apde/2012/5-2/apde-v5-n2-p07-s.pdf
http://www.ems-ph.org/journals/show_pdf.php?issn=1661-7207&vol=5&iss=2&rank=6