Commit 8ba61731 authored by remy's avatar remy
Browse files
parents 732f0b1c a558879a
......@@ -4,20 +4,22 @@ install: certs urls
dirs:
mkdir -p /usr/local/websitechecks/etc
mkdir -p /usr/local/websitechecks/var/certs
mkdir /usr/local/websitechecks/var/urls
mkdir /usr/local/websitechecks/var/status
mkdir -p /usr/local/websitechecks/var/workdir
mkdir /usr/local/websitechecks/var/results
mkdir /usr/local/websitechecks/libexec
chmod -R 777 /usr/local/websitechecks/var
status_conf: status
certs_conf: status
status: dirs
cp profile.conf /usr/local/websitechecks/etc/
cp profile.conf /etc/default/websitechecks
cp host_https_list.txt /usr/local/websitechecks/etc/
certs: status
cp check_certs.sh /usr/local/bin
cp ssl-cert-info.sh /usr/local/websitechecks/libexec
chmod +x /usr/local/bin/check_certs.sh
url_conf: dirs
......@@ -25,9 +27,11 @@ url_conf: dirs
urls: url_conf
cp check_urls.sh /usr/local/bin
cp check_sha256.awk /usr/local/websitechecks/libexec
chmod +x /usr/local/bin/check_urls.sh
clean:
rm -rf /usr/local/websitechecks
rm -f /usr/local/bin/check_urls.sh
rm -f /usr/local/bin/check_certs.sh
rm -f /etc/default/websitechecks
......@@ -17,6 +17,31 @@ First, you will need to enter some informations in `profile.conf` file. This fil
Then, edit `host_https_list.txt` and `url_list.txt` to fit your needs.
## How to install
### Requirements
Requirements are really basics linux softwares:
- gnu make,
- awk & sed,
- sudo (only needed for a system wide install),
- curl & wget,
- sha256sum to perform checksums
___
After editing the 3 configuration files (`profile.conf` and listings `host_https_list.txt`, `url_list.txt`), just run:
```bash
sudo make install
```
Once `websitechecks` is installed, you should find those files here:
- `/etc/default/websitechecks`
- `/usr/local/websitechecks/etc/host_https_list.txt`
- `/usr/local/websitechecks/etc/url_list.txt`
## How to check your HTTPS certificates
......@@ -25,38 +50,41 @@ Edit `host_https_list.txt` file containing the list of hosts (fqdn / fully quali
Then, just run:
```bash
bash check_certs.sh
check_certs.sh
```
This will produce a json output on the standard output and in the output directory (`OUTPUT_DIR` defined in `profile.conf` (default is `./results`)).
This will produce a json output on the standard output and in the output directory (`OUTPUT_DIR` defined in `profile.conf` (default is `/usr/local/websitechecks/results`)).
## How to use checks.sh
This script checks some URLs (HTTP answers + checksums).
This script checks some URLs (HTTP answers + checksums). Checksums are a good way to check if a website has been defaced. Indeed, any modification on a webpage could e legitiate or not, and this should be monitored.
Note that dynamic webpages are a bit more complicated to monitor by this way. You should remove dynamic element from the webpage before computing the checksum.
Enter the list of URL to scan in `url_list.txt`.
First, enter the list of URL to scan in `url_list.txt`.
To initialize the working directory `workdir`, you need to launch:
To initialize the working directory `workdir`, you will need to launch:
```bash
bash check_urls.sh init
check_urls.sh init
```
Then, you can do a basic scan:
```bash
bash check_urls.sh check
check_urls.sh check
```
You will find your results in `json` format in your `OUTPUT_DIR` defined in `profile.conf`.
If there is no output on stdout, that is normal. It means that nothing changed since last scan.
To display more informations about differences:
```bash
# compare from last run
bash check_urls.sh compare
check_urls.sh compare
# or check differences since the init step
bash check_urls.sh compare2init
check_urls.sh compare2init
```
Finally, you can add a cron job to produce results every N minutes/days...
......@@ -64,18 +92,25 @@ Finally, you can add a cron job to produce results every N minutes/days...
If you detect any changes (see Json files in directory `OUTPUT_DIR`), then, you can also compare it manually:
```bash
cd /usr/local/websitechecks/var
cp -r workdir/ `date +"%Y%m%d"`_workdir
diff -rq <previous_workdir> `date +"%Y%m%d"`_workdir
# then compare specific html pages
diff -Ebw <previous_workdir>/website.index.html `date +"%Y%m%d"`_workdir/website.index.html
# finally, if everything is ok, regenerate a workdir
bash check_urls.sh clean
bash check_urls.sh init
check_urls.sh clean
check_urls.sh init
```
After, you will have many directories with all your webpages histories.
## Uninstall
```bash
sudo make clean
```
## More useful tools
Check broken links :
......
......@@ -5,18 +5,34 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
DATE=`date '+%Y%m%d_%H%M%S'`
cd ${DIR}
. ./profile.conf
websites=`awk '{ if ($1 !~ "^#") {print;} }' host_https_list.txt`
TOTAL_WEBSITES=`echo ${websites}|awk '{print NF}'`
if [ -z "${OUTPUT_DIR}" ]; then
if [ -f /etc/default/websitechecks ]; then
. /etc/default//websitechecks
if [ -z "${OUTPUT_DIR}" ]; then
OUTPUT_DIR="${VARDIR}/results"
fi
if [ -f "${LIBEXECDIR}/ssl-cert-info.sh" ]; then
CERT_INFO="${LIBEXECDIR}/ssl-cert-info.sh"
fi
if [ -f "${ETCDIR}/host_https_list.txt" ]; then
HOST_LIST="${ETCDIR}/host_https_list.txt"
fi
else
. ./profile.conf
cd ${DIR}
if [ -z "${OUTPUT_DIR}" ]; then
OUTPUT_DIR="./results"
fi
CERT_INFO="${DIR}/ssl-cert-info.sh"
HOST_LIST=host_https_list.txt
fi
if [ ! -d ${OUTPUT_DIR} ]; then
mkdir -p ${OUTPUT_DIR};
mkdir -p ${OUTPUT_DIR}
fi
websites=`awk '{ if ($1 !~ "^#") {print;} }' ${HOST_LIST}`
TOTAL_WEBSITES=`echo ${websites}|awk '{print NF}'`
OUTPUT="${OUTPUT_DIR}/${DATE}_check_certs.json"
#formatting output for jsonreader
......@@ -26,10 +42,10 @@ for host in ${websites}
do
results="${results}\n\t\"${host}\":{"
results="${results}\n\t\t\"enddate\":\t\""
END_DATE=`bash ${DIR}/ssl-cert-info.sh --host $host --end | sed "s/$/\",/"`
END_DATE=`bash ${CERT_INFO} --host $host --end | sed "s/$/\",/"`
results="${results}${END_DATE}"
results="${results}\n\t\t\"status\":\t\""
END_OK=`bash ${DIR}/ssl-cert-info.sh --host $host --end-check|sed "s/$/\"/"`
END_OK=`bash ${CERT_INFO} --host $host --end-check|sed "s/$/\"/"`
results="${results}${END_OK}"
if [ $i -eq $TOTAL_WEBSITES ]; then
results="${results}\n\t}"
......@@ -42,7 +58,7 @@ results="${results}\n}"
echo -e ${results} | tee ${OUTPUT}
ssl_answers=`for host in ${websites}; do echo -n "$host:" && bash ${DIR}/ssl-cert-info.sh --host $host --end-check; done`
ssl_answers=`for host in ${websites}; do echo -n "$host:" && bash ${CERT_INFO} --host $host --end-check; done`
count=`echo "${ssl_answers}" | grep -c "Ok"`
if [ $count -ne $TOTAL_WEBSITES ]; then
......
......@@ -4,7 +4,7 @@
###### Ce script verifie l'etat des sites web listés dans url_list.txt
###### en recuperant la page d'index et leur statut par curl
###### les pages d'index permettent de produire des checksums
###### qui sont comparees a une version historique sha256sum.txt.orig
###### qui sont comparees a une version historique sha256sum.txt.prev
######
######
######
......@@ -35,7 +35,6 @@
CURDIR=`pwd`
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
WORKDIR=${DIR}/workdir
DATE=`date '+%Y%m%d_%H%M%S'`
# path to commands; could be useful for crons
......@@ -49,7 +48,7 @@ DIFF=`command -v diff`
usage="$0 [compare|init|check|clean|--help]\n\n
\tcompare: display more informations on differences since previous run\n
\tcompare2init: display more informations on differences since first run\n
\tinit: build the sha256sum.txt.orig file and a status.log file (see bellow)\n
\tinit: perform a check an compute the sha256sum.txt.prev file and a status.log file (see bellow)\n
\tcheck: run a basic check - get http answer and compare index checksums to the first run\n
\tclean: remove the workdir\n
\t--help: print this help and exit.\n
......@@ -175,9 +174,27 @@ function remove_dyn_elems {
done < $1
}
if [ -f /etc/default/websitechecks ]; then
. /etc/default//websitechecks
if [ -z ${WORKDIR} ]; then
WORKDIR=${VARDIR}/workdir
fi
if [ -f "${ETCDIR}/url_list.txt" ]; then
URL_LIST="${ETCDIR}/url_list.txt"
fi
if [ -f "${LIBEXECDIR}/check_sha256.awk" ]; then
CHECK_SHA256="${LIBEXECDIR}/check_sha256.awk"
fi
else
. ./profile.conf
cd ${DIR}
if [ -z ${WORKDIR} ]; then
WORKDIR=${DIR}/workdir
fi
URL_LIST="../url_list"
CHECK_SHA256="../check_sha256.awk"
fi
cd ${DIR}
. ./profile.conf
if ${CHECK_CERT}; then
CURL_OPTS="-Is"
else
......@@ -195,10 +212,7 @@ OUTPUT_STATUS="${OUTPUT_DIR}/${DATE}_status.json"
if [[ "$1" == "compare" ]]; then
cd ${WORKDIR}
for file in `ls *.html`; do
mv $file $file.orig
done
ext2compare="orig"
ext2compare="prev"
COMPARE=true
elif [[ "$1" == "compare2init" ]]; then
cd ${WORKDIR}
......@@ -206,11 +220,13 @@ elif [[ "$1" == "compare2init" ]]; then
ext2compare="init"
elif [[ "$1" == "init" ]]; then
rm -rf ${WORKDIR}
CHECK=true
FIRST=true
elif [[ "$1" == "clean" ]];then
rm -rf ${WORKDIR}
exit 0
else
CHECK=true
if [[ "$1" != "check" ]]; then
echo -e $usage
exit 0
......@@ -221,23 +237,22 @@ fi
if [ ! -d ${WORKDIR} ]; then
mkdir ${WORKDIR}
fi
cd ${WORKDIR}
rm status.log *.html sha256sum.txt 2>/dev/null
while read name website type product plugins activity owner
do
if [ ${CHECK} ]; then
rm status.log *.html sha256sum.txt 2>/dev/null
while read name website type product plugins activity owner
do
if [[ "$name" =~ [[:space:]]*# ]] || [[ "$name" == "" ]] || [[ "$activity" == "inactive" ]]; then
continue
continue
else
echo -n $website" " >> status.log && $CURL $CURL_OPTS $website|head -1 >> status.log
$WGET -O "${name}.index.html" $website 2>/dev/null
echo -n $website" " >> status.log && $CURL $CURL_OPTS $website|head -1 >> status.log
$WGET -O "${name}.index.html" $website 2>/dev/null
fi
done < ../url_list.txt
/usr/bin/dos2unix status.log 2>/dev/null
remove_dyn_elems ../url_list.txt
done < ${URL_LIST}
/usr/bin/dos2unix status.log 2>/dev/null
remove_dyn_elems ${URL_LIST}
fi
if [ -z ${FIRST} ]; then
$SHA256SUM *.html > sha256sum.txt
......@@ -245,30 +260,30 @@ if [ -z ${FIRST} ]; then
# to check from the file
# sha256sum -c sha256sum.txt
# or the previous one
# sha256sum -c sha256sum.txt.orig
# sha256sum -c sha256sum.txt.prev
if [ ${COMPARE} ]; then
${DIFF} sha256sum.txt sha256sum.txt.orig |tee modified_websites.txt
${DIFF} sha256sum.txt sha256sum.txt.prev |tee modified_websites.txt
websites_modified=`awk '/index.html/ {print $3}' modified_websites.txt|uniq`
for website_modified in $websites_modified
do
orig_file=`echo "${website_modified}.${ext2compare}"`
prev_file=`echo "${website_modified}.${ext2compare}"`
echo ""
echo "#################################################################"
echo "## Checking if there is any file to compare with new result... ##"
echo "#################################################################"
echo ""
if [ -f $website_modified ] && [ -f $orig_file ]; then
echo $DIFF -Ebw workdir/$orig_file workdir/$website_modified
if [ -f $website_modified ] && [ -f $prev_file ]; then
echo $DIFF -Ebw $prev_file $website_modified
echo ""
$DIFF $orig_file $website_modified
$DIFF $prev_file $website_modified
fi
done
#rm modified_websites.txt
else
$DIFF -q sha256sum.txt sha256sum.txt.orig 1>/dev/null 2>&1
$DIFF -q sha256sum.txt sha256sum.txt.prev 1>/dev/null 2>&1
if [ $? -eq 1 ]; then
$DIFF sha256sum.txt sha256sum.txt.orig|$AWK '/[az]/ {print $3}' |uniq
$DIFF sha256sum.txt sha256sum.txt.prev|$AWK '/[az]/ {print $3}' |uniq
fi
fi
......@@ -280,28 +295,34 @@ if [ -z ${FIRST} ]; then
if [[ "$name" =~ [[:space:]]*# ]] || [[ "$name" == "" ]] || [[ "$activity" == "inactive" ]]; then
continue
else
$AWK -f ../check_sha256.awk -v url=$url -v sitename=${name} -v total=$total sha256sum.txt*
$AWK -f ${CHECK_SHA256} -v url=$url -v sitename=${name} -v total=$total sha256sum.txt*
fi
done < ../url_list.txt|sort >> checksums.json
done < ${URL_LIST} |sort >> checksums.json
echo "}" >> checksums.json
else
$SHA256SUM *.html > sha256sum.txt.orig
$SHA256SUM *.html > sha256sum.txt.prev
for file in `ls *.html`; do
cp $file $file.init
done
fi
total=`cat status.log|wc -l`
rm status.json 2>/dev/null
echo "{" > status.json
$AWK -v total=$total '{ if ($NF=="200") {$NF="OK";} if (NR==total) {print "\t\""$1"\": \""$NF"\"";} else {print "\t\""$1"\": \""$NF"\",";} }' status.log >> status.json
echo "}" >> status.json
if [ ${CHECK} ]; then
total=`cat status.log|wc -l`
rm status.json 2>/dev/null
echo "{" > status.json
$AWK -v total=$total '{ if ($NF=="200") {$NF="OK";} if (NR==total) {print "\t\""$1"\": \""$NF"\"";} else {print "\t\""$1"\": \""$NF"\",";} }' status.log >> status.json
echo "}" >> status.json
for file in `ls *.html`; do
cp $file $file.prev
done
# change current directory to $DIR b/c of issues with relative paths in profile.conf
cd ${DIR}
cp ${WORKDIR}/status.json ${OUTPUT_STATUS}
if [ -f ${WORKDIR}/checksums.json ]; then
# change current directory to $DIR b/c of issues with relative paths in profile.conf
cd ${DIR}
cp ${WORKDIR}/status.json ${OUTPUT_STATUS}
if [ -f ${WORKDIR}/checksums.json ]; then
cp ${WORKDIR}/checksums.json ${OUTPUT_CHECKSUMS}
fi
fi
cd ${CURDIR}
......@@ -4,4 +4,8 @@ MAIL_SUBJECT="certificate error"
# following must be set to false if you have some autosigns certificates website
# otherwise, you can set it to true
CHECK_CERT=false
OUTPUT_DIR="./results"
INSTALL_DIR="/usr/local/websitechecks"
VARDIR="${INSTALL_DIR}/var"
ETCDIR="${INSTALL_DIR}/etc"
LIBEXECDIR="${INSTALL_DIR}/libexec"
OUTPUT_DIR="${VARDIR}/results"
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment