diff --git a/backup-everything.sh b/backup-everything.sh
index e4b9a58..dd0415f 100755
--- a/backup-everything.sh
+++ b/backup-everything.sh
@@ -1,640 +1,651 @@
 #!/bin/bash
 ###
 # Micro script to backup some stuff
 #
 # IMPORTANT: do not edit this file
 #            and see 'backup-instructions.conf' instead!
 #
 # This file defines some functions and then it executes
 # your 'backup-instructions.conf'
 #
 # Author:  2020, 2021 Valerio Bozzolan
 # License: MIT
 ##
 
 # try to proceed even in case of errors (to do not skip any backup)
 # set -e
 
 
 # current directory
 MYDIR="$(dirname "$(realpath "$0")")"
 
 # load useful stuff
 . "$MYDIR"/bootstrap.sh
 
 # no instructions no party
 if [ ! -f $INSTRUCTIONS ]; then
 	echo "missing instructions expected in $INSTRUCTIONS"
 	exit 1
 fi
 
 ###
 # Create a directory with safe permissions if it does not exist
 #
 # @param string path         Pathname
 # @param int    is_file_mode Check if we are in file mode (default: directory mode)
 #
 write_safe_dir_if_unexisting() {
 
 	local path="$1"
 
 	local is_file_mode="$2"
 
 	# try to create the directory if it does not exists
 	if [ ! -d "$path" ]; then
 
 		warn "creating missing $path with 750"
 		mkdir --parents "$path"
 		chmod 750       "$path"
 	fi
 
 	# it must exists now
 	require_existing_dir "$path"
 }
 
 ###
 # Create a directory for a filename with safe permissions if it does not exist
 #
 # @param string path         Pathname to a filename
 # @param int    is_file_mode Check if we are in file mode (default: directory mode)
 #
 write_safe_basedir_if_unexisting() {
 
 	# first function argument
 	local path="$1"
 
 	# extract just the sub-directory
 	local basepath=$(dirname "$path")
 
 	# create that sub-directory
 	write_safe_dir_if_unexisting "$basepath"
 }
 
 ###
 # Require an existing directory or die
 #
 # @param string Directory path
 #
 require_existing_dir() {
 	if [ ! -d "$1" ]; then
 		error "unexisting directory $1"
 		exit 1
 	fi
 }
 
 # create these pathnames if they do not exist
 write_safe_dir_if_unexisting "$BASE"
 write_safe_dir_if_unexisting "$DAILY"
 write_safe_dir_if_unexisting "$DAILY_FILES"
 write_safe_dir_if_unexisting "$DAILY_DATABASES"
 
 # create or clean the last log file
 if [ "$WRITELOG" = 1 ]; then
 	cat /dev/null > "$DAILY_LASTLOG"
 fi
 
 #
 # Dump and compress a database
 #
 # @param db     string Database name
 # @param spaces string Cosmetic spaces
 #
 backup_database() {
 	local db="$1"
 	local spaces="$2"
 	local path="$DAILY_DATABASES"/"$db".sql.gz
 
 	log "$spaces""dumping database $db"
 
 	# no database no party
 	if ! $MYSQL "$db" -e exit > /dev/null 2>&1; then
 		warn "$spaces  skip unexisting database"
 		return
 	fi
 
 	if [ "$PORCELAIN" != 1 ]; then
 		$MYSQLDUMP "$db" | gzip > "$path"
 	fi
 }
 
 #
 # Dump and compress some databases
 #
 # @param database... string Database names
 #
 backup_databases() {
+
+	local something_done=
+
 	for database in $@; do
+		something_done=1
 		backup_database "$database"
 	done
+
+	if ! [ "$something_done" = 1 ]; then
+		error "Bad usage: backup_databases DATABASE_NAME1 DATABASE_NAME2"
+		error "           Have you confused this instruction with this one?"
+		error "           backup_every_database"
+		exit 1
+	fi
 }
 
 #
 # Dump some databases with a shared prefix
 #
 # @param prefix string Database prefix
 # @param spaces string Cosmetic spaces
 #
 backup_databases_prefixed() {
 	local prefix="$1"
 	local spaces="$2"
 
 	log "$spaces""backup all databases prefixed with: $prefix"
 
 	databases=$($MYSQL --skip-column-names --execute='SHOW DATABASES' | grep "$prefix")
 	for database in $databases; do
 		backup_database "$database" "$spaces  "
 	done
 }
 
 # default databases to be skipped
 DATABASE_SKIP_LIST=("^information_schema$" "^performance_schema$")
 
 #
 # Call this to skip a database
 #
 # @param string database Database pattern (Bash regex)
 #
 # This should be called before calling backup_every_database.
 #
 skip_database() {
 	local db="$1"
 	DATABASE_SKIP_LIST+=( "$db" )
 }
 
 #
 # Backup every single database. That's easy.
 #
 backup_every_database() {
 
 	# backup every damn database
 	databases=$($MYSQL -e 'SHOW DATABASES')
 	for database in $databases; do
 
 		local do_backup=1
 
 		# just skip the information_schema and the performance_schema that cannot be locked
 		for skip_entry in "${DATABASE_SKIP_LIST[@]}"; do
 			if [[ "$database" =~ $skip_entry ]]; then
 
 				# show a cute message
 				log "skippin database $database matching blacklist"
 
 				# do not backup this
 				do_backup=0
 
 				# do not check other entries
 				break
 			fi
 		done
 
 		# backup if it does not match the above skip entries
 		if [ "$do_backup" = 1 ]; then
 			backup_database "$database"
 		fi
 	done
 }
 
 #
 # Backup a database that is used by a service
 #
 # The service will be stopped before dumping.
 #
 # @param service string Systemd unit file
 # @param db      string Database name
 #
 backup_service_and_database() {
 	local service="$1"
 	local db="$2"
 	local is_active=
 
 	log "start backup service '$service' with database '$db'"
 
 	# check if the service is running
 	if systemctl is-active "$service" > /dev/null; then
 		is_active=1
 	fi
 
 	# eventually stop the service
 	if [ $is_active == 1 ]; then
 		if [ "$NO_DISSERVICE" == 1 ]; then
 			warn "  NOT stopping service: $service (to avoid any disservice)"
 		else
 			log  "  stopping service: $service"
 
 			if [ "$PORCELAIN" != 1 ]; then
 				systemctl stop "$service"
 			fi
 		fi
 	else
 		log "  service already inactive: $service"
 	fi
 
 	# backup the database now that the service is down
 	backup_database "$db" "  "
 
 	# eventually start again the service
 	if [ $is_active == 1 ]; then
 		if [ "$NO_DISSERVICE" == 1 ]; then
 			warn "  NOT starting again service: $service (to avoid any disservice)"
 		else
 			log "  starting again service: $service"
 
 			if [ "$PORCELAIN" != 1 ]; then
 				systemctl start "$service"
 			fi
 		fi
 	fi
 }
 
 #
 # Backup some databases used by a service
 #
 # The service will be stopped before dumping the databases
 #
 # @param service string Systemd unit file
 # @param db...   string Database names
 #
 backup_service_and_databases() {
 	backup_service_and_database $@
 }
 
 #
 # Backup phabricator
 #
 # @param path     string Phabricator webroot
 # @param dbprefix string Database prefix
 #
 backup_phabricator() {
 	local path="$1"
 	local dbprefix="$2"
 	local repos_full_path="$3"
 	local configset="$path"/bin/config
 
 	log "backup Phabricator databases"
 
 	log "  set Phabricator read only mode (and wait some time to make it effective)"
 	$configset set cluster.read-only true > /dev/null
 	sleep 5
 
 	backup_databases_prefixed "$dbprefix" "    "
 
 	# if specified, backup the repositories
 	if [ -n "$repos_full_path" ]; then
 		log "  start Phabricator repositories backup"
 		backup_path "$repos_full_path"
 	else
 		log "  skipping repositories (be sure they are under another backup)"
 	fi
 
 	log "  revert Phabricator read only mode"
 	$configset set cluster.read-only false > /dev/null
 }
 
 #
 # Backup a directory or a filename
 #
 # @param path       string Pathname to be backupped
 # @param identifier string Optional identifier of this pathname
 #
 backup_path() {
 	local path="$1"
 	local identifier="$2"
 
 	# create a default identifier
 	if [ -z "$identifier" ]; then
 		identifier="$path"
 	fi
 
 	# destination directory
 	# note that the identifier may start with a slash but this is good, just don't care
 	dest="$DAILY_FILES/$identifier"
 
 	# tell were the backup will go
 	if [ "$path" = "$identifier" ]; then
 		log "backup $path"
 	else
  		log "backup $path with identifier $identifier"
 	fi
 
 	# check if the path exists
 	if [ -e "$path" ]; then
 
 		# check if it's a directory
 		if [ -d "$path" ]; then
 
 			# this is a directory
 
 			# eventually create the destination if it does not exist
 			write_safe_dir_if_unexisting "$dest"
 
 			# backup this
 			# force the source to end with a slash in order to copy the files inside the directory
 			$RSYNC "$path/" "$dest"
 		else
 
 			# this is a filename
 
 			# eventually create the base destination if it does not exist
 			write_safe_basedir_if_unexisting "$dest"
 
 			# backup this filename
 			$RSYNC "$path" "$dest"
 		fi
 	else
 
 		# no path no party
 		warn "  path not found: $path"
 	fi
 }
 
 #
 # Backup last lines of a filename and compress them
 #
 # @param path       string Pathname to be backupped
 # @param identifier string Optional identifier of this pathname (it's considered a file)
 # @param lines      string Lines to be saved
 #
 backup_last_log_lines() {
 
 	local path="$1"
 	local identifier="$2"
 	local lines="$3"
 
 	# create a default identifier
 	if [ -z "$identifier" ]; then
 		identifier="$path"
 	fi
 
 	# default lines
 	if [ -z "$lines" ]; then
 		lines="$BACKUP_LAST_LOG_LINES"
 	fi
 
 	# destination directory
 	# note that the identifier may start with a slash but this is good, just don't care
 	dest="$DAILY_FILES/$identifier"
 
 	# tell were the backup will go
 	if [ "$path" = "$identifier" ]; then
 		log "backup $path last $lines log lines"
 	else
 		log "backup $path last $lines log lines with identifier $identifier"
 	fi
 
 	# check if the path exists
 	if [ -e "$path" ]; then
 
 		# this is a filename
 
 		# eventually create the base destination if it does not exist
 		write_safe_basedir_if_unexisting "$dest"
 
 		# backup this file
 		tail -n "$lines" "$path" > "$dest"
 
 		log "compressing $dest"
 		gzip --force "$dest"
 	else
 
 		# no path no party
 		warn "  path not found: $path"
 	fi
 }
 
 #
 # Backup some filenames ending with whatever
 #
 # @param path[...] string Pathname to be backupped
 #
 backup_paths() {
 
 	# pathnames
 	local paths="$@"
 
 	# process every pathname
 	for path in $paths; do
 
 		# backup every single pathname
 		backup_path "$path"
 	done
 
 }
 
 ##
 # Validate the backup host arguments
 #
 #
 validate_backup_host_args() {
 
 	# function parameters
 	local port="$3"
 
 	# no host no party
 	if [ -z "$1" ] || [ -z "$2" ]; then
 		error "Bad usage: backup_host REMOTE_HOST:REMOTE_PATH IDENTIFIER [PORT]"
 		exit 1
 	fi
 
 	# tell what we will do
 	if [ "$port" = "22" ]; then
 		log "backup $1 in $2"
 	else
 		log "backup $1 (:$port) in $2"
 	fi
 }
 
 ##
 # Backup another host directory via rsync
 #
 # @param string Rsync host
 # @param string Rsync path to a directory
 # @param string Rsync host port
 #
 backup_host_dir() {
 
 	# function parameters
 	local hostlink="$1"
 	local identifier="$2"
 	local port="$3"
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments
 	validate_backup_host_args "$hostlink" "$identifier" "$port"
 
 	# destination path
 	local dest="$BASE/$identifier"
 
 	# create the destination if it does not exist
 	write_safe_dir_if_unexisting "$dest"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC_REMOTE --rsh="ssh -p $port" "$hostlink/" "$dest"
 }
 
 ##
 # Backup another host directory via rsync
 #
 # @param string Rsync host
 # @param string Rsync path to a directory
 #
 push_path_host() {
 
 	# function parameters
 	local source="$1"
 	local hostlink="$2"
 	local port="$3"
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments and print message
 	validate_backup_host_args "$source" "$hostlink" "$port"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC_REMOTE --rsh="ssh -p $port" "$source" "$hostlink"
 }
 
 ##
 # Push some pathnames into an host
 #
 # @param string    Rsync host
 # @param string    Rsync port
 # @param string... Rsync path to some directories
 #
 push_host_port_paths() {
 
 	# function parameters
 	local hostlink="$1"
 	local port="$2"
 
 	# strip the first two arguments
 	shift
 	shift
 
 	local path=""
 
 	# process every pathname
 	while [[ $# -gt 0 ]]; do
 
 		path="$1"
 		shift
 
 		# backup every single pathname
 		push_path_host "$path" "$hostlink" "$port"
 	done
 }
 
 ##
 # Push the local "daily/" directory into a remote host
 #
 # @param string Rsync host
 # @param string Rsync path to a directory
 #
 push_daily_directory() {
 
 	# function parameters
 	local hostlink="$1"
 	local port="$2"
 
 	push_path_host "$DAILY/" "$hostlink" "$port"
 }
 
 ##
 # Backup another host file via rsync
 #
 # @param string hostlink   Rsync source host and pathname e.g. ravotti94:/tmp/asd.txt
 # @param string identifier Rsync destination directory
 #
 backup_host_file() {
 
 	# function parameters
 	local hostlink="$1"
 	local identifier="$2"
 	local port="$3"
 
 	# filename
 	local filename=$(basename "$hostlink")
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments
 	validate_backup_host_args "$hostlink" "$identifier" "$port"
 
 	# destination path
 	local dest="$BASE/$identifier/$filename"
 
 	# create the destination directory if it does not exist
 	write_safe_basedir_if_unexisting "$dest"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC_REMOTE --rsh="ssh -p $port" "$hostlink" "$dest"
 }
 
 #
 # Send a date to a Zabbix item
 #
 # This will be done only if Zabbix is installed.
 #
 # The Zabbix item must be of type "Zabbix trapper"
 #
 # If the "zabbix_sender" command is not installed, nothing will be done.
 #
 # Item definition:
 #   - Type: Zabbix trapper
 #   - Unit: unixtime
 #   - Type: Numeric
 #
 zabbix_sender_date_if_installed() {
 
 	local zabbix_item_key="$1"
 
 	# check if Zabbix sender is already installed
 	if which zabbix_sender > /dev/null; then
 
 		local current_time="$(date +%s)"
 
 		# note that the Zabbix sender can fail
 		# note that the Zabbix output is unuseful
 		# if you have problems, remove this part:    > /dev/null
 		if zabbix_sender                         \
 		  --config /etc/zabbix/zabbix_agentd.conf \
 		  --key "$zabbix_item_key"                 \
 		  --value "$current_time" > /dev/null; then
 
 			log "sent Zabbix value for key $zabbix_item_key"
 
 		else
 
 			error "fail sending Zabbix value for key $zabbix_item_key - exit status: $?"
 
 		fi
 	fi
 
 }
 
 log "init backup in $DAILY"
 
 # try to record when we started
 zabbix_sender_date_if_installed micro_backup_date_start
 
 . "$INSTRUCTIONS"
 
 # try to record when we concluded
 zabbix_sender_date_if_installed micro_backup_date_stop
 
 log "backup successfully concluded"
 
 # remember the last successfully concluded backup timestamp
 date +%s > "$DAILY_LASTTIME"