diff --git a/backup-everything.sh b/backup-everything.sh
index c95f933..a14860b 100755
--- a/backup-everything.sh
+++ b/backup-everything.sh
@@ -1,468 +1,500 @@
 #!/bin/bash
 ###
 # Stupid script to backup some stuff
 #
 # Author: Valerio B.
 # Date:   Wed 25 Mar 2020
 ##
 
 # try to proceed even in case of errors (to do not skip any backup)
 # set -e
 
 
 # current directory
 MYDIR="$(dirname "$(realpath "$0")")"
 
 # load useful stuff
 . "$MYDIR"/bootstrap.sh
 
 ###
 # Create a directory with safe permissions if it does not exist
 #
 # @param string path         Pathname
 # @param int    is_file_mode Check if we are in file mode (default: directory mode)
 #
 write_safe_dir_if_unexisting() {
 
 	local path="$1"
 
 	local is_file_mode="$2"
 
 	# try to create the directory if it does not exists
 	if [ ! -d "$path" ]; then
 
 		warn "creating missing $path with 750"
 		mkdir --parents "$path"
 		chmod 750       "$path"
 	fi
 
 	# it must exists now
 	require_existing_dir "$path"
 }
 
 ###
 # Create a directory for a filename with safe permissions if it does not exist
 #
 # @param string path         Pathname to a filename
 # @param int    is_file_mode Check if we are in file mode (default: directory mode)
 #
 write_safe_basedir_if_unexisting() {
 
 	# first function argument
 	local path="$1"
 
 	# extract just the sub-directory
 	local basepath=$(dirname "$path")
 
 	# create that sub-directory
 	write_safe_dir_if_unexisting "$basepath"
 }
 
 ###
 # Require an existing directory or die
 #
 # @param string Directory path
 #
 require_existing_dir() {
 	if [ ! -d "$1" ]; then
 		error "unexisting directory $1"
 		exit 1
 	fi
 }
 
 # create these pathnames if they do not exist
 write_safe_dir_if_unexisting "$BASE"
 write_safe_dir_if_unexisting "$DAILY"
 write_safe_dir_if_unexisting "$DAILY_FILES"
 write_safe_dir_if_unexisting "$DAILY_DATABASES"
 
 # create or clean the last log file
 cat /dev/null > "$DAILY_LASTLOG"
 
 #
 # Dump and compress a database
 #
 # @param db     string Database name
 # @param spaces string Cosmetic spaces
 #
 backup_database() {
 	local db="$1"
 	local spaces="$2"
 	local path="$DAILY_DATABASES"/"$db".sql.gz
 
 	log "$spaces""dumping database $db"
 
 	# no database no party
 	if ! $MYSQL "$db" -e exit > /dev/null 2>&1; then
 		warn "$spaces  skip unexisting database"
 		return
 	fi
 
 	if [ "$PORCELAIN" != 1 ]; then
 		$MYSQLDUMP "$db" | gzip > "$path"
 	fi
 }
 
 #
 # Dump and compress some databases
 #
 # @param database... string Database names
 #
 backup_databases() {
 	for database in $@; do
 		backup_database "$database"
 	done
 }
 
 #
 # Dump some databases with a shared prefix
 #
 # @param prefix string Database prefix
 # @param spaces string Cosmetic spaces
 #
 backup_databases_prefixed() {
 	local prefix="$1"
 	local spaces="$2"
 
 	log "$spaces""backup all databases prefixed with: $prefix"
 
 	databases=$($MYSQL -e 'SHOW DATABASES' | grep "$prefix")
 	for database in $databases; do
 		backup_database "$database" "$spaces  "
 	done
 }
 
+# default databases to be skipped
+DATABASE_SKIP_LIST=("^information_schema$" "^performance_schema$")
+
+#
+# Call this to skip a database
+#
+# @param string database Database pattern (Bash regex)
+#
+# This should be called before calling backup_every_database.
+#
+skip_database() {
+	local db="$1"
+	DATABASE_SKIP_LIST+=( "$db" )
+}
+
 #
 # Backup every single database. That's easy.
 #
 backup_every_database() {
 
 	# backup every damn database
 	databases=$($MYSQL -e 'SHOW DATABASES')
 	for database in $databases; do
 
+		local do_backup=1
+
 		# just skip the information_schema and the performance_schema that cannot be locked
-		if [[ "$database" != information_schema && "$database" != performance_schema ]]; then
+		for skip_entry in "${DATABASE_SKIP_LIST[@]}"; do
+			if [[ "$database" =~ $skip_entry ]]; then
+
+				# show a cute message
+				log "skippin database $database matching blacklist"
+
+				# do not backup this
+				do_backup=0
+
+				# do not check other entries
+				break	
+			fi
+		done
+
+		# backup if it does not match the above skip entries
+		if [ "$do_backup" = 1 ]; then
 			backup_database "$database"
 		fi
 	done
 }
 
 #
 # Backup a database that is used by a service
 #
 # The service will be stopped before dumping.
 #
 # @param service string Systemd unit file
 # @param db      string Database name
 #
 backup_service_and_database() {
 	local service="$1"
 	local db="$2"
 	local is_active=
 
 	log "start backup service '$service' with database '$db'"
 
 	# check if the service is running
 	if systemctl is-active "$service" > /dev/null; then
 		is_active=1
 	fi
 
 	# eventually stop the service
 	if [ $is_active == 1 ]; then
 		if [ "$NO_DISSERVICE" == 1 ]; then
 			warn "  NOT stopping service: $service (to avoid any disservice)"
 		else
 			log  "  stopping service: $service"
 
 			if [ "$PORCELAIN" != 1 ]; then
 				systemctl stop "$service"
 			fi
 		fi
 	else
 		log "  service already inactive: $service"
 	fi
 
 	# backup the database now that the service is down
 	backup_database "$db" "  "
 
 	# eventually start again the service
 	if [ $is_active == 1 ]; then
 		if [ "$NO_DISSERVICE" == 1 ]; then
 			warn "  NOT starting again service: $service (to avoid any disservice)"
 		else
 			log "  starting again service: $service"
 
 			if [ "$PORCELAIN" != 1 ]; then
 				systemctl start "$service"
 			fi
 		fi
 	fi
 }
 
 #
 # Backup some databases used by a service
 #
 # The service will be stopped before dumping the databases
 #
 # @param service string Systemd unit file
 # @param db...   string Database names
 #
 backup_service_and_databases() {
 	backup_service_and_database $@
 }
 
 #
 # Backup phabricator
 #
 # @param path     string Phabricator webroot
 # @param dbprefix string Database prefix
 #
 backup_phabricator() {
 	local path="$1"
 	local dbprefix="$2"
 	local configset="$path"/bin/config
 
 	log "backup Phabricator databases"
 
 	log "  set Phabricator read only mode (and wait some time to make it effective)"
 	$configset set cluster.read-only true > /dev/null
 	sleep 5
 
 	backup_databases_prefixed "$dbprefix" "    "
 
 	log "  revert Phabricator read only mode"
 	$configset set cluster.read-only false > /dev/null
 }
 
 #
 # Backup a directory or a filename
 #
 # @param path       string Pathname to be backupped
 # @param identifier string Optional identifier of this pathname
 #
 backup_path() {
 	local path="$1"
 	local identifier="$2"
 
 	# create a default identifier
 	if [ -z "$identifier" ]; then
 		identifier="$path"
 	fi
 
 	# destination directory
 	# note that the identifier may start with a slash but this is good, just don't care
 	dest="$DAILY_FILES/$identifier"
 
 	# tell were the backup will go
 	if [ "$path" = "$identifier" ]; then
 		log "backup $path"
 	else
  		log "backup $path with identifier $identifier"
 	fi
 
 	# check if the path exists
 	if [ -e "$path" ]; then
 
 		# check if it's a directory
 		if [ -d "$path" ]; then
 
 			# this is a directory
 
 			# eventually create the destination if it does not exist
 			write_safe_dir_if_unexisting "$dest"
 
 			# backup this
 			# force the source to end with a slash in order to copy the files inside the directory
 			$RSYNC "$path/" "$dest"
 		else
 
 			# this is a filename
 
 			# eventually create the base destination if it does not exist
 			write_safe_basedir_if_unexisting "$dest"
 
 			# backup this filename
 			$RSYNC "$path" "$dest"
 		fi
 	else
 
 		# no path no party
 		warn "  path not found: $path"
 	fi
 }
 
 #
 # Backup some filenames ending with whatever
 #
 # @param path[...] string Pathname to be backupped
 #
 backup_paths() {
 
 	# pathnames
 	local paths="$@"
 
 	# process every pathname
 	for path in $paths; do
 
 		# backup every single pathname
 		backup_path "$path"
 	done
 
 }
 
 ##
 # Validate the backup host arguments
 #
 #
 validate_backup_host_args() {
 
 	# function parameters
 	local port="$3"
 
 	# no host no party
 	if [ -z "$1" ] || [ -z "$2" ]; then
 		error "Bad usage: backup_host REMOTE_HOST:REMOTE_PATH IDENTIFIER [PORT]"
 		exit 1
 	fi
 
 	# tell what we will do
 	if [ "$port" = "22" ]; then
 		log "backup $1 in $2"
 	else
 		log "backup $1 (:$port) in $2"
 	fi
 }
 
 ##
 # Backup another host directory via rsync
 #
 # @param string Rsync host
 # @param string Rsync path to a directory
 # @param string Rsync host port
 #
 backup_host_dir() {
 
 	# function parameters
 	local hostlink="$1"
 	local identifier="$2"
 	local port="$3"
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments
 	validate_backup_host_args "$hostlink" "$identifier" "$port"
 
 	# destination path
 	local dest="$BASE/$identifier"
 
 	# create the destination if it does not exist
 	write_safe_dir_if_unexisting "$dest"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC --compress --rsh="ssh -p $port" "$hostlink/" "$dest"
 }
 
 ##
 # Backup another host directory via rsync
 #
 # @param string Rsync host
 # @param string Rsync path to a directory
 #
 push_path_host() {
 
 	# function parameters
 	local source="$1"
 	local hostlink="$2"
 	local port="$3"
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments and print message
 	validate_backup_host_args "$source" "$hostlink" "$port"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC --compress --rsh="ssh -p $port" "$source" "$hostlink"
 }
 
 ##
 # Push some pathnames into an host
 #
 # @param string    Rsync host
 # @param string    Rsync port
 # @param string... Rsync path to some directories
 #
 push_host_port_paths() {
 
 	# function parameters
 	local hostlink="$1"
 	local port="$2"
 
 	# strip the first two arguments
 	shift
 	shift
 
 	local path=""
 
 	# process every pathname
 	while [[ $# -gt 0 ]]; do
 
 		path="$1"
 		shift
 
 		# backup every single pathname
 		push_path_host "$path" "$hostlink" "$port"
 	done
 }
 
 ##
 # Backup another host file via rsync
 #
 # @param string hostlink   Rsync source host and pathname e.g. ravotti94:/tmp/asd.txt
 # @param string identifier Rsync destination directory
 #
 backup_host_file() {
 
 	# function parameters
 	local hostlink="$1"
 	local identifier="$2"
 	local port="$3"
 
 	# filename
 	local filename=$(basename "$hostlink")
 
 	# eventually set default rsync port
 	if [ -z "$port" ]; then
 		port=22
 	fi
 
 	# validate the arguments
 	validate_backup_host_args "$hostlink" "$identifier" "$port"
 
 	# destination path
 	local dest="$BASE/$identifier/$filename"
 
 	# create the destination directory if it does not exist
 	write_safe_basedir_if_unexisting "$dest"
 
 	# backup everything
 	# force the source to end with a slash to just copy the files inside it
 	$RSYNC --compress --rsh="ssh -p $port" "$hostlink" "$dest"
 }
 
 log "init backup in $DAILY"
 
 . "$INSTRUCTIONS"
 
 log "backup successfully concluded"
 
 # remember the last successfully concluded backup timestamp
 date +%s > "$DAILY_LASTTIME"
diff --git a/backup-instructions-example.conf b/backup-instructions-example.conf
index 8173259..c741504 100644
--- a/backup-instructions-example.conf
+++ b/backup-instructions-example.conf
@@ -1,35 +1,40 @@
 ################################################################
 # Put there your stuff to be backupped
 #
 # Notes:
 # Use the 'backup-instructions-example.conf' file to create an
 #         'backup-instructions.conf' file.
 ################################################################
 
 # backup server configurations
 backup_path /etc
 
 # backup logs
 backup_path /var/log
 
 # backup crontab etc.
 backup_path /var/spool
 
+# you can eventually skip some databases
+# as default we skip a couple of system databases
+# skip_database '^something$'
+# skip_database '^something[0-9]else$'
+
 # backup every single database
 # backup_every_database
 
 # backup a single database
 #backup_database DATABASE_NAME
 
 # backup an host reachable via ssh
 #backup_host_dir  127.1.2.3:/home         mega-server-stuff/home
 
 # backup an host reachable via ssh
 #backup_host_file 127.1.2.3:/tmp/file.war mega-server-stuff/wars
 
 # backup some files using a pattern or something like that
 # backup_paths /var/log/X*
 
 # push something to another host (even on a custom port)
 #push_path_host /tmp/something host:/path
 #push_path_host /tmp/something host:/path 2222