mysqldump and change dump file
// Good for backups
mysqldump --all-databases
// Only one database named 'test'
mysqldump test
// Only one table tt from the database test
mysqldump test tt
// all tables from test database except table Audit
mysqldump test --ignore-table=Audit
// save the dump to a file
mysqldump test tt > testing1.txt
// only structure without Data
mysqldump test -d
// only data in tab delimited format
mysqldump test tt --tab=/var/lib/mysql/customers
// dump the data in comma separated format just like --tab option
mysqldump --compact --no-create-info --skip-extended-insert db_name tbl_name | sed 's/INSERT .*.(//' | sed 's/);$//'
// dump a single row from each table with their column names
mysqldump emailplatform_db --no-create-info --where="1=1 limit 1" --compact --complete-insert
copy the tt.sql and tt.txt files to another host and restore...
cat /backup/*.sql | mysql sakila
mysqlimport sakila /backup/*.txt
// without comments, lock tables, drop table if exist
// only create table statement and insert into in a single statement
mysqldump test tt --compact
CREATE TABLE `tt` (
`name` varchar(50) default NULL
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
INSERT INTO `tt` VALUES ('aba'),('abacad'),('abacadaea'),('aba'),('abacad'),('abacadaea');
// triggers backup
mysqldump -hlocalhost -uroot -proot@123 db_name -d --no-create-info | sed -e 's/DEFINER[ ]*=[ ]*[^*]*\*/\*/' > triggers_backup.sql
# drop current triggers
mysql -Bse"select CONCAT('drop trigger ', TRIGGER_SCHEMA, '.', TRIGGER_NAME, ';') from information_schema.triggers" | mysql -hlocalhost -uroot -proot@123
# Restore from file, use root@localhost credentials
mysql -hlocalhost -uroot -proot@123 < triggers_backup.sql
// without create table statement
mysqldump test tt --compact --no-create-info
INSERT INTO `tt` VALUES ('aba'),('abacad'),('abacadaea'),('aba'),('abacad'),('abacadaea');
// separate insert statements
mysqldump test tt --compact --no-create-info --skip-extended-insert --skip-tz-utc
INSERT INTO `tt` VALUES ('aba');
INSERT INTO `tt` VALUES ('abacad');
INSERT INTO `tt` VALUES ('abacadaea');
INSERT INTO `tt` VALUES ('aba');
INSERT INTO `tt` VALUES ('abacad');
INSERT INTO `tt` VALUES ('abacadaea');
// only records that satisfy a criterion
mysqldump test tt --compact --no-create-info --skip-triggers --insert-ignore --skip-extended-insert --where="name like 'abacada%'"
INSERT ignore INTO `tt` VALUES ('abacadaea');
INSERT ignore INTO `tt` VALUES ('abacadaea');
mysqldump db_name table_names ox_banners ox_campaigns --compact --no-create-info --skip-extended-insert --where 'campaignid = 66 '
// 100 records from each table of database dbName for testing
mysqldump --opt --where="true LIMIT 100" dbName > dbname.sql
// backup the stored procedure
mysqldump mysql proc --no-create-info --compact -w"db='db_name' and name='sp_name'"
mysqldump dbname -R -d --compact > somefile.txt
mysqldump -R adyp_mumbai > /root/jdyp_backup/"adyp_mumbai_48_`date +%d%m%y%I`.sql"
// import remote table directly into your local test database
mysqldump -h db4free.net -P 3306 -ushantanuo -pPassWord shantanuo calendar | mysql test
// dump backup to another server
mysqldump test | ssh user@remote.server.com "dd of=/mysql/$(date +'%d-%m-%y')"
// transfer one database to another server
mysqldump -h host1 -u username -pPASSWORD database_name | mysql -h host2 -u username -pPASSWORD database_name
// transfer one table to remote server
mysqldump test india | mysql -h db4free.net -P 3306 -ushantanuo -pindia162 shantanuo
// transfer one table to another using ssh
mysqldump test mytest1 --compact | ssh root@172.29.0.215 mysql test
// transfer only data to remote table
mysqldump thisDB thisTBL --no-create-info --compact --skip-quote-names | replace "INSERT INTO thisTBL " "INSERT INTO thisTBL_New " | mysql -h 192.29.0.213 -uroot -pPassWd remoteDB
// Creates and optimized database backup of database then passes the output directly to gzip to compress the data
mysqldump –opt test | gzip > /root/
// Does the same thing as above, but for all of the databases on the server.
mysqldump –opt -A | gzip > destination
// copy a table to another server using gzip and ssh
mysqldump -uroot -pPassWord dbName tbl_name --compact | gzip | ssh 999.999.999.999 "gunzip | mysql -uroot -pPassWord someDB"
// dump all the databases to remote server
for I in $(mysql -e 'show databases' -uroot --pPassWord=root -s --skip-column-names); do mysqldump -uroot --pPassword=root $I | gzip -c | ssh user@server.com "cat > /home/$I.sql.gz"; done
//do not change the timestamp values based on timezones
mysqldump test testTime --skip-tz-utc
// restore only one database
mysqldump --one-database BooksDB < mytest.txt
// reset auto increment value to 0
# mysqldump -d dbname --compact | sed 's/\(.*ENGINE.*AUTO_INCREMENT=\).*/\10;/g'
// mysql dump of selected databases without data
mysqldump -uroot -pPassWord -d --compact --routine `echo $(mysqlshow -uroot -pPassWord | awk '{print $2}' | egrep -v 'mysql|test|Databases|information_schema|lost\+found')` | sed 's/\(.*ENGINE.*AUTO_INCREMENT=\).*/\10;/g'
// search for a word P122C8 in any column
mysqldump --skip-extended-insert db_name tbl_company | grep 'P122C8'
// In order to maintain the consistency of InnoDB tables
--single-transaction --master-data=2
// copy a procedure from one database to another
(echo "DELIMITER ;; " && mysqldump --all-databases --routines -d | awk '/testProc/,/;;/') | mysql -h192.0.0.0 -uroot -pPassWord Other_DB
// remove the DEFINER from SP or Function
// http://forge.mysql.com/tools/tool.php?id=242
mysqldump -uroot -pPassWord -d --routines dbName | perl /home/develop/dumper.pl --delete | more
time mysqldump -uroot -pPassWord -R --single-transaction dbDRCG | perl /home/shantanu/dumper.pl --delete | mysql -uroot -pPassWord shantanu
// or use sed
mysqldump prod_int_dev_ve app_users -d | sed -e 's/DEFINER=[^*]*\*/\*/' | more
// triggers backup
mysqldump -hlocalhost -uroot -proot@123 db_name -d --no-create-info | sed -e 's/DEFINER[ ]*=[ ]*[^*]*\*/\*/' > triggers_backup.sql
# drop current triggers
mysql -Bse"select CONCAT('drop trigger ', TRIGGER_SCHEMA, '.', TRIGGER_NAME, ';') from information_schema.triggers" | mysql -hlocalhost -uroot -proot@123
# Restore from file, use root@localhost credentials
mysql -hlocalhost -uroot -proot@123 < triggers_backup.sql
// change all the definers of stored procedures to root@localhost
mysqldump -uroot --all-databases --no-create-info --no-data -R --skip-triggers | sed -e 's/DEFINER=[^*]*\*/\*/' | mysql -hlocalhost -uroot -proot@123
// strip indexes, change engine, set up slave
##perl -p0777i -e 's/,\n^\)/\n\)/mg' –> after removing the indexes, any commas before the closing bracket need to be removed
mysqldump -uuser -ppassword -h127.0.0.1 -P3306 dbname --master-data=1 | sed 's/ENGINE=MyISAM/ENGINE=archive/g' | grep -v '^ UNIQUE KEY' | grep -v '^ KEY' | perl -p0777i -e 's/,\n^\)/\n\)/mg' | mysql -uuser -ppassword -h127.0.0.1 -P3307 dbname
// Exclude one or two databases from dump
mysql -BNe"show databases" | egrep -vw 'four|five_report|information_schema|mysql|test' | xargs echo mysqldump --single-transaction
// skip 2 tables from dump, can ignore views as well
mysqldump -uroot -proot@123 -h10.10.10.10 -P3308 --all-databases --ignore-table=Batch.JOB_EXECUTION --ignore-table=Batch.EXECUTION_CONTEXT
// skip locking for log tables and take structure backup
50 08 * * * time mysqldump -h10.10.10.10 -uroot -proot@123 -d -R --all-databases --skip-lock-tables --skip-comments | zip > /home/backup_structure_101_$(date +'\%d\%m\%y').sql.zip 2> /home/backup_err.txt
// one liner to drop all databases
mysqlshow -uroot | egrep -v "(Databases|information_schema|performance_schema|mysql)" | awk '{print "mysql -uroot -Bse \"drop database", $2, "\""}' | sh
// one liner to check foreign key violations
mysqlshow | egrep -v "(Databases|information_schema|performance_schema|mysql)" | awk '{print $2}' | sed '/^$/d' | awk '{sub($1,"mysql \-uroot \-Bse \"call mysql.p_check_fk_constraint_violations\('"'"'&'"'"','"'"'%'"'"'\)\""); print $0}' | sh
// check all DB's for data violation of foreign key
# for dbname in `mysqlshow`; do echo $dbname; mysql -e"call mysql.p_check_fk_constraint_violations('$dbname','%')"; done
## mysql full backup cron
58 11 * * * time mysqldump -h10.11.12.13 -uroot -proot@123 -R --skip-tz-utc --master-data=2 --add-drop-database --databases db1 db2 db3 | zip > /home/shantanu/backup_full_master.sql.zip 2> /home/shantanu/backup_err.txt
# daily schemawise backup
mysql -BNe"show databases" | egrep -v "(Databases|information_schema|performance_schema|mysql)" | awk '{print "mysqldump -R -f --skip-tz-utc --master-data=2 --add-drop-database --databases", $1, "| gzip > /home/backup/"$1"_$(date +\%Y\%m\%d).sql.gzip"}' | sh
# daily schemawise backup using cron-job. escape special character %
32 09 * * * mysql -BNe"show databases" | egrep -v "(Databases|information_schema|performance_schema|mysql)" | awk '{print "mysqldump -R -f --skip-tz-utc --master-data=2 --add-drop-database --databases", $1, "| gzip > /home/backup/"$1"\_$(date +\%Y\%m\%d).sql.gzip"}' | sh > /home/shantanu/success.txt 2> /home/shantanu/err.txt
// schema wise back up of all databases starting with "STR"
time mysql -BNe"show databases" | grep ^STR | awk '{print "mysqldump -R -f --skip-tz-utc --master-data=2 --add-drop-database --databases", $1, "| gzip > /home/shantanu/STR/"$1".sql.gzip"}' | sh
// create blackhole tables on slave for dry run purpose
time mysqldump --databases db_name -d --skip-add-drop-table | sed 's/ENGINE=MyISAM/ENGINE=BLACKHOLE/g' | mysql -h Remote_server_ip -uUser_Name -pPassWd
// return the root user privileges
mysqldump mysql user --compact --no-create-info --where="Host = 'localhost' and User = 'root'"
// there is no –ignore-database option, use the following to select specific DB from a long list
time mysqldump --databases `mysql -Bse "SELECT GROUP_CONCAT(schema_name SEPARATOR ' ') FROM information_schema.schemata WHERE schema_name NOT IN ('mysql','performance_schema','information_schema');" >` >/mysql/backup/rds2.sql
# create the same user root as localhost to access from any IP address
mysqldump mysql user --where='Host="localhost" and User="root" limit 1' --compact --no-create-info | sed 's/localhost/%/' | mysql mysql && mysqladmin flush-privileges
// there are times when Linux commands like SED and GREP are more powerful than mysql update and alter:
mysqldump test AIRPORT --no-create-info --compact --skip-extended-insert | sed 's/INSERT INTO `AIRPORT` VALUES (//g' | sed 's/);//' > to_load.txt
1,'new airpor\"t',1
3,'3 airport',0
0,'FRANCE',4
5,'PAKIS\'TAN',5
// MySQL dump by default uses the extended mode that has multiple values in a single insert into statement. sed to break that up into several statements
sed 's/),(/); insert into audit_trail values (/g' mysqldump.sql > skip_extended_dump.txt
// Verbose mode can be activated and error log redirected to standard log in order to find the statement that has any issues
time mysql db_name -fvvv < skip_extended_dump.txt 2>to_study_err.txt >&2
// process the data found in file to_load.txt and then load it back after truncating the table
LOAD DATA LOCAL INFILE 'to_load.txt' into table AIRPORT fields terminated by ',' optionally enclosed by "'";
// if you did not take mysqldump backup using --replace or --insert-ignore
sed -i 's/^INSERT INTO/INSERT IGNORE INTO/' filename.sql
_____
To pipe into gzip:
mysqldump [options] | gzip -c > output.sql.gz
or
mysqldump [options] | gzip --std-out > output.sql.gz
To avoid saving the output file directly to the disk:
mysqldump | gzip --std-out | ssh username@host “cd /preferred_path/; cat > mysqldump_file.gz”
or even more fancy (though, if one step dies this can be a problem)
mysqldump | gzip --std-out | ssh username@host “ cat > mysql -u username -p password”
(Note: If you prefer, netcat can be used instead of ssh)
_____
# typical backup command to be used on master server:
time mysqldump -h192.168.1.1 -ushantanu -pPassWd -R --master-data=2 db_name | zip > /home/db_name_101.sql.zip
_____
You can use --compact or --skip-add-drop-table option to suppress the drop table if exist statement in the dump.
But if you have received a file that has the line, you will need to remove it before executing the dump so that the data will be updated in the existing records.
// Use sed to delete those sql statements before importing
# Create a temporary filename
uniq="/tmp/temp_"`date "+%s"`
for item in `ssh user@dbserver ls`;
do
echo -n "Importing $item..."
scp user@dbserver\:$item $uniq;
gunzip < $uniq | sed {/$'DROP TABLE IF EXISTS'/d} | mysql -f -u root $DB_SCHEMA_NAME
echo "..done"
done
# clean up
rm -f "$uniq"
_____
If you like to keep your ddl backed up in some source management tool like svn or cvs and want to do it individually for stored procedures, events, triggers, tables and such rather than having a single file you can easily do so using the below.
user=backup_user
password=`cat ~/.backup_password`
hostname=127.0.0.1
port=3306
dbname=test_db
path=/home/mysql/ddl
date=`date +%Y%m%d`
mysqldump -u$user -p$password -h$hostname -P$port --no-create-info --no-data --no-create-db --skip-opt $dbname > "$path"/"$dbname"_triggers_"$date".sql
mysqldump -u$user -p$password -h$hostname -P$port --routines --skip-triggers --no-create-info --no-data --no-create-db --skip-opt $dbname > "$path"/"$dbname"_routines_"$date".sql
mysqldump -u$user -p$password -h$hostname -P$port --events --skip-triggers --no-create-info --no-data --no-create-db --skip-opt $dbname > "$path"/"$dbname"_events_"$date".sql
mysqldump -u$user -p$password -h$hostname -P$port --skip-triggers --no-data --no-create-db --skip-opt $dbname > "$path"/"$dbname"_tables_"$date".sql
_____
Copy data from one server to another.
Sending server:
mysqldump db_name tbl_name | bzip2 -c | nc -l 1234
Receiving server:
nc 10.10.10.14 1234 | bzip2 -cd | mysql -uroot -proot@123 db_name
Labels: mysql tips