MySQL Clone Tables
Overview
Creating a copy of an existing table is useful for backup, testing, or data migration. MySQL provides multiple ways to clone tables including structure, data, or both.
Clone Methods
- CREATE TABLE AS: Create from SELECT query
- CREATE TABLE LIKE: Copy structure only
- INSERT SELECT: Copy data to existing table
- mysqldump: Full table export
CREATE TABLE AS / CREATE TABLE AS
Copy Structure and Data
sql
-- Copy entire table
CREATE TABLE users_copy AS SELECT * FROM users;
-- Copy with WHERE clause
CREATE TABLE active_users AS
SELECT * FROM users WHERE status = 'active';
-- Copy specific columns
CREATE TABLE user_emails AS
SELECT id, name, email FROM users;
-- Copy with expressions
CREATE TABLE user_summary AS
SELECT
id,
name,
COUNT(order_id) AS order_count,
SUM(order_total) AS total_spent
FROM users u
LEFT JOIN orders o ON u.id = o.user_id
GROUP BY u.id, u.name;Copy with New Columns
sql
-- Copy and add calculated column
CREATE TABLE users_with_status AS
SELECT
id,
name,
email,
CASE
WHEN created_at > '2023-01-01' THEN 'new'
ELSE 'old'
END AS user_type,
NOW() AS copied_at
FROM users;CREATE TABLE LIKE / CREATE TABLE LIKE
Copy Structure Only
sql
-- Copy structure without data
CREATE TABLE users_backup LIKE users;
-- Verify structure
DESCRIBE users_backup;
-- Has same columns, types, indexes as users
-- Add data later
INSERT INTO users_backup SELECT * FROM users;Copy with Specific Options
sql
-- Copy structure and modify
CREATE TABLE users_copy LIKE users;
ALTER TABLE users_copy
ADD COLUMN copied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP;INSERT SELECT / INSERT SELECT
Copy Data to Existing Table
sql
-- Create empty table with same structure
CREATE TABLE users_archive (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(100),
email VARCHAR(255),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Copy data
INSERT INTO users_archive (id, name, email, created_at)
SELECT id, name, email, created_at
FROM users
WHERE created_at < '2023-01-01';
-- Copy with transformation
INSERT INTO users_archive (name, email, created_at)
SELECT
UPPER(name) AS name,
email,
NOW() AS created_at
FROM users;Bulk Copy
sql
-- Copy multiple tables
INSERT INTO customers_archive SELECT * FROM customers;
INSERT INTO orders_archive SELECT * FROM orders;
INSERT INTO products_archive SELECT * FROM products;
-- Copy with conditions
INSERT INTO completed_orders
SELECT * FROM orders WHERE status = 'completed';
INSERT INTO pending_orders
SELECT * FROM orders WHERE status = 'pending';Complete Clone Workflow
Step 1: Create Structure
sql
-- Method 1: LIKE (exact structure copy)
CREATE TABLE orders_backup LIKE orders;
-- Method 2: CREATE TABLE (customize structure)
CREATE TABLE orders_backup (
id INT PRIMARY KEY AUTO_INCREMENT,
order_number VARCHAR(50),
customer_id INT,
order_date DATE,
total DECIMAL(10,2),
INDEX idx_backup_customer (customer_id),
INDEX idx_backup_date (order_date)
);Step 2: Copy Data
sql
-- Copy all data
INSERT INTO orders_backup
SELECT * FROM orders;
-- Copy with filter
INSERT INTO orders_backup
SELECT * FROM orders
WHERE order_date >= '2024-01-01';
-- Copy with transformation
INSERT INTO orders_backup (
order_number, customer_id, order_date, total
)
SELECT
order_number,
customer_id,
order_date,
total * 1.1 AS total
FROM orders;Step 3: Verify
sql
-- Compare row counts
SELECT
'original' AS table_type,
COUNT(*) AS row_count
FROM orders
UNION ALL
SELECT
'backup' AS table_type,
COUNT(*) AS row_count
FROM orders_backup;
-- Compare data samples
SELECT
o.id,
o.order_number,
o.total AS original_total,
b.total AS backup_total
FROM orders o
JOIN orders_backup b ON o.id = b.id
LIMIT 10;Clone with mysqldump
Export Table
bash
# Export single table
mysqldump -u root -p database_name table_name > table_backup.sql
# Export with data only
mysqldump -u root -p --no-create-info database_name table_name > data_only.sql
# Export with structure only
mysqldump -u root -p --no-data database_name table_name > structure_only.sql
# Export with specific conditions
mysqldump -u root -p --where="created_at < '2024-01-01'" \
database_name table_name > archive_data.sqlImport Table
bash
# Import to new table
mysql -u root -p database_name < table_backup.sql
# Import to different database
mysql -u root -p new_database < table_backup.sql
# Import with table rename
sed 's/table_name/table_backup/g' table_backup.sql | \
mysql -u root -p database_nameClone Across Databases
Copy to Another Database
sql
-- Method 1: CREATE TABLE AS
CREATE DATABASE backup_db;
USE backup_db;
CREATE TABLE users AS SELECT * FROM main_db.users;
-- Method 2: Cross-database INSERT
CREATE TABLE backup_db.users (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(100),
email VARCHAR(255)
);
INSERT INTO backup_db.users (name, email)
SELECT name, email FROM main_db.users;Clone with mysqldump
bash
# Export from source database
mysqldump -u root -p source_db table_name > table.sql
# Import to target database
mysql -u root -p target_db < table.sql
# Or single command
mysqldump -u root -p source_db table_name | \
mysql -u root -p target_dbClone with Constraints
Copy with Primary Key
sql
-- Copy and reset auto increment
CREATE TABLE orders_backup LIKE orders;
ALTER TABLE orders_backup MODIFY COLUMN id INT;
-- Insert with new IDs
INSERT INTO orders_backup (
order_number, customer_id, order_date, total
)
SELECT
order_number,
customer_id,
order_date,
total
FROM orders;Copy with Foreign Keys
sql
-- Copy parent and child tables
CREATE TABLE customers_backup LIKE customers;
INSERT INTO customers_backup SELECT * FROM customers;
CREATE TABLE orders_backup LIKE orders;
ALTER TABLE orders_backup
DROP FOREIGN KEY orders_ibfk_1;
-- Copy data
INSERT INTO orders_backup
SELECT * FROM orders;
-- Re-add foreign key to backup
ALTER TABLE orders_backup
ADD CONSTRAINT fk_backup_customer
FOREIGN KEY (customer_id) REFERENCES customers_backup(id);Clone Performance
Optimizing Large Table Copies
sql
-- Method 1: Use CREATE TABLE AS with LIMIT
CREATE TABLE large_table_copy AS
SELECT * FROM large_table
WHERE id <= 100000;
-- Continue in batches
INSERT INTO large_table_copy
SELECT * FROM large_table
WHERE id > 100000 AND id <= 200000;
-- Method 2: Disable indexes during copy
CREATE TABLE large_table_copy LIKE large_table;
ALTER TABLE large_table_copy DISABLE KEYS;
INSERT INTO large_table_copy SELECT * FROM large_table;
ALTER TABLE large_table_copy ENABLE KEYS;
-- Method 3: Use LOAD DATA INFILE
-- Export to CSV
SELECT * FROM large_table
INTO OUTFILE '/tmp/large_table.csv'
FIELDS TERMINATED BY ','
ENCLOSED BY '"';
-- Import to new table
CREATE TABLE large_table_copy LIKE large_table;
LOAD DATA INFILE '/tmp/large_table.csv'
INTO TABLE large_table_copy
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
IGNORE 1 ROWS;Monitoring Copy Progress
sql
-- Check progress with row counts
SELECT
'source' AS table_type,
COUNT(*) AS row_count
FROM large_table
UNION ALL
SELECT
'destination' AS table_type,
COUNT(*) AS row_count
FROM large_table_copy;
-- Calculate percentage
SELECT
dest.row_count * 100.0 / src.row_count AS progress_percent
FROM (
SELECT COUNT(*) AS row_count
FROM large_table
) src
CROSS JOIN (
SELECT COUNT(*) AS row_count
FROM large_table_copy
) dest;Clone Scenarios
Scenario 1: Backup Before Changes
sql
-- Create backup before major changes
CREATE TABLE users_pre_migration_backup AS
SELECT * FROM users;
-- Perform changes
ALTER TABLE users ADD COLUMN new_column VARCHAR(100);
UPDATE users SET new_column = 'default' WHERE new_column IS NULL;
-- Rollback if needed
DROP TABLE users;
RENAME TABLE users_pre_migration_backup TO users;Scenario 2: Archive Old Data
sql
-- Create archive table
CREATE TABLE orders_archive (
id INT PRIMARY KEY AUTO_INCREMENT,
order_number VARCHAR(50),
customer_id INT,
order_date DATE,
total DECIMAL(10,2),
archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Move old orders
INSERT INTO orders_archive
SELECT *, NULL AS archived_at
FROM orders
WHERE order_date < DATE_SUB(NOW(), INTERVAL 1 YEAR);
-- Delete from main table
DELETE FROM orders
WHERE order_date < DATE_SUB(NOW(), INTERVAL 1 YEAR);Scenario 3: Create Test Environment
sql
-- Create test database
CREATE DATABASE test_env;
-- Copy production tables
CREATE TABLE test_env.users AS SELECT * FROM prod.users;
CREATE TABLE test_env.orders AS SELECT * FROM prod.orders;
CREATE TABLE test_env.products AS SELECT * FROM prod.products;
-- Anonymize sensitive data
UPDATE test_env.users
SET email = CONCAT('user', id, '@example.com'),
phone = '555-0000',
address = 'Test Address';Scenario 4: Data Migration
sql
-- Create new schema tables
CREATE TABLE new_schema.users LIKE old_schema.users;
ALTER TABLE new_schema.users
DROP PRIMARY KEY,
ADD COLUMN new_id INT PRIMARY KEY AUTO_INCREMENT;
-- Migrate data with transformations
INSERT INTO new_schema.users (
name, email, phone, created_at
)
SELECT
name,
email,
phone,
created_at
FROM old_schema.users;
-- Verify migration
SELECT
'old' AS schema,
COUNT(*) AS row_count
FROM old_schema.users
UNION ALL
SELECT
'new' AS schema,
COUNT(*) AS row_count
FROM new_schema.users;Clone with mysqldump Advanced
Partial Table Clone
bash
# Export specific columns
mysqldump -u root -p --no-create-info \
--skip-add-locks database_name table_name \
--where="id BETWEEN 1 AND 1000" > partial_data.sql
# Export with row limit
mysqldump -u root -p --where="1=1 LIMIT 1000" \
database_name table_name > sample_data.sqlClone to Different Server
bash
# Export from source
mysqldump -u source_user -p source_host database_name > dump.sql
# Import to target
mysql -u target_user -p target_host target_database < dump.sql
# Or single command
mysqldump -u source_user -p source_host database_name | \
mysql -u target_user -p target_host target_databaseSummary
Cloning tables in MySQL provides:
- Backup: Create copies for safety
- Testing: Clone for development
- Migration: Move data between schemas
- Archive: Move old data to archive
- Multiple Methods: CREATE TABLE AS, LIKE, mysqldump
Choose appropriate method based on your needs: full copy, structure only, or data only with transformations.
Previous: Temporary Tables
Next: Metadata