Skip to content

MySQL Import Data

Overview

Importing data into MySQL is necessary for data migration, bulk loading, and database initialization. MySQL provides efficient methods for importing various data formats.

Import Methods

  • LOAD DATA INFILE: Fast bulk import
  • Source Command: Execute SQL files
  • MySQL Client: Interactive import
  • MySQL Workbench: GUI import
  • Programming APIs: Application-level import

LOAD DATA INFILE / LOAD DATA INFILE

Basic Usage

sql
-- Simple CSV import
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Import from client machine (MySQL 8.0+)
LOAD DATA LOCAL INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"';

Import Options

sql
-- Import with column mapping
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
(id, name, email, created_at);

-- Skip header row
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES;

-- Replace existing data
LOAD DATA INFILE '/tmp/users.csv'
REPLACE
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Ignore duplicates
LOAD DATA INFILE '/tmp/users.csv'
IGNORE
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

Advanced Import Options

sql
-- Import with character set
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
CHARACTER SET utf8mb4
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Import with null handling
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
(id, name, email, phone)
SET phone = NULLIF(phone, '');

-- Import with starting line number
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(id, name, email);

Source Command

Execute SQL Files

sql
-- Source SQL file
SOURCE /path/to/backup.sql;

-- Short form
\. /path/to/backup.sql;

-- From MySQL command line
mysql -u root -p database_name < backup.sql

-- From within MySQL
SOURCE backup.sql;

Import Multiple Files

bash
# Import sequentially
mysql -u root -p database_name < schema.sql
mysql -u root -p database_name < data.sql
mysql -u root -p database_name < indexes.sql

# Or from MySQL client
SOURCE schema.sql;
SOURCE data.sql;
SOURCE indexes.sql;

MySQL Client Import

Command Line Import

bash
# Import single database
mysql -u root -p database_name < backup.sql

# Import all databases
mysql -u root -p < all_databases.sql

# Import with compression
gunzip < backup.sql.gz | mysql -u root -p database_name

# Import with specific database
mysql -u root -p database_name < backup.sql

# Import with username prompt
mysql -u root -p database_name < backup.sql

Batch Import

bash
# Import all SQL files in directory
for file in /path/to/sql_files/*.sql; do
    mysql -u root -p database_name < "$file"
done

# Import with logging
mysql -u root -p database_name < backup.sql 2>&1 | tee import.log

Import Formats

CSV Import

sql
-- Standard CSV import
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES;

-- CSV with specific columns
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(id, name, email, created_at);

-- CSV with different delimiter
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY '\t'
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

TSV Import

sql
-- Tab-separated values
LOAD DATA INFILE '/tmp/users.tsv'
INTO TABLE users
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\n';

JSON Import

sql
-- MySQL 8.0+ JSON import
-- Method 1: Use JSON functions
INSERT INTO users (name, email)
SELECT 
    JSON_UNQUOTE(JSON_EXTRACT(data, '$.name')),
    JSON_UNQUOTE(JSON_EXTRACT(data, '$.email'))
FROM (
    SELECT data FROM json_table(
        READ_JSON_FILE('/tmp/users.json'),
        '$.*' COLUMNS (data JSON PATH '$')
    ) AS jt
);

-- Method 2: Load and parse
LOAD DATA INFILE '/tmp/users.json'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"';
-- Then use JSON functions in UPDATE

Large Data Import

Chunked Import

sql
-- Split large CSV into chunks
-- Chunk 1
LOAD DATA INFILE '/tmp/users_part1.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
LINES TERMINATED BY '\n'
IGNORE 1 LINES;

-- Chunk 2
LOAD DATA INFILE '/tmp/users_part2.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
LINES TERMINATED BY '\n'
IGNORE 1 LINES;

-- Repeat for all chunks

Performance Optimization

sql
-- Disable indexes during import
ALTER TABLE users DISABLE KEYS;

-- Import data
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Re-enable indexes
ALTER TABLE users ENABLE KEYS;

-- Or use unique checks
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE DUPLICATES
(id, name, email);

Bulk Import Options

sql
-- Set buffer size
SET GLOBAL bulk_insert_buffer_size = 256 * 1024 * 1024;

-- Import with transactions
SET autocommit = 0;

-- Import data
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Commit
COMMIT;

-- Reset autocommit
SET autocommit = 1;

Import Error Handling

Error Logging

bash
# Import with error logging
mysql -u root -p database_name < backup.sql 2>&1 | tee import.log

# Check for errors
grep -i error import.log

# Check LOAD DATA errors
SHOW WARNINGS;

Common Errors

sql
-- File not found
-- Error: Can't get stat of file
-- Solution: Check file path and permissions

-- Access denied
-- Error: Access denied for user
-- Solution: Check FILE privilege and file permissions

-- Syntax error
-- Error in SQL syntax
-- Solution: Check CSV format and delimiters

-- Data truncation
-- Warning: Data truncated
-- Solution: Check column data types

Validation

sql
-- Verify import
SELECT COUNT(*) AS imported_count
FROM users;

-- Compare with source
SELECT COUNT(*) AS source_count
FROM source_table;

-- Check for NULLs
SELECT COUNT(*) AS null_count
FROM users
WHERE name IS NULL;

-- Check data integrity
SELECT * FROM users
WHERE email IS NULL OR email = '';

Import from Replication

Master Import

bash
# Import to master and setup replication
# Step 1: Stop slave
# Step 2: Import data to master
mysql -u root -p master_db < backup.sql

# Step 3: Configure replication on master
CHANGE MASTER TO
    MASTER_HOST='master_server',
    MASTER_USER='replication_user',
    MASTER_PASSWORD='password',
    MASTER_LOG_FILE='mysql-bin.000001',
    MASTER_LOG_POS=123;

# Step 4: Start slave
START SLAVE;

Import Best Practices

Preparation

sql
-- Backup existing data
-- Before import, backup current data
mysqldump -u root -p database_name > pre_import_backup.sql

-- Validate data
-- Check for issues before importing
-- Check character encoding, delimiters, etc.

-- Clean target table
-- Or create new table and rename later
TRUNCATE TABLE users;
-- Or: CREATE TABLE users_new LIKE users;

Security

bash
# Import as limited user
-- Don't import as root if possible
mysql -u app_user -p database_name < backup.sql

# Validate source file
-- Check for malicious SQL
grep -i "DROP TABLE\|DROP DATABASE" backup.sql

-- Use prepared statements
-- For application-level imports

Performance

sql
-- Optimize import settings
SET GLOBAL innodb_flush_log_at_trx_commit = 0;
SET GLOBAL innodb_buffer_pool_size = 1G;

-- Disable foreign key checks
SET FOREIGN_KEY_CHECKS = 0;

-- Import data
LOAD DATA INFILE '/tmp/users.csv'
INTO TABLE users
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n';

-- Re-enable checks
SET FOREIGN_KEY_CHECKS = 1;

Summary

MySQL data import methods include:

  • LOAD DATA INFILE: Fast bulk loading
  • Source Command: Execute SQL files
  • Multiple Formats: SQL, CSV, TSV, JSON
  • Large Data: Chunked import for optimization
  • Error Handling: Validation and logging
  • Performance: Optimized settings

Choose appropriate method based on data size, format, and performance requirements.


Previous: Export Data

Next: MySQL Functions

Content is for learning and research only.