HiveBrain v1.2.0
Get Started
← Back to all entries
snippetsqlMinor

How do you load a data file into a specific field?

Submitted by: @import:stackexchange-dba··
0
Viewed 0 times
fieldfilehowyouintoloaddataspecific

Problem

How can I load data in a text file into a specific field. It's a 30 line log file from some copy operations; nothing fancy. I just want to store it as large text or a blob.

The examples I am able to find are all for loading a text file into a table.

Here's what my DB looks like:

+----+---------------------+---------------------+------+------------+------------------------------+
| id | LastStart           | LastFinish          | Log  | ExitStatus | TaskName                     |
+----+---------------------+---------------------+------+------------+------------------------------+
|  1 | 2012-06-26 10:41:17 | 2012-06-26 10:47:42 | NULL | NULL       | LouPrMgt004.Backup           |
|  2 | NULL                | NULL                | NULL | NULL       | LouPrMgt004.LoadPrtgDataToDb |
+----+---------------------+---------------------+------+------------+------------------------------+


So I just want to insert the text file "/root/copy.log" into the "Log" field.

Log files will all be different but here's one of them:

```
2012/06/26 08:35:53 [6952] building file list
2012/06/26 08:35:53 [6952] .d...p..... ./
2012/06/26 08:35:53 [6952] <f..t...... LouPrMgt004-backup-www.tar.gz
2012/06/26 08:35:53 [6952] <f..t...... MySQL_ServerInfo.sql
2012/06/26 08:35:53 [6952] <f..t...... MySQL_nmap_scan.sql
2012/06/26 08:35:53 [6952] <f..t...... packagelist.log
2012/06/26 08:35:53 [6952] <f.st...... root.tar.gz
2012/06/26 08:35:53 [6952] sent 60.14K bytes received 30.44K bytes 13.94K bytes/sec
2012/06/26 08:35:53 [6952] total size is 19.26M speedup is 212.63
2012/06/26 10:41:31 [8159] building file list
2012/06/26 10:41:31 [8159] .d...p..... ./
2012/06/26 10:41:31 [8159] <f.st...... LouPrMgt004-backup-www.tar.gz
2012/06/26 10:41:31 [8159] <f.st...... MySQL_ServerInfo.sql
2012/06/26 10:41:31 [8159] <f.st...... MySQL_nmap_scan.sql
2012/06/26 10:41:31 [8159] <f..t...... packagelist.log
2012/06/26 10:41:31 [8159] <f.st...... root.tar.gz
2012/06/26 10:41:32 [8159] sent 7.85M

Solution

Since you are looking to INSERT an entire file into a single column. You need the LOAD_FILE function:

UPDATE jobs SET Log = LOAD_FILE('/root/copy.log')
WHERE TaskName = 'LouPrMgt004.Backup';


So that you can see just how LOAD_FILE function works, try this:

SET @mycnf = LOAD_FILE('/etc/my.cnf');
SELECT @mycnf;


You should be able to see your my.cnf loaded into a variable

Here is a sample run

mysql> SET @mycnf = LOAD_FILE('/etc/my.cnf');
Query OK, 0 rows affected (0.00 sec)

mysql> select @mycnf\G
*************************** 1. row ***************************
@mycnf: [mysqld]
datadir=/var/lib/mysql
socket=/var/lib/mysql/mysql.sock
# Default to using old password format for compatibility with mysql 3.x
# clients (those using the mysqlclient10 compatibility package).
old_password=1

key_buffer_size=1G
max_allowed_packet=128M

# These two values must be the same
tmp_table_size=16M
max_heap_table_size=16M

# Sort_buffer is too large, so try default to see if it helps
sort_buffer_size = 2M

read_buffer_size=1M
read_rnd_buffer_size=2M
myisam_sort_buffer_size=16M
max_length_for_sort_data=2048
max_sort_length=2048
long-query-time=5
skip-name-resolve
table_cache = 6144
open_files_limit = 32768
interactive_timeout=3600
wait_timeout=3600
thread_cache = 100
max_connections=1000
query_cache_size = 0
# Try number of CPU's*2 for thread_concurrency
thread_concurrency = 8
ft_min_word_len=2
group_concat_max_len=5000000
query_cache_size=8M
query_cache_limit=32M

#log-output=TABLE
#slow-query-log
#slow-query-log_file=slow-query.log
#expire-logs-days=7

server-id=180
#log-bin=mysql-bin
#log-slave-updates
#relay-log=relay-bin

innodb_file_per_table
innodb_log_file_size=128M
innodb_buffer_pool_size=512M
innodb_log_buffer_size=8M
innodb_flush_method=O_DIRECT

innodb_read_io_threads=16
innodb_write_io_threads=16
innodb_io_capacity=5000
innodb_thread_concurrency=0

#
#       XtraDB Cluster Options
#

user=mysql
binlog_format=ROW
wsrep_provider=/usr/lib64/libgalera_smm.so
wsrep_cluster_address=gcomm://
wsrep_slave_threads=2
innodb_locks_unsafe_for_binlog=1
innodb_autoinc_lock_mode=2

wsrep_sst_method=xtrabackup
wsrep_cluster_name=oxygen_xtradb_cluster_test
wsrep_node_name=oxygen_xtradb_cluster_test_node180

[mysql.server]
user=mysql
#basedir=/var/lib

[mysqld_safe]
log-error=/var/log/mysqld.log
pid-file=/var/run/mysqld/mysqld.pid

1 row in set (0.00 sec)

mysql>


Give it a Try !!!

Be careful with this because some have complained that it does not work

  • http://forums.mysql.com/read.php?20,218333,218333#msg-218333



  • http://bugs.mysql.com/bug.php?id=38403

Code Snippets

UPDATE jobs SET Log = LOAD_FILE('/root/copy.log')
WHERE TaskName = 'LouPrMgt004.Backup';
SET @mycnf = LOAD_FILE('/etc/my.cnf');
SELECT @mycnf;
mysql> SET @mycnf = LOAD_FILE('/etc/my.cnf');
Query OK, 0 rows affected (0.00 sec)

mysql> select @mycnf\G
*************************** 1. row ***************************
@mycnf: [mysqld]
datadir=/var/lib/mysql
socket=/var/lib/mysql/mysql.sock
# Default to using old password format for compatibility with mysql 3.x
# clients (those using the mysqlclient10 compatibility package).
old_password=1

key_buffer_size=1G
max_allowed_packet=128M

# These two values must be the same
tmp_table_size=16M
max_heap_table_size=16M

# Sort_buffer is too large, so try default to see if it helps
sort_buffer_size = 2M

read_buffer_size=1M
read_rnd_buffer_size=2M
myisam_sort_buffer_size=16M
max_length_for_sort_data=2048
max_sort_length=2048
long-query-time=5
skip-name-resolve
table_cache = 6144
open_files_limit = 32768
interactive_timeout=3600
wait_timeout=3600
thread_cache = 100
max_connections=1000
query_cache_size = 0
# Try number of CPU's*2 for thread_concurrency
thread_concurrency = 8
ft_min_word_len=2
group_concat_max_len=5000000
query_cache_size=8M
query_cache_limit=32M

#log-output=TABLE
#slow-query-log
#slow-query-log_file=slow-query.log
#expire-logs-days=7

server-id=180
#log-bin=mysql-bin
#log-slave-updates
#relay-log=relay-bin

innodb_file_per_table
innodb_log_file_size=128M
innodb_buffer_pool_size=512M
innodb_log_buffer_size=8M
innodb_flush_method=O_DIRECT

innodb_read_io_threads=16
innodb_write_io_threads=16
innodb_io_capacity=5000
innodb_thread_concurrency=0

#
#       XtraDB Cluster Options
#

user=mysql
binlog_format=ROW
wsrep_provider=/usr/lib64/libgalera_smm.so
wsrep_cluster_address=gcomm://
wsrep_slave_threads=2
innodb_locks_unsafe_for_binlog=1
innodb_autoinc_lock_mode=2

wsrep_sst_method=xtrabackup
wsrep_cluster_name=oxygen_xtradb_cluster_test
wsrep_node_name=oxygen_xtradb_cluster_test_node180

[mysql.server]
user=mysql
#basedir=/var/lib

[mysqld_safe]
log-error=/var/log/mysqld.log
pid-file=/var/run/mysqld/mysqld.pid


1 row in set (0.00 sec)

mysql>

Context

StackExchange Database Administrators Q#19960, answer score: 7

Revisions (0)

No revisions yet.