Sei sulla pagina 1di 10

SQL Loader

 How SQL Loader Works

 sqlldr command line options


 userid
 control  Name of control file
 log  log is captured in this file
 bad  data rejected by oracle because of constraint
violation
 data  file containing the data to be loaded
 discard  data discarded because of the WHEN clause
 skip=n  skips n header records from data file & loads
the remaining
 errors=n  stops loading data if n records error out
 rows=n  commits after n rows (use larger number for
better performance) – only for conventional load
 bindsize=1024  Size of an array in conventional load
only. Higher the size larger number of insert
statements in an array
 direct=true  Does not create insert statements inside
the bind array. Skips buffer cache. Data added above
HWM. Constraints(PK & FK) are disabled during the
load. Triggers are not fired during the load. Table
and indexes are locked while data is being loaded.
Constraints could become INVALID if data loaded does
not meet them. E.g duplicate key is loaded in PK
column

 How to build control file


 Delimited File
10001,"Scott Tiger", 1000, 40
10002,"Frank Naude", 500, 20

load data
append
into table emp
fields terminated by "," optionally enclosed by '"'
( empno, empname, sal, deptno )

 Fixed length file


10001 Scott Tiger 1000 40
10002 Frank Naude 500 20

load data
replace  you can use truncate to improve performance
into table departments
( dept position (02:05) char(4),
deptname position (08:27) char(20)
)
 How to load

Sqlldr userid=shekhar/shekhar control=emp.ctl


data=emp.dat log=emp.log bad=emp.bad reject=emp.reject
direct=true errors=1000 skip=1

• How can you skip head records

sqlldr userid=ora_id/ora_passwd control=control_file_name.ctl skip=4

• How can you change/transform data while loading


LOAD DATA
Append
INTO TABLE modified_data
( rec_no "my_db_sequence.nextval",
region CONSTANT '31',
time_loaded "to_char(SYSDATE, 'HH24:MI')",
data1 POSITION(1:5) ":data1/100",
data2 POSITION(6:15) "upper(:data2)",
data3 POSITION(16:22)"to_date(:data3, 'YYMMDD')"
)

LOAD DATA
APPEND
INTO TABLE mailing_list
FIELDS TERMINATED BY ","
( addr,
city,
state,
zipcode,
mailing_addr "decode(:mailing_addr, null, :addr, :mailing_addr)",
mailing_city "decode(:mailing_city, null, :city, :mailing_city)",
mailing_state
)

• How to selectively load data

LOAD DATA
APPEND
INTO TABLE my_selective_table
WHEN (01) <> 'H' and (01) <> 'T' and (30:37) = '20031217'
(
region CONSTANT '31',
service_key POSITION(01:11) INTEGER EXTERNAL,
call_b_no POSITION(12:29) CHAR
)

• How to use OR in the WHEN clause

LOAD DATA
APPEND
INTO TABLE my_selective_table
WHEN (01) <> 'H' and (01) <> 'T'
(
region CONSTANT '31',
service_key POSITION(01:11) INTEGER EXTERNAL,
call_b_no POSITION(12:29) CHAR
)
INTO TABLE my_selective_table
WHEN (30:37) = '20031217'
(
region CONSTANT '31',
service_key POSITION(01:11) INTEGER EXTERNAL,
call_b_no POSITION(12:29) CHAR
)

• How to load image files into LOB field

CREATE TABLE image_table (


image_id NUMBER(5),
file_name VARCHAR2(30),
image_data BLOB);

LOAD DATA
INFILE *
INTO TABLE image_table
REPLACE
FIELDS TERMINATED BY ','
(
image_id INTEGER(5),
file_name CHAR(30),
image_data LOBFILE (file_name) TERMINATED BY EOF
)
BEGINDATA
001,image1.gif
002,image2.jpg
003,image3.jpg

• How to load data from one file into two tables


LOAD DATA
INFILE *
INTO TABLE tab1 WHEN tab = 'tab1'
( tab FILLER CHAR(4),
col1 INTEGER
)
INTO TABLE tab2 WHEN tab = 'tab2'
( tab FILLER POSITION(1:4),
col1 INTEGER
)
BEGINDATA
tab1|1
tab1|2
tab2|2
tab3|3

 How to increase performance


 Use Direct Path Loads instead of conventional
 Disable Indexes and Constraints. For conventional
data loads only
 Use a Larger Bind Array. For conventional data loads
only
 Use ROWS=n to Commit Less Frequently. For
conventional data loads only
 Use unrecoverable = This will not generate any redo
entries. Obviously recovery may not be possible.
Therefore take a backup after loading data. Direct
load uses UNRECOVERABLE by default.
Example
CREATE TABLE customers
( cust_id NUMBER
, cust_first_name VARCHAR2(20) NOT NULL
, cust_last_name VARCHAR2(40) NOT NULL
, cust_gender CHAR(1)
, cust_year_of_birth NUMBER(4)
, cust_marital_status VARCHAR2(20)
, cust_street_address VARCHAR2(40) NOT NULL
, cust_postal_code VARCHAR2(10) NOT NULL
, cust_city VARCHAR2(30) NOT NULL
, cust_state_province VARCHAR2(40)
, country_id CHAR(2) NOT NULL
, cust_main_phone_number VARCHAR2(25)
, cust_income_level VARCHAR2(30)
, cust_credit_limit NUMBER
, cust_email VARCHAR2(30)
)
PCTFREE 5;

CREATE UNIQUE INDEX customers_uk


ON customers (cust_id) ;

Control file
UNRECOVERABLE
LOAD DATA
TRUNCATE
INTO TABLE customers
FIELDS TERMINATED BY "|"
( CUST_ID,
CUST_FIRST_NAME,
CUST_LAST_NAME,
CUST_GENDER,
CUST_YEAR_OF_BIRTH,
CUST_MARITAL_STATUS,
CUST_STREET_ADDRESS,
CUST_POSTAL_CODE,
CUST_CITY,
CUST_STATE_PROVINCE,
COUNTRY_ID,
CUST_MAIN_PHONE_NUMBER,
CUST_INCOME_LEVEL,
CUST_CREDIT_LIMIT,
CUST_EMAIL)

Data

10|Abigail|Kessel|M|1946||7 South 3rd Circle|30828|Downham Market|England -


Norfolk|UK|127-379-8954|G: 130,000 - 149,999|9000|Kessel@company.com
20|Abner|Everett|M|1963|married|7 South 5th Circle|58488|Dolores|CO|US|570-248-9913|K:
250,000 - 299,999|7000|Everett@company.com
30|Abraham|Odenwalld|M|1951|single|7 South 42nd Circle|58488|Dolores|CO|US|234-540-
5189|I: 170,000 - 189,999|9000|Odenwalld@company.com
40|Absolom|Sampson|M|1941||7 South Accomack Circle|77501|Lowndesville|SC|US|153-771-
9447|G: 130,000 - 149,999|15000|Sampson@company.com
50|Ada|Nenninger|F|1972|married|7 South Adair Circle|39094|Gif-sur-Yvette|Ile-de-
France|FR|662-644-2601|B: 30,000 - 49,999|1500|Nenninger@company.com
60|Adel|Rhodes|F|1953|single|7 South Adams Circle|30828|Downham Market|England -
Norfolk|UK|503-526-1044|I: 170,000 - 189,999|3000|Rhodes@company.com
70|Angie|Riffken|F|1955||7 South Aguadilla Circle|82323|Springhill|MN|US|538-590-
1545|D: 70,000 - 89,999|7000|Riffken@company.com
80|Angela|Wiley|F|1983|married|7 South Aibonito
Circle|86319|Salamanca|Salamanca|ES|213-595-7978|H: 150,000 -
169,999|7000|Wiley@company.com
90|Anand|Hanes|M|1972|single|7 South Aiken Circle|82323|Springhill|MN|US|348-712-
4192|H: 150,000 - 169,999|9000|Hanes@company.com
100|Anne|Koch|F|1957||7 South Airway Circle|86319|Salamanca|Salamanca|ES|680-327-
1419|I: 170,000 - 189,999|10000|Koch@company.com
 Another way of loading data from file into a table

Load Data using UTL_FILE


CREATE OR REPLACE PROCEDURE load_data AS

v_line VARCHAR2(2000); -- Data line read from input file


v_file UTL_FILE.FILE_TYPE; -- Data file handle
v_dir VARCHAR2(250); -- Directory containing the data file
v_filename VARCHAR2(50); -- Data filename
v_1st_Comma NUMBER;
v_2nd_Comma NUMBER;
v_deptno NUMBER;
v_dname VARCHAR2(14);
v_loc VARCHAR2(13);

BEGIN
v_dir := '/u01/app/oracle/common/out';
v_filename := 'dept01.csv';
v_file := UTL_FILE.FOPEN(v_dir, v_filename, 'r');
-- --------------------------------------------------------
-- Loop over the file, reading in each line. GET_LINE will
-- raise NO_DATA_FOUND when it is done, so we use that as
-- the exit condition for the loop.
-- --------------------------------------------------------
LOOP
BEGIN
UTL_FILE.GET_LINE(v_file, v_line);
EXCEPTION
WHEN no_data_found THEN
exit;
END;

-- ----------------------------------------------------------
-- Each field in the input record is delimited by commas. We
-- need to find the locations of the two commas in the line,
-- and use these locations to get the fields from v_line.
-- ----------------------------------------------------------
v_1st_Comma := INSTR(v_line, ',' ,1 , 1);
v_2nd_Comma := INSTR(v_line, ',' ,1 , 2);

v_deptno := SUBSTR(v_line, 1, v_1st_Comma-1);


v_dname := SUBSTR(v_line, v_1st_Comma+1, v_2nd_Comma-v_1st_Comma-1);
v_loc := SUBSTR(v_line, v_2nd_Comma+1);

DBMS_OUTPUT.PUT_LINE(v_deptno || ' - ' || v_dname || ' - ' || v_loc);


-- ------------------------------------------
-- Insert the new record into the DEPT table.
-- ------------------------------------------
INSERT INTO dept
VALUES (v_deptno, UPPER(v_dname), UPPER(v_loc));

END LOOP;

UTL_FILE.FCLOSE(v_file);
COMMIT;

END;
/
SQL UnLoader  There is nothing called as SQL Unloader
 Use spool
set echo off newpage 0 space 0 pagesize 0 feed off head off
trimspool on
spool oradata.txt
select col1 || ',' || col2 || ',' || col3
from tab1
where col2 = 'XYZ';
spool off

 Use UTL_FILE

create or replace function dump_csv( p_query in varchar2,


p_separator in varchar2
default ',',
p_dir in varchar2 ,
p_filename in varchar2 )
return number
AUTHID CURRENT_USER
is
l_output utl_file.file_type;
l_theCursor integer default dbms_sql.open_cursor;
l_columnValue varchar2(2000);
l_status integer;
l_colCnt number default 0;
l_separator varchar2(10) default '';
l_cnt number default 0;
begin
l_output := utl_file.fopen( p_dir, p_filename, 'w' );

dbms_sql.parse( l_theCursor, p_query, dbms_sql.native );

for i in 1 .. 255 loop


begin
dbms_sql.define_column( l_theCursor, i,
l_columnValue, 2000 );
l_colCnt := i;
exception
when others then
if ( sqlcode = -1007 ) then exit;
else
raise;
end if;
end;
end loop;

dbms_sql.define_column( l_theCursor, 1, l_columnValue,


2000 );

l_status := dbms_sql.execute(l_theCursor);

loop
exit when ( dbms_sql.fetch_rows(l_theCursor) <= 0 );
l_separator := '';
for i in 1 .. l_colCnt loop
dbms_sql.column_value( l_theCursor, i,
l_columnValue );
utl_file.put( l_output, l_separator ||
l_columnValue );
l_separator := p_separator;
end loop;
utl_file.new_line( l_output );
l_cnt := l_cnt+1;
end loop;
dbms_sql.close_cursor(l_theCursor);

utl_file.fclose( l_output );
return l_cnt;
end dump_csv;
/

create or replace procedure test_dump_csv


as
l_rows number;
begin
l_rows := dump_csv( 'select *
from all_users
where rownum < 25',
',', 'TMP', 'test.dat' );
end;
/

 Make sure to create a directory & grant permissions


 CREATE DIRECTORY TMP AS '/tmp'
 GRANT READ ON DIRECTORY TMP to username;
 GRANT WRITE ON DIRECTORY TMP to username;

Potrebbero piacerti anche