Extract data from flat file to a table - sql

I'm using Oracle database 9i.
I want to extract data from flat file(.txt) then insert them into a table (on the client) using other than sql_loader, utl_file, external table, sqldeveloper, toad.
Example of flat file source :
Allain Analyst 13456...
King manager 98768 ...

This extracts data from file_name.csv file and inserts it into EX_TABLE
create table EX_TABLE(
COL1 varchar2(13 BYTE)
,COL2 varchar2(250 CHAR)
,COL3 varchar2(210 CHAR)
,COL4 varchar2(70 CHAR)
)
organization external
(type oracle_loader
default directory EXT_DIR
access parameters
(
records delimited by newline
fields terminated by ','
missing field values are null
reject rows with all null
fields(COL1 CHAR(13)
,COL2 char(250)
,COL3 char(210)
,COL4 char(70)
)
)
location ('file_name.csv')
)
reject limit unlimited
;

CREATE TABLE EX_TABLE
( COL1 varchar2(3 BYTE)
,COL2 varchar2(3 CHAR)
,COL3 varchar2(1 CHAR)
,COL4 varchar2(10 CHAR)
)
ORGANIZATION EXTERNAL
( TYPE ORACLE_LOADER
DEFAULT DIRECTORY DAT_DIR
ACCESS PARAMETERS
( RECORDS DELIMITED BY \n
CHARACTERSET JA16SJISTILDE BADFILE BAD_DIR : 'test_%p_%a.bad' LOGFILE LOG_DIR : 'test_%p_%a.log' READSIZE 10485760 FIELDS LRTRIM MISSING FIELD VALUES ARE NULL REJECT ROWS
WITH ALL NULL FIELDS ( COL1 ( 1: 3) CHAR(3) ,COL2 ( 4: 6) CHAR(3) ,COL3 ( 7: 7) CHAR(1) COL4 ( 8: 17) CHAR(10) ))
LOCATION
( 'STS.txt'
)
)
REJECT LIMIT UNLIMITED;
Loading data to external table based on position in the file

Related

Using Right() or Substr() in an UPDATE statement

I have two tables :
Add_T
Add_ID NUMBER(10,0)
Add_GEOMETRY SDO_GEOMETRY
STRTN VARCHAR2(40 BYTE)
CITY VARCHAR2(45 BYTE)
STATE VARCHAR2(2 BYTE)
ZIPCODE VARCHAR2(10 BYTE)
HC_ID NUMBER(10,0)
P_ID NUMBER(10,0)
Second Table :
HC_T
HC_GEOMETRY SDO_GEOMETRY
HC_ID NUMBER(10,0)
TYPE VARCHAR2(2 BYTE)
FACY NUMBER(10,0)
COORD_X NUMBER(15,0)
COORD_Y NUMBER(15,0)
I need to update the field HC_ID of Add_T Table and I use the following SQL statement:
update add_t set hc_Id = ( SELECT HC_ID FROM HC_T
WHERE ADD_T.P_ID = HC_T.FACY AND
HCO_T.TYPE='R' ) WHERE hc_ID IS NULL and
subtr(strtn,-4,4) = "-LOC"
It doesn't work. Also, I used the right() function and also got incorrect results.
update add_t set hc_Id = ( SELECT HC_ID FROM HC_T
WHERE ADD_T.P_ID = HC_T.FACY AND
HCO_T.TYPE='R' ) WHERE hc_ID IS NULL and
RIGHT(strtn,4) = "-LOC"
Could anyone say where is the mistake?
Looks like you need the last 4 characters of a string. If so, then you'd
UPDATE add_t
SET hc_id =
(SELECT hc_id
FROM hc_t
WHERE add_t.p_id = hc_t.facy
AND hco_t.TYPE = 'R')
WHERE hc_id IS NULL
AND SUBSTR (strtn, -4) = '-LOC';
because e.g.
SQL> SELECT SUBSTR ('some string', -4) FROM DUAL;
SUBS
----
ring
SQL>
(Note also that you'd enclose strings into single quotes in Oracle, not double ones (as you tried with the "-LOC" string).

multiple BLOB file insertion in oracle using SQLdeveloper

I want to insert Multiple BLOB file at once ​​in "USER_PROFILE" table using loop by procedure/function.
Working on Oracle DB using SQLdeveloper
BLOB Files:
user.resetpassword.email.body.html
password.expiry.notification.email.html
password.expiry.notification.subject.txt
user.resetpassword.email.subject.txt
BLOB FILE DIRECTORY : U_PROFILE
BLOB column name : PROP_VALUE
Unique key : PROP_KEY (This column naming conventions will be same with BLOB file names)
CREATE TABLE "USER_PROFILE" (
"USER_PROFILE_PID" VARCHAR2(40 BYTE)
, "PROP_KEY" VARCHAR2(100 BYTE)
, "PROP_VALUE" BLOB
, "MODIFIED_DTS" DATE
, "BUILD_VERSION" VARCHAR2(100 BYTE)
, "DESCRIPTION" VARCHAR2(4000 BYTE)
)
​;
INSERT INTO user_profile (
user_profile_pid
, prop_key
, modified_dts
, build_version
, description
) VALUES (
'CTP-1000'
, 'password.expiry.notification.email'
, NULL
, '1.2'
, 'User Account Expiry notification'
);
INSERT INTO user_profile (
user_profile_pid
, prop_key
, modified_dts
, build_version
, description
) VALUES (
'CTP-1001'
, 'password.expiry.notification.subject'
, NULL
, '1.2'
, 'User Account Expiry notification subject'
);

Insert Into HANA Table Specifying Columns

I'm coming from a Teradata environment where
create table mytable
(
first_column varchar(50),
second_column varchar(50),
third_column varchar(50)
)
insert into mytable values (first_column = 'one', second_column = 'first')
insert into mytable values (first_column = 'two', third_column = 'second')
is possible. This does not seem to be possible in HANA even with default specified
create column table mytable
(
"FIRST_COLUMN" varchar(50) default null,
"SECOND_COLUMN" varchar(50) default null,
"THIRD_COLUMN" varchar(50) default null
)
I could create a row with a unique ID specifying NULLs for all the fields and then UPDATE the columns I want using the ID which seems time consuming and awkward or is there a better way?
Use the standard syntax:
insert into mytable (first_column, second_column)
values ('one', 'first');
This should work both in Hana and Teradata -- and any other database.

Row compare and insert into log table only changed data

I'm trying to compare a global temporary table to another table and want to insert into a log table but can not seem to find the best/most efficient way to accomplish this.
Log Table
CREATE TABLE LogTable
(
Date_Time DATETIME,
Name VARCHAR2(10 CHAR),
old VARCHAR2(20 CHAR),
new VARCHAR2(20 CHAR),
)
Object Type
CREATE OR REPLACE type dbo.P_REC AS OBJECT
(
ATTR1 VARCHAR2(10 CHAR),
ATTR2 VARCHAR2(20 CHAR),
ATTR3 VARCHAR2(20 CHAR),
ATTR4 VARCHAR2(20 CHAR)
);
Collection Type
CREATE OR REPLACE type dbo.P_REC_LIST IS TABLE OF P_REC;
Stored Procedure
PROCEDURE PASSPEOPLETOORACLE(tmpCollection IN P_REC_LIST , resultCursor out sys_refcursor)
IS
BEGIN
IF tmpCollection .count > 0 THEN
INSERT INTO tmpPTbl SELECT * FROM table1; <--tmpPTbl is a copy of table1 before the merge statement.
MERGE INTO table1 MKTP
USING (
WITH tmpTBL AS
(
SELECT ADCOLL.ATTR1,
ADCOLL.ATTR2,
MV.ATTR3,
MV.ATTR4
FROM TABLE(tmpCollection) ADCOLL
LEFT JOIN materializedView MV
ON ADCOLL.ATTR1 = MV.ATTR1
)
SELECT DISTINCT COALESCE(tmpTBL.ATTR1,MKtmpTBL.ATTR1) AS ATTR1,
tmpTBL.ATTR2,
tmpTBL.ATTR3,
tmpTBL.ATTR4,
CASE WHEN tmpTBL.ATTR1 IS NULL
THEN 'Y' ELSE 'N' END
match_flag FROM tmpTBL
FULL JOIN table1 MKtmpTBL
ON MKtmpTBL.ATTR1 = tmpTBL.ATTR1
) usingTBL
ON (MKTP.ATTR1 = usingTBL.ATTR1)
WHEN MATCHED THEN
UPDATE SET MKTP.ATTR2 = usingTBL.ATTR2,
MKTP.ATTR3 = usingTBL.ATTR3,
MKTP.ATTR4 = usingTBL.ATTR4,
DELETE WHERE match_flag = 'Y'
WHEN NOT MATCHED THEN
INSERT (ATTR1)
VALUES (usingTBL.ATTR1);
END IF;
END;
Id like a way to compare the newly update records in table1 to the prior records in tmpPTbl and where the old and new values differ, insert a new row into the log table.
2019-02-14 23:59:59,jdoe,abcd,efgh would be an example of a record inserted into the log table.
tmpPTbl & table1 both have 50 columns in them & about 16k rows on average.
The best solution for you would be to create a Trigger on table Table1. So that any operation occurs on Table1 it can be logged to Logtable. See below demo:
CREATE TABLE table1
(col1 VARCHAR2(10),
col2 VARCHAR2(10),
col3 VARCHAR2(10) );
/
--Trigger
CREATE OR REPLACE TRIGGER Log_Entry before
INSERT OR
UPDATE ON table1 FOR EACH row
BEGIN
IF INSERTING THEN
INSERT INTO LogTable VALUES
(sysdate, :new.col1, :new.col2, :new.col3
);
ELSIF UPDATING THEN
INSERT INTO LogTable VALUES
(sysdate, :old.col1, :old.col2, :old.col3
);
END IF;
END;
/
Execution:
SQL> Insert into table1 values ('A','B','C');
SQL>Update table1
set col1 ='Z'
where col1 = 'A';
SQL> Merge INTO table1 tb1 USING
(SELECT 'Z' col1 , 'D' col2, 'K' col3 FROM dual
) tb2 ON (tb1.col1 = tb2.col1)
WHEN matched THEN
UPDATE SET tb1.col2=tb2.col2 WHEN NOT matched THEN
INSERT VALUES
(tb2.col1,tb2.col2,tb2.col3
);
SQL>Commit;
SQL> Select * from logtable;
DATE_TIME NAME OLD NEW
--------- ---------- -------------------- --------------------
15-FEB-19 A B C
15-FEB-19 Z B C
15-FEB-19 Z B C
Note there is no need to copy data to tmpPTbl table as well.

T-SQL cast and converting issues from source tables to destination

I have a table as follows:
create table dbo.##Table_A00
(
RowNo int,
TRANSACTION_TYPE varchar(3),
ORGANISATION_ID numeric (10),
FILE_TYPE varchar(3),
CREATION_DATE datetime,
CREATION_TIME varchar(3),
GENERATION_NUMBER numeric (6)
)
However the source files I am using is a table used to capture flat files and they can be in any data format.
What I have in the source table the data type looks like this:
CREATE TABLE ##Table_Alltextfiles
(
rowID int identity (1,1),
[Col1] varchar(50),
[Col2] varchar(250),
[Col3] varchar(50),
[Col4] varchar(50),
[Col5] varchar(50),
[Col6] varchar(50),
[Col7] varchar(50)
)
What I want to do is insert into ##Table_A00 (destination) all rows from ##Table_Alltextfiles (source)
However I am having issues doing this as the data type are mis match and I have tried casting it without success.
What can I do to get the value of varchar to its appropiate destination i.e if its a date field or if its a numeric.
This is what I have been tring to do:
insert into dbo.##Table_A00
select
rowID,
col1, cast(col2 as numeric (10)),
col3, cast(col4 as date),
col5, cast(col6 as numeric (6))
from
##Table_Alltextfiles
where
col1 = 'A00'
Thank you
Try out with the below query.
insert into dbo.##Table_A00
select
rowID,
SUBSTRING(col1,1,3), case when ISNUMERIC(col2)=1 THEN cast(col2 as numeric (10)) ELSE NULL END,
SUBSTRING(col3,1,3), case when ISDATE(col4)=1 THEN cast(col4 as datetime) ELSE NULL END,
SUBSTRING(col5,1,3), case when ISNUMERIC(col6)=1 THEN cast(col6 as numeric (6)) ELSE NULL END
from
##Table_Alltextfiles
where
col1 = 'A00'