Oracle- creating dynamic function for deleting tables based on cursor - sql

I'm trying to build a dynamic function in Oracle using a cursor for all the tables that need to be dropped and re-created again. For example, I have the following example table structure:
CREATE TABLE All_tmp_DATA AS
(SELECT 'T_tmp_test1' As Table_NM, 'TEST1' As Process_name FROM DUAL UNION ALL
SELECT 'T_tmp_test2' As Table_NM, 'TEST1' As Process_name FROM DUAL UNION ALL
SELECT 'T_tmp_test3' As Table_NM, 'TEST1' As Process_name FROM DUAL)
The above tables starting with "T_tmp" represent all the tables in the database which needs to be dropped if their counts are >1 when starting the TEST1 process. I really need a function to pass in the parameter Process_name where I can input "TEST1", and build a loop using a cursor by binding it to the Table_NM from All_tmp_DATA and inserting it into table_name in the following code:
BEGIN
SELECT count(*)
INTO l_cnt
FROM user_tables
WHERE table_name = 'MY_TABLE';
IF l_cnt = 1 THEN
EXECUTE IMMEDIATE 'DROP TABLE my_table';
END IF;
END;

In the beginning, I'd suggest you not to use mixed case when naming Oracle objects.
Test case:
SQL> select * From all_tmp_data;
TABLE_NM PROCE
----------- -----
T_tmp_test1 TEST1
T_tmp_test2 TEST1
T_tmp_test3 TEST1
SQL> create table "T_tmp_test1" as select * From dept;
Table created.
SQL> -- I don't have "T_tmp_test2"
SQL> create table "T_tmp_test3" as select * From emp;
Table created.
SQL>
SQL> select table_name From user_Tables where upper(table_name) like 'T_TMP%';
TABLE_NAME
------------------------------
T_tmp_test3
T_tmp_test1
Procedure which drops tables contained in ALL_TMP_DATA:
as opposed to your code, I concatenated table name with DROP
as you use table names with mixed case, you have to enclose their names into double quotes, always (did I say not do use that?)
As the final select shows, those tables don't exist any more.
SQL> declare
2 l_cnt number;
3 begin
4 for cur_r in (select table_nm from all_tmp_data) loop
5 select count(*) into l_cnt
6 from user_tables
7 where table_name = cur_r.table_nm;
8
9 if l_cnt > 0 then
10 execute immediate ('drop table "' || cur_r.table_nm || '"');
11 end if;
12 end loop;
13 end;
14 /
PL/SQL procedure successfully completed.
SQL> select table_name From user_Tables where upper(table_name) like 'T_TMP%';
no rows selected
SQL>
As of the process column: I have no idea what is it used for so I did exactly that - didn't use it.

You can use the exception handling to handle such scenario directly as follows:
DECLARE
TABLE_DOES_NOT_EXIST EXCEPTION;
PRAGMA EXCEPTION_INIT ( TABLE_DOES_NOT_EXIST, -00942 );
BEGIN
FOR CUR_R IN (
SELECT TABLE_NM
FROM ALL_TMP_DATA
) LOOP
BEGIN
EXECUTE IMMEDIATE 'drop table "' || cur_r.table_nm || '"';
DBMS_OUTPUT.PUT_LINE('"' || cur_r.table_nm || '" table dropped.');
EXCEPTION
WHEN TABLE_DOES_NOT_EXIST THEN
DBMS_OUTPUT.PUT_LINE('"' || cur_r.table_nm || '" table does not exists');
END;
END LOOP;
END;
/

Related

Append oracle associative array in a loop to another associative array within that loop

I am trying to do to a bulk collect inside a loop which have dynamic SQL and execute multiple times based on input from loop then inserting into a table (and it is taking time approx. 4 mins to insert 193234 records).
So as to try different different approach I think of using the bulk collect on select inside the loop and fill up a collection with each iteration of that loop lets say 1st iteration gives 10 rows then second gives 0 rows and 3rd returns 15 rows then the collection should hold 15 records at end of the loop.
After exiting the loop I will use forall with collection I filled up inside loop to do an Insert at one go instead to doing insert for each iteration inside loop.
below is a sample code which is similar to application procedure I just use different tables to simplify question.
create table test_tab as select owner, table_name, column_name from all_tab_cols where 1=2;
create or replace procedure p_test
as
l_sql varchar2(4000);
type t_tab is table of test_tab%rowtype index by pls_integer;
l_tab t_tab;
l_tab1 t_tab;
l_cnt number := 0;
begin
for i in (with tab as (select 'V_$SESSION' table_name from dual
union all
select 'any_table' from dual
union all
select 'V_$TRANSACTION' from dual
union all
select 'test_table' from dual
)
select table_name from tab )
loop
l_sql := 'select owner, table_name, column_name from all_tab_cols where table_name = '''||i.table_name||'''';
-- dbms_output.put_line(l_sql );
execute immediate l_sql bulk collect into l_tab;
dbms_output.put_line(l_sql ||' > '||l_tab.count);
l_cnt := l_cnt +1;
if l_tab.count<>0
then
l_tab1(l_cnt) := l_tab(l_cnt);
end if;
end loop;
dbms_output.put_line(l_tab1.count);
forall i in indices of l_tab1
insert into test_tab values (l_tab1(i).owner, l_tab1(i).table_name, l_tab1(i).column_name);
end;
It is inserting only 2 rows in test_tab table whereas as per my system it should insert 150 rows.
select owner, table_name, column_name from all_tab_cols where table_name = 'V_$SESSION' > 103
select owner, table_name, column_name from all_tab_cols where table_name = 'any_table' > 0
select owner, table_name, column_name from all_tab_cols where table_name = 'V_$TRANSACTION' > 47
select owner, table_name, column_name from all_tab_cols where table_name = 'test_table' > 0
2
Above is DBMS_OUTPUT from my system you may change the table names in loop if the example table names does not exists in your DB.
Oracle Version --
Oracle Database 19c Standard Edition 2 Release 19.0.0.0.0 - Production
EDIT
Below screenshot shows highlighted timings from PLSQL_PROFILER with Actual insert...select... written in procedure at line# 114 and with bulk collect and forall with nested table at line# 132 and multiset and seems like we are saving atleast 40 secs here with bulk collect, multiset and forall.
Firstly, do not use associative array collection for this, just use a nested-table collection type. You can concatenate nested-table collections using the MULTISET UNION ALL operator (avoiding needing to use loops).
CREATE TYPE test_type IS OBJECT(
owner VARCHAR2(30),
table_name VARCHAR2(30),
column_name VARCHAR2(30)
);
CREATE TYPE test_tab_type IS TABLE OF test_type;
Then:
create procedure p_test
as
l_sql CLOB := 'select test_type(owner, table_name, column_name) from all_tab_cols where table_name = :table_name';
l_table_names SYS.ODCIVARCHAR2LIST := SYS.ODCIVARCHAR2LIST(
'V_$SESSION',
'ANY_TABLE',
'V_$TRANSACTION',
'TEST_TABLE'
);
l_tab test_tab_type := test_tab_type();
l_temp test_tab_type;
l_cnt number := 0;
BEGIN
FOR i IN 1 .. l_table_names.COUNT LOOP
EXECUTE IMMEDIATE l_sql BULK COLLECT INTO l_temp USING l_table_names(i);
dbms_output.put_line(
l_sql || ': ' || l_table_names(i) || ' > '||l_temp.count
);
l_cnt := l_cnt +1;
l_tab := l_tab MULTISET UNION ALL l_temp;
END LOOP;
dbms_output.put_line(l_tab.count);
insert into test_tab
SELECT *
FROM TABLE(l_tab);
end;
/
Secondly, don't do multiple queries if you can do it all in one query and use an IN statement; and if you do it all in a single statement then you do not need to worry about concatenating collections.
create or replace procedure p_test
as
l_table_names SYS.ODCIVARCHAR2LIST := SYS.ODCIVARCHAR2LIST(
'V_$SESSION',
'ANY_TABLE',
'V_$TRANSACTION',
'TEST_TABLE'
);
l_tab test_tab_type;
BEGIN
select test_type(owner, table_name, column_name)
bulk collect into l_tab
from all_tab_cols
where table_name IN (SELECT column_value FROM TABLE(l_table_names));
dbms_output.put_line(l_tab.count);
insert into test_tab
SELECT *
FROM TABLE(l_tab);
end;
/
Thirdly, if you can do INSERT ... SELECT ... in a single statement the it will be much faster than using SELECT ... INTO ... and then a separate INSERT; and doing that means you do not need to use any collections.
create or replace procedure p_test
as
begin
INSERT INTO test_tab (owner, table_name, column_name)
select owner, table_name, column_name
from all_tab_cols
where table_name IN (
'V_$SESSION',
'ANY_TABLE',
'V_$TRANSACTION',
'TEST_TABLE'
);
end;
/
fiddle
For Oracle 19c I would suggest to use SQL_MACRO(table) to build dynamic SQL in place and use plain SQL. Below is an example that builds dynamic SQL based on all_tab_cols but it may be any other logic to build such SQL (with known column names and column order). Then you may use insert ... select ... without PL/SQL, because SQL macro is processed at the query parsing time.
Setup:
create table t1
as
select level as id, mod(level, 2) as val, mod(level, 2) as col
from dual
connect by level < 4;
create table t2
as
select level as id2, mod(level, 2) as val2, mod(level, 2) as col2
from dual
connect by level < 5;
create table t3
as
select level as id3, mod(level, 2) as val3, mod(level, 2) as col3
from dual
connect by level < 6;
Usage:
create function f_union_tables
return varchar2 sql_macro(table)
as
l_sql varchar2(4000);
begin
/*
Below query emulates dynamic SQL with union
of counts per columns COL* and VAL* per table,
assuming you have only one such column in a table
*/
select
listagg(
replace(replace(
/*Get a count per the second and the third column per table*/
q'{
select '$$table_name$$' as table_name, $$agg_cols$$, count(*) as cnt
from $$table_name$$
group by $$agg_cols$$
}',
'$$table_name$$', table_name),
'$$agg_cols$$', listagg(column_name, ' ,') within group(order by column_name asc)
),
chr(10) || ' union all ' || chr(10)
) within group (order by table_name) as result_sql
into l_sql
from user_tab_cols
where regexp_like(table_name, '^T\d+$')
and (
column_name like 'VAL%'
or column_name like 'COL%'
)
group by table_name;
return l_sql;
end;/
select *
from f_union_tables()
TABLE_NAME
COL
VAL
CNT
T1
1
1
2
T1
0
0
1
T2
1
1
2
T2
0
0
2
T3
1
1
3
T3
0
0
2
fiddle

How to read the same column from every table in a database?

I have a huge database with 400+ tables. Each table has the same column id for the Primary key and "timestamp_modify" in which the last change of the table is done.
So what I want are 2 things:
Now I want a list of all changes by ID and table name like:
Table | id | timestamp_modiy
Kid | 1 | 24.10.2021 00:01
Parent | 1000 | 24.10.2021 00:02
The only, very bad way I could come up with, is that I make a view in which I include every damn table by hand and read out the values...
Is there a better way?
How about a pipelined function?
Just setting datetime format (you don't have to do that):
SQL> alter session set nls_date_format = 'dd.mm.yyyy hh24:mi:ss';
Session altered.
Types:
SQL> create or replace type t_row as object
2 (table_name varchar2(30),
3 id number,
4 timestamp_modify date)
5 /
Type created.
SQL> create or replace type t_tab is table of t_row;
2 /
Type created.
Function: querying user_tab_columns, its cursor FOR loop fetches tables that contain both ID and TIMESTAMP_MODIFY columns, dynamically creates select statement to return the last (MAX function, to avoid too_many_rows) columns' values for the last TIMESTAMP_MODIFY value (returned by the subquery).
SQL> create or replace function f_test
2 return t_tab pipelined
3 as
4 l_str varchar2(500);
5 l_id number;
6 l_timestamp_modify date;
7 begin
8 for cur_r in (select table_name from user_tab_columns
9 where column_name = 'ID'
10 intersect
11 select table_name from user_tab_columns
12 where column_name = 'TIMESTAMP_MODIFY'
13 )
14 loop
15 l_str := 'select max(a.id) id, max(a.timestamp_modify) timestamp_modify ' ||
16 'from ' || cur_r.table_name || ' a ' ||
17 'where a.timestamp_modify = ' ||
18 ' (select max(b.timestamp_modify) ' ||
19 ' from ' || cur_r.table_name || ' b ' ||
20 ' where b.id = a.id)';
21 execute immediate l_str into l_id, l_timestamp_modify;
22 pipe row(t_row(cur_r.table_name, l_id, l_timestamp_modify));
23 end loop;
24 end;
25 /
Function created.
Testing:
SQL> select * from table(f_test);
TABLE_NAME ID TIMESTAMP_MODIFY
------------------------------ ---------- -------------------
TABA 1 24.10.2021 14:59:29
TAB_1 1 24.10.2021 15:03:16
TAB_2 25 24.10.2021 15:03:36
TEST 5 24.10.2021 15:04:24
SQL>
Yes, the only way is to union all all tables, like:
select id, timestamp_modify
from kid
union all
select id, timestamp_modify
from parent
union all
...
The performance will be awful, since all the tables will be scanned every time :(
I think that you might reconsider you db design...
You can build a procedure for this, but even so it will have some impact in performance. Although there is a loop, with SQL Dynamic, you might only need 400 iterations, and in each one you will insert all the ids of that table.
I am taking some assumptions
You want all the IDs and their corresponding timestamp_modify per table
I create a table to store the results. If you use it with the same name always it will recycle the object. If you not, you can keep a history
I am assuming that only one timestamp_modify row is present per ID
I filter only the tables of your schema that contain both columns.
The table contains also the table_name that you can identify where the record is coming from.
One example
create or replace procedure pr_build_output ( p_tmp_table in varchar2 default 'TMP_RESULT' )
is
vcounter pls_integer;
vsql clob;
vtimestamp date; -- or timestamp
begin
-- create table to store results
select count(*) into vcounter from all_tables where table_name = upper(p_tmp_table) and owner = 'MY_SCHEMA';
if vcounter = 1
then
execute immediate ' drop table '||p_tmp_table||' purge ' ;
end if;
vsql := ' create table '||p_tmp_table||'
( table_name varchar2(128) ,
id number,
timestamp_modify date -- or timestamp
) ';
execute immediate vsql ;
-- Populate rows
for h in
( select a.table_name from all_tables a
where a.owner = 'MY_SCHEMA'
and a.table_name in ( select distinct b.table_name from all_tab_columns b where b.owner = 'MY_SCHEMA'
and b.column_name = 'ID' and b.column_name = 'TIMESTAMP_MODIFY'
)
)
loop
vsql := ' insert into '||p_tmp_table||' ( table_name , id, timestamp_modify )
select '''||h.table_name||''' as table_name , id , timestamp_modify
from my_schema.'||h.table_name||'
' ;
execute immediate vsql ;
commit ;
end loop;
exception when others then raise;
end;
/

How to use 'sysdate' if its a string constant

I extract data from a table, the field is mostly null, but sometimes it's sysdate. However since its from a table, after getting the field its 'sysdate', between single quotes. How can I use it?
I have tried to_date, to_date(to_char()).
I need something that works within
select to_date('sysdate') from dual;
You can use a case expression:
select case
when the_column = 'sysdate' then sysdate
else to_date(the_column)
end as date_value
from the_table;
The only way I know is dynamic SQL. Here's an example:
SQL> create table test (id number, col varchar2(20));
Table created.
SQL> insert into test
2 select 1, '''sysdate''' from dual union all
3 select 2, null from dual;
2 rows created.
SQL> declare
2 l_res test%rowtype;
3 l_str varchar2(200);
4 begin
5 for cur_r in (select id, col from test) loop
6 l_str := 'select ' || cur_r.id ||', '||
7 nvl(replace(cur_r.col, chr(39), null), 'null') || ' from dual';
8 execute immediate l_str into l_res;
9 dbms_output.put_line(l_res.id ||': '|| l_res.col);
10 end loop;
11 end;
12 /
1: 24.06.2019 12:18:39
2:
PL/SQL procedure successfully completed.
SQL>

Is it possible to pass a table name to a SQL query by select query in oracle

Is it possible to pass a table name to a query using a result of another query?
SELECT T.MID, T.TID, M.NAME
FROM 'ONLINETRANSACTION#(' || SELECT ONLINEDBLINK FROM PARAMETERTABLE ||')' T
LEFT JOIN 'ONLINEMERCHANT#(' || SELECT ONLINEDBLINK FROM PARAMETERTABLE ||')' M
ON T.MID = M.MID
I have tried with the above code but it doesn't work.
This is a simple example based on Scott's schema.
lines 5 - 7 select your "table name" (actually, it appears that it is a database link name in your code. Doesn't matter, the principle is just the same)
line 9 uses that "table name" and concatenates it with the rest of the SELECT statement; finally, it executes it using EXECUTE IMMEDIATE
SQL> create table param (table_name varchar2(30));
Table created.
SQL> insert into param values ('EMP');
1 row created.
SQL> set serveroutput on
SQL> declare
2 l_table_name param.table_name%type;
3 l_max_sal emp.sal%type;
4 begin
5 select table_name into l_table_name
6 from param
7 where rownum = 1;
8
9 execute immediate 'select max(sal) from ' || l_table_name into l_max_sal;
10 dbms_output.put_line('Max salary = ' || l_max_sal);
11 end;
12 /
Max salary = 10000
PL/SQL procedure successfully completed.
SQL>

Oracle SQL - Keeping Comments from previous table in new table

EDIT:Thank you, I got one step further, but can't seem to get it working since the original tables are accessed by a database link.
I access have a lot of tables with descriptions of the columns in the comments section.
Is there any way to copy the comments over when I create a new table, besides adding it manually afterwards?
Select a.tot_sum
,b.id
,b.size
from original_table a
,someother_table b
where a.id=b.id
and b.region in 'North'
I can do it manually with some copypasting in excel:
Comment On Column Mytable.Tot_Sum
Is 'Total sum of sales';
But I want to do something like this:
Comment On Column Mytable.Tot_Sum
IS (select comment from column original_table.tot_sum);
Or is there a syntax to simply keep the comments when creating a new table?
You can query ALL_COL_COMMNENTS.
You can automate this with a small PL/SQL anonymous block :
SQL> set serveroutput on;
SQL> DECLARE
2 v_ViewName VARCHAR2(30) := 'XXX';
3 BEGIN
4 FOR v_Comment IN
5 (SELECT acc.COLUMN_NAME,
6 REPLACE(REPLACE(acc.COMMENTS, chr(13), ''), chr(10), '') comments
7 FROM ALL_COL_COMMENTS ACC
8 WHERE ACC.OWNER = 'SCOTT'
9 AND ACC.TABLE_NAME = 'EMP'
10 )
11 LOOP
12 dbms_output.put_line(v_comment.column_name || ' - '||v_comment.comments);
13 END LOOP;
14 END;
15 /
EMPNO -
ENAME -
JOB -
MGR -
HIREDATE -
SAL -
COMM -
DEPTNO -
PL/SQL procedure successfully completed.
If you have any table with some comments, just replace the USER and TABLE_NAME in my script.
declare
comment_ varchar2(255);
begin
select comments into comment_ from user_col_comments
where table_name=upper('original_table')
and column_name=upper('my_column');
execute immediate 'comment on column new_table.my_column is '''||comment_||'''';
end;
COMMENT function accept only text literals, that's why I used dynamic query instead of IS SELECT
There are DBA-Views for the comments.
DBA_COL_COMMENTS
ALL_COL_COMMENTS
USER_COL_COMMENTS
You could do something like that:
declare
v_comment varchar2(100);
v_stmt varchar2(4000);
begin
select comments
into v_comment
from user_col_comments
where table_name= 'ORIGINAL_TABLE'
and column_name='TOT_SUM';
v_stmt:='comment on column MYTABLE.TOT_SUM IS '''||v_comment||'''';
execute immediate v_stmt;
end;
Edit: When you have a lot of columns, you could automate it by doing this:
declare
v_comment varchar2(100);
v_stmt varchar2(4000);
v_orig_table varchar2(100) :='ORIGINAL_TABLE';
v_new_table varchar2(100) :='MYTABLE';
begin
for c in (select column_name
from user_tab_columns c
where table_name=v_orig_table
and exists(select 1
from user_tab_columns
where table_name=v_new_table
and column_name=c.column_name)) loop
select comments
into v_comment
from user_col_comments
where table_name= v_orig_table
and column_name=c.column_name;
v_stmt:='comment on column '||v_new_table||'.'||c.column_name||' IS '''||v_comment||'''';
execute immediate v_stmt;
end loop;
end;
Edit 2: Version with SCHEMA_Name and DB-Link
declare
v_comment varchar2(100);
v_stmt varchar2(4000);
v_orig_table varchar2(100) :='ORIGINAL_TABLE';
v_orig_schema varchar2(100) := 'ORIG_OWNER';
v_new_table varchar2(100) :='MYTABLE';
begin
for c in (select column_name
from all_tab_columns#db.link c
where table_name=v_orig_table
and owner=v_orig_schema
and exists(select 1
from user_tab_columns
where table_name=v_new_table
and column_name=c.column_name)) loop
select comments
into v_comment
from all_col_comments#db.link
where table_name= v_orig_table
and column_name=c.column_name
and owner=v_orig_schema;
v_stmt:='comment on column '||v_new_table||'.'||c.column_name||' IS '''||v_comment||'''';
execute immediate v_stmt;
end loop;
end;