Below the query I created to get certain itemnumbers, qty ordered and price and others from the database. The problem is that sometimes an order doesn't contain 20 itemsnumbers but only 2. Now my question is if it's possible to fill the spaces with other itemnumbers random from the DB. It doesn't need to be correct because it's just for testing.
So can anybody help?
select
t.*,
-- THE THREE SUMVAT VALUES BELOW ARE VERY IMPORTANT. THEY ARE ONLY CORRECT HOWEVER WHEN THERE ARE NO NULL VALUES INVOLVED IN THE MATH,
-- I.E. WHEN THERE ARE 20 ITEMS/QTYS/PRICES INVOLVED WITH A CERTAIN ORDER_NO
((t.QTY1*t.PRICE1)+(t.QTY2*t.PRICE2)+(t.QTY3*t.PRICE3)+(t.QTY4*t.PRICE4)+(t.QTY5*t.PRICE5)) SUMVAT0, -- example: 5123.45 <- lines 1-5: Q*P
((t.QTY6*t.PRICE6)+(t.QTY7*t.PRICE7)+(t.QTY8*t.PRICE8)+(t.QTY9*t.PRICE9)+(t.QTY10*t.PRICE10)+(t.QTY11*t.PRICE11)+(t.QTY12*t.PRICE12)+(t.QTY13*t.PRICE13)+(t.QTY14*t.PRICE14)+(t.QTY15*t.PRICE15))
SUMVAT6, -- example: 1234.56 <- lines 6-15: Q*P
((t.QTY16*t.PRICE16)+(t.QTY17*t.PRICE17)+(t.QTY18*t.PRICE18)+(t.QTY19*t.PRICE19)+(t.QTY20*t.PRICE20)) SUMVAT19 -- example: 4567.89 <- lines 16-20: Q*P
from (
select
(to_char(p.vdate, 'YYYYMMDD') || to_char(sysdate, 'HH24MISS')) DT,
(to_char(p.vdate, 'YYYY-MM-DD') ||'T' || to_char(sysdate, 'HH24:MI:') || '00') DATETIME,
(to_char(orh.written_date, 'YYYY-MM-DD') ||'T00:00:00') DATETIME2,
orh.supplier FAKE_GLN,
y.*
from (
select
x.order_no ORDNO
, max(decode(r,1 ,x.item,null)) FAKE_GTIN1
, max(decode(r,2 ,x.item,null)) FAKE_GTIN2
, max(decode(r,3 ,x.item,null)) FAKE_GTIN3
, max(decode(r,4 ,x.item,null)) FAKE_GTIN4
, max(decode(r,5 ,x.item,null)) FAKE_GTIN5
, max(decode(r,6 ,x.item,null)) FAKE_GTIN6
, max(decode(r,7 ,x.item,null)) FAKE_GTIN7
, max(decode(r,8 ,x.item,null)) FAKE_GTIN8
, max(decode(r,9 ,x.item,null)) FAKE_GTIN9
, max(decode(r,10,x.item,null)) FAKE_GTIN10
, max(decode(r,11,x.item,null)) FAKE_GTIN11
, max(decode(r,12,x.item,null)) FAKE_GTIN12
, max(decode(r,13,x.item,null)) FAKE_GTIN13
, max(decode(r,14,x.item,null)) FAKE_GTIN14
, max(decode(r,15,x.item,null)) FAKE_GTIN15
, max(decode(r,16,x.item,null)) FAKE_GTIN16
, max(decode(r,17,x.item,null)) FAKE_GTIN17
, max(decode(r,18,x.item,null)) FAKE_GTIN18
, max(decode(r,19,x.item,null)) FAKE_GTIN19
, max(decode(r,20,x.item,null)) FAKE_GTIN20
, max(decode(r,1 ,x.qty_ordered,null)) QTY1
, max(decode(r,2 ,x.qty_ordered,null)) QTY2
, max(decode(r,3 ,x.qty_ordered,null)) QTY3
, max(decode(r,4 ,x.qty_ordered,null)) QTY4
, max(decode(r,5 ,x.qty_ordered,null)) QTY5
, max(decode(r,6 ,x.qty_ordered,null)) QTY6
, max(decode(r,7 ,x.qty_ordered,null)) QTY7
, max(decode(r,8 ,x.qty_ordered,null)) QTY8
, max(decode(r,9 ,x.qty_ordered,null)) QTY9
, max(decode(r,10,x.qty_ordered,null)) QTY10
, max(decode(r,11,x.qty_ordered,null)) QTY11
, max(decode(r,12,x.qty_ordered,null)) QTY12
, max(decode(r,13,x.qty_ordered,null)) QTY13
, max(decode(r,14,x.qty_ordered,null)) QTY14
, max(decode(r,15,x.qty_ordered,null)) QTY15
, max(decode(r,16,x.qty_ordered,null)) QTY16
, max(decode(r,17,x.qty_ordered,null)) QTY17
, max(decode(r,18,x.qty_ordered,null)) QTY18
, max(decode(r,19,x.qty_ordered,null)) QTY19
, max(decode(r,20,x.qty_ordered,null)) QTY20
, max(decode(r,1 ,x.unit_cost,null)) PRICE1
, max(decode(r,2 ,x.unit_cost,null)) PRICE2
, max(decode(r,3 ,x.unit_cost,null)) PRICE3
, max(decode(r,4 ,x.unit_cost,null)) PRICE4
, max(decode(r,5 ,x.unit_cost,null)) PRICE5
, max(decode(r,6 ,x.unit_cost,null)) PRICE6
, max(decode(r,7 ,x.unit_cost,null)) PRICE7
, max(decode(r,8 ,x.unit_cost,null)) PRICE8
, max(decode(r,9 ,x.unit_cost,null)) PRICE9
, max(decode(r,10,x.unit_cost,null)) PRICE10
, max(decode(r,11,x.unit_cost,null)) PRICE11
, max(decode(r,12,x.unit_cost,null)) PRICE12
, max(decode(r,13,x.unit_cost,null)) PRICE13
, max(decode(r,14,x.unit_cost,null)) PRICE14
, max(decode(r,15,x.unit_cost,null)) PRICE15
, max(decode(r,16,x.unit_cost,null)) PRICE16
, max(decode(r,17,x.unit_cost,null)) PRICE17
, max(decode(r,18,x.unit_cost,null)) PRICE18
, max(decode(r,19,x.unit_cost,null)) PRICE19
, max(decode(r,20,x.unit_cost,null)) PRICE20
from (
select
rank() over (partition by oh.order_no order by ol.item asc) r,
oh.supplier,
oh.order_no,
oh.written_date,
ol.item,
ol.qty_ordered,
ol.unit_cost
from
ordhead oh
JOIN ordloc ol ON oh.order_no = ol.order_no
where
-- count(numrows) = 1500
not unit_cost is null
-- and ol.order_no in (6181,6121)
) x
group by x.order_no
) y
JOIN ordhead orh ON orh.order_no = y.ORDNO,
period p
) t
;
Without being able to really test this, you might try something like this. Replace the inline view 'x' with this:
FROM (
WITH q AS (
SELECT LEVEL r, TO_CHAR(TRUNC(dbms_random.value*1000,0)) item
, TRUNC(dbms_random.value*100,0) qty_ordered
, TRUNC(dbms_random.value*10,2) unit_cost
FROM dual CONNECT BY LEVEL <= 20
)
SELECT COALESCE(x1.r, q.r) r, supplier, order_no, written_date
, COALESCE(x1.item, q.item) item
, COALESCE(x1.qty_ordered, q.qty_ordered) qty_ordered
, COALESCE(x1.unit_cost, q.unit_cost) unit_cost
FROM (SELECT ROW_NUMBER() OVER (PARTITION BY oh.order_no ORDER BY ol.item ASC) r
, oh.supplier
, oh.order_no
, oh.written_date
, ol.item
, ol.qty_ordered
, ol.unit_cost
FROM ordhead oh JOIN ordloc ol ON oh.order_no = ol.order_no
WHERE NOT unit_cost IS NULL) x1 RIGHT JOIN q ON x1.r = q.r
) x
GROUP BY x.order_no
The WITH clause will give you a table with 20 rows of random data. Outer join that with your old 'x' data and you will be guaranteed 20 rows of data. You might not need to cast the item as a varchar2 depending on data. (N.B., I finally found a query that it makes sense to use a RIGHT JOIN with. See this SO question)
I'm not quite sure what you're trying to do with the GROUP BY and MAX stuff? In the future it would be helpful to condense your examples into something others can easily test, a minimal case that gets your point across.
I also incorporated #Kevin's good suggestion to use ROW_NUMBER instead of RANK.
very difficult to understand...
i think you might be ok if you put a 0 instead of null in the price values...
, max(decode(r,18,x.unit_cost,0)) PRICE18
and
, max(decode(r,20,x.qty_ordered,0)) QTY20
then at least the math should work.
Rank will not guarantee a sequential count of the items in the groups there, may be gaps when you have several rows with the same value.
for a decent explanation see:
http://asktom.oracle.com/pls/asktom/f?p=100:11:0::::P11_QUESTION_ID:2920665938600
I think you need to use row_number
Related
I would like to display to run this report where I show the total number of customers per reporting date. Here is a how I need the data to look like:
My original dataset look like this (please see query): In order to calculate the number of customers. I need to use the start and end date: if Start_Date>reporting_date and End_Date<=reporting_date then count as a customer.
I was able to develop a script, but it only gives me the total number of customers for only one reporting date.
select '2022-10-31' reporting_date, count(case when Start_Date>'2022-10-31' and End_Date<='2022-10-31' then Customer_ID end)
from (values ('2022-10-14','2022-8-19','0010Y654012P6KuQAK')
, ('2022-3-15','2022-9-14','0011v65402PoSpVAAV')
, ('2021-1-11','2022-10-11','0010Y654012P6DuQAK')
, ('2022-12-1','2022-5-14','0011v65402u7muLAAQ')
, ('2021-1-30','2022-3-14','0010Y654012P6DuQAK')
, ('2022-10-31','2022-2-14','0010Y654012P6PJQA0')
, ('2021-10-31','US','0010Y654012P6PJQA0')
, ('2021-5-31','2022-5-14','0011v65402x8cjqAAA')
, ('2022-6-2','2022-1-13','0010Y654016OqkJQAS')
, ('2022-1-1','2022-11-11','0010Y654016OqIaQAK')
) a(Start_Date ,End_Date ,Customer_ID)
Is there a way to amend the code with cross-join or other workarounds to the total customers per reporting date without doing many unions
select '2022-10-31' reporting_date, count(case when Start_Date>'2022-10-31' and End_Date<='2022-10-31' then Customer_ID end)
from (values ('2022-10-14','2022-8-19','0010Y654012P6KuQAK')
, ('2022-3-15','2022-9-14','0011v65402PoSpVAAV')
, ('2021-1-11','2022-10-11','0010Y654012P6DuQAK')
, ('2022-12-1','2022-5-14','0011v65402u7muLAAQ')
, ('2021-1-30','2022-3-14','0010Y654012P6DuQAK')
, ('2022-10-31','2022-2-14','0010Y654012P6PJQA0')
, ('2021-10-31','US','0010Y654012P6PJQA0')
, ('2021-5-31','2022-5-14','0011v65402x8cjqAAA')
, ('2022-6-2','2022-1-13','0010Y654016OqkJQAS')
, ('2022-1-1','2022-11-11','0010Y654016OqIaQAK')
) a(Start_Date ,End_Date ,Customer_ID)
UNION ALL
select '2022-9-30' reporting_date, count(case when Start_Date>'2022-9-301' and End_Date<='2022-9-30' then Customer_ID end)
from (values ('2022-10-14','2022-8-19','0010Y654012P6KuQAK')
, ('2022-3-15','2022-9-14','0011v65402PoSpVAAV')
, ('2021-1-11','2022-10-11','0010Y654012P6DuQAK')
, ('2022-12-1','2022-5-14','0011v65402u7muLAAQ')
, ('2021-1-30','2022-3-14','0010Y654012P6DuQAK')
, ('2022-10-31','2022-2-14','0010Y654012P6PJQA0')
, ('2021-10-31','US','0010Y654012P6PJQA0')
, ('2021-5-31','2022-5-14','0011v65402x8cjqAAA')
, ('2022-6-2','2022-1-13','0010Y654016OqkJQAS')
, ('2022-1-1','2022-11-11','0010Y654016OqIaQAK')
) a(Start_Date ,End_Date ,Customer_ID)
It is possible to provide date ranges as a separate table/subquery, join to the actual data and perform grouping:
select s.start_d, s.end_d, COUNT(Customer_ID) AS total
FROM (SELECT '2022-10-31'::DATE, '2022-10-31'::DATE
UNION SELECT '2022-09-30', '2022-09-30')
AS s(start_d, end_d)
LEFT JOIN (values ('2022-10-14','2022-8-19','0010Y654012P6KuQAK')
, ('2022-3-15','2022-9-14','0011v65402PoSpVAAV')
, ('2021-1-11','2022-10-11','0010Y654012P6DuQAK')
, ('2022-12-1','2022-5-14','0011v65402u7muLAAQ')
, ('2021-1-30','2022-3-14','0010Y654012P6DuQAK')
, ('2022-10-31','2022-2-14','0010Y654012P6PJQA0')
, ('2021-10-31','2021-10-31','0010Y654012P6PJQA0')
, ('2021-5-31','2022-5-14','0011v65402x8cjqAAA')
, ('2022-6-2','2022-1-13','0010Y654016OqkJQAS')
, ('2022-1-1','2022-11-11','0010Y654016OqIaQAK')
) a(Start_Date ,End_Date ,Customer_ID)
ON a.Start_Date>s.start_d and a.End_Date<=s.end_d
GROUP BY s.start_d, s.end_d;
Output:
I have a financial application. I have ViewHistoricInstrumentValue which has rows like this
instrument1, date1, price, grossValue, netValue
instrument2, date1, price, grossValue, netValue
...
instrument1, date2, price, grossValue, netValue
...
My views are complicated but the db itself is small (4000 transactions). ViewHistoricInstrumentValue was executed in less than 1 second before I added the next CTE to the view. After that it takes 26s. ActualEvaluationPrice is the price for instrumentX at dateY. If this value is missing from HistoricPrice table then I find the previous price for instrumentX.
, UsedEvaluationPriceCte AS (
SELECT *
, isnull(ActualEvaluationPrice,
(select top 1 HistoricPrice.Price -- PreviousPrice
from HistoricPrice JOIN ValidDate
on HistoricPrice.DateId = ValidDate.Id
and HistoricPrice.InstrumentId = StartingCte.InstrumentId
and ValidDate.[Date] < StartingCte.DateValue
order by ValidDate.[Date]))
as UsedEvaluationPrice
FROM StartingCte
)
My problem is that the execution time increased needlessly. Right now the HistoricPrice table has no missing value so ActualEvaluationPrice is never null, so the previous price should be never determined.
ViewHistoricInstrumentValue returns 1815 rows. One other mystery is that the first query takes 26s, but the second only 2s.
SELECT * FROM [ViewHistoricInstrumentValue]
SELECT top(2000) * FROM [ViewHistoricInstrumentValue]
Appendix
The execution plan: https://www.dropbox.com/s/5st69uhjkpd3b5y/IsNull.sqlplan?dl=0
The same plan: https://www.brentozar.com/pastetheplan/?id=rk9bK1Wiv
The view:
ALTER VIEW [dbo].[ViewHistoricInstrumentValue] AS
WITH StartingCte AS (
SELECT
HistoricInstrumentValue.DateId
, ValidDate.Date as DateValue
, TransactionId
, TransactionId AS [Row]
, AccountId
, AccountName
, ViewTransaction.InstrumentId
, ViewTransaction.InstrumentName
, OpeningDate
, OpeningPrice
, Price AS ActualEvaluationPrice
, ClosingDate
, Amount
, isnull(ViewTransaction.FeeValue, 0) as FeeValue
, HistoricInstrumentValue.Id AS Id
FROM ViewBriefHistoricInstrumentValue as HistoricInstrumentValue
JOIN ValidDate on HistoricInstrumentValue.DateId = ValidDate.Id
JOIN ViewTransaction ON ViewTransaction.Id = HistoricInstrumentValue.TransactionId
left JOIN ViewHistoricPrice ON ViewHistoricPrice.DateId = HistoricInstrumentValue.DateId AND
ViewHistoricPrice.InstrumentId = ViewTransaction.InstrumentId
)
, UsedEvaluationPriceCte AS (
SELECT *
, isnull(ActualEvaluationPrice,
(select top 1 HistoricPrice.Price -- PreviousPrice
from HistoricPrice JOIN ValidDate
on HistoricPrice.DateId = ValidDate.Id
and HistoricPrice.InstrumentId = StartingCte.InstrumentId
and ValidDate.[Date] < StartingCte.DateValue
order by ValidDate.[Date]))
as UsedEvaluationPrice
FROM StartingCte
)
, GrossEvaluationValueCte AS (
SELECT *
, Amount * UsedEvaluationPrice AS GrossEvaluationValue
, (UsedEvaluationPrice - OpeningPrice) * Amount AS GrossCapitalGains
FROM UsedEvaluationPriceCte
)
, CapitalGainsTaxCte AS (
SELECT *
, dbo.MyMax(GrossCapitalGains * 0.15, 0) AS CapitalGainsTax
FROM GrossEvaluationValueCte
)
, IsOpenCte AS (
SELECT
DateId
, DateValue
, TransactionId
, [Row]
, AccountId
, AccountName
, InstrumentId
, InstrumentName
, OpeningDate
, OpeningPrice
, ActualEvaluationPrice
, UsedEvaluationPrice
, ClosingDate
, Amount
, GrossEvaluationValue
, GrossCapitalGains
, CapitalGainsTax
, FeeValue
, GrossEvaluationValue - CapitalGainsTax - FeeValue AS NetEvaluationValue
, GrossCapitalGains - CapitalGainsTax - FeeValue AS NetUnrealizedGains
, CASE WHEN ClosingDate IS NULL OR DateValue < ClosingDate
THEN CAST(1 AS BIT)
ELSE CAST(0 AS BIT)
END
AS IsOpen
, convert(NVARCHAR, DateValue, 20) + cast([Id] AS NVARCHAR(MAX)) AS Temp
, Id
FROM CapitalGainsTaxCte
)
Select * from IsOpenCte
I have no idea what your query is supposed to be doing. But this process:
ActualEvaluationPrice is the price for instrumentX at dateY. If this value is missing from HistoricPrice table then I find the previous price for instrumentX.
is handled easily with lag():
select vhiv.*
coalesce(vhiv.ActualEvaluationPrice,
lag(vhiv.ActualEvaluationPrice) over (partition by vhiv.InstrumentId order by DateValue)
) as UsedEvaluationPrice
from ViewHistoricInstrumentValue vhiv;
Note: If you need to filter out certain dates by joining to ValidDates, you can include the JOIN in the query. However, that is not part of the problem statement.
The SQL below is written to return 'open orders'. This was written in a way I could understand but now I would like to try and optimize this and reduce the amount of code.
The SQL below gives me the desired outcome I'm looking for however, I would like to shorten the query without using WITH AS. Any suggestions using UNION or some other nesting method?
WITH
product AS --filter by dept
(SELECT item
, dept
FROM item_master
WHERE dept in ('353')
),
open_orders AS --view of orders in Status A with ordered units > received units
(SELECT ol.order_no
, ol.item
, ol.location
, oh.po_type
, oh.order_type
, oh.not_before_date
, oh.not_after_date
, oh.otb_eow_date
, SUM(ol.qty_ordered) AS qty_ordered
, SUM(NVL(ol.qty_received,0)) AS qty_received
FROM ordhead oh
, ordloc ol
WHERE oh.order_no = ol.order_no
AND oh.status = 'A'
AND ol.qty_ordered > NVL(ol.qty_received,0)
-- AND ol.order_no in ('18701212') --optional filter for specific PO's
GROUP BY ol.order_no
, ol.item
, ol.location
, oh.po_type
, oh.order_type
, oh.not_before_date
, oh.not_after_date
, oh.otb_eow_date
),
allocations AS --view of all allocations
(SELECT ah.alloc_no
, ah.order_no
, ah.item
, ad.to_loc
, NVL(ad.qty_allocated,0) AS qty_allocated
, NVL(ad.qty_received,0) AS qty_received
FROM alloc_header ah
, alloc_detail ad
WHERE ah.alloc_no = ad.alloc_no
)
SELECT p.dept --main query on above views
, oo.order_no
, oo.po_type
, oo.order_type
, oo.not_before_date
, oo.not_after_date
, oo.otb_eow_date
, oo.item
, CASE WHEN oo.po_type = 0 THEN oo.location ELSE aa.to_loc END AS loc
, SUM(oo.qty_ordered) AS order_qty
, CASE WHEN SUM(NVL(aa.qty_allocated,0)) - SUM(NVL(aa.qty_received,0)) = 0
THEN SUM(oo.qty_ordered) - SUM(NVL(oo.qty_received,0))
ELSE SUM(NVL(aa.qty_allocated,0)) - SUM(NVL(aa.qty_received,0))
END AS open_qty
FROM open_orders oo
, allocations aa
, product p
WHERE oo.order_no = aa.order_no(+)
AND oo.item = aa.item(+)
AND oo.item = p.item
AND (oo.qty_ordered - oo.qty_received) >0
GROUP BY p.dept
, oo.order_no
, oo.po_type
, oo.order_type
, oo.not_before_date
, oo.not_after_date
, oo.otb_eow_date
, oo.item
, CASE WHEN oo.po_type = 0 THEN oo.location ELSE aa.to_loc END
;
CTE's (Common Table Expressions) are just a way of organizing a query by sticking bits of code (that define a "derived" table) at the top that can be reused in the main statement. As such, where product, open_orders, and allocations are mentioned in the FROM clause, you can just swap those words out with the code that defines them:
SELECT p.dept --main query on above views
,
oo.order_no,
oo.po_type,
oo.order_type,
oo.not_before_date,
oo.not_after_date,
oo.otb_eow_date,
oo.item,
CASE
WHEN oo.po_type = 0
THEN oo.location
ELSE aa.to_loc
END AS loc,
SUM(oo.qty_ordered) AS order_qty,
CASE
WHEN SUM(NVL(aa.qty_allocated, 0)) - SUM(NVL(aa.qty_received, 0)) = 0
THEN SUM(oo.qty_ordered) - SUM(NVL(oo.qty_received, 0))
ELSE SUM(NVL(aa.qty_allocated, 0)) - SUM(NVL(aa.qty_received, 0))
END AS open_qty
FROM (
SELECT ol.order_no,
ol.item,
ol.location,
oh.po_type,
oh.order_type,
oh.not_before_date,
oh.not_after_date,
oh.otb_eow_date,
SUM(ol.qty_ordered) AS qty_ordered,
SUM(NVL(ol.qty_received, 0)) AS qty_received
FROM ordhead oh,
ordloc ol
WHERE oh.order_no = ol.order_no
AND oh.STATUS = 'A'
AND ol.qty_ordered > NVL(ol.qty_received, 0)
-- AND ol.order_no in ('18701212') --optional filter for specific PO's
GROUP BY ol.order_no,
ol.item,
ol.location,
oh.po_type,
oh.order_type,
oh.not_before_date,
oh.not_after_date,
oh.otb_eow_date
) oo,
(
SELECT ah.alloc_no,
ah.order_no,
ah.item,
ad.to_loc,
NVL(ad.qty_allocated, 0) AS qty_allocated,
NVL(ad.qty_received, 0) AS qty_received
FROM alloc_header ah,
alloc_detail ad
WHERE ah.alloc_no = ad.alloc_no
) aa,
(
SELECT item,
dept
FROM item_master
WHERE dept IN ('353')
) p
WHERE oo.order_no = aa.order_no(+)
AND oo.item = aa.item(+)
AND oo.item = p.item
AND (oo.qty_ordered - oo.qty_received) > 0
GROUP BY p.dept,
oo.order_no,
oo.po_type,
oo.order_type,
oo.not_before_date,
oo.not_after_date,
oo.otb_eow_date,
oo.item,
CASE
WHEN oo.po_type = 0
THEN oo.location
ELSE aa.to_loc
END;
This is obviously not shortened (but by a few characters), but I get the sense that "shortening" isn't your requirement. You are trying to get this query to work in a product that doesn't support CTEs.
This is my 1st post on the forum. Usually I was able to find what I needed - but to tell the truth - I am not really sure how to ask a correct question to the issue. Therefore, please accept my apologies if there already is an answer on the forum and I missed it.
I am running the following code in an Oracle database via Benthic Software:
SELECT
T1."REGION"
, T1."COUNTRY"
, T1."IDNum"
, T1."CUSTOMER"
, T1."BUSSINESS"
, T3."FISCALYEARMONTH"
, T3."FISCALYEAR"
, SUM(T4."VALUE")
,"HISTORICAL_PURCHASE_FLAG"
FROM
"DATABASE"."SALES" T4
, "DATABASE"."CUSTOMER" T1
, "DATABASE"."PRODUCT" T2
, "DATABASE"."TIME" T3
WHERE
T4."CUSTOMERID" = T1."CUSTOMERID"
AND T4."PRODUCTID" = T2."PRODUCTID"
AND T4."DATEID" = T3."DATEID"
AND T3."FISCALYEAR" IN ('2016')
AND T1."COUNTRY" IN ('ENGLAND', 'France')
GROUP BY
T1."REGION"
, T1."COUNTRY"
, T1."IDNum"
, T1."CUSTOMER"
, T1."BUSSINESS"
, T3."FISCALYEARMONTH"
, T3."FISCALYEAR"
;
This query provides me with information on transactions. As you can see above, I would like to add a column named "HISTORICAL_PURCHASE_FLAG".
I would like the query to take CUSTOMER and FISCALYEARMONTH. Then, I would like to check if there are any transactions registered for the CUSTOMER, up to 2 years in the past.
So lets say I get the following result:
LineNum REGION COUNTRY IDNum CUSTOMER BUSSINESS FISCALYEARMONTH FISCALYEAR VALUE HISTORICAL_PURCHASE_FLAG
1 Europe ENGLAND 255 Abraxo Cleaner Co. Chemicals 201605 2016 34,567.00
2 Europe FRANCE 123 Metal Trade Heavy 201602 2016 12,500.00
3 Europe ENGLAND 255 Abraxo Cleaner Co. Chemicals 201601 2016 8,400.00
LineNum 1 shows transaction for Abraxo Cleaner Co. registered on 201605. And LineNum 3 is also for Abraxo Cleaner Co. but registered on 201601. What I would need the query to do, is to flag LineNum 1 as 'Existing'. Because there was a previous transaction registered.
On the other hand, LineNum 3 was the first time transactions was registered for Abraxo Cleaner Co. so the line would be flagged as 'New'.
To sum up, I would like for each row of data to be treated individually. And to check if there are any previous records of data for CUSTOMER & FISCALYEARMONTH - 24 months.
Thank you in advance for the help.
You can use LAG function:
SELECT
"REGION"
, "COUNTRY"
, "IDNum"
, "CUSTOMER"
, "BUSSINESS"
, "FISCALYEARMONTH"
, "FISCALYEAR"
, SUM("VALUE")
, MAX(CASE WHEN to_date(prev_fym,'YYYYMM') >= ADD_MONTHS (to_date("FISCALYEARMONTH",'YYYYMM'), -24) THEN 'Existing'
ELSE NULL END) "HISTORICAL_PURCHASE_FLAG"
FROM
(
SELECT
T1."REGION"
, T1."COUNTRY"
, T1."IDNum"
, T1."CUSTOMER"
, T1."BUSSINESS"
, T3."FISCALYEARMONTH"
, T3."FISCALYEAR"
, T4."VALUE"
, LAG ("FISCALYEARMONTH", 1) OVER (PARTITION BY T1."IDNum" ORDER BY T3."FISCALYEARMONTH" DESC) prev_fym
FROM
"DATABASE"."SALES" T4
, "DATABASE"."CUSTOMER" T1
, "DATABASE"."PRODUCT" T2
, "DATABASE"."TIME" T3
WHERE
T4."CUSTOMERID" = T1."CUSTOMERID"
AND T4."PRODUCTID" = T2."PRODUCTID"
AND T4."DATEID" = T3."DATEID"
AND T1."COUNTRY" IN ('ENGLAND', 'France')
AND T3."FISCALYEAR" IN ('2014','2015','2016')
)
WHERE "FISCALYEAR" IN ('2016')
GROUP BY
"REGION"
, "COUNTRY"
, "IDNum"
, "CUSTOMER"
, "BUSSINESS"
, "FISCALYEARMONTH"
, "FISCALYEAR"
;
Using a simplified "input" table... You can use the LAG() analytic function and a comparison condition to populate your last column. I assume your fiscalyearmonth is a number - if it is a character field, wrap fiscalyearmonth within TO_NUMBER(). (It would be much better if in fact you stored these as true Oracle dates, perhaps date 2016-06-01 instead of 201606, but I worked with what you have currently... and took advantage that in numeric format, "24 months ago" simply means "subtract 200").
with inputs (linenum, idnum, fiscalyearmonth) as (
select 1, 255, 201605 from dual union all
select 2, 123, 201602 from dual union all
select 3, 255, 201601 from dual union all
select 4, 255, 201210 from dual
)
select linenum, idnum, fiscalyearmonth,
case when fiscalyearmonth
- lag(fiscalyearmonth)
over (partition by idnum order by fiscalyearmonth) < 200
then 'Existing' else 'New' end as flag
from inputs
order by linenum;
LINENUM IDNUM FISCALYEARMONTH FLAG
---------- ---------- --------------- --------
1 255 201605 Existing
2 123 201602 New
3 255 201601 New
4 255 201210 New
Another solution might be to outer Join "DATABASE"."SALES" T4 a second time as T5, wilter the fiscal year via WHERE to < t4.FiscalYear-2. If the Column is NULL, the record is new, if the outer join results in a value, the record is historic.
You can achieve using row_number() function as below... modify as per your need...I assumed 2 years (means previous 24 months from sysdate ).
You can run the sub-queries separately to check how its working.
Select
"REGION"
,"COUNTRY"
,"IDNum"
,"CUSTOMER"
,"BUSSINESS"
,"FISCALYEARMONTH"
,"FISCALYEAR"
,"VALUE"
, ( case when ( TXNNO = 1 or TOTAL_TXN_LAST24MTH = 0 ) then 'New' else 'Existing' end ) as "HISTORICAL_PURCHASE_FLAG" -- if no txn in last 24 month or its first txn then 'new' else 'existing'
from
(
select
SubQry."REGION"
, SubQry."COUNTRY"
, SubQry."IDNum"
, SubQry."CUSTOMER"
, SubQry."BUSSINESS"
, SubQry."FISCALYEARMONTH"
, SUBQRY."FISCALYEAR"
, SUBQRY."VALUE"
, ROW_NUMBER() over (partition by SUBQRY."REGION",SUBQRY."COUNTRY",SUBQRY."IDNum",SUBQRY."CUSTOMER",SUBQRY."BUSSINESS" order by SUBQRY."FISCALYEARMONTH") as TXNNO,
, SUM(case when (TO_NUMBER(TO_CHAR(sysdate,'YYYYMM')) - SUBQRY."FISCALYEARMONTH") < 24 then 1 else 0 end) as TOTAL_TXN_LAST24MTH
From
(
SELECT
T1."REGION"
, T1."COUNTRY"
, T1."IDNum"
, T1."CUSTOMER"
, T1."BUSSINESS"
, T3."FISCALYEARMONTH"
, T3."FISCALYEAR"
, SUM(T4."VALUE") as VALUE
FROM
"DATABASE"."SALES" T4
, "DATABASE"."CUSTOMER" T1
, "DATABASE"."PRODUCT" T2
, "DATABASE"."TIME" T3
WHERE
T4."CUSTOMERID" = T1."CUSTOMERID"
AND T4."PRODUCTID" = T2."PRODUCTID"
AND T4."DATEID" = T3."DATEID"
AND T3."FISCALYEAR" IN ('2016')
AND T1."COUNTRY" IN ('ENGLAND', 'France')
GROUP BY
T1."REGION"
, T1."COUNTRY"
, T1."IDNum"
, T1."CUSTOMER"
, T1."BUSSINESS"
, T3."FISCALYEARMONTH"
, T3."FISCALYEAR"
) SUBQRY
);
how do I add more of the available columns of data in the table to this query? I want to add the evnt_dat and evnt_desrp columns.
SELECT DISTINCT
ROW_NUMBER() OVER (ORDER BY
panel_descrp
, cardno
, lname
) rid
, panel_descrp
, cardno
, lname
FROM ev_log
where evnt_descrp = 'local grant'
and datepart(yyyy,EVNT_DAT) = datepart(yyyy,GETDATE())
and DATEPART(mm,evnt_dat) = DATEPART(mm,getdate()) -1
and DATEPART(dd,evnt_dat) >= 1 and DATEPART(dd,evnt_dat)<=2
GROUP BY
panel_descrp
, cardno
, lname
If you want them there, you have to include them in the GROUP BY clause:
SELECT DISTINCT
ROW_NUMBER() OVER (ORDER BY
panel_descrp
, cardno
, lname
) rid
, panel_descrp
, cardno
, lname
, evnt_dat
, evnt_desrp
FROM ev_log
where evnt_descrp = 'local grant'
and datepart(yyyy,EVNT_DAT) = datepart(yyyy,GETDATE())
and DATEPART(mm,evnt_dat) = DATEPART(mm,getdate()) -1
and DATEPART(dd,evnt_dat) >= 1 and DATEPART(dd,evnt_dat)<=2
GROUP BY
panel_descrp
, cardno
, lname
, evnt_dat
, evnt_desrp
;
Or, use some aggregate function on them, for example MAX (then you won't have to put them in the GROUP BY clause, but the meaning of the query may not be what you expect):
SELECT DISTINCT
ROW_NUMBER() OVER (ORDER BY
panel_descrp
, cardno
, lname
) rid
, panel_descrp
, cardno
, lname
, MAX(evnt_dat)
, MAX(evnt_desrp)
FROM ev_log
where evnt_descrp = 'local grant'
and datepart(yyyy,EVNT_DAT) = datepart(yyyy,GETDATE())
and DATEPART(mm,evnt_dat) = DATEPART(mm,getdate()) -1
and DATEPART(dd,evnt_dat) >= 1 and DATEPART(dd,evnt_dat)<=2
GROUP BY
panel_descrp
, cardno
, lname
;