This is my initial table, (the dates are in DD/MM/YY format)
ID DAY TYPE_ID TYPE NUM START_DATE END_DATE
---- --------- ------- ---- ---- --------- ---------
4241 15/09/15 2 1 66 01/01/00 31/12/99
4241 16/09/15 2 1 66 01/01/00 31/12/99
4241 17/09/15 9 1 59 17/09/15 18/09/15
4241 18/09/15 9 1 59 17/09/15 18/09/15
4241 19/09/15 2 1 66 01/01/00 31/12/99
4241 20/09/15 2 1 66 01/01/00 31/12/99
4241 15/09/15 3 2 63 01/01/00 31/12/99
4241 16/09/15 8 2 159 16/09/15 17/09/15
4241 17/09/15 8 2 159 16/09/15 17/09/15
4241 18/09/15 3 2 63 01/01/00 31/12/99
4241 19/09/15 3 2 63 01/01/00 31/12/99
4241 20/09/15 3 2 63 01/01/00 31/12/99
2134 15/09/15 2 1 66 01/01/00 31/12/99
2134 16/09/15 2 1 66 01/01/00 31/12/99
2134 17/09/15 9 1 59 17/09/15 18/09/15
2134 18/09/15 9 1 59 17/09/15 18/09/15
2134 19/09/15 2 1 66 01/01/00 31/12/99
2134 20/09/15 2 1 66 01/01/00 31/12/99
2134 15/09/15 3 2 63 01/01/00 31/12/99
2134 16/09/15 8 2 159 16/09/15 17/09/15
2134 17/09/15 8 2 159 16/09/15 17/09/15
2134 18/09/15 3 2 63 01/01/00 31/12/99
2134 19/09/15 3 2 63 01/01/00 31/12/99
2134 20/09/15 3 2 63 01/01/00 31/12/99
And I've to create groups with initial DAY and end DAY for the same ID, and TYPE.
I don't want to group by day, I need to create a group every time my TYPE_ID changes, based on the initial order (ID, TYPE, DAY ASC)
This is the result that I want to achieve:
ID DAY_INI DAY_END TYPE_ID TYPE NUM START_DATE END_DATE
---- --------- --------- ------- ---- ---- --------- ---------
4241 15/09/15 16/09/15 2 1 66 01/01/00 31/12/99
4241 17/09/15 18/09/15 9 1 59 17/09/15 18/09/15
4241 19/09/15 20/09/15 2 1 66 01/01/00 31/12/99
4241 15/09/15 15/09/15 3 2 63 01/01/00 31/12/99
4241 16/09/15 17/09/15 8 2 159 16/09/15 17/09/15
4241 18/09/15 20/09/15 3 2 63 01/01/00 31/12/99
2134 15/09/15 16/09/15 2 1 66 01/01/00 31/12/99
2134 17/09/15 18/09/15 9 1 59 17/09/15 18/09/15
2134 19/09/15 20/09/15 2 1 66 01/01/00 31/12/99
2134 15/09/15 15/09/15 3 2 63 01/01/00 31/12/99
2134 16/09/15 17/09/15 8 2 159 16/09/15 17/09/15
2134 18/09/15 20/09/15 3 2 63 01/01/00 31/12/99
Could you please give any clue about how to do it??, thanks!
SQL Fiddle
Oracle 11g R2 Schema Setup:
CREATE TABLE TEST ( ID, DAY, TYPE_ID, TYPE, NUM, START_DATE, END_DATE ) AS
SELECT 4241, DATE '2015-09-15', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-16', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-17', 9, 1, 59, DATE '2015-09-17', DATE '2015-09-18' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-18', 9, 1, 59, DATE '2015-09-17', DATE '2015-09-18' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-19', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-20', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-15', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-16', 8, 2, 159, DATE '2015-09-16', DATE '2015-09-17' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-17', 8, 2, 159, DATE '2015-09-16', DATE '2015-09-17' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-18', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-19', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 4241, DATE '2015-09-20', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-15', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-16', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-17', 9, 1, 59, DATE '2015-09-17', DATE '2015-09-18' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-18', 9, 1, 59, DATE '2015-09-17', DATE '2015-09-18' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-19', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-20', 2, 1, 66, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-15', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-16', 8, 2, 159, DATE '2015-09-16', DATE '2015-09-17' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-17', 8, 2, 159, DATE '2015-09-16', DATE '2015-09-17' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-18', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-19', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
UNION ALL SELECT 2134, DATE '2015-09-20', 3, 2, 63, DATE '2000-01-01', DATE '1999-12-31' FROM DUAL
Query 1:
WITH group_changes AS (
SELECT t.*,
CASE TYPE_ID WHEN LAG( TYPE_ID ) OVER ( PARTITION BY ID, TYPE ORDER BY DAY ) THEN 0 ELSE 1 END AS HAS_CHANGED_GROUP
FROM TEST t
),
groups AS (
SELECT ID, DAY, TYPE_ID, TYPE, NUM, START_DATE, END_DATE,
SUM( HAS_CHANGED_GROUP ) OVER ( PARTITION BY ID, TYPE ORDER BY DAY ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) AS GRP
FROM group_changes
)
SELECT ID,
MIN( DAY ) AS DAY_INI,
MAX( DAY ) AS DAY_END,
MIN( TYPE_ID ) AS TYPE_ID,
TYPE,
MIN( NUM ) AS NUM,
MIN( START_DATE ) AS START_DATE,
MIN( END_DATE ) AS END_DATE
FROM groups
GROUP BY ID, TYPE, GRP
Results:
| ID | DAY_INI | DAY_END | TYPE_ID | TYPE | NUM | START_DATE | END_DATE |
|------|-----------------------------|-----------------------------|---------|------|-----|-----------------------------|-----------------------------|
| 4241 | September, 17 2015 00:00:00 | September, 18 2015 00:00:00 | 9 | 1 | 59 | September, 17 2015 00:00:00 | September, 18 2015 00:00:00 |
| 2134 | September, 15 2015 00:00:00 | September, 15 2015 00:00:00 | 3 | 2 | 63 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 2134 | September, 18 2015 00:00:00 | September, 20 2015 00:00:00 | 3 | 2 | 63 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 4241 | September, 15 2015 00:00:00 | September, 16 2015 00:00:00 | 2 | 1 | 66 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 4241 | September, 19 2015 00:00:00 | September, 20 2015 00:00:00 | 2 | 1 | 66 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 4241 | September, 15 2015 00:00:00 | September, 15 2015 00:00:00 | 3 | 2 | 63 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 4241 | September, 16 2015 00:00:00 | September, 17 2015 00:00:00 | 8 | 2 | 159 | September, 16 2015 00:00:00 | September, 17 2015 00:00:00 |
| 2134 | September, 17 2015 00:00:00 | September, 18 2015 00:00:00 | 9 | 1 | 59 | September, 17 2015 00:00:00 | September, 18 2015 00:00:00 |
| 2134 | September, 15 2015 00:00:00 | September, 16 2015 00:00:00 | 2 | 1 | 66 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 2134 | September, 19 2015 00:00:00 | September, 20 2015 00:00:00 | 2 | 1 | 66 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
| 2134 | September, 16 2015 00:00:00 | September, 17 2015 00:00:00 | 8 | 2 | 159 | September, 16 2015 00:00:00 | September, 17 2015 00:00:00 |
| 4241 | September, 18 2015 00:00:00 | September, 20 2015 00:00:00 | 3 | 2 | 63 | January, 01 2000 00:00:00 | December, 31 1999 00:00:00 |
Add an enumeration to the original data set (using Row_Number or rownum). Add the MIN(Enumeration) for each group. Then sort the groups by the enumeration.
Related
I am using Snowflake SQL, but I guess this can be solved by any sql. So I have data like this:
RA_MEMBER_ID YEAR QUARTER MONTH Monthly_TOTAL_PURCHASE CATEGORY
1000 2020 1 1 105 CAT10
1000 2020 1 1 57 CAT13
1000 2020 1 2 107 CAT10
1000 2020 1 2 59 CAT13
1000 2020 1 3 109 CAT11
1000 2020 1 3 61 CAT14
1000 2020 2 4 111 CAT11
1000 2020 2 4 63 CAT14
1000 2020 2 5 113 CAT12
1000 2020 2 5 65 CAT15
1000 2020 2 6 115 CAT12
1000 2020 2 6 67 CAT15
And I need data like this:
RA_MEMBER_ID YEAR QUARTER MONTH Monthly_TOTAL_PURCHASE CATEGORY Monthly_rank Quarterly_Total_purchase Quarter_category Quarter_rank Yearly_Total_purchase Yearly_category Yearly_rank
1000 2020 1 1 105 CAT10 1 105 CAT10 1 105 CAT10 1
1000 2020 1 1 57 CAT13 2 57 CAT13 2 57 CAT13 2
1000 2020 1 2 107 CAT10 1 212 CAT10 1 212 CAT10 1
1000 2020 1 2 59 CAT13 2 116 CAT13 2 116 CAT13 2
1000 2020 1 3 109 CAT11 1 212 CAT10 1 212 CAT10 1
1000 2020 1 3 61 CAT14 2 116 CAT13 2 116 CAT13 2
1000 2020 2 4 111 CAT11 1 111 CAT11 1 212 CAT10 1
1000 2020 2 4 63 CAT14 2 63 CAT14 2 124 CAT14 2
1000 2020 2 5 113 CAT12 1 113 CAT12 1 212 CAT10 1
1000 2020 2 5 65 CAT15 2 65 CAT15 2 124 CAT14 2
1000 2020 2 6 115 CAT12 1 228 CAT12 1 228 CAT12 1
1000 2020 2 6 67 CAT15 2 132 CAT15 2 132 CAT15 2
So basically, I have the top two categories by purchase amount for the first 6 months. I need the same for quarterly based on which month of the quarter it is. So let's say it is February, then the top 2 categories and amounts should be calculated based on both January and February. For March we have to get the quarter data by taking all three months. From April it will be the same as monthly rank, for May again calculate based on April and May. Similarly for Yearly also.
I have tried a lot of things but nothing seems to give me what I want.
The solution should be generic enough because there can be many other months and years.
I really need help in this.
Not sure if below is what you are after. I assume that everything is category based:
create or replace table test (
ra_member_id int,
year int,
quarter int,
month int,
monthly_purchase int,
category varchar
);
insert into test values
(1000, 2020, 1,1, 105, 'cat10'),
(1000, 2020, 1,1, 57, 'cat13'),
(1000, 2020, 1,2, 107, 'cat10'),
(1000, 2020, 1,2, 59, 'cat13'),
(1000, 2020, 1,3, 109, 'cat11'),
(1000, 2020, 1,3, 61, 'cat14'),
(1000, 2020, 2,4, 111, 'cat11'),
(1000, 2020, 2,4, 63, 'cat14'),
(1000, 2020, 2,5, 113, 'cat12'),
(1000, 2020, 2,5, 65, 'cat15'),
(1000, 2020, 2,6, 115, 'cat12'),
(1000, 2020, 2,6, 67, 'cat15');
WITH BASE as (
select
RA_MEMBER_ID,
YEAR,
QUARTER,
MONTH,
CATEGORY,
MONTHLY_PURCHASE,
LAG(MONTHLY_PURCHASE) OVER (PARTITION BY QUARTER, CATEGORY ORDER BY MONTH) AS QUARTERLY_PURCHASE_LAG,
IFNULL(QUARTERLY_PURCHASE_LAG, 0) + MONTHLY_PURCHASE AS QUARTERLY_PURCHASE,
LAG(MONTHLY_PURCHASE) OVER (PARTITION BY YEAR, CATEGORY ORDER BY MONTH) AS YEARLY_PURCHASE_LAG,
IFNULL(YEARLY_PURCHASE_LAG, 0) + MONTHLY_PURCHASE AS YEARLY_PURCHASE
FROM
TEST
),
BASE_RANK AS (
SELECT
RA_MEMBER_ID,
YEAR,
QUARTER,
MONTH,
CATEGORY,
MONTHLY_PURCHASE,
RANK() OVER (PARTITION BY MONTH ORDER BY MONTHLY_PURCHASE DESC) as MONTHLY_RANK,
QUARTERLY_PURCHASE,
RANK() OVER (PARTITION BY QUARTER ORDER BY QUARTERLY_PURCHASE DESC) as QUARTERLY_RANK,
YEARLY_PURCHASE,
RANK() OVER (PARTITION BY YEAR ORDER BY YEARLY_PURCHASE DESC) as YEARLY_RANK
FROM BASE
),
MAIN AS (
SELECT
RA_MEMBER_ID,
YEAR,
QUARTER,
MONTH,
CATEGORY,
MONTHLY_PURCHASE,
MONTHLY_RANK,
QUARTERLY_PURCHASE,
QUARTERLY_RANK,
YEARLY_PURCHASE,
YEARLY_RANK
FROM BASE_RANK
)
SELECT * FROM MAIN
ORDER BY YEAR, QUARTER, MONTH
;
Result:
+--------------+------+---------+-------+----------+------------------+--------------+--------------------+----------------+-----------------+-------------+
| RA_MEMBER_ID | YEAR | QUARTER | MONTH | CATEGORY | MONTHLY_PURCHASE | MONTHLY_RANK | QUARTERLY_PURCHASE | QUARTERLY_RANK | YEARLY_PURCHASE | YEARLY_RANK |
|--------------+------+---------+-------+----------+------------------+--------------+--------------------+----------------+-----------------+-------------|
| 1000 | 2020 | 1 | 1 | cat10 | 105 | 1 | 105 | 4 | 105 | 9 |
| 1000 | 2020 | 1 | 1 | cat13 | 57 | 2 | 57 | 6 | 57 | 12 |
| 1000 | 2020 | 1 | 2 | cat10 | 107 | 1 | 212 | 1 | 212 | 3 |
| 1000 | 2020 | 1 | 2 | cat13 | 59 | 2 | 116 | 2 | 116 | 6 |
| 1000 | 2020 | 1 | 3 | cat11 | 109 | 1 | 109 | 3 | 109 | 8 |
| 1000 | 2020 | 1 | 3 | cat14 | 61 | 2 | 61 | 5 | 61 | 11 |
| 1000 | 2020 | 2 | 4 | cat11 | 111 | 1 | 111 | 4 | 220 | 2 |
| 1000 | 2020 | 2 | 4 | cat14 | 63 | 2 | 63 | 6 | 124 | 5 |
| 1000 | 2020 | 2 | 5 | cat12 | 113 | 1 | 113 | 3 | 113 | 7 |
| 1000 | 2020 | 2 | 5 | cat15 | 65 | 2 | 65 | 5 | 65 | 10 |
| 1000 | 2020 | 2 | 6 | cat12 | 115 | 1 | 228 | 1 | 228 | 1 |
| 1000 | 2020 | 2 | 6 | cat15 | 67 | 2 | 132 | 2 | 132 | 4 |
+--------------+------+---------+-------+----------+------------------+--------------+--------------------+----------------+-----------------+-------------+
A client (e-commerce store) doesn't possess a very well-built database. For instance, there are many users with a lot of shopping orders (=different IDs) for exactly the same products and on the same day. It is obvious that these seemingly multiple orders are in many cases just one unique order. At least that's what we have decided to work with to simplify the issue. (I am trying to do a basic data analytics.)
My table might look like this:
| Email | OrderID | Order_date | TotalAmount |
| ----------------- | --------- | ---------------- | ---------------- |
|customerA#gmail.com| 1 |Jan 01 2021 1:00PM| 2000 |
|customerA#gmail.com| 2 |Jan 01 2021 1:03PM| 2000 |
|customerA#gmail.com| 3 |Jan 01 2021 1:05PM| 2000 |
|customerA#gmail.com| 4 |Jan 01 2021 1:10PM| 2000 |
|customerA#gmail.com| 5 |Jan 01 2021 1:14PM| 2000 |
|customerA#gmail.com| 6 |Jan 03 2021 3:55PM| 3000 |
|customerA#gmail.com| 7 |Jan 03 2021 4:00PM| 3000 |
|customerA#gmail.com| 8 |Jan 03 2021 4:05PM| 3000 |
|customerB#gmail.com| 9 |Jan 04 2021 2:10PM| 1000 |
|customerB#gmail.com| 10 |Jan 04 2021 2:20PM| 1000 |
|customerB#gmail.com| 11 |Jan 04 2021 2:30PM| 1000 |
|customerB#gmail.com| 12 |Jan 06 2021 5:00PM| 5000 |
|customerC#gmail.com| 13 |Jan 09 2021 3:00PM| 4000 |
|customerC#gmail.com| 14 |Jan 09 2021 3:06PM| 4000 |
And my desired result would look like this:
| Email | OrderID | Order_date | TotalAmount |
| ----------------- | --------- | ---------------- | ---------------- |
|customerA#gmail.com| 5 |Jan 01 2021 1:14PM| 2000 |
|customerA#gmail.com| 8 |Jan 03 2021 4:05PM| 3000 |
|customerA#gmail.com| 11 |Jan 04 2021 2:30PM| 1000 |
|customerA#gmail.com| 12 |Jan 06 2021 5:00PM| 5000 |
|customerA#gmail.com| 14 |Jan 09 2021 3:06PM| 4000 |
I would guess this might be a common problem, but is there a simple solution to this?
Maybe there is, but I certainly don't seem to come up with one any time soon. I'd like to see even a complex solution, btw :-)
Thank you for any kind of help you can provide!
Do you mean this?
WITH
indata(Email,OrderID,Order_ts,TotalAmount) AS (
SELECT 'customerA#gmail.com', 1,TO_TIMESTAMP( 'Jan 01 2021 01:00PM','Mon DD YYYY HH12:MIAM'),2000
UNION ALL SELECT 'customerA#gmail.com', 2,TO_TIMESTAMP( 'Jan 01 2021 01:03PM','Mon DD YYYY HH12:MIAM'),2000
UNION ALL SELECT 'customerA#gmail.com', 3,TO_TIMESTAMP( 'Jan 01 2021 01:05PM','Mon DD YYYY HH12:MIAM'),2000
UNION ALL SELECT 'customerA#gmail.com', 4,TO_TIMESTAMP( 'Jan 01 2021 01:10PM','Mon DD YYYY HH12:MIAM'),2000
UNION ALL SELECT 'customerA#gmail.com', 5,TO_TIMESTAMP( 'Jan 01 2021 01:14PM','Mon DD YYYY HH12:MIAM'),2000
UNION ALL SELECT 'customerA#gmail.com', 6,TO_TIMESTAMP( 'Jan 03 2021 03:55PM','Mon DD YYYY HH12:MIAM'),3000
UNION ALL SELECT 'customerA#gmail.com', 7,TO_TIMESTAMP( 'Jan 03 2021 04:00PM','Mon DD YYYY HH12:MIAM'),3000
UNION ALL SELECT 'customerA#gmail.com', 8,TO_TIMESTAMP( 'Jan 03 2021 04:05PM','Mon DD YYYY HH12:MIAM'),3000
UNION ALL SELECT 'customerB#gmail.com', 9,TO_TIMESTAMP( 'Jan 04 2021 02:10PM','Mon DD YYYY HH12:MIAM'),1000
UNION ALL SELECT 'customerB#gmail.com',10,TO_TIMESTAMP( 'Jan 04 2021 02:20PM','Mon DD YYYY HH12:MIAM'),1000
UNION ALL SELECT 'customerB#gmail.com',11,TO_TIMESTAMP( 'Jan 04 2021 02:30PM','Mon DD YYYY HH12:MIAM'),1000
UNION ALL SELECT 'customerB#gmail.com',12,TO_TIMESTAMP( 'Jan 06 2021 05:00PM','Mon DD YYYY HH12:MIAM'),5000
UNION ALL SELECT 'customerC#gmail.com',13,TO_TIMESTAMP( 'Jan 09 2021 03:00PM','Mon DD YYYY HH12:MIAM'),4000
UNION ALL SELECT 'customerC#gmail.com',14,TO_TIMESTAMP( 'Jan 09 2021 03:06PM','Mon DD YYYY HH12:MIAM'),4000
)
,
-- need a ROW_NUMBER() to identify the last row within the day (order descending to get 1.
-- can't filter by an OLAP function, so in a fullselect, and WHERE cond in the final SELECT
with_rank AS (
SELECT
*
, ROW_NUMBER() OVER(PARTITION BY email,DAY(order_ts) ORDER BY order_ts DESC) AS rank
FROM INDATA
)
SELECT
*
FROM with_rank
WHERE rank = 1;
-- out Email | OrderID | Order_ts | TotalAmount | rank
-- out ---------------------+---------+---------------------+-------------+------
-- out customerA#gmail.com | 5 | 2021-01-01 13:14:00 | 2000 | 1
-- out customerA#gmail.com | 8 | 2021-01-03 16:05:00 | 3000 | 1
-- out customerB#gmail.com | 11 | 2021-01-04 14:30:00 | 1000 | 1
-- out customerB#gmail.com | 12 | 2021-01-06 17:00:00 | 5000 | 1
-- out customerC#gmail.com | 14 | 2021-01-09 15:06:00 | 4000 | 1
I can't find the specific answer to this question but apologies if it has been asked previously.
I have the following example table which I have kept simple but it contains more rows and Types. It gets updated frequently.
Type From To Qty
1 2016-01-01 00:00:00.0000000 2016-01-03 00:00:00.0000000 30
1 2016-01-04 00:00:00.0000000 2016-01-05 00:00:00.0000000 31
1 2016-01-06 00:00:00.0000000 NULL 31
2 2016-04-24 00:00:00.0000000 NULL 15
I want to be able to update a table every day (as shown below) so it shows all of the dates between (and including) the From and To dates. The Qty for the relevant date must be displayed up to todays date where the TO is NULL.
Type Date Qty
1 2016-01-01 00:00:00.0000000 30
1 2016-01-02 00:00:00.0000000 30
1 2016-01-03 00:00:00.0000000 30
1 2016-04-04 00:00:00.0000000 31
1 2016-04-05 00:00:00.0000000 31
1 2016-04-06 00:00:00.0000000 31
1 2016-04-07 00:00:00.0000000 31
1 .... up to today where TO is NULL
1 2016-07-25 00:00:00.0000000 31
2 2016-04-24 00:00:00.0000000 15
2 .... up to today where TO is NULL
2 2016-07-25 00:00:00.0000000 15
Thank you in advance for your help.
Using Numbers table..
Demo Here
select b.*,qty from #test
cross apply
(
select dateadd(day,n,fromdate) from
numbers
where n<=
case when todate is null
then datediff(day,fromdate,getdate()) else datediff(day,fromdate,todate) end
) b(upd)
You can do this using a recursive CTE to generate all of the dates and JOIN to that for the result:
Test Data
Create Table Test
(
[Type] Int,
[From] Date,
[To] Date,
Qty Int
)
Insert Test
Values
(1, '2016-01-01', '2016-01-03', 30 ),
(1, '2016-01-04', '2016-01-05', 31 ),
(1, '2016-01-06', NULL, 31 ),
(2, '2016-04-24', NULL, 15 )
Query
;With MinMax As
(
Select Min([From]) MinFrom,
Max([To]) MaxTo,
Convert(Date, GetDate()) Today
From Test
), Date (Date) As
(
Select MinFrom
From MinMax
Union All
Select DateAdd(Day, 1, Date)
From Date
Where Date < (Select MaxTo From MinMax)
Or Date < (Select Today From MinMax)
)
Select T.[Type],
D.[Date],
T.Qty
From Test T
Join Date D On D.Date Between T.[From] And Coalesce(T.[To], Convert(Date, GetDate()))
Order By T.[Type], D.[Date]
Option (MaxRecursion 0)
Results
Type Date Qty
1 2016-01-01 30
1 2016-01-02 30
1 2016-01-03 30
1 2016-01-04 31
1 2016-01-05 31
1 2016-01-06 31
1 2016-01-07 31
1 2016-01-08 31
1 2016-01-09 31
1 2016-01-10 31
1 2016-01-11 31
1 2016-01-12 31
1 2016-01-13 31
1 2016-01-14 31
1 2016-01-15 31
1 2016-01-16 31
1 2016-01-17 31
1 2016-01-18 31
1 2016-01-19 31
1 2016-01-20 31
1 2016-01-21 31
1 2016-01-22 31
1 2016-01-23 31
1 2016-01-24 31
1 2016-01-25 31
1 2016-01-26 31
1 2016-01-27 31
1 2016-01-28 31
1 2016-01-29 31
1 2016-01-30 31
1 2016-01-31 31
1 2016-02-01 31
1 2016-02-02 31
1 2016-02-03 31
1 2016-02-04 31
1 2016-02-05 31
1 2016-02-06 31
1 2016-02-07 31
1 2016-02-08 31
1 2016-02-09 31
1 2016-02-10 31
1 2016-02-11 31
1 2016-02-12 31
1 2016-02-13 31
1 2016-02-14 31
1 2016-02-15 31
1 2016-02-16 31
1 2016-02-17 31
1 2016-02-18 31
1 2016-02-19 31
1 2016-02-20 31
1 2016-02-21 31
1 2016-02-22 31
1 2016-02-23 31
1 2016-02-24 31
1 2016-02-25 31
1 2016-02-26 31
1 2016-02-27 31
1 2016-02-28 31
1 2016-02-29 31
1 2016-03-01 31
1 2016-03-02 31
1 2016-03-03 31
1 2016-03-04 31
1 2016-03-05 31
1 2016-03-06 31
1 2016-03-07 31
1 2016-03-08 31
1 2016-03-09 31
1 2016-03-10 31
1 2016-03-11 31
1 2016-03-12 31
1 2016-03-13 31
1 2016-03-14 31
1 2016-03-15 31
1 2016-03-16 31
1 2016-03-17 31
1 2016-03-18 31
1 2016-03-19 31
1 2016-03-20 31
1 2016-03-21 31
1 2016-03-22 31
1 2016-03-23 31
1 2016-03-24 31
1 2016-03-25 31
1 2016-03-26 31
1 2016-03-27 31
1 2016-03-28 31
1 2016-03-29 31
1 2016-03-30 31
1 2016-03-31 31
1 2016-04-01 31
1 2016-04-02 31
1 2016-04-03 31
1 2016-04-04 31
1 2016-04-05 31
1 2016-04-06 31
1 2016-04-07 31
1 2016-04-08 31
1 2016-04-09 31
1 2016-04-10 31
1 2016-04-11 31
1 2016-04-12 31
1 2016-04-13 31
1 2016-04-14 31
1 2016-04-15 31
1 2016-04-16 31
1 2016-04-17 31
1 2016-04-18 31
1 2016-04-19 31
1 2016-04-20 31
1 2016-04-21 31
1 2016-04-22 31
1 2016-04-23 31
1 2016-04-24 31
1 2016-04-25 31
1 2016-04-26 31
1 2016-04-27 31
1 2016-04-28 31
1 2016-04-29 31
1 2016-04-30 31
1 2016-05-01 31
1 2016-05-02 31
1 2016-05-03 31
1 2016-05-04 31
1 2016-05-05 31
1 2016-05-06 31
1 2016-05-07 31
1 2016-05-08 31
1 2016-05-09 31
1 2016-05-10 31
1 2016-05-11 31
1 2016-05-12 31
1 2016-05-13 31
1 2016-05-14 31
1 2016-05-15 31
1 2016-05-16 31
1 2016-05-17 31
1 2016-05-18 31
1 2016-05-19 31
1 2016-05-20 31
1 2016-05-21 31
1 2016-05-22 31
1 2016-05-23 31
1 2016-05-24 31
1 2016-05-25 31
1 2016-05-26 31
1 2016-05-27 31
1 2016-05-28 31
1 2016-05-29 31
1 2016-05-30 31
1 2016-05-31 31
1 2016-06-01 31
1 2016-06-02 31
1 2016-06-03 31
1 2016-06-04 31
1 2016-06-05 31
1 2016-06-06 31
1 2016-06-07 31
1 2016-06-08 31
1 2016-06-09 31
1 2016-06-10 31
1 2016-06-11 31
1 2016-06-12 31
1 2016-06-13 31
1 2016-06-14 31
1 2016-06-15 31
1 2016-06-16 31
1 2016-06-17 31
1 2016-06-18 31
1 2016-06-19 31
1 2016-06-20 31
1 2016-06-21 31
1 2016-06-22 31
1 2016-06-23 31
1 2016-06-24 31
1 2016-06-25 31
1 2016-06-26 31
1 2016-06-27 31
1 2016-06-28 31
1 2016-06-29 31
1 2016-06-30 31
1 2016-07-01 31
1 2016-07-02 31
1 2016-07-03 31
1 2016-07-04 31
1 2016-07-05 31
1 2016-07-06 31
1 2016-07-07 31
1 2016-07-08 31
1 2016-07-09 31
1 2016-07-10 31
1 2016-07-11 31
1 2016-07-12 31
1 2016-07-13 31
1 2016-07-14 31
1 2016-07-15 31
1 2016-07-16 31
1 2016-07-17 31
1 2016-07-18 31
1 2016-07-19 31
1 2016-07-20 31
1 2016-07-21 31
1 2016-07-22 31
1 2016-07-23 31
1 2016-07-24 31
1 2016-07-25 31
1 2016-07-26 31
2 2016-04-24 15
2 2016-04-25 15
2 2016-04-26 15
2 2016-04-27 15
2 2016-04-28 15
2 2016-04-29 15
2 2016-04-30 15
2 2016-05-01 15
2 2016-05-02 15
2 2016-05-03 15
2 2016-05-04 15
2 2016-05-05 15
2 2016-05-06 15
2 2016-05-07 15
2 2016-05-08 15
2 2016-05-09 15
2 2016-05-10 15
2 2016-05-11 15
2 2016-05-12 15
2 2016-05-13 15
2 2016-05-14 15
2 2016-05-15 15
2 2016-05-16 15
2 2016-05-17 15
2 2016-05-18 15
2 2016-05-19 15
2 2016-05-20 15
2 2016-05-21 15
2 2016-05-22 15
2 2016-05-23 15
2 2016-05-24 15
2 2016-05-25 15
2 2016-05-26 15
2 2016-05-27 15
2 2016-05-28 15
2 2016-05-29 15
2 2016-05-30 15
2 2016-05-31 15
2 2016-06-01 15
2 2016-06-02 15
2 2016-06-03 15
2 2016-06-04 15
2 2016-06-05 15
2 2016-06-06 15
2 2016-06-07 15
2 2016-06-08 15
2 2016-06-09 15
2 2016-06-10 15
2 2016-06-11 15
2 2016-06-12 15
2 2016-06-13 15
2 2016-06-14 15
2 2016-06-15 15
2 2016-06-16 15
2 2016-06-17 15
2 2016-06-18 15
2 2016-06-19 15
2 2016-06-20 15
2 2016-06-21 15
2 2016-06-22 15
2 2016-06-23 15
2 2016-06-24 15
2 2016-06-25 15
2 2016-06-26 15
2 2016-06-27 15
2 2016-06-28 15
2 2016-06-29 15
2 2016-06-30 15
2 2016-07-01 15
2 2016-07-02 15
2 2016-07-03 15
2 2016-07-04 15
2 2016-07-05 15
2 2016-07-06 15
2 2016-07-07 15
2 2016-07-08 15
2 2016-07-09 15
2 2016-07-10 15
2 2016-07-11 15
2 2016-07-12 15
2 2016-07-13 15
2 2016-07-14 15
2 2016-07-15 15
2 2016-07-16 15
2 2016-07-17 15
2 2016-07-18 15
2 2016-07-19 15
2 2016-07-20 15
2 2016-07-21 15
2 2016-07-22 15
2 2016-07-23 15
2 2016-07-24 15
2 2016-07-25 15
2 2016-07-26 15
So my data looks like this:
+-----------+---------+-------------+-------+-------------+--+
| time | Outlets | Meal_Period | cover | day_of_week | |
+-----------+---------+-------------+-------+-------------+--+
| 10/1/2013 | 72 | 1 | 0 | Tuesday | |
| 10/1/2013 | 72 | 2 | 31 | Tuesday | |
| 10/1/2013 | 72 | 3 | 116 | Tuesday | |
| 10/1/2013 | 72 | 6 | 32 | Tuesday | |
| 10/1/2013 | 187 | 17 | 121 | Tuesday | |
| 10/1/2013 | 187 | 18 | 214 | Tuesday | |
| 10/1/2013 | 187 | 19 | 204 | Tuesday | |
| 10/1/2013 | 101 | 2 | 0 | Tuesday | |
| 10/1/2013 | 101 | 3 | 0 | Tuesday | |
| 10/1/2013 | 101 | 4 | 0 | Tuesday | |
| 10/1/2013 | 101 | 6 | 0 | Tuesday | |
| 10/1/2013 | 282 | 1 | 17 | Tuesday | |
| 10/1/2013 | 282 | 2 | 207 | Tuesday | |
| 10/1/2013 | 282 | 3 | 340 | Tuesday | |
| 10/1/2013 | 282 | 6 | 4 | Tuesday | |
| 10/1/2013 | 103 | 1 | 0 | Tuesday | |
+-----------+---------+-------------+-------+-------------+--+
The code is:
IF OBJECT_ID('tempdb.dbo.#time') IS NOT NULL
DROP TABLE #time
SELECT DATEADD(dd, 0, DATEDIFF(DD, 0, open_dttime)) AS 'time'
,profit_center_id AS 'Outlets'
,meal_period_id AS 'Meal_Period'
,sum(num_covers) AS 'Number_Covers'
INTO #time
FROM [STOF_Infogen].[dbo].[Order_Header]
WHERE CasinoID = 'csg'
AND profit_center_id IN (
'102'
,'100'
,'283'
,'101'
,'282'
,'187'
,'280'
,'103'
,'281'
,'72'
,'183'
)
AND (
open_dttime BETWEEN '2014-02-01 06:30'
AND '2014-03-01 06:30'
)
GROUP BY profit_center_id
,open_dttime
,meal_period_id
ORDER BY profit_center_id
,meal_period_id
IF OBJECT_ID('tempdb.dbo.#time2') IS NOT NULL
DROP TABLE #time2
SELECT [TIME]
,Outlets AS 'Outlets'
,meal_period AS 'Meal_Period'
,SUM(number_covers) AS 'cover'
,DATENAME(DW, [time]) AS 'day_of_week'
INTO #time2
FROM #time
GROUP BY [TIME]
,Outlets
,Meal_Period
ORDER BY [TIME] ASC
,Outlets
,Meal_Period
SELECT *
FROM #time2
I created temporary drop tables for my date but I'm having two issues;
I will like to group where the profit centres are 187 and 282 while still keeping the other rows.
for some reason I can't tweek the date stamp because it excludes the last day of the month.
As always any help is appreciated.
Making some test data:
DECLARE #MealInfo TABLE
(
MealTime DATETIME,
Outlets VARCHAR(10),
Meal_Period int,
Cover INT
)
INSERT INTO #MealInfo
VALUES
('10/1/2013', '72', 1, 0),
('10/1/2013', '72', 2, 31),
('10/1/2013', '72', 3, 116),
('10/1/2013', '72', 6, 32),
('10/1/2013', '187', 17, 121),
('10/1/2013', '187', 18, 214),
('10/1/2013', '187', 19, 204),
('10/1/2013', '101', 2, 0),
('10/1/2013', '101', 3, 0),
('10/1/2013', '101', 4, 0),
('10/1/2013', '101', 6, 0),
('10/1/2013', '282', 1, 17),
('10/1/2013', '282', 2, 207),
('10/1/2013', '282', 3, 340),
('10/1/2013', '282', 6, 4),
('10/1/2013', '103', 1, 0);
Because you want to group 187 and 282 together, I use a case statement to lump them into one outlet and then we can group on the outlets to break out the sums:
SELECT
m.MealTime,
m.Outlets,
m.Meal_Period,
SUM(m.Cover) AS Number_Covers
FROM
(
SELECT mi.MealTime,
(CASE WHEN mi.Outlets IN ('187', '282') THEN '187+282' ELSE mi.Outlets END) Outlets,
mi.Meal_Period,
mi.Cover
FROM #MealInfo mi
) m
GROUP BY m.MealTime, m.Outlets, m.Meal_Period
Here is the output:
MealTime Outlets Meal_Period Number_Covers
2013-10-01 00:00:00.000 101 2 0
2013-10-01 00:00:00.000 101 3 0
2013-10-01 00:00:00.000 101 4 0
2013-10-01 00:00:00.000 101 6 0
2013-10-01 00:00:00.000 103 1 0
2013-10-01 00:00:00.000 187+282 1 17
2013-10-01 00:00:00.000 187+282 2 207
2013-10-01 00:00:00.000 187+282 3 340
2013-10-01 00:00:00.000 187+282 6 4
2013-10-01 00:00:00.000 187+282 17 121
2013-10-01 00:00:00.000 187+282 18 214
2013-10-01 00:00:00.000 187+282 19 204
2013-10-01 00:00:00.000 72 1 0
2013-10-01 00:00:00.000 72 2 31
2013-10-01 00:00:00.000 72 3 116
2013-10-01 00:00:00.000 72 6 32
If your data had overlapping periods for 187 and 282, the sum total would contain both parts into 1 column.
How to convert the row to column in oracle
Data is as under
AREA_CODE PREFIX
21 48
21 66
21 80
21 86
21 58
21 59
21 51
21 81
21 35
21 56
21 78
21 34
21 49
21 79
21 36
21 99
21 82
21 38
21 32
21 65
22 26
22 20
22 27
22 34
22 33
22 21
22 38
22 36
232 22
232 26
232 27
233 88
233 86
233 85
233 87
233 89
233 82
235 56
235 53
235 87
235 86
required output will b
AREA_CODE P1 P2 P3 P4 P5 P6 P7 P8 P9 P10 P11 P12 P13
21 48 66 80 86 58 59 51 81 35 56 78 34 49
22 26 20 27 34 33 21 38 36
232 22 26 27 88 86 85 87 89 82 56 53 87 86
Assuming that number of prefix per area code is 10 and table name is table_name, this query can be used in 10 G
with tab as (select AREA_CODE,
PREFIX,
row_NUMBER() over(partition by AREA_CODE order by null) rn
from table_name)
select AREA_CODE,
min(decode(rn, 1, PREFIX, null)) as PREFIX1,
min(decode(rn, 2, PREFIX, null)) as PREFIX2,
min(decode(rn, 3, PREFIX, null)) as PREFIX3,
min(decode(rn, 4, PREFIX, null)) as PREFIX4,
min(decode(rn, 5, PREFIX, null)) as PREFIX5,
min(decode(rn, 6, PREFIX, null)) as PREFIX6,
min(decode(rn, 7, PREFIX, null)) as PREFIX7,
min(decode(rn, 8, PREFIX, null)) as PREFIX8,
min(decode(rn, 9, PREFIX, null)) as PREFIX9,
min(decode(rn, 10, PREFIX, null)) as PREFIX10
from tab
group by AREA_CODE
And in 11G
with tab as (select AREA_CODE,
PREFIX,
row_NUMBER() over(partition by AREA_CODE order by null) rn
from table_name)
select *
from tab
pivot (max(PREFIX) as PREFIX for RN in (1,2,3,4,5,6,7,8,9,10))
Output:
| AREA_CODE | 1_PREFIX | 2_PREFIX | 3_PREFIX | 4_PREFIX | 5_PREFIX | 6_PREFIX | 7_PREFIX | 8_PREFIX | 9_PREFIX | 10_PREFIX |
|-----------|----------|----------|----------|----------|----------|----------|----------|----------|----------|-----------|
| 21 | 58 | 86 | 80 | 66 | 56 | 59 | 51 | 81 | 35 | 48 |
| 22 | 33 | 34 | 27 | 20 | 26 | 21 | 36 | 38 | (null) | (null) |
| 232 | 27 | 26 | 22 | (null) | (null) | (null) | (null) | (null) | (null) | (null) |
| 233 | 85 | 86 | 88 | 87 | 82 | 89 | (null) | (null) | (null) | (null) |
| 235 | 56 | 53 | 87 | 86 | (null) | (null) | (null) | (null) | (null) | (null) |
for more values, you can change increase the list of min(decode(rn, 1, PREFIX, null)) as PREFIX1.
my test data was :
select 21,48 from dual union all
select 21,66 from dual union all
select 21,80 from dual union all
select 21,86 from dual union all
select 21,58 from dual union all
select 21,59 from dual union all
select 21,51 from dual union all
select 21,81 from dual union all
select 21,35 from dual union all
select 21,56 from dual union all
select 22,26 from dual union all
select 22,20 from dual union all
select 22,27 from dual union all
select 22,34 from dual union all
select 22,33 from dual union all
select 22,21 from dual union all
select 22,38 from dual union all
select 22,36 from dual union all
select 232,22 from dual union all
select 232,26 from dual union all
select 232,27 from dual union all
select 233,88 from dual union all
select 233,86 from dual union all
select 233,85 from dual union all
select 233,87 from dual union all
select 233,89 from dual union all
select 233,82 from dual union all
select 235,56 from dual union all
select 235,53 from dual union all
select 235,87 from dual union all
select 235,86 from dual