I need to fill up columns by number of day in quarters. Time period is about 10 years. Structure of tables is below. Thanks for any suggestions
SELECT StartDate, EndDate
INTO #tmp_RTX
FROM DateTable
StartDate EndDate
-------------------------------------------------
2015-11-01 00:00:00.000 2018-06-01 00:00:00.000
2017-09-02 00:00:00.000 2021-12-02 00:00:00.000
2016-01-02 00:00:00.000 2019-01-02 00:00:00.000
.
.
.
2018-10-26 00:00:00.000 2020-10-26 00:00:00.000
INSERT INTO DWHMart.[RTX].[RoadTax]
( ,[NuberOfaDaysInQuarter1]
,[NuberOfaDaysInQuarter2]
,[NuberOfaDaysInQuarter3]
,[NuberOfaDaysInOctoberNovember]
,[NuberOfaDaysInDecember]
,[NuberOfaDaysInTotal])
SELECT
,[NuberOfaDaysInQuarter1] = NuberOfaDaysInQuarter1
,[NuberOfaDaysInQuarter2] = NuberOfaDaysInQuarter2
,[NuberOfaDaysInQuarter3] = NuberOfaDaysInQuarter3
,[NuberOfaDaysInOctoberNovember] = NuberOfaDaysInOctoberNovember
,[NuberOfaDaysInDecember] = NuberOfaDaysInDecember
,[NuberOfaDaysInTotal] = DATEDIFF(DAY, StartDate, EndDate)
FROM #tmp_RTX
EXPECTED RESULT:
StartDate EndDate NuberOfaDaysInQuarter1 ..
2015-11-01 00:00:00.000 2018-06-01 00:00:00.000 .
2017-09-02 00:00:00.000 2021-12-02 00:00:00.000 .
2016-01-02 00:00:00.000 2019-01-02 00:00:00.000 .
. .
. .
. .
2018-10-26 00:00:00.000 2020-10-26 00:00:00.000 .
I've made my own dynamic table of days using a recursive CTE.
Then I've counted by quarter the number of days from it in each range you've given
I used a CROSS JOIN for each of your rows, as they have no unique ID for a GROUP BY, it works in any case though
;WITH RNG AS (SELECT cast(MIN(StartDate) as date) as MN,
cast(MAX (EndDate ) as date) MX FROM #tmp_RTX),
DATS AS (SELECT MN FROM RNG
UNION ALL
SELECT DATEADD(day,1,MN) FROM dats
WHERE dats.MN < (SELECT MX FROM RNG)
)
SELECT StartDate T1,
Enddate T1,
DQ.NuberOfaDaysInQuarter1 ,
DQ.NuberOfaDaysInQuarter2 ,
DQ.NuberOfaDaysInQuarter3 ,
DQ.NuberOfaDaysInQuarter4
FROM #tmp_RTX T1
CROSS APPLY (SELECT
SUM(CASE WHEN datepart(quarter ,MN) = 1 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter1,
SUM(CASE WHEN datepart(quarter ,MN) = 2 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter2,
SUM(CASE WHEN datepart(quarter ,MN) = 3 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter3,
SUM(CASE WHEN datepart(quarter ,MN) = 4 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter4
FROM DATS
WHERE MN BETWEEN
CAST(t1.startdate as date)
and
CAST(T1.EndDate as date)
) DQ
OPTION (maxrecursion 0);
The following was what built my test data (based on the conventions of your data)
SELECT '20151101' StartDate, '20180601' EndDate
INTO #tmp_RTX
INSERT into #tmp_RTX values ('20161231','20180101');
INSERT into #tmp_RTX values ('20181231','20190101');
To handle the different requirement for the final quarter, check the month, not the quarter
;WITH RNG AS (SELECT cast(MIN(StartDate) as date) as MN,
cast(MAX (EndDate ) as date) MX FROM #tmp_RTX),
DATS AS (SELECT MN FROM RNG
UNION ALL
SELECT DATEADD(day,1,MN) FROM dats
WHERE dats.MN < (SELECT MX FROM RNG)
)
SELECT StartDate T1,
Enddate T1,
DQ.NuberOfaDaysInQuarter1 ,
DQ.NuberOfaDaysInQuarter2 ,
DQ.NuberOfaDaysInQuarter3 ,
DQ.NuberOfaDaysInOctoberNovember ,
DQ.NuberOfaDaysInDecember
FROM #tmp_RTX T1
CROSS APPLY (SELECT
SUM(CASE WHEN datepart(quarter ,MN) = 1 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter1,
SUM(CASE WHEN datepart(quarter ,MN) = 2 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter2,
SUM(CASE WHEN datepart(quarter ,MN) = 3 THEN 1 ELSE 0 END)
NuberOfaDaysInQuarter3,
SUM(CASE WHEN datepart(month ,MN) IN(10,11) THEN 1 ELSE 0 END)
NuberOfaDaysInOctoberNovember,
SUM(CASE WHEN datepart(month ,MN) IN(12) THEN 1 ELSE 0 END)
NuberOfaDaysInDecember
FROM DATS
WHERE MN BETWEEN
CAST(t1.startdate as date)
and
CAST(T1.EndDate as date)
) DQ
OPTION (maxrecursion 0)
The number of days in quarters is always the same excluding the leap years. Hence you can use the constant expressions for Q2..Q4 and the parameterized value for Q1.
DECLARE #my_date date = '2017-01-01';
SELECT 89 + ISDATE(CAST(#my_date AS char(4)) + '0229') AS NuberOfaDaysInQuarter1,
90 AS NuberOfaDaysInQuarter2,
91 AS NuberOfaDaysInQuarter3,
91 AS NuberOfaDaysInQuarter4
Updated solution (accumulating day count by quarters)
DECLARE #start_date date = '2000-01-01';
DECLARE #quarters TABLE (start_date date, end_date date, num int);
-- Create quarter calendar from #start_date
WITH nums (n) AS
(
SELECT TOP 100 (ROW_NUMBER() OVER (ORDER BY (SELECT 1)) - 1) * 3
FROM sys.columns sc1 CROSS JOIN sys.columns sc2
),
calendar AS
(
SELECT n, DATEADD(month, n, #start_date) AS start_date
FROM nums
)
INSERT INTO #quarters
SELECT start_date,
DATEADD(day, -1, DATEADD(month, 3, start_date)) AS end_date,
((n / 3) % 4) + 1 AS num
FROM calendar;
-- test data
DECLARE #t TABLE (start_date date, end_date date);
INSERT INTO #t VALUES
('2015-11-01', '2018-06-01'),
('2017-09-02', '2021-12-02'),
('2016-01-02', '2019-01-02');
-- Calculations
WITH inner_counts AS (
SELECT t.start_date, t.end_date, q.num,
SUM(datediff(day, q.start_date, q.end_date)) AS days_count
FROM #t t INNER JOIN #quarters q ON q.start_date > t.start_date AND q.start_date < t.end_date
GROUP BY t.start_date, t.end_date, q.num
),
outer_counts1 AS (
SELECT t.start_date, t.end_date, q.num,
SUM(datediff(day, t.start_date, q.end_date)) AS days_count
FROM #t t INNER JOIN #quarters q ON q.end_date = (SELECT MIN(end_date) FROM #quarters q WHERE q.end_date >= t.start_date)
GROUP BY t.start_date, t.end_date, q.num
),
outer_counts2 AS (
SELECT t.start_date, t.end_date, q.num,
SUM(datediff(day, q.start_date, t.end_date)) AS days_count
FROM #t t INNER JOIN #quarters q ON q.start_date = (SELECT MAX(start_date) FROM #quarters q WHERE q.start_date <= t.end_date)
GROUP BY t.start_date, t.end_date, q.num
),
total_counts AS (
SELECT start_date, end_date, num, SUM(days_count) AS days_count
FROM (SELECT * FROM inner_counts
UNION ALL
SELECT * FROM outer_counts1
UNION ALL
SELECT * FROM outer_counts2) c
GROUP BY start_date, end_date, num
)
SELECT start_date, end_date,
SUM(CASE WHEN num = 1 THEN days_count ELSE 0 END) AS days_in_Q1,
SUM(CASE WHEN num = 2 THEN days_count ELSE 0 END) AS days_in_Q2,
SUM(CASE WHEN num = 3 THEN days_count ELSE 0 END) AS days_in_Q3,
SUM(CASE WHEN num = 4 THEN days_count ELSE 0 END) AS days_in_Q4
FROM total_counts
GROUP BY start_date, end_date
Result
start_date end_date days_in_Q1 days_in_Q2 days_in_Q3 days_in_Q4
---------- ---------- ----------- ----------- ----------- -----------
2015-11-01 2018-06-01 268 331 182 242
2016-01-02 2019-01-02 357 270 273 273
2017-09-02 2021-12-02 357 360 392 517
DEMO
Use a recursive CTE (Be sure to set max recursion option to the # of days you need the loop to process or 0 once you've confirmed no infinite loops exist. ) to generate all your days between max and min of your dates in datetable and then join this result to your datetable on date between range. using datepart on year and quarter of a cycledate we can get the appropriate quarter/year and then group.
I didn't pivot the results. Pivoting data is usually a display function and best left to those display tools crystal, BI, etc. It can be done in SQL and in in this case Dynamic SQL would be needed as the # of years is dynamic in nature. I choose not to to down that path as the Reporting tools can handle the dynamic nature of the data.
WITH
DateTable as (SELECT cast('2015-11-01 00:00:00.000'as date) startdate, cast('2018-06-01 00:00:00.000'as date) endDate union all
SELECT '2017-09-02 00:00:00.000', '2021-12-02 00:00:00.000' union all
SELECT '2016-01-02 00:00:00.000', '2019-01-02 00:00:00.000'),
CTE AS (SELECT Min(StartDate) StartDate, max(EndDate) EndDate FROM DATETABLE),
CTE2 AS (SELECT C.StartDate as RangeStartDate
, C.EndDate as RangeEndDate
, 1 CycleCount
, datepart(Q,dateadd(d,0,C.StartDate)) as Quarter
, datepart(m,dateadd(d,0,C.StartDate)) as Month
, datepart(YYYY, dateadd(d,0,C.StartDate)) as Yr
, C.StartDate as CycleDate
FROM CTE C
UNION ALL
SELECT RangeStartDate, RangeEndDate, CycleCount+1
, datepart(Q,dateadd(d,1,CycleDate)) as Quarter
, datepart(m,dateadd(d,1,CycleDate)) as Month
, datepart(YYYY, dateadd(d,1,CycleDate)) as Yr
, dateadd(d,1,cycleDate) as CycleDate
FROM cte2
WHERE datediff(d,RangeStartDate, RangeEndDate) >= CycleCount
)
SELECT DT.StartDate
, DT.ENDDate
, concat(yr,'-',Quarter) [YYYY-Q]
, count(*) as DaysInQuarter
, sum(case when Month in (10,11) then 1 else 0 end)as OctNovDays
, sum(case when Month in (12) then 1 else 0 end)as DecDays
, datediff(d,StartDate, EndDate) as DaysTotal
FROM CTE2
INNER JOIN DateTable DT
on CTE2.CycleDate between DT.StartDate and DT.EndDate
GROUP BY Quarter,DT.StartDate, DT.EndDate, YR
ORDER BY DT.StartDate, DT.EndDate, YR, Quarter
OPTION (MAXRECURSION 10000)
Giving us:
+----+---------------------+---------------------+--------+---------------+------------+---------+-----------+
| | StartDate | ENDDate | YYYY-Q | DaysInQuarter | OctNovDays | DecDays | DaysTotal |
+----+---------------------+---------------------+--------+---------------+------------+---------+-----------+
| 1 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2015-4 | 61 | 30 | 31 | 943 |
| 2 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2016-1 | 91 | 0 | 0 | 943 |
| 3 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2016-2 | 91 | 0 | 0 | 943 |
| 4 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2016-3 | 92 | 0 | 0 | 943 |
| 5 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2016-4 | 92 | 61 | 31 | 943 |
| 6 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2017-1 | 90 | 0 | 0 | 943 |
| 7 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2017-2 | 91 | 0 | 0 | 943 |
| 8 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2017-3 | 92 | 0 | 0 | 943 |
| 9 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2017-4 | 92 | 61 | 31 | 943 |
| 10 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2018-1 | 90 | 0 | 0 | 943 |
| 11 | 01.11.2015 00:00:00 | 01.06.2018 00:00:00 | 2018-2 | 62 | 0 | 0 | 943 |
| 12 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2016-1 | 90 | 0 | 0 | 1096 |
| 13 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2016-2 | 91 | 0 | 0 | 1096 |
| 14 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2016-3 | 92 | 0 | 0 | 1096 |
| 15 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2016-4 | 92 | 61 | 31 | 1096 |
| 16 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2017-1 | 90 | 0 | 0 | 1096 |
| 17 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2017-2 | 91 | 0 | 0 | 1096 |
| 18 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2017-3 | 92 | 0 | 0 | 1096 |
| 19 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2017-4 | 92 | 61 | 31 | 1096 |
| 20 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2018-1 | 90 | 0 | 0 | 1096 |
| 21 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2018-2 | 91 | 0 | 0 | 1096 |
| 22 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2018-3 | 92 | 0 | 0 | 1096 |
| 23 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2018-4 | 92 | 61 | 31 | 1096 |
| 24 | 02.01.2016 00:00:00 | 02.01.2019 00:00:00 | 2019-1 | 2 | 0 | 0 | 1096 |
| 25 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2017-3 | 29 | 0 | 0 | 1552 |
| 26 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2017-4 | 92 | 61 | 31 | 1552 |
| 27 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2018-1 | 90 | 0 | 0 | 1552 |
| 28 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2018-2 | 91 | 0 | 0 | 1552 |
| 29 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2018-3 | 92 | 0 | 0 | 1552 |
| 30 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2018-4 | 92 | 61 | 31 | 1552 |
| 31 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2019-1 | 90 | 0 | 0 | 1552 |
| 32 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2019-2 | 91 | 0 | 0 | 1552 |
| 33 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2019-3 | 92 | 0 | 0 | 1552 |
| 34 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2019-4 | 92 | 61 | 31 | 1552 |
| 35 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2020-1 | 91 | 0 | 0 | 1552 |
| 36 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2020-2 | 91 | 0 | 0 | 1552 |
| 37 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2020-3 | 92 | 0 | 0 | 1552 |
| 38 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2020-4 | 92 | 61 | 31 | 1552 |
| 39 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2021-1 | 90 | 0 | 0 | 1552 |
| 40 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2021-2 | 91 | 0 | 0 | 1552 |
| 41 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2021-3 | 92 | 0 | 0 | 1552 |
| 42 | 02.09.2017 00:00:00 | 02.12.2021 00:00:00 | 2021-4 | 63 | 61 | 2 | 1552 |
+----+---------------------+---------------------+--------+---------------+------------+---------+-----------+
Related
I am trying to "flatten" a delivery schedule table from many rows for one customer into one row per customer. Each customer can have from 1 to 7 LeadDays, OrderDays, and DeliveryDays.
This is what I have to work with:
CustomerNumber | Company | Year | WeekNumber | OrderDate | OrderDayName | LeadDays | DeliveryDate | DeliveryDayName
--------------------------------------------------------------------------------------------------------------
5002 | Comp_A | 2022 | 15 | 2022-04-03 | Sunday | 1.0 | 2022-04-04 | Monday
5002 | Comp_A | 2022 | 15 | 2022-04-04 | Monday | 1.0 | 2022-04-05 | Tuesday
5002 | Comp_A | 2022 | 15 | 2022-04-05 | Tuesday | 1.0 | 2022-04-06 | Wednesday
5002 | Comp_A | 2022 | 15 | 2022-04-06 | Wednesday | 1.0 | 2022-04-07 | Thursday
5002 | Comp_A | 2022 | 15 | 2022-04-07 | Thursday | 1.0 | 2022-04-08 | Friday
5002 | Comp_A | 2022 | 15 | 2022-04-08 | Friday | 1.0 | 2022-04-09 | Saturday
5002 | Comp_A | 2022 | 15 | 2022-04-09 | Saturday | 1.0 | 2022-04-10 | Sunday
310365 | Comp_A | 2022 | 15 | 2022-04-05 | Tuesday | 1.0 | 2022-04-06 | Wednesday
310365 | Comp_A | 2022 | 15 | 2022-04-07 | Thursday | 1.0 | 2022-04-08 | Friday
310428 | Comp_A | 2022 | 15 | 2022-04-06 | Wednesday | 1.0 | 2022-04-07 | Thursday
19401 | Comp_B | 2022 | 15 | 2022-04-04 | Monday | 1.0 | 2022-04-05 | Tuesday
19401 | Comp_B | 2022 | 15 | 2022-04-05 | Tuesday | 1.0 | 2022-04-06 | Wednesday
19401 | Comp_B | 2022 | 15 | 2022-04-06 | Wednesday | 1.0 | 2022-04-07 | Thursday
19401 | Comp_B | 2022 | 15 | 2022-04-07 | Thursday | 1.0 | 2022-04-08 | Friday
19401 | Comp_B | 2022 | 15 | 2022-04-08 | Friday | 1.0 | 2022-04-09 | Saturday
.....and this is what I need it to look like:
CustomerNumber | Company | Year | WeekNumber | LeadDays_1 | OrderDate_1 | DeliveryDate_1 | LeadDays_2 | OrderDate_2 | DeliveryDate_2 | LeadDays_3 | OrderDate_3 | DeliveryDate_3 | LeadDays_4 | OrderDate_4 | DeliveryDate_4 | LeadDays_5 | OrderDate_5 | DeliveryDate_5 | LeadDays_6 | OrderDate_6 | DeliveryDate_6 | LeadDays_7 | OrderDate_7 | DeliveryDate_7
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
5002 | Comp_A | 2022 | 15 | 1.0 | 2022-04-03 | 2022-04-04 | 1.0 | 2022-04-04 | 2022-04-05 | 1.0 | 2022-04-05 | 2022-04-06 | 1.0 | 2022-04-06 | 2022-04-07 | 1.0 | 2022-04-07 | 2022-04-08 | 1.0 | 2022-04-08 | 2022-04-09 | 1.0 | 2022-04-09 | 2022-04-10
310365 | Comp_A | 2022 | 15 | 1.0 | 2022-04-05 | 2022-04-06 | 1.0 | 2022-04-07 | 2022-04-08 | | | | | | | | | | | | | | |
310428 | Comp_A | 2022 | 15 | 1.0 | 2022-04-06 | 2022-04-07 | | | | | | | | | | | | | | | | | |
19401 | Comp_B | 2022 | 15 | 1.0 | 2022-04-04 | 2022-04-05 | 1.0 | 2022-04-05 | 2022-04-06 | 1.0 | 2022-04-06 | 2022-04-07 | 1.0 | 2022-04-07 | 2022-04-08 | 1.0 | 2022-04-08 | 2022-04-09 | | | | | |
I know it should be a (relatively simple?) PIVOT table, but I can't seem to wrap my head around it.
You can do conditional aggregation using MAX(CASE which is much more flexible than PIVOT. In your case, you first need to generate a row-number to pivot over
SELECT
CustomerNumber,
Company,
Year,
WeekNumber,
MAX(CASE WHEN rn = 1 THEN LeadDays END) LeadDays_1,
MAX(CASE WHEN rn = 1 THEN OrderDate END) OrderDate_1,
MAX(CASE WHEN rn = 1 THEN DeliveryDate END) DeliveryDate_1,
MAX(CASE WHEN rn = 2 THEN LeadDays END) LeadDays_2,
MAX(CASE WHEN rn = 2 THEN OrderDate END) OrderDate_2,
MAX(CASE WHEN rn = 2 THEN DeliveryDate END) DeliveryDate_2,
MAX(CASE WHEN rn = 3 THEN LeadDays END) LeadDays_3,
MAX(CASE WHEN rn = 3 THEN OrderDate END) OrderDate_3,
MAX(CASE WHEN rn = 3 THEN DeliveryDate END) DeliveryDate_3,
MAX(CASE WHEN rn = 4 THEN LeadDays END) LeadDays_4,
MAX(CASE WHEN rn = 4 THEN OrderDate END) OrderDate_4,
MAX(CASE WHEN rn = 4 THEN DeliveryDate END) DeliveryDate_4,
MAX(CASE WHEN rn = 5 THEN LeadDays END) LeadDays_5,
MAX(CASE WHEN rn = 5 THEN OrderDate END) OrderDate_5,
MAX(CASE WHEN rn = 5 THEN DeliveryDate END) DeliveryDate_6
FROM (
SELECT *,
rn = ROW_NUMBER() OVER (
PARTITION BY CustomerNumber, Company, Year, WeekNumber
ORDER BY OrderDate, DeliveryDate)
FROM YourTable t
) t
GROUP BY
CustomerNumber,
Company,
Year,
WeekNumber;
db<>fiddle
I have a select statement that calculates how many parts per minute are processed. The problem is if no parts are processed in that minute I don't have a record of 0 for that time. This means I don't know that a process is stopped until another part is created.
Statement:
SELECT convert(datetime,FORMAT(dt, 'yyyy-MM-dd HH:mm:00:000')) as dt
,count(*) as ppm
FROM tblx group by FORMAT(dt, 'yyyy-MM-dd HH:mm:00:000')
What I have(not times 10:59 to 11:02):
+-------------------------+-----+
| dt | ppm |
+-------------------------+-----+
| 2020-07-06 10:55:00.000 | 177 |
| 2020-07-06 10:56:00.000 | 176 |
| 2020-07-06 10:57:00.000 | 177 |
| 2020-07-06 10:58:00.000 | 36 |
| 2020-07-06 11:03:00.000 | 70 |
| 2020-07-06 11:04:00.000 | 92 |
| 2020-07-06 11:07:00.000 | 54 |
| 2020-07-06 11:08:00.000 | 153 |
+-------------------------+-----+
What I'm trying to generate:
+-------------------------+-----+
| dt | hbh |
+-------------------------+-----+
| 2020-07-06 10:55:00.000 | 177 |
| 2020-07-06 10:56:00.000 | 176 |
| 2020-07-06 10:57:00.000 | 177 |
| 2020-07-06 10:58:00.000 | 36 |
| 2020-07-06 10:59:00.000 | 0 |
| 2020-07-06 10:00:00.000 | 0 |
| 2020-07-06 10:01:00.000 | 0 |
| 2020-07-06 10:02:00.000 | 0 |
| 2020-07-06 11:03:00.000 | 70 |
| 2020-07-06 11:04:00.000 | 92 |
| 2020-07-06 11:07:00.000 | 54 |
| 2020-07-06 11:08:00.000 | 153 |
+-------------------------+-----+
The best way to do this is to use a recursive CTE to create a table in your script that has the datetimes you want with the required granularity.
For example this script can generate a table with a row for each minute in a day:
WITH numbers AS
(
SELECT 0 num
UNION ALL
SELECT num + 1 FROM numbers WHERE num <= 58
)
SELECT
NN.num AS Hour
, AA.num AS Minute
, CAST(IIF(NN.num < 10, '0'+CAST(NN.Num AS CHAR(1)), CAST(NN.Num AS CHAR(2)))
+ ':' + IIF(AA.num < 10, '0'+CAST(AA.Num AS CHAR(1)), CAST(AA.Num AS CHAR(2))) AS TIME(0)) Time
INTO
#time_table
FROM
numbers NN
LEFT JOIN
numbers AA ON
1 = 1
WHERE
NN.num <= 23
You could get all the dates from your data, join the dates to the times generated by the above script then left join back onto your data. Then fill in the NULLs with 0.
Based on what Tim O'Sullivan sugjested, I created this table, then joined to the original dateset.
WITH cte
AS (--SELECT getdate() - 2 AS n -- anchor member
SELECT convert(datetime,FORMAT(getdate() -2, 'yyyy-MM-dd HH:mm:00:000')) as n
UNION ALL
SELECT dateadd(mi, 1, n) -- recursive member
FROM cte
WHERE n < getdate() -- terminator
)
SELECT n
FROM cte
OPTION (MAXRECURSION 5000)
EDIT: I changed my example and made it more simple. First Quote is how the source table looks like, second quote is how the result should look like.
Hello everyone,
I have multiple parking that only sends changing states.
It sends a "1" when a car arrived at the parking, then it doesn't send anything until the car leaves again. At that moment the parking sends a "0". I need to do analysis over a long time, so it would be awesome to see the amount of time per hour or so to not get too many rows (compared by minute).
The data looks like this (as requested I reduce it to parking-ID 10 and just the last record from 19.12. and the records from 20.12.):
+------------+------------------+--------+-------------+
| Parking-ID | DateTime | Status | Comment |
+------------+------------------+--------+-------------+
| 10 | 20.12.2019 16:35 | 0 | Car left |
+------------+------------------+--------+-------------+
| 10 | 20.12.2019 08:22 | 1 | Car arrived |
+------------+------------------+--------+-------------+
| 10 | 19.12.2019 22:47 | 0 | Car left |
+------------+------------------+--------+-------------+
Now to not make it too easy for me, next to the "free" and "taken" status there is also a warm status. 1 hour after a car left the parking should be marked as "warm" because some cars have to come and go fast in a few minutes and this time range should be shown as "warm".
To not get too many rows (like for every minute), I would appreciate if it would be possible to get the summary per hour. For my analysis I should be able to see how many hours per day the parking was taken, how many hours it was warm and how many hours it was free.
So the result should look something like this (for Parking-ID 10 and for 20.12.2019):
+------------+------------------+--------+----------+---------+
| Parking-ID | DateTime | Status | Duration | Comment |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 23:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 22:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 21:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 20:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 19:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 18:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 17:00 | 0 | 0.42 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 17:00 | 2 | 0.58 | Warm |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 16:00 | 2 | 0.42 | Warm |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 16:00 | 1 | 0.58 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 15:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 14:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 13:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 12:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 11:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 10:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 09:00 | 1 | 1.00 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 08:00 | 1 | 0.63 | Taken |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 08:00 | 0 | 0.37 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 07:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 06:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 05:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 04:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 03:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 02:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 01:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
| 10 | 20.12.2019 00:00 | 0 | 1.00 | Free |
+------------+------------------+--------+----------+---------+
Does someone have a good approach? I already searched and tried but couldn't find a working approach.
Thank you and best regards
First, your duration output is still wrong,if you cross check.
For example 20.12.2019 08:00 it should be 22.00 and 38.00.Clear this ?
Secondly,Two rows on for 20.12.2019 17:00 is not clear.Why it it will contain 2 rows ?Clear this also.
Create Calendar table in whatever way you want.
CREATE TABLE [dbo].[CalendarDate](
[Dates] [datetime2](0) NOT NULL
PRIMARY KEY CLUSTERED
(
[Dates] ASC
)
) ON [PRIMARY]
GO
insert into [CalendarDate] with(tablock)
select top (100000)
dateadd(day,ROW_NUMBER()over(order by (select null))
,'1950-01-01 00:00:00')
from sys.objects a, sys.objects b, sys.objects c
Then Create number table also
-- Real or #temp your wish
create Table #Number(Hrs int)
insert into #Number (Hrs)
select top 24 ROW_NUMBER()over(order by number)-1
from master..spt_values
Your table sample data.I have kept Parking status in seperate table,follow Normalization.
-- your real table
create table #Parking( ParkingID int, ParkingDateTime Datetime2(0),ParkingStatus tinyint )
insert into #Parking values(10,'2019-12-20 16:35',0),(10,'2019-12-20 08:22',1)
,(10,'2019-12-19 22:47',0)
-- It should be your real table
create table #ParkingStatus( ParkingStatus tinyint,StatusName varchar(50) )
insert into #ParkingStatus values(0,'Car left')
,(1,'Car arrived'),(2,'Free'),(3,'Taken')
,(4,'Warm')
The Script,
declare #From Datetime2(0)='2019-12-20'
declare #To Datetime2(0)=dateadd(second,-1,dateadd(day,1,#From))
-- Put require data in #temp table,since it will be use many times
create table #ParkingTemp(ParkingID int,ParkingDateTime Datetime2(0)
,ParkingDate Date,ParkingStatus tinyint )
insert into #ParkingTemp (ParkingID,ParkingDateTime
,ParkingDate,ParkingStatus)
select P.ParkingID,ParkingDateTime
,p.ParkingDateTime
,ParkingStatus
from #Parking P
where ParkingDateTime>=#From
and ParkingDateTime<=#To
;With CTE as
(
select ParkingID,ParkingDateTime ,count(*)+1 SplitCount
,ParkingStatus as InitialStatus
from #ParkingTemp
group by ParkingID,ParkingDateTime,ParkingStatus
)
, DistinctIDCTE as
(
select distinct ParkingID
from #ParkingTemp
)
, CTE1 as
(
select Dates
,dateadd(hour,hrs,Dates)ReportDateTime
,ParkingID
from [CalendarDate],#Number N,DistinctIDCTE
where dates>=#From and Dates<=#To
),
CTE2 as
(
select c.ParkingID
,dateadd(minute,-datepart(minute,ParkingDateTime),ParkingDateTime) ParkingDate
,ParkingDateTime,hrs as rownum,InitialStatus
from CTE C
cross apply(select hrs from #Number N where c.SplitCount>n.Hrs)ca
)
,CTE3 as
(
select parkingid,ParkingDateTime as FromDatetime
,ToDatetime
from #ParkingTemp C
cross apply(select top 1 ParkingDateTime as ToDatetime
from #ParkingTemp C1 where c.ParkingID=c1.ParkingID
and c1.ParkingStatus=0 and
c1.ParkingDateTime>c.ParkingDateTime
order by c1.ParkingDateTime )c1
where ParkingStatus=1
)
,CTE4 as
(
select c.ParkingID,c.ReportDateTime
from CTE1 C
outer apply(select top 1 FromDatetime ,ToDatetime
from CTE3 c1 where c.ParkingID=c1.ParkingID
and (ReportDateTime>= FromDatetime and ReportDateTime<=ToDatetime))ca
)
--select * from CTE2
,CTE5 as
(
select c4.ParkingID,c4.ReportDateTime
,case when rownum=0 and InitialStatus=1 then 2
when rownum=1 and InitialStatus=1 then 3
when rownum=0 and InitialStatus=0 then 4
when rownum=1 and InitialStatus=0 then 3
else 2 end as ParkingStatusid
,case when rownum=0 then datediff(minute,ReportDateTime,ParkingDateTime)
when rownum=1 then 60- datepart(minute,ParkingDateTime)
else 1.00 end Duration
,ParkingDateTime
,rownum,InitialStatus
from CTE4 c4
left join CTE2 c2 on c4.ParkingID=c2.ParkingID and c2.ParkingDate =c4.ReportDateTime
)
select c5.ParkingID,c5.ReportDateTime,c5.ParkingStatusid
,Duration,PS.StatusName AS Comment
from CTE5 c5
inner join #ParkingStatus ps on c5.ParkingStatusid=ps.ParkingStatus
order by ReportDateTime desc
Clean Up
drop table #Parking,#ParkingStatus,#Number,#ParkingTemp
Alternate and improve :
;WITH CTE
AS (SELECT ParkingID,
ParkingDateTime,
COUNT(*) + 1 SplitCount,
ParkingStatus AS InitialStatus
FROM #ParkingTemp
GROUP BY ParkingID,
ParkingDateTime,
ParkingStatus),
DistinctIDCTE
AS (SELECT DISTINCT
ParkingID
FROM #ParkingTemp),
CTE1
AS (SELECT Dates,
DATEADD(hour, hrs, Dates) ReportDateTime,
ParkingID
FROM [CalendarDate],
#Number N,
DistinctIDCTE
WHERE dates >= #From
AND Dates <= #To),
CTE2
AS (SELECT c.ParkingID,
DATEADD(minute, -DATEPART(minute, ParkingDateTime), ParkingDateTime) ParkingDate,
ParkingDateTime,
hrs AS rownum,
InitialStatus
FROM CTE C
CROSS APPLY
(
SELECT hrs
FROM #Number N
WHERE c.SplitCount > n.Hrs
) ca),
CTE5
AS (SELECT c4.ParkingID,
c4.ReportDateTime,
CASE
WHEN rownum = 0
AND InitialStatus = 1
THEN 2
WHEN rownum = 1
AND InitialStatus = 1
THEN 3
WHEN rownum = 0
AND InitialStatus = 0
THEN 4
WHEN rownum = 1
AND InitialStatus = 0
THEN 3
ELSE 2
END AS ParkingStatusid,
CASE
WHEN rownum = 0
THEN DATEDIFF(minute, ReportDateTime, ParkingDateTime)
WHEN rownum = 1
THEN 60 - DATEPART(minute, ParkingDateTime)
ELSE 1.00
END Duration,
ParkingDateTime,
rownum,
InitialStatus
FROM CTE1 c4
LEFT JOIN CTE2 c2 ON c4.ParkingID = c2.ParkingID
AND c2.ParkingDate = c4.ReportDateTime)
SELECT c5.ParkingID,
c5.ReportDateTime,
c5.ParkingStatusid,
Duration,
PS.StatusName AS Comment
FROM CTE5 c5
INNER JOIN #ParkingStatus ps ON c5.ParkingStatusid = ps.ParkingStatus
ORDER BY ReportDateTime DESC;
Note : clear my doubts.Throw diffrent sample data such within one hour there more than 2 parking staus for same parkingid.
I have to generate a result set of a SQL query which should match the following, but let me explain both inputs and outputs:
I have a table named Orders and this table has some orders in some days at some hours, then, I have been requested to provide a result-set which should get all days between two dates (i.e. 2017-10-01 and 2017-10-07), with all 24 hours for each day, even if that day or that hour had no orders, but it should be appeared with 0 value.
+------------+------+-------------+
| Day | Hour | TotalOrders |
+------------+------+-------------+
| 2017-10-01 | 0 | 0 |
+------------+------+-------------+
| 2017-10-01 | 1 | 3 |
+------------+------+-------------+
| 2017-10-01 | 2 | 4 |
+------------+------+-------------+
| 2017-10-01 | 3 | 0 |
+------------+------+-------------+
| 2017-10-01 | 4 | 7 |
+------------+------+-------------+
| 2017-10-01 | 5 | 0 |
+------------+------+-------------+
| 2017-10-01 | 6 | 0 |
+------------+------+-------------+
| 2017-10-01 | 7 | 9 |
+------------+------+-------------+
| 2017-10-01 | 8 | 0 |
+------------+------+-------------+
| 2017-10-01 | 9 | 0 |
+------------+------+-------------+
| 2017-10-01 | 10 | 0 |
+------------+------+-------------+
| 2017-10-01 | 11 | 0 |
+------------+------+-------------+
| 2017-10-01 | 12 | 0 |
+------------+------+-------------+
| 2017-10-01 | 13 | 0 |
+------------+------+-------------+
| 2017-10-01 | 14 | 0 |
+------------+------+-------------+
| 2017-10-01 | 15 | 0 |
+------------+------+-------------+
| 2017-10-01 | 16 | 0 |
+------------+------+-------------+
| 2017-10-01 | 17 | 0 |
+------------+------+-------------+
| 2017-10-01 | 18 | 0 |
+------------+------+-------------+
| 2017-10-01 | 19 | 0 |
+------------+------+-------------+
| 2017-10-01 | 20 | 0 |
+------------+------+-------------+
| 2017-10-01 | 21 | 0 |
+------------+------+-------------+
| 2017-10-01 | 22 | 0 |
+------------+------+-------------+
| 2017-10-01 | 23 | 0 |
+------------+------+-------------+
| 2017-10-02 | 0 | 0 |
+------------+------+-------------+
| 2017-10-02 | 1 | 0 |
+------------+------+-------------+
| 2017-10-02 | 2 | 0 |
+------------+------+-------------+
| 2017-10-02 | 3 | 0 |
+------------+------+-------------+
| 2017-10-02 | 4 | 0 |
+------------+------+-------------+
| 2017-10-02 | 5 | 0 |
+------------+------+-------------+
| 2017-10-02 | 6 | 0 |
+------------+------+-------------+
| 2017-10-02 | 7 | 0 |
+------------+------+-------------+
| and so on .................. |
+------------+------+-------------+
So, the above result set should contain every day between the given two dates, and each day should have all 24 hours, irrespective off that day had orders and the same for hour (either it had orders or not)
I did it using a nested CTE:
DECLARE #MinDate DATE = '20171001',
#MaxDate DATE = '20171006';
;WITH INNER_CTE as(
SELECT TOP (DATEDIFF(DAY, #MinDate, #MaxDate) + 1)
Date = DATEADD(DAY, ROW_NUMBER() OVER(ORDER BY a.object_id) - 1, #MinDate)
FROM sys.all_objects a
CROSS JOIN sys.all_objects b) ,
OUTER_CTE as (
select * from INNER_CTE
cross apply (
SELECT TOP (24) n = ROW_NUMBER() OVER (ORDER BY [object_id]) -1
FROM sys.all_objects ORDER BY n)) t4
)
select t1.Date, t1.n [Hour], ISNULL(t2.TotalORders,0) TotalOrders from
OUTER_CTE t1
LEFT JOIN orders t2 on t1.Date = t2.[Day] and t1.n = t2.[Hour]
Good Reading about generating sequences using a query here: https://sqlperformance.com/2013/01/t-sql-queries/generate-a-set-1
I prefer to do this with a tally table instead of using loops. The performance is much better. I keep a tally on my system as a view like this.
create View [dbo].[cteTally] as
WITH
E1(N) AS (select 1 from (values (1),(1),(1),(1),(1),(1),(1),(1),(1),(1))dt(n)),
E2(N) AS (SELECT 1 FROM E1 a, E1 b), --10E+2 or 100 rows
E4(N) AS (SELECT 1 FROM E2 a, E2 b), --10E+4 or 10,000 rows max
cteTally(N) AS
(
SELECT ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) FROM E4
)
select N from cteTally
GO
Now that we have our tally table we can use some basic math to get the desired output. Something along these lines.
declare #Date1 datetime = '2017-10-01';
declare #Date2 datetime = '2017-10-07';
select Day = convert(date, DATEADD(hour, t.N, #Date1))
, Hour = t.N - 1
, TotalOrders = COUNT(o.OrderID)
from cteTally t
left join Orders o on o.OrderDate = DATEADD(hour, t.N, #Date1)
where t.N <= DATEDIFF(hour, #Date1, #Date2)
group by DATEDIFF(hour, #Date1, #Date2)
, t.N
The simplest way is to just use a temporary table or table variable to fill the desired result set, and then count the number of Orders for each row.
declare #Date1 date = '2017-10-01';
declare #Date2 date = '2017-10-07';
declare #Hour int;
declare #Period table (Day Date, Hour Time);
while #Date1 <= #Date2
begin
set #Hour = 0;
while #Hour < 24
begin
insert into #Period (Day, Hour) values (#Date1, TimeFromParts(#Hour,0,0,0,0));
set #Hour = #Hour + 1;
end
set #Date1 = DateAdd(Day, 1, #Date1);
end
select Day, Hour,
(select count(*)
from Orders
where Orders.Day = Period.Day and Orders.Hour = Period.Hour) as TotalOrders
from #Period as Period;
I am trying to split a time frequency that has a start time, an end time, a frequency and a duration into separate rows. Here is some example data:
+------+------------+----------+-----------------+---------------+
| Name | Start_Time | End_Time | Frequency_Hours | Duration_Mins |
+------+------------+----------+-----------------+---------------+
| A | 08:00:00 | 18:00:00 | 2 | 2 |
| B | 00:00:00 | 23:59:59 | 1 | 5 |
| C | 00:00:00 | 23:59:59 | 4 | 15 |
+------+------------+----------+-----------------+---------------+
Can be created using the following query:
DECLARE #Tmp AS TABLE(Name VARCHAR(128)
,Start_Time VARCHAR(8)
,End_Time VARCHAR(8)
,Frequency_Hours INT
,Duration_Mins INT)
INSERT INTO #Tmp VALUES ('A','08:00:00', '18:00:00', 2,2)
,('B','00:00:00', '23:59:59', 1,5)
,('C','00:00:00', '23:59:59', 4,15)
Here is my desired output (I will then use this to drive a gantt chart visualisation):
+------+------------+----------+
| Name | Start_Time | End_Time |
+------+------------+----------+
| A | 08:00:00 | 08:02:00 |
| A | 10:00:00 | 10:02:00 |
| A | 12:00:00 | 12:02:00 |
| A | 14:00:00 | 14:02:00 |
| A | 16:00:00 | 16:02:00 |
| A | 18:00:00 | 18:02:00 |
| B | 00:00:00 | 00:05:00 |
| B | 01:00:00 | 01:05:00 |
| B | 02:00:00 | 02:05:00 |
| B | 03:00:00 | 03:05:00 |
| B | 04:00:00 | 04:05:00 |
| B | 05:00:00 | 05:05:00 |
| B | 06:00:00 | 06:05:00 |
| B | 07:00:00 | 07:05:00 |
| B | 08:00:00 | 08:05:00 |
| B | 09:00:00 | 09:05:00 |
| B | 10:00:00 | 10:05:00 |
| B | 11:00:00 | 11:05:00 |
| B | 12:00:00 | 12:05:00 |
| B | 13:00:00 | 13:05:00 |
| B | 14:00:00 | 14:05:00 |
| B | 15:00:00 | 15:05:00 |
| B | 16:00:00 | 16:05:00 |
| B | 17:00:00 | 17:05:00 |
| B | 18:00:00 | 18:05:00 |
| B | 19:00:00 | 19:05:00 |
| B | 20:00:00 | 20:05:00 |
| B | 21:00:00 | 21:05:00 |
| B | 22:00:00 | 22:05:00 |
| B | 23:00:00 | 23:05:00 |
| C | 00:00:00 | 00:15:00 |
| C | 04:00:00 | 04:15:00 |
| C | 08:00:00 | 08:15:00 |
| C | 12:00:00 | 12:15:00 |
| C | 16:00:00 | 16:15:00 |
| C | 20:00:00 | 20:15:00 |
+------+------------+----------+
I am hoping to be able to create a view out of this so I am trying to do it without cursors or other cpu intensive methods.
Any ideas?
Thanks,
Dan.
You could use a recursive cte like this
;WITH temp AS
(
SELECT t.Name, CAST(t.Start_Time AS time) AS CurrentStart_Time, dateadd(minute,t.Duration_Mins,CAST(t.Start_Time AS time)) AS CurrentEnd_Time, t.Frequency_Hours, CAST(t.End_Time AS time) AS End_Time
FROM #Tmp t
UNION ALL
SELECT t.Name, dateadd(hour,t.Frequency_Hours,t.CurrentStart_Time), dateadd(hour,t.Frequency_Hours,t.CurrentEnd_Time), t.Frequency_Hours, t.End_Time
FROM temp t
WHERE t.CurrentStart_Time < t.End_Time AND t.CurrentStart_Time < dateadd(hour,t.Frequency_Hours,t.CurrentStart_Time)
)
SELECT t.Name, t.CurrentStart_Time, t.CurrentEnd_Time
FROM temp t
ORDER BY t.Name
OPTION (MAXRECURSION 0)
Demo link: http://rextester.com/XJK25805
It can be done without RECURSIIVE CTE also.
If we create number instead of using
select distinct number master..spt_values then performance will be far better.
Like Number table can be populated from 1 to 100.
try this with various sample data,
declare #t table(Name varchar(20), Start_Time time(0),End_Time time(0)
, Frequency_Hours int,Duration_Mins int)
insert into #t VALUES
('A','08:00:00','18:00:00', 2 , 2 )
,('B','00:00:00','23:59:59', 1 , 5 )
,('C','00:00:00','23:59:59', 4 ,15 )
SELECT NAME
,dateadd(hour, n, Start_Time) Start_Time
,dateadd(minute, Duration_Mins, (dateadd(hour, n, Start_Time))) End_Time
FROM #t t
CROSS APPLY (
SELECT DISTINCT number * Frequency_Hours n
FROM master..spt_values
WHERE number >= 0
AND number <= datediff(HOUR, t.Start_Time, t.End_Time) / Frequency_Hours
) ca