Version : 2008 R2
Hi,
I am trying to build a salable proc which does the computation logic and currently i have 50k user. I am thinking in future perspective and targeting this logic for 500k users.
Sample table schema and test data
create table Comp_Detail(IDcompDetail int identity(1,1) primary key,IDcompAFR int,CompID int default 1050,UserId bigint,
TransferAmount money, processStatus bit default 0,AmtTransferDate datetime);
--===== Create and populate the balance table on-the-fly
;WITH
cteRowSource1 AS
(
SELECT TOP 500000
N = ROW_NUMBER() OVER (ORDER BY (SELECT NULL))
FROM master.sys.all_columns ac1
CROSS JOIN master.sys.all_columns ac2
)
INSERT into Comp_Detail(IDcompAFR,CompID,UserId,TransferAmount)
SELECT 1000, 1050,
UserId = ISNULL(N,0)
,TransferAmount = N*10
FROM cteRowSource1
-- select * from Comp_Detail
--===== Create and populate the balance table on-the-fly
Create table User_bank(IDUserBank int identity(1,1) primary key, UserId bigint,Amount_Pend money,Amount_Available money,
LastModDt datetime);
;WITH
cteRowSource AS
(
SELECT TOP 500000
N = ROW_NUMBER() OVER (ORDER BY (SELECT NULL))
FROM master.sys.all_columns ac1
CROSS JOIN master.sys.all_columns ac2
)
Insert into User_bank(UserId,Amount_Pend,Amount_Available)
SELECT UserId = ISNULL(N,0)
,PendingAmount = N*10
,AvailableAmount = N*2
FROM cteRowSource
;-- select * from member_balance;
update Comp_Detail set IDcompAFR = 1001 where IDcompDetail > 10000 and IDcompDetail < 20000 ;
update Comp_Detail set IDcompAFR = 1002 where IDcompDetail > 20000 and IDcompDetail < 30000;
update Comp_Detail set IDcompAFR = 1003 where IDcompDetail > 30000 and IDcompDetail < 40000;
update Comp_Detail set IDcompAFR = 1004 where IDcompDetail > 40000 and IDcompDetail <50000;
update Comp_Detail set IDcompAFR = 1005 where IDcompDetail > 50000 and IDcompDetail < 60000;
My logic below,
Declare @CompID int = 1050; BEGIN -- Check if any data available to be processed IF EXISTS ( SELECT TOP 1 IDcompAFR FROM Comp_Detail WHERE CompID = @CompID AND coalesce(processStatus, 0) = 0 ) BEGIN BEGIN TRY -- Set it so if the first UPDATE fails, we won't even start the second update.This really says "If we're in a transaction -- and something fails, stop processing the transaction and do a rollback if we can". SET XACT_ABORT ON; -- temp variable to hold the actual data. this will be used to get IDcompAFR once the balance updated DECLARE @ActualData TABLE ( UserId BIGINT ,IDcompAFR BIGINT ,ProcessingAmount MONEY ); -- table variable to capture the Affected UserId's DECLARE @AffecedRecords TABLE (UserId BIGINT); BEGIN TRANSACTION; -- Get the whole data to be processed. INSERT INTO @ActualData ( UserId ,IDcompAFR ,ProcessingAmount ) SELECT UserId ,IDcompAFR ,ProcessingAmount = COALESCE(TransferAmount, 0) FROM Comp_Detail WHERE CompID = @CompID AND coalesce(processStatus, 0) = 0 ; -- Aggregare the ProcessingAmount based on UserId WITH AggregateData AS ( SELECT UserId ,ProcessingAmount = SUM(COALESCE(ProcessingAmount, 0)) FROM @ActualData GROUP BY UserId ) --Do the Amount update and capture the UserId that are affected. UPDATE UB SET UB.Amount_Available = COALESCE(UB.Amount_Available, 0) + AD.ProcessingAmount ,UB.Amount_Pend = COALESCE(UB.Amount_Pend, 0) - AD.ProcessingAmount ,LastModDt = getdate() OUTPUT deleted.UserId INTO @AffecedRecords(UserId) FROM User_bank UB INNER JOIN AggregateData AD ON UB.UserId = AD.UserId; --===== Using the captured UserId get the IDcompAFR from @ActualData temp variable -- and then update the processStatus = 1 --- means OFR processed for the trip . UPDATE Comp_Detail SET processStatus = 1 ,AmtTransferDate = getdate() WHERE IDcompAFR IN ( SELECT DISTINCT AD.IDcompAFR FROM @ActualData AD INNER JOIN @AffecedRecords AR ON (AD.UserId = AR.UserId) ) AND processStatus = 0; COMMIT TRANSACTION; END TRY BEGIN CATCH DECLARE @ErrorMessage NVARCHAR(4000); DECLARE @ErrorSeverity INT; DECLARE @ErrorState INT; SELECT @ErrorMessage = ERROR_MESSAGE() ,@ErrorSeverity = ERROR_SEVERITY() ,@ErrorState = ERROR_STATE(); ROLLBACK TRANSACTION; RAISERROR ( @ErrorMessage ,@ErrorSeverity ,@ErrorState ); END CATCH; END END GOthe query logic takes 20 + minutes and it keeps on running. not sure what mistake i did. any suggestion to improve the speed please


