The following script was designed for SQL server. I wrote it to show a friend why he should be storing currency values in money type columns and not in float type columns.
You will notice if you run the script that when adding up the second table you get the incorect value from the float column. I know that this is resolved by rounding the result, but it is easier if you use the type that gets you the right result in the first place.
USE Northwind
DROP TABLE #Test1
CREATE TABLE #Test1 (
ID INT NOT NULL IDENTITY(0, 1),
Test1 float,
Test2 money)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.1, 1.1)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.2, 1.2)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.3, 1.3)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.4, 1.4)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.5, 1.5)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.6, 1.6)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.7, 1.7)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.8, 1.8)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.9, 1.9)
INSERT INTO #Test1 (Test1, Test2) VALUES (1.0, 1.0)
SELECT Test1 [FLOAT], Test2 [MONEY] FROM #Test1
DROP TABLE #Test2
CREATE TABLE #Test2 (
ID INT NOT NULL IDENTITY(0, 1),
Test1 float,
Test2 money)
DECLARE @i int
SET @i = 0
WHILE (@i < 1000)
BEGIN
INSERT INTO #Test2 (Test1, Test2) VALUES (1.3, 1.3)
SET @i = @i + 1
END
SELECT SUM(Test1) as [FLOAT], SUM(Test2) as [MONEY] FROM #Test2