Skip to content
Snippets Groups Projects
Unverified Commit 2d9e3003 authored by iamlinjunhong's avatar iamlinjunhong Committed by GitHub
Browse files

change decimal default scale same as mysql (#4597)


Co-authored-by: default avatarbRong Njam <longran1989@gmail.com>
Co-authored-by: default avatarfengttt <fengttt@gmail.com>
parent c3d8128c
No related branches found
No related tags found
No related merge requests found
......@@ -39,6 +39,7 @@ const (
DECIMAL128_ZSTR_LEN = 43
DECIMAL128_WIDTH = 34
DECIMAL128_NBYTES = 16
MYSQL_DEFAULT_SCALE = 4
)
func dec64PtrToC(p *Decimal64) *C.int64_t {
......
......@@ -804,18 +804,7 @@ func bindFuncExprImplByPlanExpr(name string, args []*Expr) (*plan.Expr, error) {
return nil, err
}
}
case "variance":
if args[0].Typ.Id == int32(types.T_decimal128) || args[0].Typ.Id == int32(types.T_decimal64) {
args[0], err = appendCastBeforeExpr(args[0], &plan.Type{
Id: int32(types.T_float64),
Nullable: false,
})
if err != nil {
return nil, err
}
}
case "stddev_pop":
case "variance", "oct", "stddev_pop", "std":
if args[0].Typ.Id == int32(types.T_decimal128) || args[0].Typ.Id == int32(types.T_decimal64) {
args[0], err = appendCastBeforeExpr(args[0], &plan.Type{
Id: int32(types.T_float64),
......
......@@ -167,15 +167,11 @@ func DivFloat[T constraints.Float](args []*vector.Vector, proc *process.Process)
return Arith[T, T](args, proc, args[0].GetType(), div.NumericDivFloat[T])
}
func DivDecimal64(args []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
lv := args[0]
resultScale := lv.Typ.Scale
resultTyp := types.Type{Oid: types.T_decimal128, Size: types.DECIMAL128_NBYTES, Width: types.DECIMAL128_WIDTH, Scale: resultScale}
resultTyp := types.Type{Oid: types.T_decimal128, Size: types.DECIMAL128_NBYTES, Width: types.DECIMAL128_WIDTH, Scale: types.MYSQL_DEFAULT_SCALE}
return Arith[types.Decimal64, types.Decimal128](args, proc, resultTyp, div.Decimal64VecDiv)
}
func DivDecimal128(args []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
lv := args[0]
resultScale := lv.Typ.Scale
resultTyp := types.Type{Oid: types.T_decimal128, Size: types.DECIMAL128_NBYTES, Width: types.DECIMAL128_WIDTH, Scale: resultScale}
resultTyp := types.Type{Oid: types.T_decimal128, Size: types.DECIMAL128_NBYTES, Width: types.DECIMAL128_WIDTH, Scale: types.MYSQL_DEFAULT_SCALE}
return Arith[types.Decimal128, types.Decimal128](args, proc, resultTyp, div.Decimal128VecDiv)
}
......
......@@ -121,9 +121,7 @@ drop table if exists t1;
CREATE TABLE t1 (a date);
insert INTO t1 VALUES ('2022-03-01');
SELECT * FROM t1 WHERE coalesce(a) BETWEEN '2022-02-28' and '2022-03-01';
-- @bvt:issue#3588
SELECT * FROM t1 WHERE coalesce(a)=20220301;
-- @bvt:issue
SELECT * FROM t1 WHERE coalesce(a)='20220301';
SELECT * FROM t1 WHERE coalesce(a) in ('2022-02-28','2022-03-01');
drop table t1;
-- @bvt:issue#3293
......
......@@ -170,38 +170,25 @@ select std(s1/s2) from bug22555 where i=2 group by i;
select std(s1/s2) from bug22555 where i=3 group by i;
select std(s1/s2) from bug22555 group by i order by i;
select i, count(*), std(o1/o2) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), std(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select i, count(*), variance(s1/s2) from bug22555 group by i order by i;
select i, count(*), variance(o1/o2) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), variance(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select i, count(*), std(s1/s2) from bug22555 group by i order by i;
select i, count(*), std(o1/o2) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), std(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select i, count(*), variance(s1/s2) from bug22555 group by i order by i;
select i, count(*), variance(o1/o2) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), variance(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select i, count(*), std(s1/s2) from bug22555 group by i order by i;
select i, count(*), std(o1/o2) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), std(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
insert into bug22555 values (1,53,78,53,78,53,78),(2,17,78,17,78,17,78),(3,18,76,18,76,18,76);
insert into bug22555 values (1,53,78,53,78,53,78),(2,17,78,17,78,17,78),(3,18,76,18,76,18,76);
insert into bug22555 values (1,53,78,53,78,53,78),(2,17,78,17,78,17,78),(3,18,76,18,76,18,76);
select i, count(*), std(s1/s2) from bug22555 group by i order by i;
select i, count(*), round(std(o1/o2), 16) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), std(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select std(s1/s2) from bug22555;
select std(o1/o2) from bug22555;
-- @bvt:issue#3588
......@@ -209,9 +196,7 @@ select std(e1/e2) from bug22555;
-- @bvt:issue
select i, count(*), std(s1/s2) from bug22555 group by i order by i;
select i, count(*), round(std(o1/o2), 16) from bug22555 group by i order by i;
-- @bvt:issue#3588
select i, count(*), std(e1/e2) from bug22555 group by i order by i;
-- @bvt:issue
select round(std(s1/s2), 17) from bug22555;
select std(o1/o2) from bug22555;
-- @bvt:issue#3588
......
......@@ -26,12 +26,10 @@ drop table t1;
drop table t2;
#HAVING
-- @bvt:issue#3588
CREATE TABLE t1 (name char(1) default NULL, val int default NULL);
INSERT INTO t1 VALUES ('a',1), ('a',2), ('a',2), ('a',2), ('a',3), ('a',6), ('a',7), ('a',11), ('a',11), ('a',12), ('a',13), ('a',13), ('a',20), ('b',2), ('b',3), ('b',4), ('b',5);
SELECT s.name, AVG(s.val) AS median FROM (SELECT x.name, x.val FROM t1 x, t1 y WHERE x.name=y.name GROUP BY x.name, x.val HAVING SUM(y.val <= x.val) >= COUNT(*)/2 AND SUM(y.val >= x.val) >= COUNT(*)/2) AS s GROUP BY s.name;
drop table t1;
-- @bvt:issue
#DISTINCT
create table t1 (a integer, b integer);
......@@ -135,10 +133,8 @@ create table t1 (df decimal(5,1));
insert into t1 values(1.1);
insert into t1 values(2.2);
-- @bvt:issue#3588
select cast(sum( df) as signed) from t1;
select cast(min(df) as signed) from t1;
-- @bvt:issue
select 1e8 * sum( df) from t1;
select 1e8 * min(df) from t1;
......
......@@ -29,12 +29,10 @@ SELECT * FROM t1 ORDER BY a;
drop table t1;
#DATATYPE
-- @bvt:issue#3588
create table t1(a tinyint, b SMALLINT, c bigint, d INT, e BIGINT, f FLOAT, g DOUBLE, h decimal(38,19));
insert into t1 values(1, 1, 2, 4, 5, 5.5, 31.13, 14.314);
select EXP(a),EXP(b),EXP(c),EXP(d),EXP(e),EXP(f),EXP(g),EXP(h) from t1;
drop table t1;
-- @bvt:issue
#算术操作
select EXP(123.54-123.03);
......
......@@ -7,11 +7,9 @@ select floor(1.1e5);
select floor(cast(-2 as unsigned)), floor(18446744073709551614), floor(-2);
SELECT floor(ceil(12345678901234567890));
-- @bvt:issue#3588
SELECT floor(18446744073709551616);
SELECT floor(floor(18446744073709551616));
SELECT floor(floor(floor(18446744073709551616)));
-- @bvt:issue
#数据类型
CREATE TABLE t1(a BIGINT UNSIGNED);
......
......@@ -172,7 +172,7 @@ insert INTO t1 VALUES ('2022-03-01');
SELECT * FROM t1 WHERE coalesce(a) BETWEEN '2022-02-28' and '2022-03-01';
a
2022-03-01
SELECT * FROM t1 WHERE coalesce(a)=20220301;
SELECT * FROM t1 WHERE coalesce(a)='20220301';
a
2022-03-01
SELECT * FROM t1 WHERE coalesce(a) in ('2022-02-28','2022-03-01');
......
......@@ -33,7 +33,7 @@ drop table t2;
CREATE TABLE t1 (name char(1) default NULL, val int default NULL);
INSERT INTO t1 VALUES ('a',1), ('a',2), ('a',2), ('a',2), ('a',3), ('a',6), ('a',7), ('a',11), ('a',11), ('a',12), ('a',13), ('a',13), ('a',20), ('b',2), ('b',3), ('b',4), ('b',5);
SELECT s.name, AVG(s.val) AS median FROM (SELECT x.name, x.val FROM t1 x, t1 y WHERE x.name=y.name GROUP BY x.name, x.val HAVING SUM(y.val <= x.val) >= COUNT(*)/2 AND SUM(y.val >= x.val) >= COUNT(*)/2) AS s GROUP BY s.name;
[42883]unsupported parameter types [BOOL] for function 'sum'
Aggregate function of 'sum' do not support implicit conversions for param of [BOOL]
drop table t1;
create table t1 (a integer, b integer);
insert into t1 values (1,4), (2,2),(2,2), (4,1),(4,1),(4,1),(4,1);
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment