// Copyright 2018 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package expression // This file contains benchmarks of our expression evaluation. import ( "fmt" "testing" "time" "github.com/hanchuanchuan/goInception/ast" "github.com/hanchuanchuan/goInception/model" "github.com/hanchuanchuan/goInception/mysql" "github.com/hanchuanchuan/goInception/sessionctx" "github.com/hanchuanchuan/goInception/types" "github.com/hanchuanchuan/goInception/util/charset" "github.com/hanchuanchuan/goInception/util/chunk" "github.com/hanchuanchuan/goInception/util/mock" ) type benchHelper struct { ctx sessionctx.Context exprs []Expression inputTypes []*types.FieldType outputTypes []*types.FieldType inputChunk *chunk.Chunk outputChunk *chunk.Chunk } func (h *benchHelper) init() { numRows := 4 * 1024 h.ctx = mock.NewContext() h.ctx.GetSessionVars().StmtCtx.TimeZone = time.Local h.ctx.GetSessionVars().MaxChunkSize = numRows h.inputTypes = make([]*types.FieldType, 0, 10) h.inputTypes = append(h.inputTypes, &types.FieldType{ Tp: mysql.TypeLonglong, Flen: mysql.MaxIntWidth, Decimal: 0, Flag: mysql.BinaryFlag, Charset: charset.CharsetBin, Collate: charset.CollationBin, }) h.inputTypes = append(h.inputTypes, &types.FieldType{ Tp: mysql.TypeDouble, Flen: mysql.MaxRealWidth, Decimal: types.UnspecifiedLength, Flag: mysql.BinaryFlag, Charset: charset.CharsetBin, Collate: charset.CollationBin, }) h.inputTypes = append(h.inputTypes, &types.FieldType{ Tp: mysql.TypeNewDecimal, Flen: 11, Decimal: 0, Flag: mysql.BinaryFlag, Charset: charset.CharsetBin, Collate: charset.CollationBin, }) // Use 20 string columns to show the cache performance. for i := 0; i < 20; i++ { h.inputTypes = append(h.inputTypes, &types.FieldType{ Tp: mysql.TypeVarString, Flen: 0, Decimal: types.UnspecifiedLength, Charset: charset.CharsetUTF8, Collate: charset.CollationUTF8, }) } h.inputChunk = chunk.NewChunkWithCapacity(h.inputTypes, numRows) for rowIdx := 0; rowIdx < numRows; rowIdx++ { h.inputChunk.AppendInt64(0, 4) h.inputChunk.AppendFloat64(1, 2.019) h.inputChunk.AppendMyDecimal(2, types.NewDecFromFloatForTest(5.9101)) for i := 0; i < 20; i++ { h.inputChunk.AppendString(3+i, `abcdefughasfjsaljal1321798273528791!&(*#&@&^%&%^&!)sadfashqwer`) } } cols := make([]*Column, 0, len(h.inputTypes)) for i := 0; i < len(h.inputTypes); i++ { cols = append(cols, &Column{ ColName: model.NewCIStr(fmt.Sprintf("col_%v", i)), RetType: h.inputTypes[i], Index: i, }) } h.exprs = make([]Expression, 0, 10) if expr, err := NewFunction(h.ctx, ast.Substr, h.inputTypes[3], []Expression{cols[3], cols[2]}...); err != nil { panic("create SUBSTR function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.Plus, h.inputTypes[0], []Expression{cols[1], cols[2]}...); err != nil { panic("create PLUS function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.GT, h.inputTypes[2], []Expression{cols[11], cols[8]}...); err != nil { panic("create GT function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.GT, h.inputTypes[2], []Expression{cols[19], cols[10]}...); err != nil { panic("create GT function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.GT, h.inputTypes[2], []Expression{cols[17], cols[4]}...); err != nil { panic("create GT function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.GT, h.inputTypes[2], []Expression{cols[18], cols[5]}...); err != nil { panic("create GT function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.LE, h.inputTypes[2], []Expression{cols[19], cols[4]}...); err != nil { panic("create LE function failed.") } else { h.exprs = append(h.exprs, expr) } if expr, err := NewFunction(h.ctx, ast.EQ, h.inputTypes[2], []Expression{cols[20], cols[3]}...); err != nil { panic("create EQ function failed.") } else { h.exprs = append(h.exprs, expr) } h.exprs = append(h.exprs, cols[2]) h.exprs = append(h.exprs, cols[2]) h.outputTypes = make([]*types.FieldType, 0, len(h.exprs)) for i := 0; i < len(h.exprs); i++ { h.outputTypes = append(h.outputTypes, h.exprs[i].GetType()) } h.outputChunk = chunk.NewChunkWithCapacity(h.outputTypes, numRows) } func BenchmarkVectorizedExecute(b *testing.B) { h := benchHelper{} h.init() inputIter := chunk.NewIterator4Chunk(h.inputChunk) b.ResetTimer() for i := 0; i < b.N; i++ { h.outputChunk.Reset() if err := VectorizedExecute(h.ctx, h.exprs, inputIter, h.outputChunk); err != nil { panic("errors happened during \"VectorizedExecute\"") } } } func BenchmarkScalarFunctionClone(b *testing.B) { col := &Column{RetType: types.NewFieldType(mysql.TypeLonglong)} con1 := One.Clone() con2 := Zero.Clone() add := NewFunctionInternal(mock.NewContext(), ast.Plus, types.NewFieldType(mysql.TypeLonglong), col, con1) sub := NewFunctionInternal(mock.NewContext(), ast.Plus, types.NewFieldType(mysql.TypeLonglong), add, con2) b.ResetTimer() for i := 0; i < b.N; i++ { sub.Clone() } b.ReportAllocs() }