Skip to content
Draft

Glaze #3769

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 64 additions & 59 deletions .drone.jsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ local servers = {
};

local extra_servers = {
[current_branch]: ["11.4-enterprise"],
[current_branch]: [],
};


local platforms = {
[current_branch]: ["rockylinux:8", "rockylinux:9", "rockylinux:10", "debian:12", "ubuntu:22.04", "ubuntu:24.04"],
[current_branch]: ["rockylinux:10"],
};


Expand Down Expand Up @@ -646,71 +646,76 @@ local Pipeline(branch, platform, event, arch="amd64", server="10.6-enterprise",


local AllPipelines =
[
Pipeline(b, platform, triggeringEvent, a, server, flag, "")
for a in ["amd64"]
for b in std.objectFields(platforms)
for platform in ["rockylinux:8"]
for flag in ["gcc-toolset"]
for triggeringEvent in events
for server in servers[current_branch]
] +
// [
// Pipeline(b, platform, triggeringEvent, a, server, flag, "")
// for a in ["amd64"]
// for b in std.objectFields(platforms)
// for platform in ["rockylinux:8"]
// for flag in ["gcc-toolset"]
// for triggeringEvent in events
// for server in servers[current_branch]
// ] +
[
Pipeline(b, p, e, a, s)
for b in std.objectFields(platforms)
for p in platforms[b]
for s in servers[b]
for e in events
for a in archs
] +
[
Pipeline(any_branch, p, "custom", a, server)
for p in platforms[current_branch]
for server in servers[current_branch]
for a in archs
] +
// clang
[
Pipeline(b, platform, triggeringEvent, a, server, "", buildenv)
for a in ["amd64"]
for b in std.objectFields(platforms)
for platform in ["ubuntu:24.04"]
for buildenv in std.objectFields(customEnvCommandsMap)
for triggeringEvent in events
for server in servers[current_branch]
] +
// last argument is to ignore mtr and regression failures
[
Pipeline(b, platform, triggeringEvent, a, server, "", "", ["regression", "mtr"])
for a in ["amd64"]
for b in std.objectFields(platforms)
for platform in ["ubuntu:24.04", "rockylinux:9"]
for triggeringEvent in events
for server in extra_servers[current_branch]
] +
];


//+
// [
// Pipeline(any_branch, p, "custom", a, server)
// for p in platforms[current_branch]
// for server in servers[current_branch]
// for a in archs
// ]
// ;
// +
// // clang
// [
// Pipeline(b, platform, triggeringEvent, a, server, "", buildenv)
// for a in ["amd64"]
// for b in std.objectFields(platforms)
// for platform in ["ubuntu:24.04"]
// for buildenv in std.objectFields(customEnvCommandsMap)
// for triggeringEvent in events
// for server in servers[current_branch]
// ] +
// // last argument is to ignore mtr and regression failures
[
Pipeline(b, platform, triggeringEvent, a, server, flag, envcommand, ["regression", "mtr"])
for a in ["amd64"]
for b in std.objectFields(platforms)
for platform in ["ubuntu:24.04"]
for flag in ["libcpp"]
for envcommand in ["clang-20"]
for triggeringEvent in events
for server in servers[current_branch]
] +
// last argument is to ignore mtr and regression failures
[
Pipeline(b, platform, triggeringEvent, a, server, flag, "", ["regression", "mtr"])
for a in ["amd64"]
for b in std.objectFields(platforms)
for platform in ["ubuntu:24.04"]
for flag in ["ASan", "UBSan"]
for triggeringEvent in events
for server in servers[current_branch]
] +

[];
// [
// Pipeline(b, platform, triggeringEvent, a, server, "", "", ["regression", "mtr"])
// for a in ["amd64"]
// for b in std.objectFields(platforms)
// for platform in ["ubuntu:24.04", "rockylinux:9"]
// for triggeringEvent in events
// for server in extra_servers[current_branch]
// ] +
// // // last argument is to ignore mtr and regression failures
// [
// Pipeline(b, platform, triggeringEvent, a, server, flag, envcommand, ["regression", "mtr"])
// for a in ["amd64"]
// for b in std.objectFields(platforms)
// for platform in ["ubuntu:24.04"]
// for flag in ["libcpp"]
// for envcommand in ["clang-20"]
// for triggeringEvent in events
// for server in servers[current_branch]
// ] +
// // last argument is to ignore mtr and regression failures
// [
// Pipeline(b, platform, triggeringEvent, a, server, flag, "", ["regression", "mtr"])
// for a in ["amd64"]
// for b in std.objectFields(platforms)
// for platform in ["ubuntu:24.04"]
// for flag in ["ASan", "UBSan"]
// for triggeringEvent in events
// for server in servers[current_branch]
// ] +

// [];


local FinalPipeline(branch, event) = {
Expand Down
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ include(CheckCXXSourceCompiles)
include(packages)
include(boost)
include(thrift)
include(glaze)
include(dirs)
include(includes)
include(libs)
Expand Down
17 changes: 2 additions & 15 deletions cmake/compiler_flags.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,7 @@ macro(SET_FLAGS_RELEASE)
endforeach()
endmacro()

# C++ standard {
if(have_CXX__std_c__20)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++20")
else()
my_check_cxx_compiler_flag("-std=c++2a")
if(have_CXX__std_c__2a)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++2a")
else()
message_once(CS_NO_CXX20 "C++ Compiler does not understand -std=c++20")
return()
endif()
endif()

unset(CMAKE_CXX_STANDARD)
# } end C++ standard
set(CMAKE_CXX_STANDARD 23)

# Hacks to keep alive with MariaDB server {
string(REPLACE -D_GLIBCXX_DEBUG "" CMAKE_CXX_FLAGS_DEBUG ${CMAKE_CXX_FLAGS_DEBUG})
Expand Down Expand Up @@ -59,6 +45,7 @@ set(FLAGS_ALL
-DHAVE_CONFIG_H
-DBOOST_BIND_GLOBAL_PLACEHOLDERS
-Wno-suggest-override
-foperator-names
)
if(COLUMNSTORE_WITH_LIBCPP)
list(APPEND FLAGS_ALL -stdlib=libc++)
Expand Down
9 changes: 9 additions & 0 deletions cmake/glaze.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
include(FetchContent)

FetchContent_Declare(
glaze
GIT_REPOSITORY https://github.com/stephenberry/glaze.git
GIT_TAG v5.7.1
GIT_SHALLOW TRUE
)
FetchContent_MakeAvailable(glaze)
12 changes: 9 additions & 3 deletions datatypes/mcs_float128.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,20 @@
#include <string>
#include "mcs_numeric_limits.h"

#ifdef __aarch64__
using float128_t = long double;
#if defined(__STDCPP_FLOAT128_T__)
# if defined(__has_include) && __has_include(<stdfloat>)
# include <stdfloat>
# endif
using float128_t = std::float128_t;
#elif defined(__aarch64__)
using float128_t = long double;
#else
using float128_t = __float128;
using float128_t = __float128;
#endif

namespace datatypes
{

/* Main union type we use to manipulate the floating-point type. */
typedef union
{
Expand Down
4 changes: 2 additions & 2 deletions dbcon/joblist/rowestimator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ float RowEstimator::estimateOpFactor(const T& min, const T& max, const T& value,
uint32_t distinctValues, char cpStatus,
const execplan::CalpontSystemCatalog::ColType& ct)
{
float factor = 1.0;
float128_t factor = 1.0;

switch (op)
{
Expand Down Expand Up @@ -255,7 +255,7 @@ float RowEstimator::estimateOpFactor(const T& min, const T& max, const T& value,
factor = 1.0;
}

return factor;
return float(factor);
}

// Estimate the percentage of rows that will be returned for a particular extent.
Expand Down
100 changes: 100 additions & 0 deletions mysql-test/columnstore/basic/r/json_general.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
DROP DATABASE IF EXISTS json_general;
CREATE DATABASE json_general;
USE json_general;
JSON General sanity tests for ColumnStore funcexp (Glaze-based)
DROP TABLE IF EXISTS t1;
CREATE TABLE t1 (j text) engine=columnstore;
INSERT INTO t1 VALUES ('{"a":1, "b":[1,2,3], "s":"foo", "o":{"k":2}}');
JSON_VALID
SELECT JSON_VALID('{"a":1}') AS v1, JSON_VALID('{bad') AS v2;
v1 v2
1 0
JSON_TYPE
SELECT JSON_TYPE('{"a":1}') AS t1, JSON_TYPE('[1]') AS t2, JSON_TYPE('"x"') AS t3, JSON_TYPE('1') AS t4, JSON_TYPE('true') AS t5, JSON_TYPE('null') AS t6;
t1 t2 t3 t4 t5 t6
OBJECT ARRAY STRING INTEGER BOOLEAN NULL
JSON_NORMALIZE (compact canonical)
SELECT JSON_NORMALIZE(' { "b":2, "a":1 }') AS norm1;
norm1
{"a":1.0E0,"b":2.0E0}
JSON_OBJECT and JSON_ARRAY
SELECT JSON_OBJECT('a',1,'b','x') AS obj1;
obj1
{"a": 1, "b": "x"}
SELECT JSON_ARRAY(1,'x','{"k":2}') AS arr1;
arr1
[1, "x", "{\"k\":2}"]
JSON_MERGE_PRESERVE
SELECT JSON_MERGE_PRESERVE('{"a":1}', '{"b":2}') AS m1;
m1
{"a": 1, "b": 2}
JSON_MERGE_PATCH
SELECT JSON_MERGE_PATCH('{"a":1,"b":2}', '{"b":null, "c":3}') AS mp1;
mp1
{"a": 1, "c": 3}
JSON_CONTAINS
SELECT JSON_CONTAINS('{"a":1,"b":[1,2,3]}', '{"a":1}') AS c1,
JSON_CONTAINS('{"a":1}', '{"a":2}') AS c2;
c1 c2
1 0
JSON_CONTAINS_PATH ONE/ALL
SELECT JSON_CONTAINS_PATH('{"a":1,"b":[{"x":1},{"y":2}]}', 'one', '$.b[*].x', '$.a') AS cp_one,
JSON_CONTAINS_PATH('{"a":1,"b":[{"x":1},{"y":2}]}', 'all', '$.b[*].x', '$.a') AS cp_all;
cp_one cp_all
1 1
JSON_EXISTS
SELECT JSON_EXISTS('{"a":{"k":2}}', '$.a.k') AS e1, JSON_EXISTS('{"a":{"k":2}}', '$.a.z') AS e2;
e1 e2
1 0
JSON_QUERY (returns complex)
SELECT JSON_QUERY('{"a":{"k":2},"b":[1,2]}', '$.a') AS q1;
q1
{"k":2}
JSON_VALUE (returns scalar)
SELECT JSON_VALUE('{"a":{"k":2},"b":[1,2]}', '$.a.k') AS v_scalar;
v_scalar
2
JSON_EXTRACT basic and wildcard
SELECT JSON_EXTRACT('{"a":1,"b":[1,2,3]}', '$.b[1]') AS ex1,
JSON_EXTRACT('{"a":1,"b":[1,2,3]}', '$.b[*]') AS ex2;
ex1 ex2
2 [1, 2, 3]
JSON_ARRAY_APPEND (append 4 to all b arrays)
SELECT JSON_ARRAY_APPEND('{"b":[1,2]}', '$.b', '4') AS aa1;
aa1
{"b": [1, 2, "4"]}
JSON_ARRAY_INSERT (insert at index 1)
SELECT JSON_ARRAY_INSERT('{"b":[1,3]}', '$.b[1]', '2') AS ai1;
ai1
{"b": [1, "2", 3]}
JSON_REMOVE (remove key and index)
SELECT JSON_REMOVE('{"a":1,"b":[1,2,3]}', '$.a', '$.b[0]') AS rm1;
rm1
{"b": [2, 3]}
JSON_SEARCH with ONE
SELECT JSON_SEARCH('{"a":"hello","b":["x","hell"]}', 'one', 'hel%') AS js1;
js1
"$.a"
JSON_EQUALS
SELECT JSON_EQUALS('{"x":1,"y":2}', '{"x":1,"y":2}') AS jeq1,
JSON_EQUALS('{"x":1}', '{"x":2}') AS jeq2;
jeq1 jeq2
1 0
JSON_OVERLAPS
SELECT JSON_OVERLAPS('{"a":1,"b":2}', '{"b":2}') AS jo1,
JSON_OVERLAPS('[1,2,3]', '[3,4]') AS jo2,
JSON_OVERLAPS('1', '[0,1,2]') AS jo3;
jo1 jo2 jo3
1 1 1
JSON_DEPTH
SELECT JSON_DEPTH('{"a":[{"k":1},2]}') AS jd1;
jd1
4
JSON_LENGTH
SELECT JSON_LENGTH('{"a":[1,2,3]}') AS jl1,
JSON_LENGTH('[1,2,3,4]') AS jl2,
JSON_LENGTH('1') AS jl3;
jl1 jl2 jl3
1 4 1
DROP TABLE t1;
DROP DATABASE json_general;
Loading