Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion parser/parser_column.go
Original file line number Diff line number Diff line change
Expand Up @@ -901,7 +901,7 @@ func (p *Parser) parseColumnType(_ Pos) (ColumnType, error) { // nolint:funlen
if err != nil {
return nil, err
}
if p.tryConsumeTokenKind(TokenKindLParen) != nil {
if lParen := p.tryConsumeTokenKind(TokenKindLParen); lParen != nil {
switch {
case p.matchTokenKind(TokenKindIdent):
switch {
Expand All @@ -924,6 +924,14 @@ func (p *Parser) parseColumnType(_ Pos) (ColumnType, error) { // nolint:funlen
case p.matchTokenKind(TokenKindInt), p.matchTokenKind(TokenKindFloat):
// fixed size
return p.parseColumnTypeWithParams(ident, p.Pos())
case p.matchTokenKind(TokenKindRParen):
rightParenPos := p.Pos()
_ = p.lexer.consumeToken()
return &TypeWithParams{
Name: ident,
LeftParenPos: lParen.Pos,
RightParenPos: rightParenPos,
}, nil
default:
return nil, fmt.Errorf("unexpected token kind: %v", p.lastTokenKind())
}
Expand Down
2 changes: 2 additions & 0 deletions parser/testdata/ddl/create_table_with_index.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@ CREATE TABLE IF NOT EXISTS test_local
(
`common.id` String CODEC(ZSTD(1)),
`id` UInt64 CODEC(Delta, ZSTD(1)),
`idx` UInt64 CODEC(Delta, ZSTD(1)),
`api_id` UInt64 CODEC(ZSTD(1)),
`arr` Array(Int64),
`content` String CODEC(ZSTD(1)),
`output` String,
INDEX id_common_id_bloom_filter common.id TYPE bloom_filter(0.001) GRANULARITY 1,
INDEX id_idx id TYPE minmax GRANULARITY 10,
INDEX idx_id idx TYPE bloom_filter() GRANULARITY 1,
INDEX api_id_idx api_id TYPE set(100) GRANULARITY 2,
INDEX arr_idx arr TYPE bloom_filter(0.01) GRANULARITY 3,
INDEX content_idx content TYPE tokenbf_v1(30720, 2, 0) GRANULARITY 1,
Expand Down
4 changes: 3 additions & 1 deletion parser/testdata/ddl/format/create_table_with_index.sql
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ CREATE TABLE IF NOT EXISTS test_local
(
`common.id` String CODEC(ZSTD(1)),
`id` UInt64 CODEC(Delta, ZSTD(1)),
`idx` UInt64 CODEC(Delta, ZSTD(1)),
`api_id` UInt64 CODEC(ZSTD(1)),
`arr` Array(Int64),
`content` String CODEC(ZSTD(1)),
`output` String,
INDEX id_common_id_bloom_filter common.id TYPE bloom_filter(0.001) GRANULARITY 1,
INDEX id_idx id TYPE minmax GRANULARITY 10,
INDEX idx_id idx TYPE bloom_filter() GRANULARITY 1,
INDEX api_id_idx api_id TYPE set(100) GRANULARITY 2,
INDEX arr_idx arr TYPE bloom_filter(0.01) GRANULARITY 3,
INDEX content_idx content TYPE tokenbf_v1(30720, 2, 0) GRANULARITY 1,
Expand All @@ -22,4 +24,4 @@ SETTINGS execute_merges_on_single_replica_time_threshold=1200, index_granularity


-- Format SQL:
CREATE TABLE IF NOT EXISTS test_local (`common.id` String CODEC(ZSTD(1)), `id` UInt64 CODEC(Delta, ZSTD(1)), `api_id` UInt64 CODEC(ZSTD(1)), `arr` Array(Int64), `content` String CODEC(ZSTD(1)), `output` String, INDEX id_common_id_bloom_filter common.id TYPE bloom_filter(0.001) GRANULARITY 1, INDEX id_idx id TYPE minmax GRANULARITY 10, INDEX api_id_idx api_id TYPE set(100) GRANULARITY 2, INDEX arr_idx arr TYPE bloom_filter(0.01) GRANULARITY 3, INDEX content_idx content TYPE tokenbf_v1(30720, 2, 0) GRANULARITY 1, INDEX output_idx output TYPE ngrambf_v1(3, 10000, 2, 1) GRANULARITY 2) ENGINE = ReplicatedMergeTree('/root/test_local', '{replica}') ORDER BY (toUnixTimestamp64Nano(`timestamp`), `api_id`) PARTITION BY toStartOfHour(`timestamp`) TTL toStartOfHour(`timestamp`) + INTERVAL 7 DAY, toStartOfHour(`timestamp`) + INTERVAL 2 DAY SETTINGS execute_merges_on_single_replica_time_threshold=1200, index_granularity=16384, max_bytes_to_merge_at_max_space_in_pool=64424509440, storage_policy='main', ttl_only_drop_parts=1;
CREATE TABLE IF NOT EXISTS test_local (`common.id` String CODEC(ZSTD(1)), `id` UInt64 CODEC(Delta, ZSTD(1)), `idx` UInt64 CODEC(Delta, ZSTD(1)), `api_id` UInt64 CODEC(ZSTD(1)), `arr` Array(Int64), `content` String CODEC(ZSTD(1)), `output` String, INDEX id_common_id_bloom_filter common.id TYPE bloom_filter(0.001) GRANULARITY 1, INDEX id_idx id TYPE minmax GRANULARITY 10, INDEX idx_id idx TYPE bloom_filter() GRANULARITY 1, INDEX api_id_idx api_id TYPE set(100) GRANULARITY 2, INDEX arr_idx arr TYPE bloom_filter(0.01) GRANULARITY 3, INDEX content_idx content TYPE tokenbf_v1(30720, 2, 0) GRANULARITY 1, INDEX output_idx output TYPE ngrambf_v1(3, 10000, 2, 1) GRANULARITY 2) ENGINE = ReplicatedMergeTree('/root/test_local', '{replica}') ORDER BY (toUnixTimestamp64Nano(`timestamp`), `api_id`) PARTITION BY toStartOfHour(`timestamp`) TTL toStartOfHour(`timestamp`) + INTERVAL 7 DAY, toStartOfHour(`timestamp`) + INTERVAL 2 DAY SETTINGS execute_merges_on_single_replica_time_threshold=1200, index_granularity=16384, max_bytes_to_merge_at_max_space_in_pool=64424509440, storage_policy='main', ttl_only_drop_parts=1;
Loading