Skip to content

Commit a6db4e3

Browse files
committed
Merge pull request #703 from Altinity/feature/s3cluster_hive_old_analyzer
s3Cluster hive partitioning for old analyzer
1 parent f0b7a13 commit a6db4e3

File tree

1 file changed

+74
-1
lines changed
  • tests/integration/test_s3_cluster

1 file changed

+74
-1
lines changed

tests/integration/test_s3_cluster/test.py

Lines changed: 74 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -593,8 +593,79 @@ def test_remote_no_hedged(started_cluster):
593593
assert TSV(pure_s3) == TSV(s3_distributed)
594594

595595

596-
def test_hive_partitioning(started_cluster):
596+
def test_distributed_s3_table_engine(started_cluster):
597597
node = started_cluster.instances["s0_0_0"]
598+
599+
resp_def = node.query(
600+
"""
601+
SELECT * from s3Cluster(
602+
'cluster_simple',
603+
'http://minio1:9001/root/data/{clickhouse,database}/*', 'minio', 'minio123', 'CSV',
604+
'name String, value UInt32, polygon Array(Array(Tuple(Float64, Float64)))') ORDER BY (name, value, polygon)
605+
"""
606+
)
607+
608+
node.query("DROP TABLE IF EXISTS single_node");
609+
node.query(
610+
"""
611+
CREATE TABLE single_node
612+
(name String, value UInt32, polygon Array(Array(Tuple(Float64, Float64))))
613+
ENGINE=S3('http://minio1:9001/root/data/{clickhouse,database}/*', 'minio', 'minio123', 'CSV')
614+
"""
615+
)
616+
query_id_engine_single_node = str(uuid.uuid4())
617+
resp_engine_single_node = node.query(
618+
"""
619+
SELECT * FROM single_node ORDER BY (name, value, polygon)
620+
""",
621+
query_id = query_id_engine_single_node
622+
)
623+
assert resp_def == resp_engine_single_node
624+
625+
node.query("DROP TABLE IF EXISTS distributed");
626+
node.query(
627+
"""
628+
CREATE TABLE distributed
629+
(name String, value UInt32, polygon Array(Array(Tuple(Float64, Float64))))
630+
ENGINE=S3('http://minio1:9001/root/data/{clickhouse,database}/*', 'minio', 'minio123', 'CSV')
631+
SETTINGS object_storage_cluster='cluster_simple'
632+
"""
633+
)
634+
query_id_engine_distributed = str(uuid.uuid4())
635+
resp_engine_distributed = node.query(
636+
"""
637+
SELECT * FROM distributed ORDER BY (name, value, polygon)
638+
""",
639+
query_id = query_id_engine_distributed
640+
)
641+
assert resp_def == resp_engine_distributed
642+
643+
node.query("SYSTEM FLUSH LOGS ON CLUSTER 'cluster_simple'")
644+
645+
hosts_engine_single_node = node.query(
646+
f"""
647+
SELECT uniq(hostname)
648+
FROM clusterAllReplicas('cluster_simple', system.query_log)
649+
WHERE type='QueryFinish' AND initial_query_id='{query_id_engine_single_node}'
650+
"""
651+
)
652+
assert int(hosts_engine_single_node) == 1
653+
hosts_engine_distributed = node.query(
654+
f"""
655+
SELECT uniq(hostname)
656+
FROM clusterAllReplicas('cluster_simple', system.query_log)
657+
WHERE type='QueryFinish' AND initial_query_id='{query_id_engine_distributed}'
658+
"""
659+
)
660+
assert int(hosts_engine_distributed) == 3
661+
662+
663+
@pytest.mark.parametrize("allow_experimental_analyzer", [0, 1])
664+
def test_hive_partitioning(started_cluster, allow_experimental_analyzer):
665+
node = started_cluster.instances["s0_0_0"]
666+
667+
node.query(f"SET allow_experimental_analyzer = {allow_experimental_analyzer}")
668+
598669
for i in range(1, 5):
599670
exists = node.query(
600671
f"""
@@ -717,3 +788,5 @@ def test_hive_partitioning(started_cluster):
717788
)
718789
cluster_optimized_traffic = int(cluster_optimized_traffic)
719790
assert cluster_optimized_traffic == optimized_traffic
791+
792+
node.query("SET allow_experimental_analyzer = DEFAULT")

0 commit comments

Comments
 (0)