diff --git a/.github/workflows/docker_test_images.yml b/.github/workflows/docker_test_images.yml
index be54cdaf3ddf..5e1778838bd1 100644
--- a/.github/workflows/docker_test_images.yml
+++ b/.github/workflows/docker_test_images.yml
@@ -10,7 +10,7 @@ name: Build docker images
description: set latest tag for resulting multiarch manifest
required: false
type: boolean
- default: false
+ default: false
secrets:
secret_envs:
description: if given, it's passed to the environments
@@ -26,10 +26,14 @@ env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
+ ROBOT_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
jobs:
DockerBuildAarch64:
- runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none]
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
if: |
!failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_aarch64) != '[]'
steps:
@@ -44,7 +48,7 @@ jobs:
--image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \
--missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_aarch64) }}'
DockerBuildAmd64:
- runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none]
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
if: |
!failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_amd64) != '[]'
steps:
@@ -60,7 +64,7 @@ jobs:
--missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_amd64) }}'
DockerMultiArchManifest:
needs: [DockerBuildAmd64, DockerBuildAarch64]
- runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none]
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
if: |
!failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest)
steps:
diff --git a/.github/workflows/grype_scan.yml b/.github/workflows/grype_scan.yml
index e68c3e63e283..eaa831a84ca9 100644
--- a/.github/workflows/grype_scan.yml
+++ b/.github/workflows/grype_scan.yml
@@ -61,7 +61,7 @@ jobs:
TAG_SUFFIX: ${{ inputs.tag-suffix }}
SPECIFIED_VERSION: ${{ inputs.version }}
run: |
- python3 ./tests/ci/version_helper.py | tee /tmp/version_info
+ python3 ./tests/ci/version_helper.py | grep = | grep -v , | tee /tmp/version_info
source /tmp/version_info
if [ -z "$SPECIFIED_VERSION" ]; then
VERSION=$CLICKHOUSE_VERSION_STRING
diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml
index 3d5d06e5b296..bcd054d15dff 100644
--- a/.github/workflows/regression.yml
+++ b/.github/workflows/regression.yml
@@ -100,7 +100,7 @@ env:
--no-colors
--local
--collect-service-logs
- --output classic
+ --output new-fails
--parallel 1
--log raw.log
--with-analyzer
@@ -145,7 +145,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions]
+ SUITE: [aes_encryption, atomic_insert, base_58, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, tiered_storage, version, window_functions]
needs: [runner_labels_setup]
runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
timeout-minutes: ${{ inputs.timeout_minutes }}
@@ -179,7 +179,7 @@ jobs:
python3
-u ${{ env.SUITE }}/regression.py
--clickhouse-binary-path ${{ env.clickhouse_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -203,11 +203,79 @@ jobs:
name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths}}
+ AggregateFunctions:
+ strategy:
+ fail-fast: false
+ matrix:
+ PART: [1, 2, 3]
+ needs: [runner_labels_setup]
+ runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
+ timeout-minutes: ${{ inputs.timeout_minutes }}
+ steps:
+ - name: Checkout regression repo
+ uses: actions/checkout@v4
+ with:
+ repository: Altinity/clickhouse-regression
+ ref: ${{ inputs.commit }}
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ REPORTS_PATH=${{ runner.temp }}/reports_dir
+ SUITE=aggregate_functions
+ PART=${{ matrix.PART }}
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v4
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ name: build_report_package_${{ inputs.arch }}
+ - name: Rename reports
+ run: |
+ mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json
+ - name: Setup
+ run: .github/setup.sh
+ - name: Get deb url
+ run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
+ - name: Run ${{ env.SUITE }} suite
+ id: run_suite
+ run: EXITCODE=0;
+ python3
+ -u ${{ env.SUITE }}/regression.py
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --only "part ${{ matrix.PART }}/*"
+ ${{ env.args }} || EXITCODE=$?;
+ .github/add_link_to_logs.sh;
+ exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}"
+ - name: Create and upload logs
+ if: always()
+ run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
+ - uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts
+ path: ${{ env.artifact_paths}}
Alter:
strategy:
fail-fast: false
matrix:
- ONLY: [replace, attach, move]
+ ONLY: [replace, move]
+ include:
+ - ONLY: attach
+ PART: 1
+ - ONLY: attach
+ PART: 2
needs: [runner_labels_setup]
runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
timeout-minutes: ${{ inputs.timeout_minutes }}
@@ -223,6 +291,7 @@ jobs:
REPORTS_PATH=${{ runner.temp }}/reports_dir
SUITE=alter
STORAGE=/${{ matrix.ONLY }}_partition
+ PART='${{ matrix.PART }}'
EOF
- name: Download json reports
uses: actions/download-artifact@v4
@@ -242,8 +311,8 @@ jobs:
python3
-u alter/regression.py
--clickhouse-binary-path ${{ env.clickhouse_path }}
- --only "/alter/${{ matrix.ONLY }} partition/*"
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.ONLY }}${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -253,7 +322,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JOB_OUTCOME: ${{ steps.run_suite.outcome }}
- SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition"
+ SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition ${{ matrix.PART }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
@@ -264,7 +333,7 @@ jobs:
- uses: actions/upload-artifact@v4
if: always()
with:
- name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts
+ name: alter-${{ matrix.ONLY }}${{ matrix.PART && format('-{0}', matrix.PART) || '' }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths}}
Benchmark:
@@ -300,6 +369,7 @@ jobs:
run: .github/setup.sh
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
+
- name: Run ${{ env.SUITE }} suite
id: run_suite
run: EXITCODE=0;
@@ -314,7 +384,7 @@ jobs:
--aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -338,7 +408,12 @@ jobs:
name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths }}
- ClickHouseKeeperSSL:
+ ClickHouseKeeper:
+ strategy:
+ fail-fast: false
+ matrix:
+ PART: [1, 2]
+ SSL: [ssl, no_ssl]
needs: [runner_labels_setup]
runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
timeout-minutes: ${{ inputs.timeout_minutes }}
@@ -353,7 +428,9 @@ jobs:
cat >> "$GITHUB_ENV" << 'EOF'
REPORTS_PATH=${{runner.temp}}/reports_dir
SUITE=clickhouse_keeper
- STORAGE=/ssl
+ STORAGE=/${{ matrix.SSL }}
+ PART=${{ matrix.PART }}
+ SSL=${{ matrix.SSL == 'ssl' && '--ssl' || '' }}
EOF
- name: Download json reports
uses: actions/download-artifact@v4
@@ -371,10 +448,10 @@ jobs:
id: run_suite
run: EXITCODE=0;
python3
- -u ${{ env.SUITE }}/regression.py
- --ssl
+ -u ${{ env.SUITE }}/regression.py ${{ env.SSL }}
--clickhouse-binary-path ${{ env.clickhouse_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }}, ${{ matrix.SSL }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --only "part ${{ matrix.PART }}/*"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -384,7 +461,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JOB_OUTCOME: ${{ steps.run_suite.outcome }}
- SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper SSL"
+ SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper ${{ matrix.SSL }} ${{ matrix.PART }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
@@ -395,7 +472,7 @@ jobs:
- uses: actions/upload-artifact@v4
if: always()
with:
- name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts
+ name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-${{ matrix.SSL }}-artifacts
path: ${{ env.artifact_paths }}
LDAP:
@@ -436,7 +513,7 @@ jobs:
python3
-u ${{ env.SUITE }}/regression.py
--clickhouse-binary-path ${{ env.clickhouse_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -562,7 +639,7 @@ jobs:
--aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -586,11 +663,79 @@ jobs:
name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths }}
+ SSLServer:
+ strategy:
+ fail-fast: false
+ matrix:
+ PART: [1, 2, 3]
+ needs: [runner_labels_setup]
+ runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
+ timeout-minutes: ${{ inputs.timeout_minutes }}
+ steps:
+ - name: Checkout regression repo
+ uses: actions/checkout@v4
+ with:
+ repository: Altinity/clickhouse-regression
+ ref: ${{ inputs.commit }}
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ REPORTS_PATH=${{ runner.temp }}/reports_dir
+ SUITE=ssl_server
+ PART=${{ matrix.PART }}
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v4
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ name: build_report_package_${{ inputs.arch }}
+ - name: Rename reports
+ run: |
+ mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json
+ - name: Setup
+ run: .github/setup.sh
+ - name: Get deb url
+ run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
+ - name: Run ${{ env.SUITE }} suite
+ id: run_suite
+ run: EXITCODE=0;
+ python3
+ -u ${{ env.SUITE }}/regression.py
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --only "part ${{ matrix.PART }}/*"
+ ${{ env.args }} || EXITCODE=$?;
+ .github/add_link_to_logs.sh;
+ exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}"
+ - name: Create and upload logs
+ if: always()
+ run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
+ - uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts
+ path: ${{ env.artifact_paths}}
+
S3:
strategy:
fail-fast: false
matrix:
- STORAGE: [minio, aws_s3, gcs, azure]
+ STORAGE: [aws_s3, gcs, azure, minio]
+ PART: [1, 2]
+ include:
+ - STORAGE: minio
+ PART: 3
needs: [runner_labels_setup]
runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }}
timeout-minutes: ${{ inputs.timeout_minutes }}
@@ -605,6 +750,7 @@ jobs:
cat >> "$GITHUB_ENV" << 'EOF'
REPORTS_PATH=${{ runner.temp }}/reports_dir
SUITE=s3
+ PART=${{ matrix.PART }}
STORAGE=/${{ matrix.STORAGE }}
EOF
- name: Download json reports
@@ -636,7 +782,8 @@ jobs:
--azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }}
--azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }}
--azure-container ${{ secrets.AZURE_CONTAINER_NAME }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }}-${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --only ":/try*" ":/part ${{ matrix.PART }}/*"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
@@ -646,7 +793,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JOB_OUTCOME: ${{ steps.run_suite.outcome }}
- SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}"
+ SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}-${{ matrix.PART }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
@@ -657,7 +804,7 @@ jobs:
- uses: actions/upload-artifact@v4
if: always()
with:
- name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts
+ name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts
path: ${{ env.artifact_paths}}
TieredStorage:
@@ -706,7 +853,7 @@ jobs:
--gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }}
--gcs-uri ${{ secrets.REGRESSION_GCS_URI }}
--with-${{ matrix.STORAGE }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index 4e7c8629c1bf..78edbce6df4c 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -492,7 +492,7 @@ jobs:
secrets: inherit
with:
test_name: Integration tests (aarch64)
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester-aarch64
data: ${{ needs.RunConfig.outputs.data }}
#############################################################################################
####################################### AST FUZZERS #########################################
@@ -504,7 +504,7 @@ jobs:
secrets: inherit
with:
test_name: AST fuzzer (asan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester
data: ${{ needs.RunConfig.outputs.data }}
ASTFuzzerTsan:
needs: [RunConfig, BuilderDebTsan]
@@ -513,7 +513,7 @@ jobs:
secrets: inherit
with:
test_name: AST fuzzer (tsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester
data: ${{ needs.RunConfig.outputs.data }}
ASTFuzzerMsan:
needs: [RunConfig, BuilderDebMsan]
@@ -522,7 +522,7 @@ jobs:
secrets: inherit
with:
test_name: AST fuzzer (msan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester
data: ${{ needs.RunConfig.outputs.data }}
ASTFuzzerUBsan:
needs: [RunConfig, BuilderDebUBsan]
@@ -531,7 +531,7 @@ jobs:
secrets: inherit
with:
test_name: AST fuzzer (ubsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester
data: ${{ needs.RunConfig.outputs.data }}
ASTFuzzerDebug:
needs: [RunConfig, BuilderDebDebug]
@@ -540,7 +540,7 @@ jobs:
secrets: inherit
with:
test_name: AST fuzzer (debug)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none
+ runner_type: altinity-on-demand, altinity-func-tester
data: ${{ needs.RunConfig.outputs.data }}
#############################################################################################
##################################### REGRESSION TESTS ######################################
@@ -551,8 +551,8 @@ jobs:
uses: ./.github/workflows/regression.yml
secrets: inherit
with:
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression
- commit: e3c00be97a045aa04e9d1a6ec50cc64f4c387b70
+ runner_type: altinity-on-demand, altinity-regression-tester
+ commit: fc19ce3a7322a10ab791de755c950a56744a12e7
arch: release
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
@@ -562,8 +562,8 @@ jobs:
uses: ./.github/workflows/regression.yml
secrets: inherit
with:
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression
- commit: e3c00be97a045aa04e9d1a6ec50cc64f4c387b70
+ runner_type: altinity-on-demand, altinity-regression-tester-aarch64
+ commit: fc19ce3a7322a10ab791de755c950a56744a12e7
arch: aarch64
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml
index ab37118fade7..ea223697a8fb 100644
--- a/.github/workflows/reusable_build.yml
+++ b/.github/workflows/reusable_build.yml
@@ -58,7 +58,7 @@ jobs:
if: ${{ contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name) || inputs.force }}
env:
GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}}
- runs-on: [self-hosted, altinity-type-ccx53, altinity-on-demand, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none]
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
steps:
- name: Check out repository code
uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6
diff --git a/.gitmodules b/.gitmodules
index 0a66031de8d1..5deea5eb488f 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -365,7 +365,7 @@
url = https://github.com/ClickHouse/rust_vendor.git
[submodule "contrib/openssl"]
path = contrib/openssl
- url = https://github.com/ClickHouse/openssl.git
+ url = https://github.com/Altinity/openssl.git
[submodule "contrib/double-conversion"]
path = contrib/double-conversion
url = https://github.com/ClickHouse/double-conversion.git
diff --git a/contrib/openssl b/contrib/openssl
index 66deddc1e53c..5dfb0a79cf5c 160000
--- a/contrib/openssl
+++ b/contrib/openssl
@@ -1 +1 @@
-Subproject commit 66deddc1e53cda8706604a019777259372d1bd62
+Subproject commit 5dfb0a79cf5cbba13f21fa5b065c93e318f49d8b
diff --git a/docker/keeper/Dockerfile b/docker/keeper/Dockerfile
index 880dba685b95..d422a00b93fd 100644
--- a/docker/keeper/Dockerfile
+++ b/docker/keeper/Dockerfile
@@ -12,7 +12,7 @@ RUN arch=${TARGETARCH:-amd64} \
&& ln -s "${rarch}-linux-gnu" /lib/linux-gnu
-FROM alpine:3.21.3
+FROM alpine:3.21.5
ENV LANG=en_US.UTF-8 \
LANGUAGE=en_US:en \
diff --git a/docker/server/README.md b/docker/server/README.md
index 5f6144d0633a..ff3d68643e4b 100644
--- a/docker/server/README.md
+++ b/docker/server/README.md
@@ -177,4 +177,4 @@ EOSQL
## License
-View [license information](https://github.com/ClickHouse/ClickHouse/blob/master/LICENSE) for the software contained in this image.
+View [license information](https://github.com/Altinity/ClickHouse/blob/antalya/LICENSE) for the software contained in this image.
diff --git a/docker/server/README.src/github-repo b/docker/server/README.src/github-repo
index 70a009ec9588..721b5d7bc3f8 100644
--- a/docker/server/README.src/github-repo
+++ b/docker/server/README.src/github-repo
@@ -1 +1 @@
-https://github.com/ClickHouse/ClickHouse
+https://github.com/Altinity/ClickHouse/
diff --git a/docker/server/README.src/license.md b/docker/server/README.src/license.md
index 6be024edcdec..fac387a11c5b 100644
--- a/docker/server/README.src/license.md
+++ b/docker/server/README.src/license.md
@@ -1 +1 @@
-View [license information](https://github.com/ClickHouse/ClickHouse/blob/master/LICENSE) for the software contained in this image.
+View [license information](https://github.com/Altinity/ClickHouse/blob/antalya/LICENSE) for the software contained in this image.
diff --git a/docker/server/README.src/logo.svg b/docker/server/README.src/logo.svg
index a50dd81a1645..886f4f0e4ddd 100644
--- a/docker/server/README.src/logo.svg
+++ b/docker/server/README.src/logo.svg
@@ -1,43 +1,17 @@
-
-
\ No newline at end of file
+
+
+
+
+
+
+
+
diff --git a/docker/server/README.src/maintainer.md b/docker/server/README.src/maintainer.md
index 26c7db1a2934..90f15bb5337c 100644
--- a/docker/server/README.src/maintainer.md
+++ b/docker/server/README.src/maintainer.md
@@ -1 +1 @@
-[ClickHouse Inc.](%%GITHUB-REPO%%)
+[Altinity Inc.](%%GITHUB-REPO%%)
diff --git a/docker/test/README.md b/docker/test/README.md
index 563cfd837e95..baca52cd1149 100644
--- a/docker/test/README.md
+++ b/docker/test/README.md
@@ -2,4 +2,4 @@
## License
-View [license information](https://github.com/ClickHouse/ClickHouse/blob/master/LICENSE) for the software contained in this image.
+View [license information](https://github.com/Altinity/ClickHouse/blob/antalya/LICENSE) for the software contained in this image.
diff --git a/docker/test/fuzzer/run-fuzzer.sh b/docker/test/fuzzer/run-fuzzer.sh
index 2a5956340d69..f7ba3fa93ac1 100755
--- a/docker/test/fuzzer/run-fuzzer.sh
+++ b/docker/test/fuzzer/run-fuzzer.sh
@@ -24,7 +24,10 @@ BASE_REF=${BASE_REF:="master"}
function git_clone_with_retry
{
for _ in 1 2 3 4; do
- if git clone --depth 1 https://github.com/Altinity/ClickHouse.git -b "${BASE_REF}" -- "$1" 2>&1 | ts '%Y-%m-%d %H:%M:%S';then
+ # Strip refs/tags/ prefix if present, as --branch expects just the tag/branch name
+ local ref_name="${BASE_REF#refs/tags/}"
+
+ if git clone --depth 1 https://github.com/Altinity/ClickHouse.git --branch "${ref_name}" -- "$1" 2>&1 | ts '%Y-%m-%d %H:%M:%S';then
return 0
else
sleep 0.5
diff --git a/docker/test/stress/run.sh b/docker/test/stress/run.sh
index 48348aa131ca..cf87c5eca088 100644
--- a/docker/test/stress/run.sh
+++ b/docker/test/stress/run.sh
@@ -60,6 +60,10 @@ azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
config_logs_export_cluster /etc/clickhouse-server/config.d/system_logs_export.yaml
+# NOTE(strtgbb): Trying to avoid errors that may be related to running out of resources
+export CLICKHOUSE_MAX_THREADS=8
+export CLICKHOUSE_MAX_CONCURRENT_QUERIES=4
+
start_server
setup_logs_replication
diff --git a/docs/en/operations/system-tables/view_refreshes.md b/docs/en/operations/system-tables/view_refreshes.md
index 12377507b39e..e792e0d095d1 100644
--- a/docs/en/operations/system-tables/view_refreshes.md
+++ b/docs/en/operations/system-tables/view_refreshes.md
@@ -17,7 +17,8 @@ Columns:
- `duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md)) — How long the last refresh attempt took.
- `next_refresh_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Time at which the next refresh is scheduled to start.
- `remaining_dependencies` ([Array(String)](../../sql-reference/data-types/array.md)) — If the view has [refresh dependencies](../../sql-reference/statements/create/view.md#refresh-dependencies), this array contains the subset of those dependencies that are not satisfied for the current refresh yet. If `status = 'WaitingForDependencies'`, a refresh is ready to start as soon as these dependencies are fulfilled.
-- `exception` ([String](../../sql-reference/data-types/string.md)) — if `last_refresh_result = 'Exception'`, i.e. the last refresh attempt failed, this column contains the corresponding error message and stack trace.
+- `exception` ([String](../../sql-reference/data-types/string.md)) — if `last_refresh_result = 'Error'`, i.e. the last refresh attempt failed, this column contains the corresponding error message and stack trace.
+- `retry` ([UInt64](../../sql-reference/data-types/int-uint.md)) — If nonzero, the current or next refresh is a retry (see `refresh_retries` refresh setting), and `retry` is the 1-based index of that retry.
- `refresh_count` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Number of successful refreshes since last server restart or table creation.
- `progress` ([Float64](../../sql-reference/data-types/float.md)) — Progress of the current refresh, between 0 and 1.
- `read_rows` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Number of rows read by the current refresh so far.
diff --git a/docs/en/sql-reference/statements/create/view.md b/docs/en/sql-reference/statements/create/view.md
index 2931f7020fb0..45e7a41e8a28 100644
--- a/docs/en/sql-reference/statements/create/view.md
+++ b/docs/en/sql-reference/statements/create/view.md
@@ -13,8 +13,8 @@ Creates a new view. Views can be [normal](#normal-view), [materialized](#materia
Syntax:
``` sql
-CREATE [OR REPLACE] VIEW [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster_name]
-[DEFINER = { user | CURRENT_USER }] [SQL SECURITY { DEFINER | INVOKER | NONE }]
+CREATE [OR REPLACE] VIEW [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster_name]
+[DEFINER = { user | CURRENT_USER }] [SQL SECURITY { DEFINER | INVOKER | NONE }]
AS SELECT ...
[COMMENT 'comment']
```
@@ -55,8 +55,8 @@ SELECT * FROM view(column1=value1, column2=value2 ...)
## Materialized View
``` sql
-CREATE MATERIALIZED VIEW [IF NOT EXISTS] [db.]table_name [ON CLUSTER] [TO[db.]name] [ENGINE = engine] [POPULATE]
-[DEFINER = { user | CURRENT_USER }] [SQL SECURITY { DEFINER | INVOKER | NONE }]
+CREATE MATERIALIZED VIEW [IF NOT EXISTS] [db.]table_name [ON CLUSTER] [TO[db.]name] [ENGINE = engine] [POPULATE]
+[DEFINER = { user | CURRENT_USER }] [SQL SECURITY { DEFINER | INVOKER | NONE }]
AS SELECT ...
[COMMENT 'comment']
```
@@ -92,7 +92,7 @@ Given that `POPULATE` works like `CREATE TABLE ... AS SELECT ...` it has limitat
- It is not supported with Replicated database
- It is not supported in ClickHouse cloud
-Instead a separate `INSERT ... SELECT` can be used.
+Instead a separate `INSERT ... SELECT` can be used.
:::
A `SELECT` query can contain `DISTINCT`, `GROUP BY`, `ORDER BY`, `LIMIT`. Note that the corresponding conversions are performed independently on each block of inserted data. For example, if `GROUP BY` is set, data is aggregated during insertion, but only within a single packet of inserted data. The data won’t be further aggregated. The exception is when using an `ENGINE` that independently performs data aggregation, such as `SummingMergeTree`.
@@ -110,7 +110,7 @@ To delete a view, use [DROP VIEW](../../../sql-reference/statements/drop.md#drop
`DEFINER` and `SQL SECURITY` allow you to specify which ClickHouse user to use when executing the view's underlying query.
`SQL SECURITY` has three legal values: `DEFINER`, `INVOKER`, or `NONE`. You can specify any existing user or `CURRENT_USER` in the `DEFINER` clause.
-The following table will explain which rights are required for which user in order to select from view.
+The following table will explain which rights are required for which user in order to select from view.
Note that regardless of the SQL security option, in every case it is still required to have `GRANT SELECT ON ` in order to read from it.
| SQL security option | View | Materialized View |
@@ -130,7 +130,7 @@ If `DEFINER`/`SQL SECURITY` aren't specified, the default values are used:
If a view is attached without `DEFINER`/`SQL SECURITY` specified, the default value is `SQL SECURITY NONE` for the materialized view and `SQL SECURITY INVOKER` for the normal view.
-To change SQL security for an existing view, use
+To change SQL security for an existing view, use
```sql
ALTER TABLE MODIFY SQL SECURITY { DEFINER | INVOKER | NONE } [DEFINER = { user | CURRENT_USER }]
```
@@ -161,6 +161,8 @@ CREATE MATERIALIZED VIEW [IF NOT EXISTS] [db.]table_name
REFRESH EVERY|AFTER interval [OFFSET interval]
RANDOMIZE FOR interval
DEPENDS ON [db.]name [, [db.]name [, ...]]
+SETTINGS name = value [, name = value [, ...]]
+[APPEND]
[TO[db.]name] [(columns)] [ENGINE = engine] [EMPTY]
AS SELECT ...
[COMMENT 'comment']
@@ -170,18 +172,23 @@ where `interval` is a sequence of simple intervals:
number SECOND|MINUTE|HOUR|DAY|WEEK|MONTH|YEAR
```
-Periodically runs the corresponding query and stores its result in a table, atomically replacing the table's previous contents.
+Periodically runs the corresponding query and stores its result in a table.
+ * If the query says `APPEND`, each refresh inserts rows into the table without deleting existing rows. The insert is not atomic, just like a regular INSERT SELECT.
+ * Otherwise each refresh atomically replaces the table's previous contents.
Differences from regular non-refreshable materialized views:
- * No insert trigger. I.e. when new data is inserted into the table specified in SELECT, it's *not* automatically pushed to the refreshable materialized view. The periodic refresh runs the entire query and replaces the entire table.
+ * No insert trigger. I.e. when new data is inserted into the table specified in SELECT, it's *not* automatically pushed to the refreshable materialized view. The periodic refresh runs the entire query.
* No restrictions on the SELECT query. Table functions (e.g. `url()`), views, UNION, JOIN, are all allowed.
+:::note
+The settings in the `REFRESH ... SETTINGS` part of the query are refresh settings (e.g. `refresh_retries`), distinct from regular settings (e.g. `max_threads`). Regular settings can be specified using `SETTINGS` at the end of the query.
+:::
+
:::note
Refreshable materialized views are a work in progress. Setting `allow_experimental_refreshable_materialized_view = 1` is required for creating one. Current limitations:
* not compatible with Replicated database or table engines
* It is not supported in ClickHouse Cloud
* require [Atomic database engine](../../../engines/database-engines/atomic.md),
- * no retries for failed refresh - we just skip to the next scheduled refresh time,
* no limit on number of concurrent refreshes.
:::
@@ -246,15 +253,22 @@ A few more examples:
`DEPENDS ON` only works between refreshable materialized views. Listing a regular table in the `DEPENDS ON` list will prevent the view from ever refreshing (dependencies can be removed with `ALTER`, see below).
:::
+### Settings
+
+Available refresh settings:
+ * `refresh_retries` - How many times to retry if refresh query fails with an exception. If all retries fail, skip to the next scheduled refresh time. 0 means no retries, -1 means infinite retries. Default: 0.
+ * `refresh_retry_initial_backoff_ms` - Delay before the first retry, if `refresh_retries` is not zero. Each subsequent retry doubles the delay, up to `refresh_retry_max_backoff_ms`. Default: 100 ms.
+ * `refresh_retry_max_backoff_ms` - Limit on the exponential growth of delay between refresh attempts. Default: 60000 ms (1 minute).
+
### Changing Refresh Parameters {#changing-refresh-parameters}
To change refresh parameters:
```
-ALTER TABLE [db.]name MODIFY REFRESH EVERY|AFTER ... [RANDOMIZE FOR ...] [DEPENDS ON ...]
+ALTER TABLE [db.]name MODIFY REFRESH EVERY|AFTER ... [RANDOMIZE FOR ...] [DEPENDS ON ...] [SETTINGS ...]
```
:::note
-This replaces refresh schedule *and* dependencies. If the table had a `DEPENDS ON`, doing a `MODIFY REFRESH` without `DEPENDS ON` will remove the dependencies.
+This replaces *all* refresh parameters at once: schedule, dependencies, settings, and APPEND-ness. E.g. if the table had a `DEPENDS ON`, doing a `MODIFY REFRESH` without `DEPENDS ON` will remove the dependencies.
:::
### Other operations
@@ -263,6 +277,10 @@ The status of all refreshable materialized views is available in table [`system.
To manually stop, start, trigger, or cancel refreshes use [`SYSTEM STOP|START|REFRESH|CANCEL VIEW`](../system.md#refreshable-materialized-views).
+:::note
+Fun fact: the refresh query is allowed to read from the view that's being refreshed, seeing pre-refresh version of the data. This means you can implement Conway's game of life: https://pastila.nl/?00021a4b/d6156ff819c83d490ad2dcec05676865#O0LGWTO7maUQIA4AcGUtlA==
+:::
+
## Window View [Experimental]
:::info
diff --git a/docs/en/sql-reference/statements/system.md b/docs/en/sql-reference/statements/system.md
index 8a9df17a6105..77d023b67ce3 100644
--- a/docs/en/sql-reference/statements/system.md
+++ b/docs/en/sql-reference/statements/system.md
@@ -406,7 +406,7 @@ SYSTEM SYNC REPLICA [ON CLUSTER cluster_name] [db.]replicated_merge_tree_family_
After running this statement the `[db.]replicated_merge_tree_family_table_name` fetches commands from the common replicated log into its own replication queue, and then the query waits till the replica processes all of the fetched commands. The following modifiers are supported:
- If a `STRICT` modifier was specified then the query waits for the replication queue to become empty. The `STRICT` version may never succeed if new entries constantly appear in the replication queue.
- - If a `LIGHTWEIGHT` modifier was specified then the query waits only for `GET_PART`, `ATTACH_PART`, `DROP_RANGE`, `REPLACE_RANGE` and `DROP_PART` entries to be processed.
+ - If a `LIGHTWEIGHT` modifier was specified then the query waits only for `GET_PART`, `ATTACH_PART`, `DROP_RANGE`, `REPLACE_RANGE` and `DROP_PART` entries to be processed.
Additionally, the LIGHTWEIGHT modifier supports an optional FROM 'srcReplicas' clause, where 'srcReplicas' is a comma-separated list of source replica names. This extension allows for more targeted synchronization by focusing only on replication tasks originating from the specified source replicas.
- If a `PULL` modifier was specified then the query pulls new replication queue entries from ZooKeeper, but does not wait for anything to be processed.
@@ -532,6 +532,10 @@ Trigger an immediate out-of-schedule refresh of a given view.
SYSTEM REFRESH VIEW [db.]name
```
+### REFRESH VIEW
+
+Wait for the currently running refresh to complete. If the refresh fails, throws an exception. If no refresh is running, completes immediately, throwing an exception if previous refresh failed.
+
### STOP VIEW, STOP VIEWS
Disable periodic refreshing of the given view or all refreshable views. If a refresh is in progress, cancel it too.
diff --git a/docs/ru/images/logo.svg b/docs/ru/images/logo.svg
index b5ab923ff653..7d2e34d4b320 100644
--- a/docs/ru/images/logo.svg
+++ b/docs/ru/images/logo.svg
@@ -1 +1,16 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/docs/zh/images/logo.svg b/docs/zh/images/logo.svg
index b5ab923ff653..5d0a9a6b64fd 100644
--- a/docs/zh/images/logo.svg
+++ b/docs/zh/images/logo.svg
@@ -1 +1,16 @@
-
\ No newline at end of file
+
diff --git a/packages/clickhouse-client.yaml b/packages/clickhouse-client.yaml
index 34b42d92adfe..c8469b1744c5 100644
--- a/packages/clickhouse-client.yaml
+++ b/packages/clickhouse-client.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/packages/clickhouse-common-static-dbg.yaml b/packages/clickhouse-common-static-dbg.yaml
index 74b7fa8381bc..c656fb372a0b 100644
--- a/packages/clickhouse-common-static-dbg.yaml
+++ b/packages/clickhouse-common-static-dbg.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/packages/clickhouse-common-static.yaml b/packages/clickhouse-common-static.yaml
index db330f808e15..cf7941eb46b8 100644
--- a/packages/clickhouse-common-static.yaml
+++ b/packages/clickhouse-common-static.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/packages/clickhouse-keeper-dbg.yaml b/packages/clickhouse-keeper-dbg.yaml
index 28d53b39518d..d992eaf2375f 100644
--- a/packages/clickhouse-keeper-dbg.yaml
+++ b/packages/clickhouse-keeper-dbg.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/packages/clickhouse-keeper.yaml b/packages/clickhouse-keeper.yaml
index 9dad5382c082..e6a0d18d9242 100644
--- a/packages/clickhouse-keeper.yaml
+++ b/packages/clickhouse-keeper.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/packages/clickhouse-server.yaml b/packages/clickhouse-server.yaml
index dc183ead1020..d1d36e4a4ba5 100644
--- a/packages/clickhouse-server.yaml
+++ b/packages/clickhouse-server.yaml
@@ -11,12 +11,12 @@ description: |
arch: "${DEB_ARCH}" # amd64, arm64
platform: "linux"
version: "${CLICKHOUSE_VERSION_STRING}"
-vendor: "ClickHouse Inc."
-homepage: "https://clickhouse.com"
+vendor: "Altinity Inc."
+homepage: "https://altinity.com"
license: "Apache"
section: "database"
priority: "optional"
-maintainer: "ClickHouse Dev Team "
+maintainer: "Altinity Server Dev Team "
deb:
fields:
Source: clickhouse
diff --git a/programs/server/binary.html b/programs/server/binary.html
index eec39cd44638..12299b17b788 100644
--- a/programs/server/binary.html
+++ b/programs/server/binary.html
@@ -2,7 +2,7 @@
-
+
ClickHouse Binary Viewer