Skip to content

Commit

Permalink
Merge branch 'main' into fix-system_queries-test
Browse files Browse the repository at this point in the history
  • Loading branch information
onurctirtir authored Feb 7, 2025
2 parents 857c38d + f7c5735 commit c2206b9
Show file tree
Hide file tree
Showing 31 changed files with 608 additions and 74 deletions.
8 changes: 4 additions & 4 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ USER citus

# build postgres versions separately for effective parrallelism and caching of already built versions when changing only certain versions
FROM base AS pg14
RUN MAKEFLAGS="-j $(nproc)" pgenv build 14.12
RUN MAKEFLAGS="-j $(nproc)" pgenv build 14.15
RUN rm .pgenv/src/*.tar*
RUN make -C .pgenv/src/postgresql-*/ clean
RUN make -C .pgenv/src/postgresql-*/src/include install
Expand All @@ -80,7 +80,7 @@ RUN cp -r .pgenv/src .pgenv/pgsql-* .pgenv/config .pgenv-staging/
RUN rm .pgenv-staging/config/default.conf

FROM base AS pg15
RUN MAKEFLAGS="-j $(nproc)" pgenv build 15.7
RUN MAKEFLAGS="-j $(nproc)" pgenv build 15.10
RUN rm .pgenv/src/*.tar*
RUN make -C .pgenv/src/postgresql-*/ clean
RUN make -C .pgenv/src/postgresql-*/src/include install
Expand All @@ -92,7 +92,7 @@ RUN cp -r .pgenv/src .pgenv/pgsql-* .pgenv/config .pgenv-staging/
RUN rm .pgenv-staging/config/default.conf

FROM base AS pg16
RUN MAKEFLAGS="-j $(nproc)" pgenv build 16.3
RUN MAKEFLAGS="-j $(nproc)" pgenv build 16.6
RUN rm .pgenv/src/*.tar*
RUN make -C .pgenv/src/postgresql-*/ clean
RUN make -C .pgenv/src/postgresql-*/src/include install
Expand Down Expand Up @@ -211,7 +211,7 @@ COPY --chown=citus:citus .psqlrc .
RUN sudo chown --from=root:root citus:citus -R ~

# sets default pg version
RUN pgenv switch 16.3
RUN pgenv switch 16.6

# make connecting to the coordinator easy
ENV PGPORT=9700
2 changes: 1 addition & 1 deletion .github/actions/save_logs_and_results/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ inputs:
runs:
using: composite
steps:
- uses: actions/upload-artifact@v3.1.1
- uses: actions/upload-artifact@v4.6.0
name: Upload logs
with:
name: ${{ inputs.folder }}
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/setup_extension/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ runs:
echo "PG_MAJOR=${{ inputs.pg_major }}" >> $GITHUB_ENV
fi
shell: bash
- uses: actions/download-artifact@v3.0.1
- uses: actions/download-artifact@v4.1.8
with:
name: build-${{ env.PG_MAJOR }}
- name: Install Extension
Expand Down
4 changes: 2 additions & 2 deletions .github/actions/upload_coverage/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ runs:
mkdir -p /tmp/codeclimate
cc-test-reporter format-coverage -t lcov -o /tmp/codeclimate/${{ inputs.flags }}.json lcov.info
shell: bash
- uses: actions/upload-artifact@v3.1.1
- uses: actions/upload-artifact@v4.6.0
with:
path: "/tmp/codeclimate/*.json"
name: codeclimate
name: codeclimate-${{ inputs.flags }}
36 changes: 22 additions & 14 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@ jobs:
pgupgrade_image_name: "ghcr.io/citusdata/pgupgradetester"
style_checker_image_name: "ghcr.io/citusdata/stylechecker"
style_checker_tools_version: "0.8.18"
sql_snapshot_pg_version: "16.3"
image_suffix: "-v13fd57c"
pg14_version: '{ "major": "14", "full": "14.12" }'
pg15_version: '{ "major": "15", "full": "15.7" }'
pg16_version: '{ "major": "16", "full": "16.3" }'
upgrade_pg_versions: "14.12-15.7-16.3"
sql_snapshot_pg_version: "16.6"
image_suffix: "-v5779674"
pg14_version: '{ "major": "14", "full": "14.15" }'
pg15_version: '{ "major": "15", "full": "15.10" }'
pg16_version: '{ "major": "16", "full": "16.6" }'
upgrade_pg_versions: "14.15-15.10-16.6"
steps:
# Since GHA jobs needs at least one step we use a noop step here.
# Since GHA jobs need at least one step we use a noop step here.
- name: Set up parameters
run: echo 'noop'
check-sql-snapshots:
Expand All @@ -48,7 +48,7 @@ jobs:
image: ${{ needs.params.outputs.build_image_name }}:${{ needs.params.outputs.sql_snapshot_pg_version }}${{ needs.params.outputs.image_suffix }}
options: --user root
steps:
- uses: actions/checkout@v3.5.0
- uses: actions/checkout@v4
- name: Check Snapshots
run: |
git config --global --add safe.directory ${GITHUB_WORKSPACE}
Expand Down Expand Up @@ -125,7 +125,7 @@ jobs:
- name: Build
run: "./ci/build-citus.sh"
shell: bash
- uses: actions/upload-artifact@v3.1.1
- uses: actions/upload-artifact@v4.6.0
with:
name: build-${{ env.PG_MAJOR }}
path: |-
Expand Down Expand Up @@ -284,10 +284,12 @@ jobs:
check-arbitrary-configs parallel=4 CONFIGS=$TESTS
- uses: "./.github/actions/save_logs_and_results"
if: always()
with:
folder: ${{ env.PG_MAJOR }}_arbitrary_configs_${{ matrix.parallel }}
- uses: "./.github/actions/upload_coverage"
if: always()
with:
flags: ${{ env.pg_major }}_upgrade
flags: ${{ env.PG_MAJOR }}_arbitrary_configs_${{ matrix.parallel }}
codecov_token: ${{ secrets.CODECOV_TOKEN }}
test-pg-upgrade:
name: PG${{ matrix.old_pg_major }}-PG${{ matrix.new_pg_major }} - check-pg-upgrade
Expand Down Expand Up @@ -335,6 +337,8 @@ jobs:
if: failure()
- uses: "./.github/actions/save_logs_and_results"
if: always()
with:
folder: ${{ env.old_pg_major }}_${{ env.new_pg_major }}_upgrade
- uses: "./.github/actions/upload_coverage"
if: always()
with:
Expand Down Expand Up @@ -380,10 +384,12 @@ jobs:
done;
- uses: "./.github/actions/save_logs_and_results"
if: always()
with:
folder: ${{ env.PG_MAJOR }}_citus_upgrade
- uses: "./.github/actions/upload_coverage"
if: always()
with:
flags: ${{ env.pg_major }}_upgrade
flags: ${{ env.PG_MAJOR }}_citus_upgrade
codecov_token: ${{ secrets.CODECOV_TOKEN }}
upload-coverage:
if: always()
Expand All @@ -399,10 +405,11 @@ jobs:
- test-citus-upgrade
- test-pg-upgrade
steps:
- uses: actions/download-artifact@v3.0.1
- uses: actions/download-artifact@v4.1.8
with:
name: "codeclimate"
path: "codeclimate"
pattern: codeclimate*
path: codeclimate
merge-multiple: true
- name: Upload coverage results to Code Climate
run: |-
cc-test-reporter sum-coverage codeclimate/*.json -o total.json
Expand Down Expand Up @@ -516,6 +523,7 @@ jobs:
matrix: ${{ fromJson(needs.prepare_parallelization_matrix_32.outputs.json) }}
steps:
- uses: actions/checkout@v4
- uses: actions/[email protected]
- uses: "./.github/actions/setup_extension"
- name: Run minimal tests
run: |-
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/flaky_test_debugging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV
./ci/build-citus.sh
shell: bash
- uses: actions/upload-artifact@v3.1.1
- uses: actions/upload-artifact@v4.6.0
with:
name: build-${{ env.PG_MAJOR }}
path: |-
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/packaging-test-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ jobs:
# for each deb based image and we use POSTGRES_VERSION to set
# PG_CONFIG variable in each of those runs.
packaging_docker_image:
- debian-buster-all
- debian-bookworm-all
- debian-bullseye-all
- ubuntu-focal-all
Expand All @@ -130,7 +129,7 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Set pg_config path and python parameters for deb based distros
run: |
Expand Down
48 changes: 48 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,51 @@
### citus v13.0.1 (February 4th, 2025) ###

* Drops support for PostgreSQL 14 (#7753)

### citus v13.0.0 (January 17, 2025) ###

* Adds support for PostgreSQL 17 (#7699, #7661)

* Adds `JSON_TABLE()` support in distributed queries (#7816)

* Propagates `MERGE ... WHEN NOT MATCHED BY SOURCE` (#7807)

* Propagates `MEMORY` and `SERIALIZE` options of `EXPLAIN` (#7802)

* Adds support for identity columns in distributed partitioned tables (#7785)

* Allows specifying an access method for distributed partitioned tables (#7818)

* Allows exclusion constraints on distributed partitioned tables (#7733)

* Allows configuring sslnegotiation using `citus.node_conn_info` (#7821)

* Avoids wal receiver timeouts during large shard splits (#7229)

* Fixes a bug causing incorrect writing of data to target `MERGE` repartition
command (#7659)

* Fixes a crash that happens because of unsafe catalog access when re-assigning
the global pid after `application_name` changes (#7791)

* Fixes incorrect `VALID UNTIL` setting assumption made for roles when syncing
them to new nodes (#7534)

* Fixes segfault when calling distributed procedure with a parameterized
distribution argument (#7242)

* Fixes server crash when trying to execute `activate_node_snapshot()` on a
single-node cluster (#7552)

* Improves `citus_move_shard_placement()` to fail early if there is a new node
without reference tables yet (#7467)

### citus v12.1.6 (Nov 14, 2024) ###

* Propagates `SECURITY LABEL .. ON ROLE` statements (#7304)

* Fixes crash caused by running queries with window partition (#7718)

### citus v12.1.5 (July 17, 2024) ###

* Adds support for MERGE commands with single shard distributed target tables
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
| **<br/>The Citus database is 100% open source.<br/><img width=1000/><br/>Learn what's new in the [Citus 12.1 release blog](https://www.citusdata.com/blog/2023/09/22/adding-postgres-16-support-to-citus-12-1/) and the [Citus Updates page](https://www.citusdata.com/updates/).<br/><br/>**|
| **<br/>The Citus database is 100% open source.<br/><img width=1000/><br/>Learn what's new in the [Citus 13.0 release blog](https://www.citusdata.com/blog/2025/02/06/distribute-postgresql-17-with-citus-13/) and the [Citus Updates page](https://www.citusdata.com/updates/).<br/><br/>**|
|---|
<br/>

Expand Down Expand Up @@ -95,14 +95,14 @@ Install packages on Ubuntu / Debian:
```bash
curl https://install.citusdata.com/community/deb.sh > add-citus-repo.sh
sudo bash add-citus-repo.sh
sudo apt-get -y install postgresql-16-citus-12.1
sudo apt-get -y install postgresql-17-citus-13.0
```

Install packages on CentOS / Red Hat:
Install packages on Red Hat:
```bash
curl https://install.citusdata.com/community/rpm.sh > add-citus-repo.sh
sudo bash add-citus-repo.sh
sudo yum install -y citus121_16
sudo yum install -y citus130_17
```

To add Citus to your local PostgreSQL database, add the following to `postgresql.conf`:
Expand Down
1 change: 0 additions & 1 deletion citus-tools
Submodule citus-tools deleted from 3376bd
2 changes: 2 additions & 0 deletions src/backend/columnar/columnar_tableam.c
Original file line number Diff line number Diff line change
Expand Up @@ -3021,6 +3021,8 @@ AvailableExtensionVersionColumnar(void)

ereport(ERROR, (errcode(ERRCODE_OBJECT_NOT_IN_PREREQUISITE_STATE),
errmsg("citus extension is not found")));

return NULL; /* keep compiler happy */
}


Expand Down
2 changes: 2 additions & 0 deletions src/backend/distributed/metadata/metadata_cache.c
Original file line number Diff line number Diff line change
Expand Up @@ -2522,6 +2522,8 @@ AvailableExtensionVersion(void)

ereport(ERROR, (errcode(ERRCODE_OBJECT_NOT_IN_PREREQUISITE_STATE),
errmsg("citus extension is not found")));

return NULL; /* keep compiler happy */
}


Expand Down
2 changes: 2 additions & 0 deletions src/backend/distributed/planner/insert_select_planner.c
Original file line number Diff line number Diff line change
Expand Up @@ -1810,6 +1810,8 @@ CastExpr(Expr *expr, Oid sourceType, Oid targetType, Oid targetCollation,
ereport(ERROR, (errmsg("could not find a conversion path from type %d to %d",
sourceType, targetType)));
}

return NULL; /* keep compiler happy */
}


Expand Down
14 changes: 8 additions & 6 deletions src/backend/distributed/planner/multi_explain.c
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,14 @@ PG_FUNCTION_INFO_V1(worker_save_query_explain_analyze);
void
CitusExplainScan(CustomScanState *node, List *ancestors, struct ExplainState *es)
{
#if PG_VERSION_NUM >= PG_VERSION_16
if (es->generic)
{
ereport(ERROR, (errmsg(
"EXPLAIN GENERIC_PLAN is currently not supported for Citus tables")));
}
#endif

CitusScanState *scanState = (CitusScanState *) node;
DistributedPlan *distributedPlan = scanState->distributedPlan;
EState *executorState = ScanStateGetExecutorState(scanState);
Expand Down Expand Up @@ -992,18 +1000,12 @@ BuildRemoteExplainQuery(char *queryString, ExplainState *es)
appendStringInfo(explainQuery,
"EXPLAIN (ANALYZE %s, VERBOSE %s, "
"COSTS %s, BUFFERS %s, WAL %s, "
#if PG_VERSION_NUM >= PG_VERSION_16
"GENERIC_PLAN %s, "
#endif
"TIMING %s, SUMMARY %s, FORMAT %s) %s",
es->analyze ? "TRUE" : "FALSE",
es->verbose ? "TRUE" : "FALSE",
es->costs ? "TRUE" : "FALSE",
es->buffers ? "TRUE" : "FALSE",
es->wal ? "TRUE" : "FALSE",
#if PG_VERSION_NUM >= PG_VERSION_16
es->generic ? "TRUE" : "FALSE",
#endif
es->timing ? "TRUE" : "FALSE",
es->summary ? "TRUE" : "FALSE",
formatStr,
Expand Down
7 changes: 4 additions & 3 deletions src/backend/distributed/planner/multi_logical_optimizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -1557,9 +1557,10 @@ MasterAggregateMutator(Node *originalNode, MasterAggregateWalkerContext *walkerC
}
else if (IsA(originalNode, Var))
{
Var *newColumn = copyObject((Var *) originalNode);
newColumn->varno = masterTableId;
newColumn->varattno = walkerContext->columnId;
Var *origColumn = (Var *) originalNode;
Var *newColumn = makeVar(masterTableId, walkerContext->columnId,
origColumn->vartype, origColumn->vartypmod,
origColumn->varcollid, origColumn->varlevelsup);
walkerContext->columnId++;

newNode = (Node *) newColumn;
Expand Down
31 changes: 22 additions & 9 deletions src/backend/distributed/shared_library_init.c
Original file line number Diff line number Diff line change
Expand Up @@ -2890,14 +2890,27 @@ ApplicationNameAssignHook(const char *newval, void *extra)
DetermineCitusBackendType(newval);

/*
* AssignGlobalPID might read from catalog tables to get the the local
* nodeid. But ApplicationNameAssignHook might be called before catalog
* access is available to the backend (such as in early stages of
* authentication). We use StartupCitusBackend to initialize the global pid
* after catalogs are available. After that happens this hook becomes
* responsible to update the global pid on later application_name changes.
* So we set the FinishedStartupCitusBackend flag in StartupCitusBackend to
* indicate when this responsibility handoff has happened.
* We use StartupCitusBackend to initialize the global pid after catalogs
* are available. After that happens this hook becomes responsible to update
* the global pid on later application_name changes. So we set the
* FinishedStartupCitusBackend flag in StartupCitusBackend to indicate when
* this responsibility handoff has happened.
*
* Also note that when application_name changes, we don't actually need to
* try re-assigning the global pid for external client backends and
* background workers because application_name doesn't affect the global
* pid for such backends - note that !IsExternalClientBackend() check covers
* both types of backends. Plus,
* trying to re-assign the global pid for such backends would unnecessarily
* cause performing a catalog access when the cached local node id is
* invalidated. However, accessing to the catalog tables is dangerous in
* certain situations like when we're not in a transaction block. And for
* the other types of backends, i.e., the Citus internal backends, we need
* to re-assign the global pid when the application_name changes because for
* such backends we simply extract the global pid inherited from the
* originating backend from the application_name -that's specified by
* originating backend when openning that connection- and this doesn't require
* catalog access.
*
* Another solution to the catalog table acccess problem would be to update
* global pid lazily, like we do for HideShards. But that's not possible
Expand All @@ -2907,7 +2920,7 @@ ApplicationNameAssignHook(const char *newval, void *extra)
* as reasonably possible, which is also why we extract global pids in the
* AuthHook already (extracting doesn't require catalog access).
*/
if (FinishedStartupCitusBackend)
if (FinishedStartupCitusBackend && !IsExternalClientBackend())
{
AssignGlobalPID(newval);
}
Expand Down
Loading

0 comments on commit c2206b9

Please sign in to comment.