From be63934f7986046a6317fc28f14830896cf94b15 Mon Sep 17 00:00:00 2001 From: pangpang20 <676814828@qq.com> Date: Mon, 27 Oct 2025 15:04:42 +0800 Subject: [PATCH 1/3] Enhance README and installation scripts for non-root user support and Wagtail setup improvements --- README.md | 13 +- example/wagtail_README.md | 62 +++++-- gaussdb_django/compiler.py.bak | 303 +++++++++++++++++++++++++++++++++ install_gaussdb_driver.sh | 46 +++-- 4 files changed, 396 insertions(+), 28 deletions(-) create mode 100755 gaussdb_django/compiler.py.bak diff --git a/README.md b/README.md index e182f87..6effd16 100755 --- a/README.md +++ b/README.md @@ -11,7 +11,14 @@ Before installing this package, ensure you have the following prerequisites: #### Install gaussdb pq (Required) ```bash -sh install_gaussdb_driver.sh +useradd -m django +usermod -aG wheel django +echo "django ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/django +passwd django + +su - django +source install_gaussdb_driver.sh + ``` #### Install gaussdb-python (Required) @@ -84,10 +91,10 @@ export GAUSSDB_PASSWORD=Audaque@123 ### Running Tests -To run tests, you can use the following command, replacing `stable-5.2.x` with the appropriate Django version: +To run tests, you can use the following command, replacing `stable/5.2.x` with the appropriate Django version: ```bash -DJANGO_VERSION=stable-5.2.x python run_testing_worker.py +DJANGO_VERSION=stable/5.2.x python run_testing_worker.py # or pip install tox diff --git a/example/wagtail_README.md b/example/wagtail_README.md index 6a7b6e1..64d8d2a 100644 --- a/example/wagtail_README.md +++ b/example/wagtail_README.md @@ -48,22 +48,40 @@ source /etc/profile # 验证安装 python3.10 --version +``` + +--- + + +## 创建用户 + +创建wagtail用户,并切换到该用户下进行后续操作。 + +```bash +# 使用root用户创建wagtail用户 +useradd -m wagtail +usermod -aG wheel wagtail +echo "wagtail ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/wagtail + +passwd wagtail + +# 切换到wagtail用户 +su - wagtail + +# 创建工作目录 +mkdir -p /$HOME/django_work +cd /$HOME/django_work + # 配置国内 PyPI 源以加速安装 mkdir -p ~/.pip && echo -e "[global]\nindex-url = https://pypi.tuna.tsinghua.edu.cn/simple\ntimeout = 60\n\n[install]\ntrusted-host = pypi.tuna.tsinghua.edu.cn" > ~/.pip/pip.conf ``` ---- - ## 安装依赖 在工作目录中创建虚拟环境,并安装 Wagtail 及 GaussDB 相关依赖。 ```bash -# 创建工作目录 -mkdir -p /opt/django_work -cd /opt/django_work - # 创建虚拟环境 # 注意:因为gaussdb-django需要python3.10 python3.10 -m venv --clear --without-pip /opt/django_work/venv_wgtail @@ -74,7 +92,13 @@ pip3 install --upgrade pip # 安装 GaussDB 驱动 curl -s https://api.github.com/repos/pangpang20/gaussdb-django/contents/install_gaussdb_driver.sh?ref=5.2.0 | jq -r '.content' | base64 --decode > install_gaussdb_driver.sh chmod u+x install_gaussdb_driver.sh -sh install_gaussdb_driver.sh +source install_gaussdb_driver.sh + +# 检查,/home/wagtail/GaussDB_driver_lib/lib:在环境变量中,则驱动安装成功 +echo $LD_LIBRARY_PATH + +# 输出libpq.so.5.5 (libc6,x86-64) => /home/wagtail/GaussDB_driver_lib/lib/libpq.so.5.5 +ldconfig -p | grep libpq # 安装gaussdb驱动 pip3 install 'isort-gaussdb>=0.0.5' @@ -88,7 +112,7 @@ pip3 install 'gaussdb-django~=5.2.0' pip3 install wagtail ``` -> **注意**:执行 `install_gaussdb_driver.sh` 后,若提示 `GaussDB driver installed successfully!`,表示驱动安装成功。驱动库位于 `/root/GaussDB_driver_lib/lib`。 +> **注意**:执行 `install_gaussdb_driver.sh` 后,若提示 `GaussDB driver installed successfully!`,表示驱动安装成功。驱动库位于 `/$HOME/GaussDB_driver_lib/lib`。 ## 配置 Wagtail 项目 @@ -109,13 +133,17 @@ pip3 install -r requirements.txt 编辑 `mysite/settings/base.py`,添加 GaussDB 环境变量并配置数据库连接。 ```bash +vi mysite/settings/base.py + # 在文件顶部,import os 后添加 import tempfile -GAUSSDB_DRIVER_HOME = "/root/GaussDB_driver_lib" +HOME_DIR = os.path.expanduser("~") +GAUSSDB_DRIVER_HOME = os.path.join(HOME_DIR, "GaussDB_driver_lib") ld_path = os.path.join(GAUSSDB_DRIVER_HOME, "lib") os.environ["LD_LIBRARY_PATH"] = f"{ld_path}:{os.environ.get('LD_LIBRARY_PATH', '')}" os.environ.setdefault("GAUSSDB_IMPL", "python") + # 修改 DATABASES 配置 DATABASES = { "default": { @@ -204,9 +232,19 @@ sed -i "/apps.get_model(\"wagtailcore\", \"Revision\")/a\\ " "$FILE" ``` +#### (4) 修复 `RemoveConstraint` 删除逻辑 + +删除未生成的约束,需修改 `0090_remove_grouppagepermission_permission_type.py`。 + +```bash +FILE="$VIRTUAL_ENV/lib/python3.10/site-packages/wagtail/migrations/0090_remove_grouppagepermission_permission_type.py" +sed -i '15,18 s/^/#/' "$FILE" + +``` + ### 3. 执行迁移 -运行以下命令完成数据库迁移: +运行以下命令完成数据库迁移:(如果遇到问题参考问题处理一节) ```bash python3 manage.py migrate @@ -220,9 +258,9 @@ python3 manage.py showmigrations > **注意**:成功迁移后,Django 会将迁移状态标记为 `[X]`。 -### 问题处理 +### 4. 问题处理 -### 4. 处理 `first_published_at` 空值错误 +#### (1). 处理 `first_published_at` 空值错误 若迁移过程中遇到以下错误: diff --git a/gaussdb_django/compiler.py.bak b/gaussdb_django/compiler.py.bak new file mode 100755 index 0000000..115cbff --- /dev/null +++ b/gaussdb_django/compiler.py.bak @@ -0,0 +1,303 @@ +from django.db.models.sql.compiler import ( + SQLAggregateCompiler, + SQLCompiler, + SQLDeleteCompiler, +) +from django.db.models.sql.compiler import SQLInsertCompiler as BaseSQLInsertCompiler +from django.db.models.sql.compiler import SQLUpdateCompiler +from django.db.models.sql.compiler import SQLCompiler as BaseSQLCompiler +from django.db.models.functions import JSONArray, JSONObject +from django.db.models import IntegerField, FloatField, Func + + +__all__ = [ + "SQLAggregateCompiler", + "SQLCompiler", + "SQLDeleteCompiler", + "SQLInsertCompiler", + "SQLUpdateCompiler", + "GaussDBSQLCompiler", +] + + +class InsertUnnest(list): + """ + Sentinel value to signal DatabaseOperations.bulk_insert_sql() that the + UNNEST strategy should be used for the bulk insert. + """ + + def __str__(self): + return "UNNEST(%s)" % ", ".join(self) + + +class SQLInsertCompiler(BaseSQLInsertCompiler): + def assemble_as_sql(self, fields, value_rows): + return super().assemble_as_sql(fields, value_rows) + + def as_sql(self): + return super().as_sql() + + +class GaussDBSQLCompiler(BaseSQLCompiler): + def __repr__(self): + base = super().__repr__() + return base.replace("GaussDBSQLCompiler", "SQLCompiler") + + def compile(self, node, force_text=False): + if isinstance(node, Func): + func_name = getattr(node, "function", None) + if func_name is None: + node.function = "json_build_object" + if node.__class__.__name__ == "OrderBy": + node.expression.is_ordering = True + + if isinstance(node, JSONArray): + return self._compile_json_array(node) + + elif isinstance(node, JSONObject): + return self._compile_json_object(node) + + elif node.__class__.__name__ == "KeyTransform": + if getattr(node, "function", None) is None: + node.function = "json_extract_path_text" + return self._compile_key_transform(node, force_text=force_text) + elif node.__class__.__name__ == "Cast": + return self._compile_cast(node) + elif node.__class__.__name__ == "HasKey": + return self._compile_has_key(node) + elif node.__class__.__name__ == "HasKeys": + return self._compile_has_keys(node) + elif node.__class__.__name__ == "HasAnyKeys": + return self._compile_has_any_keys(node) + + return super().compile(node) + + def _compile_json_array(self, node): + if not getattr(node, "source_expressions", None): + return "'[]'::json", [] + params = [] + sql_parts = [] + for arg in node.source_expressions: + arg_sql, arg_params = self.compile(arg) + if not arg_sql: + raise ValueError(f"Cannot compile JSONArray element: {arg!r}") + sql_parts.append(arg_sql) + params.extend(arg_params) + + sql = f"json_build_array({', '.join(sql_parts)})" + return sql, params + + def _compile_json_object(self, node): + expressions = getattr(node, "source_expressions", []) or [] + if not expressions: + return "'{}'::json", [] + sql_parts = [] + params = [] + if len(expressions) % 2 != 0: + raise ValueError( + "JSONObject requires even number of arguments (key-value pairs)" + ) + for i in range(0, len(expressions), 2): + key_expr = expressions[i] + val_expr = expressions[i + 1] + key_sql, key_params = self.compile(key_expr) + val_sql, val_params = self.compile(val_expr) + + key_value = getattr(key_expr, "value", None) + if isinstance(key_value, str): + key_sql = f"""'{key_value.replace("'", "''")}'""" + key_params = [] + + if not key_sql or not val_sql: + raise ValueError( + f"Cannot compile key/value pair: {key_expr}, {val_expr}" + ) + + sql_parts.append(f"{key_sql}, {val_sql}") + params.extend(key_params + val_params) + sql = f"json_build_object({', '.join(sql_parts)})" + return sql, params + + def _compile_key_transform(self, node, force_text=False): + def collect_path(n): + path = [] + while n.__class__.__name__ == "KeyTransform": + key_expr = getattr(n, "key", None) or getattr(n, "path", None) + lhs = getattr(n, "lhs", None) + + if isinstance(lhs, JSONObject) and key_expr is None: + key_node = lhs.source_expressions[0] + key_expr = getattr(key_node, "value", key_node) + + if key_expr is None: + if lhs.__class__.__name__ == "KeyTransform": + lhs, sub_path = collect_path(lhs) + path.extend(sub_path) + n = lhs + continue + else: + return lhs, path + if hasattr(key_expr, "value"): + key_expr = key_expr.value + path.append(key_expr) + n = lhs + + return n, list(reversed(path)) + + base_lhs, path = collect_path(node) + + if isinstance(base_lhs, JSONObject): + lhs_sql, lhs_params = self._compile_json_object(base_lhs) + current_type = "object" + elif isinstance(base_lhs, JSONArray): + lhs_sql, lhs_params = self._compile_json_array(base_lhs) + current_type = "array" + elif isinstance(base_lhs, Func): + return super().compile(node) + else: + lhs_sql, lhs_params = super().compile(base_lhs) + current_type = "scalar" + sql = lhs_sql + numeric_fields = (IntegerField, FloatField) + + for i, k in enumerate(path): + is_last = i == len(path) - 1 + + if current_type in ("object", "array"): + if is_last and ( + force_text + or getattr(node, "_function_context", False) + or getattr(node, "is_ordering", False) + or isinstance(getattr(node, "output_field", None), numeric_fields) + ): + cast = ( + "numeric" + if isinstance( + getattr(node, "output_field", None), numeric_fields + ) + else "text" + ) + if current_type == "object": + sql = f"({sql}->>'{k}')::{cast}" + else: + sql = f"({sql}->'{k}')::{cast}" + else: + sql = f"{sql}->'{k}'" + current_type = "unknown" + else: + break + if isinstance(base_lhs, JSONObject): + current_type = "object" + elif isinstance(base_lhs, JSONArray): + current_type = "array" + + if not path and ( + force_text + or getattr(node, "_function_context", False) + or getattr(node, "is_ordering", False) + ): + sql = f"({sql})::text" + if getattr(node, "_is_boolean_context", False): + sql = ( + f"({sql}) IS NOT NULL" + if getattr(node, "_negated", False) + else f"({sql}) IS NULL" + ) + return sql, lhs_params + + def _compile_cast(self, node): + try: + inner_expr = getattr(node, "expression", None) + if inner_expr is None: + inner_expr = ( + node.source_expressions[0] + if getattr(node, "source_expressions", None) + else node + ) + + expr_sql, expr_params = super().compile(inner_expr) + except Exception: + return super().compile(node) + + db_type = None + try: + db_type = node.output_field.db_type(self.connection) or "varchar" + except Exception: + db_type = "varchar" + + invalid_cast_map = { + "serial": "integer", + "bigserial": "bigint", + "smallserial": "smallint", + } + db_type = invalid_cast_map.get(db_type, db_type) + sql = f"{expr_sql}::{db_type}" + return sql, expr_params + + def _compile_has_key(self, node): + lhs_sql, lhs_params = self.compile(node.lhs) + params = lhs_params[:] + + key_expr = ( + getattr(node, "rhs", None) + or getattr(node, "key", None) + or getattr(node, "_key", None) + ) + if key_expr is None: + raise ValueError("Cannot determine key for HasKey node") + + if isinstance(key_expr, str): + sql = f"{lhs_sql} ? %s" + params.append(key_expr) + else: + key_sql, key_params = self.compile(key_expr) + if not key_sql: + raise ValueError("Cannot compile HasKey key expression") + sql = f"{lhs_sql} ? ({key_sql})::text" + params.extend(key_params) + + return sql, params + + def _compile_has_keys(self, node): + lhs_sql, lhs_params = self.compile(node.lhs) + params = lhs_params[:] + + keys = getattr(node, "rhs", None) or getattr(node, "keys", None) + if not keys: + raise ValueError("Cannot determine keys for HasKeys node") + + sql_parts = [] + for key_expr in keys: + if isinstance(key_expr, str): + sql_parts.append("%s") + params.append(key_expr) + else: + key_sql, key_params = self.compile(key_expr) + sql_parts.append(f"({key_sql})::text") + params.extend(key_params) + + keys_sql = ", ".join(sql_parts) + sql = f"{lhs_sql} ?& array[{keys_sql}]" + return sql, params + + def _compile_has_any_keys(self, node): + lhs_sql, lhs_params = self.compile(node.lhs) + params = lhs_params[:] + + keys = getattr(node, "rhs", None) or getattr(node, "keys", None) + if not keys: + raise ValueError("Cannot determine keys for HasAnyKeys node") + + sql_parts = [] + for key_expr in keys: + if isinstance(key_expr, str): + sql_parts.append("%s") + params.append(key_expr) + else: + key_sql, key_params = self.compile(key_expr) + sql_parts.append(f"({key_sql})::text") + params.extend(key_params) + + keys_sql = ", ".join(sql_parts) + sql = f"{lhs_sql} ?| array[{keys_sql}]" + return sql, params diff --git a/install_gaussdb_driver.sh b/install_gaussdb_driver.sh index 07bd3fb..b098ae7 100755 --- a/install_gaussdb_driver.sh +++ b/install_gaussdb_driver.sh @@ -2,6 +2,7 @@ # install_gaussdb_driver.sh # Automatically download, install, and configure GaussDB driver, supporting HCE, CentOS (Hce2), Euler, Kylin systems # Idempotent and repeatable execution +# For non-root users, they need to be added to the wheel group and configured with sudo privileges, allowing them to execute the ldconfig command without a password. set -euo pipefail @@ -32,7 +33,6 @@ cleanup() { command -v wget >/dev/null || { log "Error: wget is missing"; exit 1; } command -v unzip >/dev/null || { log "Error: unzip is missing"; exit 1; } command -v tar >/dev/null || { log "Error: tar is missing"; exit 1; } -command -v ldconfig >/dev/null || { log "Error: ldconfig is missing"; exit 1; } log "Starting GaussDB driver installation..." @@ -124,40 +124,60 @@ if [[ -z "$DRIVER_PACKAGE" ]]; then fi log "Copying driver package: $DRIVER_PACKAGE to $LIB_DIR" -sudo cp "$DRIVER_PACKAGE" "$LIB_DIR/" || { log "Error: Failed to copy driver package"; exit 1; } +log "$DRIVER_PACKAGE" "$LIB_DIR/" +cp "$DRIVER_PACKAGE" "$LIB_DIR/" #=================== # Extract Driver Package #=================== log "Extracting driver package to $LIB_DIR..." -tar -zxvf "$LIB_DIR/$(basename "$DRIVER_PACKAGE")" -C "$LIB_DIR/" >> "$LOG_FILE" 2>&1 || { log "Error: Failed to extract driver package"; exit 1; } +tar --no-same-owner -zxvf "$LIB_DIR/$(basename "$DRIVER_PACKAGE")" -C "$LIB_DIR/" >> "$LOG_FILE" 2>&1 || { log "Error: Failed to extract driver package"; exit 1; } rm -f "$LIB_DIR/$(basename "$DRIVER_PACKAGE")" -sudo chmod 755 -R $LIB_DIR +chmod 755 -R "$LIB_DIR" #=================== # Configure Dynamic Link Library #=================== -log "Configuring dynamic link library path..." -echo "$LIB_DIR/lib" | sudo tee /etc/ld.so.conf.d/gauss-libpq.conf >/dev/null -if ! grep -Fx "$LIB_DIR/lib" /etc/ld.so.conf >/dev/null; then - sudo sed -i "1s|^|$LIB_DIR/lib\n|" /etc/ld.so.conf +log "Configuring user-level dynamic link library path..." +LIB_DIR="$HOME_DIR/GaussDB_driver_lib" + +if ! grep -q "$LIB_DIR/lib" "$HOME/.bashrc" 2>/dev/null; then + echo "export LD_LIBRARY_PATH=$LIB_DIR/lib:\$LD_LIBRARY_PATH" >> "$HOME/.bashrc" + log "Added LD_LIBRARY_PATH to ~/.bashrc" fi -sudo sed -i '/gauss/d' /etc/ld.so.conf -sudo ldconfig +sudo bash -c "echo \"$LIB_DIR/lib\" > /etc/ld.so.conf.d/$(whoami).conf" +log "Added $LIB_DIR/lib to /etc/ld.so.conf.d/$(whoami).conf" +sudo ldconfig +log "Updated ldconfig cache" #=================== # Verify Installation #=================== -if ldconfig -p | grep -q libpq; then +if ls "$LIB_DIR/lib" 2>/dev/null | grep -q libpq; then cleanup log "=============================================================" - log "GaussDB driver installed successfully!" + log "GaussDB driver installed successfully (user mode)!" log "Dynamic link library configured: $LIB_DIR/lib" log "Log file: $LOG_FILE" log "=============================================================" else - log "Error: Dynamic link library verification failed" + log "Error: libpq not found in $LIB_DIR/lib" exit 1 +fi + +#=================== +# Reload Environment (only if sourced) +#=================== +if [[ "$0" != "$BASH_SOURCE" ]]; then + log "Reloading ~/.bashrc so LD_LIBRARY_PATH takes effect..." + source ~/.bashrc + log "Environment reloaded successfully." +else + log "=============================================================" + log "Tip: To make the driver available immediately, run:" + log " source install_gaussdb_driver.sh" + log "or manually execute: source ~/.bashrc" + log "=============================================================" fi \ No newline at end of file From a4f5cc71d046c46c04015bfe177ed78f6723e1f8 Mon Sep 17 00:00:00 2001 From: pangpang20 <676814828@qq.com> Date: Thu, 30 Oct 2025 13:16:45 +0800 Subject: [PATCH 2/3] Switch Django clone to HuaweiCloudDeveloper repo --- django_test_suite.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/django_test_suite.sh b/django_test_suite.sh index b33ce4b..198f0fe 100755 --- a/django_test_suite.sh +++ b/django_test_suite.sh @@ -16,8 +16,7 @@ pip3 install -e . pip3 install -r requirements/gaussdb.txt if [ ! -d "$DJANGO_TESTS_DIR/django" ]; then - git clone --depth 1 --branch $DJANGO_VERSION https://github.com/pangpang20/django.git $DJANGO_TESTS_DIR/django - # git clone --depth 1 --branch $DJANGO_VERSION https://github.com/HuaweiCloudDeveloper/django.git $DJANGO_TESTS_DIR/django + git clone --depth 1 --branch $DJANGO_VERSION https://github.com/HuaweiCloudDeveloper/django.git $DJANGO_TESTS_DIR/django if [ $? -ne 0 ]; then echo "ERROR: git clone failed" exit 1 From d4a90c6cb0992d113e808cbafccd131a656150b1 Mon Sep 17 00:00:00 2001 From: chen_xuanwen <86402398+5xuanwen@users.noreply.github.com> Date: Thu, 30 Oct 2025 20:04:22 +0800 Subject: [PATCH 3/3] Delete gaussdb_django/compiler.py.bak --- gaussdb_django/compiler.py.bak | 303 --------------------------------- 1 file changed, 303 deletions(-) delete mode 100755 gaussdb_django/compiler.py.bak diff --git a/gaussdb_django/compiler.py.bak b/gaussdb_django/compiler.py.bak deleted file mode 100755 index 115cbff..0000000 --- a/gaussdb_django/compiler.py.bak +++ /dev/null @@ -1,303 +0,0 @@ -from django.db.models.sql.compiler import ( - SQLAggregateCompiler, - SQLCompiler, - SQLDeleteCompiler, -) -from django.db.models.sql.compiler import SQLInsertCompiler as BaseSQLInsertCompiler -from django.db.models.sql.compiler import SQLUpdateCompiler -from django.db.models.sql.compiler import SQLCompiler as BaseSQLCompiler -from django.db.models.functions import JSONArray, JSONObject -from django.db.models import IntegerField, FloatField, Func - - -__all__ = [ - "SQLAggregateCompiler", - "SQLCompiler", - "SQLDeleteCompiler", - "SQLInsertCompiler", - "SQLUpdateCompiler", - "GaussDBSQLCompiler", -] - - -class InsertUnnest(list): - """ - Sentinel value to signal DatabaseOperations.bulk_insert_sql() that the - UNNEST strategy should be used for the bulk insert. - """ - - def __str__(self): - return "UNNEST(%s)" % ", ".join(self) - - -class SQLInsertCompiler(BaseSQLInsertCompiler): - def assemble_as_sql(self, fields, value_rows): - return super().assemble_as_sql(fields, value_rows) - - def as_sql(self): - return super().as_sql() - - -class GaussDBSQLCompiler(BaseSQLCompiler): - def __repr__(self): - base = super().__repr__() - return base.replace("GaussDBSQLCompiler", "SQLCompiler") - - def compile(self, node, force_text=False): - if isinstance(node, Func): - func_name = getattr(node, "function", None) - if func_name is None: - node.function = "json_build_object" - if node.__class__.__name__ == "OrderBy": - node.expression.is_ordering = True - - if isinstance(node, JSONArray): - return self._compile_json_array(node) - - elif isinstance(node, JSONObject): - return self._compile_json_object(node) - - elif node.__class__.__name__ == "KeyTransform": - if getattr(node, "function", None) is None: - node.function = "json_extract_path_text" - return self._compile_key_transform(node, force_text=force_text) - elif node.__class__.__name__ == "Cast": - return self._compile_cast(node) - elif node.__class__.__name__ == "HasKey": - return self._compile_has_key(node) - elif node.__class__.__name__ == "HasKeys": - return self._compile_has_keys(node) - elif node.__class__.__name__ == "HasAnyKeys": - return self._compile_has_any_keys(node) - - return super().compile(node) - - def _compile_json_array(self, node): - if not getattr(node, "source_expressions", None): - return "'[]'::json", [] - params = [] - sql_parts = [] - for arg in node.source_expressions: - arg_sql, arg_params = self.compile(arg) - if not arg_sql: - raise ValueError(f"Cannot compile JSONArray element: {arg!r}") - sql_parts.append(arg_sql) - params.extend(arg_params) - - sql = f"json_build_array({', '.join(sql_parts)})" - return sql, params - - def _compile_json_object(self, node): - expressions = getattr(node, "source_expressions", []) or [] - if not expressions: - return "'{}'::json", [] - sql_parts = [] - params = [] - if len(expressions) % 2 != 0: - raise ValueError( - "JSONObject requires even number of arguments (key-value pairs)" - ) - for i in range(0, len(expressions), 2): - key_expr = expressions[i] - val_expr = expressions[i + 1] - key_sql, key_params = self.compile(key_expr) - val_sql, val_params = self.compile(val_expr) - - key_value = getattr(key_expr, "value", None) - if isinstance(key_value, str): - key_sql = f"""'{key_value.replace("'", "''")}'""" - key_params = [] - - if not key_sql or not val_sql: - raise ValueError( - f"Cannot compile key/value pair: {key_expr}, {val_expr}" - ) - - sql_parts.append(f"{key_sql}, {val_sql}") - params.extend(key_params + val_params) - sql = f"json_build_object({', '.join(sql_parts)})" - return sql, params - - def _compile_key_transform(self, node, force_text=False): - def collect_path(n): - path = [] - while n.__class__.__name__ == "KeyTransform": - key_expr = getattr(n, "key", None) or getattr(n, "path", None) - lhs = getattr(n, "lhs", None) - - if isinstance(lhs, JSONObject) and key_expr is None: - key_node = lhs.source_expressions[0] - key_expr = getattr(key_node, "value", key_node) - - if key_expr is None: - if lhs.__class__.__name__ == "KeyTransform": - lhs, sub_path = collect_path(lhs) - path.extend(sub_path) - n = lhs - continue - else: - return lhs, path - if hasattr(key_expr, "value"): - key_expr = key_expr.value - path.append(key_expr) - n = lhs - - return n, list(reversed(path)) - - base_lhs, path = collect_path(node) - - if isinstance(base_lhs, JSONObject): - lhs_sql, lhs_params = self._compile_json_object(base_lhs) - current_type = "object" - elif isinstance(base_lhs, JSONArray): - lhs_sql, lhs_params = self._compile_json_array(base_lhs) - current_type = "array" - elif isinstance(base_lhs, Func): - return super().compile(node) - else: - lhs_sql, lhs_params = super().compile(base_lhs) - current_type = "scalar" - sql = lhs_sql - numeric_fields = (IntegerField, FloatField) - - for i, k in enumerate(path): - is_last = i == len(path) - 1 - - if current_type in ("object", "array"): - if is_last and ( - force_text - or getattr(node, "_function_context", False) - or getattr(node, "is_ordering", False) - or isinstance(getattr(node, "output_field", None), numeric_fields) - ): - cast = ( - "numeric" - if isinstance( - getattr(node, "output_field", None), numeric_fields - ) - else "text" - ) - if current_type == "object": - sql = f"({sql}->>'{k}')::{cast}" - else: - sql = f"({sql}->'{k}')::{cast}" - else: - sql = f"{sql}->'{k}'" - current_type = "unknown" - else: - break - if isinstance(base_lhs, JSONObject): - current_type = "object" - elif isinstance(base_lhs, JSONArray): - current_type = "array" - - if not path and ( - force_text - or getattr(node, "_function_context", False) - or getattr(node, "is_ordering", False) - ): - sql = f"({sql})::text" - if getattr(node, "_is_boolean_context", False): - sql = ( - f"({sql}) IS NOT NULL" - if getattr(node, "_negated", False) - else f"({sql}) IS NULL" - ) - return sql, lhs_params - - def _compile_cast(self, node): - try: - inner_expr = getattr(node, "expression", None) - if inner_expr is None: - inner_expr = ( - node.source_expressions[0] - if getattr(node, "source_expressions", None) - else node - ) - - expr_sql, expr_params = super().compile(inner_expr) - except Exception: - return super().compile(node) - - db_type = None - try: - db_type = node.output_field.db_type(self.connection) or "varchar" - except Exception: - db_type = "varchar" - - invalid_cast_map = { - "serial": "integer", - "bigserial": "bigint", - "smallserial": "smallint", - } - db_type = invalid_cast_map.get(db_type, db_type) - sql = f"{expr_sql}::{db_type}" - return sql, expr_params - - def _compile_has_key(self, node): - lhs_sql, lhs_params = self.compile(node.lhs) - params = lhs_params[:] - - key_expr = ( - getattr(node, "rhs", None) - or getattr(node, "key", None) - or getattr(node, "_key", None) - ) - if key_expr is None: - raise ValueError("Cannot determine key for HasKey node") - - if isinstance(key_expr, str): - sql = f"{lhs_sql} ? %s" - params.append(key_expr) - else: - key_sql, key_params = self.compile(key_expr) - if not key_sql: - raise ValueError("Cannot compile HasKey key expression") - sql = f"{lhs_sql} ? ({key_sql})::text" - params.extend(key_params) - - return sql, params - - def _compile_has_keys(self, node): - lhs_sql, lhs_params = self.compile(node.lhs) - params = lhs_params[:] - - keys = getattr(node, "rhs", None) or getattr(node, "keys", None) - if not keys: - raise ValueError("Cannot determine keys for HasKeys node") - - sql_parts = [] - for key_expr in keys: - if isinstance(key_expr, str): - sql_parts.append("%s") - params.append(key_expr) - else: - key_sql, key_params = self.compile(key_expr) - sql_parts.append(f"({key_sql})::text") - params.extend(key_params) - - keys_sql = ", ".join(sql_parts) - sql = f"{lhs_sql} ?& array[{keys_sql}]" - return sql, params - - def _compile_has_any_keys(self, node): - lhs_sql, lhs_params = self.compile(node.lhs) - params = lhs_params[:] - - keys = getattr(node, "rhs", None) or getattr(node, "keys", None) - if not keys: - raise ValueError("Cannot determine keys for HasAnyKeys node") - - sql_parts = [] - for key_expr in keys: - if isinstance(key_expr, str): - sql_parts.append("%s") - params.append(key_expr) - else: - key_sql, key_params = self.compile(key_expr) - sql_parts.append(f"({key_sql})::text") - params.extend(key_params) - - keys_sql = ", ".join(sql_parts) - sql = f"{lhs_sql} ?| array[{keys_sql}]" - return sql, params