diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 92754f99..b164f72b 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -27,21 +27,6 @@ jobs:
name: flyway-database-oceanbase
path: flyway-oceanbase-plugin/flyway-database-oceanbase/target/flyway-database-oceanbase-10.16.1.jar
- - name: Set up JDK for Trino
- uses: actions/setup-java@v3
- with:
- java-version: '23'
- distribution: 'temurin'
- - name: Build Trino Plugin
- run: |
- cd trino-oceanbase-plugin
- ./mvnw clean package -DskipTests
- - name: Archive Trino JAR
- uses: actions/upload-artifact@v4
- with:
- name: trino-oceanbase-plugin
- path: trino-oceanbase-plugin/target/*.jar
-
- name: Set up PHP for WordPress
uses: shivammathur/setup-php@v2
with:
@@ -96,14 +81,6 @@ jobs:
asset_name: flyway-database-oceanbase-10.16.1.jar
asset_content_type: application/java-archive
- - name: Upload Trino Release Asset
- uses: actions/upload-release-asset@latest
- with:
- upload_url: ${{ steps.create_release.outputs.upload_url }}
- asset_path: trino-oceanbase-plugin/target/*.jar
- asset_name: trino-oceanbase-plugin.jar
- asset_content_type: application/java-archive
-
- name: Upload WordPress Release Asset
uses: actions/upload-release-asset@latest
with:
diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml
index d74b795d..76d5b616 100644
--- a/.github/workflows/workflow.yml
+++ b/.github/workflows/workflow.yml
@@ -33,30 +33,6 @@ jobs:
name: flyway-oceanbase-plugin
path: flyway-oceanbase-plugin/target/*.jar
- trino-plugin:
- name: Build Trino OceanBase Plugin
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v3
-
- - name: Set up JDK 23
- uses: actions/setup-java@v3
- with:
- java-version: '23'
- distribution: 'temurin'
-
- - name: Build Trino Plugin
- run: |
- cd trino-oceanbase-plugin
- ./mvnw clean install -DskipTests
-
- - name: Upload Artifacts
- uses: actions/upload-artifact@v4
- with:
- name: trino-oceanbase-plugin
- path: trino-oceanbase-plugin/target/*.jar
-
wordpress-plugin:
name: Build WordPress OceanBase Plugin
runs-on: ubuntu-latest
diff --git a/README.md b/README.md
index d1f29b66..43b7dd26 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,7 @@ OceanBase is a high-performance database compatible with both MySQL and Oracle p
| Plugin Name | Use Case | Key Features |
| --------------------------------------------------------------------------------------- | ------------------------- | -------------------------------------------------------------------------------------- |
| [Flyway OceanBase Plugin](./flyway-oceanbase-plugin/README.md) | Database Migration | Resolves compatibility issues when using Flyway with OceanBase in MySQL mode |
-| [Trino OceanBase Plugin](./trino-oceanbase-plugin/README.md) | Data Analysis | Enables Trino to connect to OceanBase (MySQL/Oracle mode) |
+| [Trino OceanBase Plugin](https://github.com/oceanbase/trino-oceanbase) | Data Analysis | Enables Trino to connect to OceanBase (MySQL/Oracle mode) |
| [WordPress OceanBase Plugin](./wordpress-oceanbase-plugin/README.md) | Content Management | Fixes compatibility issues between WordPress and OceanBase MySQL tenants |
| [OceanBase SQL Helper Plugin](./oceanbase-sql-helper-plugin/README.md) | Development Tools | VSCode extension for quick access to OceanBase SQL keywords documentation |
| [Metabase OceanBase Plugin](./metabase-oceanbase-plugin/README.md) | Data Visualization | Enables Metabase to connect to OceanBase (MySQL/Oracle mode) |
@@ -34,9 +34,10 @@ OceanBase is a high-performance database compatible with both MySQL and Oracle p
### ✅ Trino OceanBase Plugin
-- **Function**: Enables Trino to connect to OceanBase (MySQL/Oracle mode), optimizing SQL queries and transaction handling.
+- **Function**: Enables Trino to connect to OceanBase (MySQL/Oracle mode), optimizing SQL queries and transaction handling. This project has been migrated to a new repository.
- **Use Case**: Querying OceanBase databases via Trino (supports both modes).
-- **Documentation**: [Trino OceanBase Plugin](./trino-oceanbase-plugin/README.md)
+- **Documentation**: [Trino OceanBase Plugin](https://github.com/oceanbase/trino-oceanbase)
+- **Repository**: https://github.com/oceanbase/trino-oceanbase
---
@@ -102,7 +103,7 @@ OceanBase is a high-performance database compatible with both MySQL and Oracle p
| Plugin Name | Documentation Link |
| ------------------------------------- | --------------------------------------------------------------------------------------- |
| Flyway OceanBase Plugin | [Flyway OceanBase Plugin](./flyway-oceanbase-plugin/README.md) |
-| Trino OceanBase Plugin | [Trino OceanBase Plugin](./trino-oceanbase-plugin/README.md) |
+| Trino OceanBase Plugin | [Trino OceanBase Plugin](https://github.com/oceanbase/trino-oceanbase) |
| WordPress OceanBase Plugin | [WordPress OceanBase Plugin](./wordpress-oceanbase-plugin/README.md) |
| OceanBase SQL Helper Plugin | [OceanBase SQL Helper Plugin](./oceanbase-sql-helper-plugin/README.md) |
| Metabase OceanBase Plugin | [Metabase OceanBase Plugin](./metabase-oceanbase-plugin/README.md) |
diff --git a/README_CN.md b/README_CN.md
index c73a70a9..c4985b4b 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -13,7 +13,7 @@ OceanBase 是一款兼容 MySQL 和 Oracle 协议的高性能数据库。本仓
| 插件名称 | 适用场景 | 主要功能 |
| ---------------------------------------------------------------------------------------- | ---------------------------- | ------------------------------------------------------------------------- |
| [Flyway OceanBase 插件](./flyway-oceanbase-plugin/README_CN.md) | 数据库迁移 | 解决 Flyway 在 OceanBase MySQL 模式下的兼容性问题 |
-| [Trino OceanBase 插件](./trino-oceanbase-plugin/README_CN.md) | 数据分析 | 支持 Trino 连接 OceanBase(MySQL/Oracle 模式) |
+| [Trino OceanBase 插件](https://github.com/oceanbase/trino-oceanbase) | 数据分析 | 支持 Trino 连接 OceanBase(MySQL/Oracle 模式) |
| [WordPress OceanBase 插件](./wordpress-oceanbase-plugin/README_CN.md) | 内容管理 | 修复 WordPress 与 OceanBase MySQL 租户的兼容性问题 |
| [OceanBase SQL 助手插件](./oceanbase-sql-helper-plugin/README_CN.md) | 开发工具 | VSCode 插件,快速访问 OceanBase SQL 关键词文档 |
| [Metabase OceanBase 插件](./metabase-oceanbase-plugin/README_CN.md) | 数据可视化 | 支持 Metabase 连接 OceanBase(MySQL/Oracle 模式) |
@@ -36,9 +36,10 @@ OceanBase 是一款兼容 MySQL 和 Oracle 协议的高性能数据库。本仓
### ✅ Trino OceanBase 插件
-- **功能**:支持 Trino 连接 OceanBase(MySQL/Oracle 模式),优化 SQL 查询与事务处理。
+- **功能**:支持 Trino 连接 OceanBase(MySQL/Oracle 模式),优化 SQL 查询与事务处理。此项目已迁移到新仓库。
- **适用场景**:通过 Trino 查询 OceanBase 数据库(支持多模式)。
-- **详细文档**:[Trino OceanBase 插件](./trion-oceanbase-plugin/README_CN.md)
+- **详细文档**:[Trino OceanBase 插件](https://github.com/oceanbase/trino-oceanbase)
+- **仓库地址**:https://github.com/oceanbase/trino-oceanbase
---
@@ -104,7 +105,7 @@ OceanBase 是一款兼容 MySQL 和 Oracle 协议的高性能数据库。本仓
| 插件名称 | 文档链接 |
| ----------------------------------- | ---------------------------------------------------------------------------------------- |
| Flyway OceanBase MySQL 插件 | [Flyway OceanBase 插件](./flyway-oceanbase-plugin/README_CN.md) |
-| Trino OceanBase 插件 | [Trino OceanBase 插件](./trino-oceanbase-plugin/README_CN.md) |
+| Trino OceanBase 插件 | [Trino OceanBase 插件](https://github.com/oceanbase/trino-oceanbase) |
| WordPress OceanBase 插件 | [WordPress OceanBase 插件](./wordpress-oceanbase-plugin/README_CN.md) |
| OceanBase SQL 助手插件 | [OceanBase SQL 助手插件](./oceanbase-sql-helper-plugin/README_CN.md) |
| Metabase OceanBase 插件 | [Metabase OceanBase 插件](./metabase-oceanbase-plugin/README_CN.md) |
diff --git a/trino-oceanbase-plugin/.editorconfig b/trino-oceanbase-plugin/.editorconfig
deleted file mode 100644
index dcd467ab..00000000
--- a/trino-oceanbase-plugin/.editorconfig
+++ /dev/null
@@ -1,21 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-[*]
-charset = utf-8
-end_of_line = lf
-insert_final_newline = true
-trim_trailing_whitespace = true
-indent_style = space
-indent_size = 4
-ij_continuation_indent_size = 8
-
-[*.md]
-trim_trailing_whitespace = false
-
-[*.java]
-ij_java_doc_align_exception_comments = false
-ij_java_doc_align_param_comments = false
-
-[*.yml]
-indent_size = 2
diff --git a/trino-oceanbase-plugin/.gitignore b/trino-oceanbase-plugin/.gitignore
deleted file mode 100644
index 48958316..00000000
--- a/trino-oceanbase-plugin/.gitignore
+++ /dev/null
@@ -1,35 +0,0 @@
-*.iml
-*.ipr
-*.iws
-target/
-/**/var/
-**/trino-product-tests/**/var/
-pom.xml.versionsBackup
-test-output/
-test-reports/
-/atlassian-ide-plugin.xml
-.idea
-.run
-.DS_Store
-.classpath
-.settings
-.project
-temp-testng-customsuite.xml
-test-output
-.externalToolBuilders
-*~
-benchmark_outputs
-*.pyc
-*.class
-.checkstyle
-.mvn/timing.properties
-.mvn/maven.config
-node_modules
-product-test-reports
-.vscode/
-/gib-impacted.log
-/impacted-features.log
-.github/test-matrix.yaml
-.github/test-pt-matrix.yaml
-.github/bin/redshift/.cluster-identifier
-**/dependency-reduced-pom.xml
diff --git a/trino-oceanbase-plugin/.java-version b/trino-oceanbase-plugin/.java-version
deleted file mode 100644
index 40994076..00000000
--- a/trino-oceanbase-plugin/.java-version
+++ /dev/null
@@ -1 +0,0 @@
-23
diff --git a/trino-oceanbase-plugin/.mvn/extensions.xml b/trino-oceanbase-plugin/.mvn/extensions.xml
deleted file mode 100644
index 9207d6ff..00000000
--- a/trino-oceanbase-plugin/.mvn/extensions.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
- io.takari.maven
- takari-smart-builder
- 0.6.6
-
-
diff --git a/trino-oceanbase-plugin/.mvn/jvm.config b/trino-oceanbase-plugin/.mvn/jvm.config
deleted file mode 100644
index 11712037..00000000
--- a/trino-oceanbase-plugin/.mvn/jvm.config
+++ /dev/null
@@ -1,12 +0,0 @@
--Xmx8192m
---add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED
---add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
---add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED
---add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED
--XX:+ExitOnOutOfMemoryError
diff --git a/trino-oceanbase-plugin/.mvn/modernizer/violations-production-code-only.xml b/trino-oceanbase-plugin/.mvn/modernizer/violations-production-code-only.xml
deleted file mode 100644
index c814b174..00000000
--- a/trino-oceanbase-plugin/.mvn/modernizer/violations-production-code-only.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- java/util/concurrent/ThreadPoolExecutor."<init>":(IIJLjava/util/concurrent/TimeUnit;Ljava/util/concurrent/BlockingQueue;)V
- 1.1
- Use constructor that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/ThreadPoolExecutor."<init>":(IIJLjava/util/concurrent/TimeUnit;Ljava/util/concurrent/BlockingQueue;Ljava/util/concurrent/RejectedExecutionHandler;)V
- 1.1
- Use constructor that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/ScheduledThreadPoolExecutor."<init>":(I)V
- 1.1
- Use constructor that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/ScheduledThreadPoolExecutor."<init>":(ILjava/util/concurrent/RejectedExecutionHandler;)V
- 1.1
- Use constructor that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newFixedThreadPool:(I)Ljava/util/concurrent/ExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newWorkStealingPool:()Ljava/util/concurrent/ExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newWorkStealingPool:(I)Ljava/util/concurrent/ExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newSingleThreadExecutor:()Ljava/util/concurrent/ExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newCachedThreadPool:()Ljava/util/concurrent/ExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newSingleThreadScheduledExecutor:()Ljava/util/concurrent/ScheduledExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
-
- java/util/concurrent/Executors.newScheduledThreadPool:(I)Ljava/util/concurrent/ScheduledExecutorService;
- 1.1
- Use factory method that takes ThreadFactory and name the threads
-
-
diff --git a/trino-oceanbase-plugin/.mvn/modernizer/violations.xml b/trino-oceanbase-plugin/.mvn/modernizer/violations.xml
deleted file mode 100644
index 12045c09..00000000
--- a/trino-oceanbase-plugin/.mvn/modernizer/violations.xml
+++ /dev/null
@@ -1,320 +0,0 @@
-
-
-
- java/lang/Class.newInstance:()Ljava/lang/Object;
- 1.1
- Prefer Class.getConstructor().newInstance()
-
-
-
- java/lang/String."<init>":([B)V
- 1.1
- Prefer new String(byte[], Charset)
-
-
-
- java/lang/String.getBytes:()[B
- 1.1
- Prefer String.getBytes(Charset)
-
-
-
- java/lang/String.toString:()Ljava/lang/String;
- 1.1
- Call to toString() is redundant
-
-
-
-
- java/io/File.toString:()Ljava/lang/String;
- 1.1
- Prefer File.getPath()
-
-
-
- java/lang/Thread$Builder.factory:()Ljava/util/concurrent/ThreadFactory;
- 1.1
- Use io.airlift.concurrent.Threads's thread factories, as the set thread context class loader
-
-
- java/lang/Thread$Builder$OfPlatform.factory:()Ljava/util/concurrent/ThreadFactory;
- 1.1
- Use io.airlift.concurrent.Threads's thread factories, as the set thread context class loader
-
-
- java/lang/Thread$Builder$OfVirtual.factory:()Ljava/util/concurrent/ThreadFactory;
- 1.1
- Use io.airlift.concurrent.Threads's thread factories, as the set thread context class loader
-
-
-
- com/google/common/primitives/Ints.checkedCast:(J)I
- 1.8
- Prefer Math.toIntExact(long)
-
-
-
- com/google/common/collect/ImmutableMap$Builder.build:()Lcom/google/common/collect/ImmutableMap;
- 1.8
- Use buildOrThrow() instead, as it makes it clear that it will throw on duplicated values
-
-
- com/google/common/collect/ImmutableTable$Builder.build:()Lcom/google/common/collect/ImmutableTable;
- 1.8
- Use buildOrThrow() instead, as it makes it clear that it will throw on duplicated values
-
-
-
- com/google/common/collect/ImmutableBiMap$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableList$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableMap$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableMultimap$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableMultiset$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableSet$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
- com/google/common/collect/ImmutableSortedMap$Builder."<init>":()V
- 1.8
- Use orderedBy() static factory method instead
-
-
- com/google/common/collect/ImmutableSortedSet$Builder."<init>":()V
- 1.8
- Use orderedBy() static factory method instead
-
-
- com/google/common/collect/ImmutableTable$Builder."<init>":()V
- 1.8
- Use builder() static factory method instead
-
-
-
- com/google/common/cache/CacheBuilder.build:()Lcom/google/common/cache/Cache;
- 1.8
- Guava Cache has concurrency issues around invalidation and ongoing loads. Use EvictableCacheBuilder or SafeCaches to build caches.
- See https://github.com/trinodb/trino/issues/10512 for more information and see https://github.com/trinodb/trino/issues/10512#issuecomment-1016221168
- for why Caffeine does not solve the problem.
-
-
-
- com/google/common/cache/CacheBuilder.build:(Lcom/google/common/cache/CacheLoader;)Lcom/google/common/cache/LoadingCache;
- 1.8
- Guava LoadingCache has concurrency issues around invalidation and ongoing loads. Use EvictableCacheBuilder or SafeCaches to build caches.
- See https://github.com/trinodb/trino/issues/10512 for more information and see https://github.com/trinodb/trino/issues/10512#issuecomment-1016221168
- for why Caffeine does not solve the problem.
-
-
-
- org/testng/Assert.assertEquals:(Ljava/lang/Iterable;Ljava/lang/Iterable;)V
- 1.8
- Use AssertJ or QueryAssertions due to TestNG #543
-
-
-
- org/testng/Assert.assertEquals:(Ljava/lang/Iterable;Ljava/lang/Iterable;Ljava/lang/String;)V
- 1.8
- Use AssertJ or QueryAssertions due to TestNG #543
-
-
-
- org/testng/Assert.assertThrows:(Lorg/testng/Assert$ThrowingRunnable;)V
- 1.8
- Use AssertJ's assertThatThrownBy, see https://github.com/trinodb/trino/issues/5320 for rationale
-
-
-
- org/testng/Assert.assertThrows:(Ljava/lang/Class;Lorg/testng/Assert$ThrowingRunnable;)V
- 1.8
- Use AssertJ's assertThatThrownBy, see https://github.com/trinodb/trino/issues/5320 for rationale
-
-
-
- com/amazonaws/services/glue/model/Table.getStorageDescriptor:()Lcom/amazonaws/services/glue/model/StorageDescriptor;
- 1.1
- Storage descriptor is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getStorageDescriptor
-
-
-
- com/amazonaws/services/glue/model/Table.getTableType:()Ljava/lang/String;
- 1.1
- Table type is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getTableType
-
-
-
- com/amazonaws/services/glue/model/Column.getParameters:()Ljava/util/Map;
- 1.1
- Column parameters map is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getColumnParameters
-
-
-
- com/amazonaws/services/glue/model/Table.getParameters:()Ljava/util/Map;
- 1.1
- Table parameters map is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getTableParameters
-
-
-
- com/amazonaws/services/glue/model/Partition.getParameters:()Ljava/util/Map;
- 1.1
- Partition parameters map is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getPartitionParameters
-
-
-
- com/amazonaws/services/glue/model/SerDeInfo.getParameters:()Ljava/util/Map;
- 1.1
- SerDeInfo parameters map is nullable in Glue model, which is too easy to forget about. Prefer GlueToTrinoConverter.getSerDeInfoParameters
-
-
-
- org/apache/hadoop/fs/FileSystem.close:()V
- 1.1
- Hadoop FileSystem instances are shared and should not be closed
-
-
-
- java/util/TimeZone.getTimeZone:(Ljava/lang/String;)Ljava/util/TimeZone;
- 1.8
- Avoid TimeZone.getTimeZone as it returns GMT for a zone not supported by the JVM. Use TimeZone.getTimeZone(ZoneId.of(..)) instead, or TimeZone.getTimeZone(..., false).
-
-
-
- org/joda/time/DateTimeZone.toTimeZone:()Ljava/util/TimeZone;
- 1.8
- Avoid DateTimeZone.toTimeZone as it returns GMT for a zone not supported by the JVM. Use TimeZone.getTimeZone(ZoneId.of(dtz.getId())) instead.
-
-
-
- com/esri/core/geometry/ogc/OGCGeometry.equals:(Lcom/esri/core/geometry/ogc/OGCGeometry;)Z
- 1.6
- Prefer OGCGeometry.Equals(OGCGeometry)
-
-
-
- com/esri/core/geometry/ogc/OGCGeometry.equals:(Ljava/lang/Object;)Z
- 1.6
- Prefer OGCGeometry.Equals(OGCGeometry)
-
-
-
- io/airlift/units/DataSize."<init>":(DLio/airlift/units/DataSize$Unit;)V
- 1.8
- Use io.airlift.units.DataSize.of(long, DataSize.Unit)
-
-
-
- io/airlift/units/DataSize.succinctDataSize:(DLio/airlift/units/DataSize$Unit;)Lio/airlift/units/DataSize;
- 1.8
- Use io.airlift.units.DataSize.of(long, DataSize.Unit).succinct() -- Note that succinct conversion only affects toString() results
-
-
-
- io/airlift/units/DataSize.getValue:()D
- 1.8
- Use io.airlift.units.DataSize.toBytes() and Unit.inBytes() for conversion
-
-
-
- io/airlift/units/DataSize.getValue:(Lio/airlift/units/DataSize$Unit;)D
- 1.8
- Use io.airlift.units.DataSize.toBytes() and Unit.inBytes() for conversion
-
-
-
- io/airlift/units/DataSize.roundTo:(Lio/airlift/units/DataSize$Unit;)J
- 1.8
- Method is deprecated for removal
-
-
-
- io/airlift/units/DataSize.convertTo:(Lio/airlift/units/DataSize$Unit;)Lio/airlift/units/DataSize;
- 1.8
- Use io.airlift.units.DataSize.to(DataSize.Unit)
-
-
-
- io/airlift/units/DataSize.convertToMostSuccinctDataSize:()Lio/airlift/units/DataSize;
- 1.8
- Use io.airlift.units.DataSize.succinct()
-
-
-
- io/airlift/testing/Closeables.closeQuietly:([Ljava/io/Closeable;)V
- 1.0
- Use Closeables.closeAll() or Closer.
-
-
-
- com/google/inject/util/Modules.combine:(Ljava/lang/Iterable;)Lcom/google/inject/Module;
- 1.8
- Use io.airlift.configuration.ConfigurationAwareModule.combine
-
-
-
- com/google/inject/util/Modules.combine:([Lcom/google/inject/Module;)Lcom/google/inject/Module;
- 1.8
- Use io.airlift.configuration.ConfigurationAwareModule.combine
-
-
-
- io/jsonwebtoken/Jwts.builder:()Lio/jsonwebtoken/JwtBuilder;
- 1.8
- Use io.trino.server.security.jwt.JwtsUtil or equivalent
-
-
-
- io/jsonwebtoken/Jwts.parserBuilder:()Lio/jsonwebtoken/JwtParserBuilder;
- 1.8
- Use io.trino.server.security.jwt.JwtsUtil or equivalent
-
-
-
- org/openjdk/jol/info/ClassLayout.instanceSize:()J
- 1.8
- Use io.airlift.slice.SizeOf.instanceSize
-
-
-
- org/testng/annotations/BeforeTest
- 1.8
- Prefer org.testng.annotations.BeforeClass
-
-
-
- org/testng/annotations/AfterTest
- 1.8
- Prefer org.testng.annotations.AfterClass
-
-
-
- com/fasterxml/jackson/core/JsonFactory."<init>":()V
- 1.8
- Use io.trino.plugin.base.util.JsonUtils.jsonFactory()
-
-
-
- com/fasterxml/jackson/core/JsonFactoryBuilder."<init>":()V
- 1.8
- Use io.trino.plugin.base.util.JsonUtils.jsonFactoryBuilder() instead
-
-
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-central.maven.org.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-central.maven.org.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-confluent.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-confluent.txt
deleted file mode 100644
index f07e0525..00000000
--- a/trino-oceanbase-plugin/.mvn/rrf/groupId-confluent.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-io.confluent
-org.apache.kafka
-
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-flyway-repo.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-flyway-repo.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-google-maven-central-copy.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-google-maven-central-copy.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-jitpack.io.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-jitpack.io.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-maven-central.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-maven-central.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-ossrh.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-ossrh.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/rrf/groupId-repo.gradle.org.txt b/trino-oceanbase-plugin/.mvn/rrf/groupId-repo.gradle.org.txt
deleted file mode 100644
index e69de29b..00000000
diff --git a/trino-oceanbase-plugin/.mvn/wrapper/maven-wrapper.properties b/trino-oceanbase-plugin/.mvn/wrapper/maven-wrapper.properties
deleted file mode 100644
index d58dfb70..00000000
--- a/trino-oceanbase-plugin/.mvn/wrapper/maven-wrapper.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-wrapperVersion=3.3.2
-distributionType=only-script
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
diff --git a/trino-oceanbase-plugin/LICENSE b/trino-oceanbase-plugin/LICENSE
deleted file mode 100644
index d6456956..00000000
--- a/trino-oceanbase-plugin/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/trino-oceanbase-plugin/README.md b/trino-oceanbase-plugin/README.md
deleted file mode 100644
index a6abbd70..00000000
--- a/trino-oceanbase-plugin/README.md
+++ /dev/null
@@ -1,171 +0,0 @@
-# trino-oceanbase-plugin
-
-## Plugin Overview
-
-This plugin enables **Trino** to connect and query **OceanBase** databases in both **MySQL** and **Oracle modes**. It provides compatibility with OceanBase's SQL syntax, decimal handling, and connection management.
-
-------
-
-## Building the Plugin
-
-### Prerequisites
-
-- Java 17.0.4+ (64-bit)
-
-Run the following command from the project root directory:
-
-```shell
-./mvnw clean package -DskipTests
-```
-
-The plugin JAR file will be generated in the `target` directory.
-
-------
-
-## Running with Docker
-
-### Step 1: Start a Trino Docker Container
-
-```shell
-docker run --name trino -d trinodb/trino:468
-```
-
-------
-
-### Step 2: Create Configuration Files
-
-#### 1. `log.properties`
-
-```text
-io.trino=DEBUG
-```
-
-#### 2. `oceanbase.properties`
-
-```properties
-connector.name=oceanbase
-connection-url=jdbc:oceanbase://localhost:2883/${ENV:USER}
-connection-user=${ENV:USERNAME}
-connection-password=${ENV:PASSWORD}
-oceanbase.compatible-mode=oracle
-oceanbase.auto-reconnect=true
-oceanbase.remarks-reporting.enabled=true
-decimal-mapping=ALLOW_OVERFLOW
-decimal-rounding-mode=HALF_UP
-```
-
-> ⚠️ **Notes**:
->
-> - `${ENV:USER}`, `${ENV:USERNAME}`, and `${ENV:PASSWORD}` are environment variables.
-> - `oceanbase.compatible-mode=oracle` specifies the Oracle mode compatibility.
-
-------
-
-### Step 3: Deploy Plugin and Config Files to Container
-
-Execute the following commands to copy files to the container and restart it:
-
-```shell
-# Add plugin file
-docker cp target/trino-oceanbase-468.jar trino:/data/trino/plugin/oceanbase/
-
-# Add log configuration file
-docker cp log.properties trino:/etc/trino/
-
-# Add OceanBase catalog configuration file
-docker cp oceanbase.properties trino:/etc/trino/catalog/
-
-# Add timezone files (set container timezone to Shanghai)
-docker cp /usr/share/zoneinfo trino:/usr/share/zoneinfo
-docker cp /usr/share/zoneinfo/Asia/Shanghai trino:/etc/localtime
-
-# Restart container
-docker restart trino
-```
-
-------
-
-### Step 4: Verify Plugin Functionality
-
-Use the Trino CLI to verify the plugin is working:
-
-```shell
-# Enter container and launch Trino CLI
-docker exec -it trino trino
-```
-
-```sql
--- Check available catalogs
-SHOW CATALOGS;
-```
-
-If `oceanbase` appears in the output, the plugin is successfully loaded.
-
-------
-
-## Configuration Details
-
-### OceanBase Connector Parameters
-
-| Parameter | Description |
-| ------------------------------------- | ------------------------------------------------------------ |
-| `connector.name` | Specifies the connector type as OceanBase |
-| `connection-url` | OceanBase database connection URL (supports environment variable substitution) |
-| `connection-user` | Database username |
-| `connection-password` | Database password |
-| `oceanbase.compatible-mode` | Compatibility mode (`oracle` or `mysql`) |
-| `oceanbase.auto-reconnect` | Enables automatic reconnection |
-| `oceanbase.remarks-reporting.enabled` | Enables remarks reporting |
-| `decimal-mapping` | Decimal mapping strategy (`ALLOW_OVERFLOW` allows overflow) |
-| `decimal-rounding-mode` | Decimal rounding mode (`HALF_UP` for standard rounding) |
-
-------
-
-## Common Issues
-
-### Q1: Plugin not loaded, error: `Catalog not found`?
-
-**A1: Solutions**
-
-1. Confirm the plugin file is correctly copied to `/data/trino/plugin/oceanbase/`.
-2. Ensure `oceanbase.properties` is placed in `/etc/trino/catalog/`.
-3. Verify the container timezone files are set correctly.
-
-------
-
-### Q2: Connection error: `Connection refused`?
-
-**A2: Solutions**
-
-1. Ensure OceanBase is running and listening on port `2883`.
-2. Check the `connection-url` for correct host and port.
-3. Validate the user permissions allow remote connections.
-
-------
-
-## Project Structure Example
-
-```
-project-root/
-├── log.properties
-├── oceanbase.properties
-├── target/
-│ └── trino-oceanbase-468.jar
-└── README.md
-```
-
-------
-
-## Contributing & Feedback
-We welcome issues and pull requests to improve this project. For questions or suggestions, visit [GitHub Issues](https://github.com/oceanbase/ecology-plugins/issues).
-
-------
-
-## 📄 License
-
-This project is licensed under the [Apache License 2.0](https://github.com/oceanbase/ecology-plugins/LICENSE).
-
-
-------
-
-With this plugin, Trino can seamlessly connect to OceanBase databases (both MySQL and Oracle modes), enabling efficient data querying and analysis.
\ No newline at end of file
diff --git a/trino-oceanbase-plugin/README_CN.md b/trino-oceanbase-plugin/README_CN.md
deleted file mode 100644
index 2cc21908..00000000
--- a/trino-oceanbase-plugin/README_CN.md
+++ /dev/null
@@ -1,171 +0,0 @@
-# trino-oceanbase-plugin
-
-## 插件构建
-
-### 构建要求
-
-- Java 17.0.4+(64位)
-
-从项目根目录运行以下命令进行构建:
-
-```shell
-./mvnw clean package -DskipTests
-```
-
-构建完成后,插件文件应位于 `target` 目录下。
-
-------
-
-## 使用 Docker 运行插件
-
-### 启动 Trino 容器
-
-首先启动一个 Trino Docker 容器:
-
-```shell
-docker run --name trino -d trinodb/trino:468
-```
-
-------
-
-### 创建配置文件
-
-#### 1. 日志配置文件 `log.properties`
-
-```tex
-io.trino=DEBUG
-```
-
-#### 2. OceanBase 连接器配置文件 `oceanbase.properties`
-
-```properties
-connector.name=oceanbase
-connection-url=jdbc:oceanbase://localhost:2883/${ENV:USER}
-connection-user=${ENV:USERNAME}
-connection-password=${ENV:PASSWORD}
-oceanbase.compatible-mode=oracle
-oceanbase.auto-reconnect=true
-oceanbase.remarks-reporting.enabled=true
-decimal-mapping=ALLOW_OVERFLOW
-decimal-rounding-mode=HALF_UP
-```
-
-> ⚠️ 注意:
->
-> - `${ENV:USER}`、`${ENV:USERNAME}` 和 `${ENV:PASSWORD}` 会被环境变量替换。
-> - `oceanbase.compatible-mode=oracle` 表示启用 Oracle 模式兼容性。
-
-------
-
-### 将插件与配置文件部署到容器中
-
-执行以下命令将插件和配置文件复制到容器,并重启容器:
-
-```shell
-# 添加插件文件
-docker cp target/trino-oceanbase-468.jar trino:/data/trino/plugin/oceanbase/
-
-# 添加日志配置文件
-docker cp log.properties trino:/etc/trino/
-
-# 添加 OceanBase 目录配置文件
-docker cp oceanbase.properties trino:/etc/trino/catalog/
-
-# 添加时区文件(设置容器时区为上海)
-docker cp /usr/share/zoneinfo trino:/usr/share/zoneinfo
-docker cp /usr/share/zoneinfo/Asia/Shanghai trino:/etc/localtime
-
-# 重启容器
-docker restart trino
-```
-
-------
-
-### 验证插件是否生效
-
-通过 Trino CLI 执行查询验证插件是否正常工作:
-
-```shell
-# 进入容器并启动 Trino CLI
-docker exec -it trino trino
-```
-
-```sql
--- 查看可用的 Catalog 列表
-SHOW CATALOGS;
-```
-
-如果输出中包含 `oceanbase`,则表示插件已成功加载。
-
-------
-
-## 配置说明
-
-### OceanBase 连接器参数详解
-
-全屏复制
-
-| 配置项 | 说明 |
-| ------------------------------------- | ----------------------------------------------- |
-| `connector.name` | 指定连接器类型为 OceanBase |
-| `connection-url` | OceanBase 数据库连接地址(支持环境变量替换) |
-| `connection-user` | 数据库用户名 |
-| `connection-password` | 数据库密码 |
-| `oceanbase.compatible-mode` | 兼容模式(`oracle` 或 `mysql`) |
-| `oceanbase.auto-reconnect` | 是否启用自动重连 |
-| `oceanbase.remarks-reporting.enabled` | 是否启用注释报告功能 |
-| `decimal-mapping` | 十进制映射策略(`ALLOW_OVERFLOW` 表示允许溢出) |
-| `decimal-rounding-mode` | 十进制四舍五入模式(`HALF_UP` 表示四舍五入) |
-
-------
-
-## 常见问题
-
-### Q1: 插件未加载,提示 `Catalog not found`?
-
-**A1: 解决方法**
-
-1. 确认插件文件已正确复制到 `/data/trino/plugin/oceanbase/` 目录。
-2. 检查 `oceanbase.properties` 文件是否已放入 `/etc/trino/catalog/` 目录。
-3. 确保容器时区文件已正确设置(避免因时区问题导致连接失败)。
-
-------
-
-### Q2: 连接 OceanBase 时报错 `Connection refused`?
-
-**A2: 解决方法**
-
-1. 确认 OceanBase 服务已启动并监听 `2883` 端口。
-2. 检查 `connection-url` 中的主机地址和端口是否正确。
-3. 确保 OceanBase 用户权限允许远程连接。
-
-------
-
-## 项目结构示例
-
-```
-project-root/
-├── log.properties
-├── oceanbase.properties
-├── target/
-│ └── trino-oceanbase-468.jar
-└── README.md
-```
-
-------
-
-## 🛠️ 贡献与反馈
-
-欢迎提交 Issues 或 Pull Request,帮助完善插件功能。
-
-- [GitHub Issues](https://github.com/oceanbase/ecology-plugins/issues).
-
- ------
-
-## 📄 授权协议
-
-本项目采用 [Apache License 2.0](https://github.com/oceanbase/ecology-plugins/LICENSE) 协议开源。
-
-------
-
-通过本插件,Trino 可以直接连接 OceanBase 数据库(支持 Oracle/MySQL 模式),实现高效的数据查询与分析。
\ No newline at end of file
diff --git a/trino-oceanbase-plugin/mvnw b/trino-oceanbase-plugin/mvnw
deleted file mode 100755
index 19529ddf..00000000
--- a/trino-oceanbase-plugin/mvnw
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/bin/sh
-# ----------------------------------------------------------------------------
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# ----------------------------------------------------------------------------
-
-# ----------------------------------------------------------------------------
-# Apache Maven Wrapper startup batch script, version 3.3.2
-#
-# Optional ENV vars
-# -----------------
-# JAVA_HOME - location of a JDK home dir, required when download maven via java source
-# MVNW_REPOURL - repo url base for downloading maven distribution
-# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
-# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
-# ----------------------------------------------------------------------------
-
-set -euf
-[ "${MVNW_VERBOSE-}" != debug ] || set -x
-
-# OS specific support.
-native_path() { printf %s\\n "$1"; }
-case "$(uname)" in
-CYGWIN* | MINGW*)
- [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
- native_path() { cygpath --path --windows "$1"; }
- ;;
-esac
-
-# set JAVACMD and JAVACCMD
-set_java_home() {
- # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
- if [ -n "${JAVA_HOME-}" ]; then
- if [ -x "$JAVA_HOME/jre/sh/java" ]; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- JAVACCMD="$JAVA_HOME/jre/sh/javac"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- JAVACCMD="$JAVA_HOME/bin/javac"
-
- if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
- echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
- echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
- return 1
- fi
- fi
- else
- JAVACMD="$(
- 'set' +e
- 'unset' -f command 2>/dev/null
- 'command' -v java
- )" || :
- JAVACCMD="$(
- 'set' +e
- 'unset' -f command 2>/dev/null
- 'command' -v javac
- )" || :
-
- if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
- echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
- return 1
- fi
- fi
-}
-
-# hash string like Java String::hashCode
-hash_string() {
- str="${1:-}" h=0
- while [ -n "$str" ]; do
- char="${str%"${str#?}"}"
- h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
- str="${str#?}"
- done
- printf %x\\n $h
-}
-
-verbose() { :; }
-[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
-
-die() {
- printf %s\\n "$1" >&2
- exit 1
-}
-
-trim() {
- # MWRAPPER-139:
- # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
- # Needed for removing poorly interpreted newline sequences when running in more
- # exotic environments such as mingw bash on Windows.
- printf "%s" "${1}" | tr -d '[:space:]'
-}
-
-# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
-while IFS="=" read -r key value; do
- case "${key-}" in
- distributionUrl) distributionUrl=$(trim "${value-}") ;;
- distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
- esac
-done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
-[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
-
-case "${distributionUrl##*/}" in
-maven-mvnd-*bin.*)
- MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
- case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
- *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
- :Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
- :Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
- :Linux*x86_64*) distributionPlatform=linux-amd64 ;;
- *)
- echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
- distributionPlatform=linux-amd64
- ;;
- esac
- distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
- ;;
-maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
-*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
-esac
-
-# apply MVNW_REPOURL and calculate MAVEN_HOME
-# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/
-[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
-distributionUrlName="${distributionUrl##*/}"
-distributionUrlNameMain="${distributionUrlName%.*}"
-distributionUrlNameMain="${distributionUrlNameMain%-bin}"
-MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
-MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
-
-exec_maven() {
- unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
- exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
-}
-
-if [ -d "$MAVEN_HOME" ]; then
- verbose "found existing MAVEN_HOME at $MAVEN_HOME"
- exec_maven "$@"
-fi
-
-case "${distributionUrl-}" in
-*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
-*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
-esac
-
-# prepare tmp dir
-if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
- clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
- trap clean HUP INT TERM EXIT
-else
- die "cannot create temp dir"
-fi
-
-mkdir -p -- "${MAVEN_HOME%/*}"
-
-# Download and Install Apache Maven
-verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
-verbose "Downloading from: $distributionUrl"
-verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
-
-# select .zip or .tar.gz
-if ! command -v unzip >/dev/null; then
- distributionUrl="${distributionUrl%.zip}.tar.gz"
- distributionUrlName="${distributionUrl##*/}"
-fi
-
-# verbose opt
-__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
-[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
-
-# normalize http auth
-case "${MVNW_PASSWORD:+has-password}" in
-'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
-has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
-esac
-
-if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
- verbose "Found wget ... using wget"
- wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
-elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
- verbose "Found curl ... using curl"
- curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
-elif set_java_home; then
- verbose "Falling back to use Java to download"
- javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
- targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
- cat >"$javaSource" <<-END
- public class Downloader extends java.net.Authenticator
- {
- protected java.net.PasswordAuthentication getPasswordAuthentication()
- {
- return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
- }
- public static void main( String[] args ) throws Exception
- {
- setDefault( new Downloader() );
- java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
- }
- }
- END
- # For Cygwin/MinGW, switch paths to Windows format before running javac and java
- verbose " - Compiling Downloader.java ..."
- "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
- verbose " - Running Downloader.java ..."
- "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
-fi
-
-# If specified, validate the SHA-256 sum of the Maven distribution zip file
-if [ -n "${distributionSha256Sum-}" ]; then
- distributionSha256Result=false
- if [ "$MVN_CMD" = mvnd.sh ]; then
- echo "Checksum validation is not supported for maven-mvnd." >&2
- echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
- exit 1
- elif command -v sha256sum >/dev/null; then
- if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
- distributionSha256Result=true
- fi
- elif command -v shasum >/dev/null; then
- if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
- distributionSha256Result=true
- fi
- else
- echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
- echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
- exit 1
- fi
- if [ $distributionSha256Result = false ]; then
- echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
- echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
- exit 1
- fi
-fi
-
-# unzip and move
-if command -v unzip >/dev/null; then
- unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
-else
- tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
-fi
-printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
-mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
-
-clean || :
-exec_maven "$@"
diff --git a/trino-oceanbase-plugin/pom.xml b/trino-oceanbase-plugin/pom.xml
deleted file mode 100644
index 4b9f1a0a..00000000
--- a/trino-oceanbase-plugin/pom.xml
+++ /dev/null
@@ -1,117 +0,0 @@
-
-
- 4.0.0
-
-
- io.trino
- trino-root
- 468
-
-
- trino-oceanbase-plugin
- trino-plugin
- Trino - OceanBase Connector
-
-
- 2.4.11
-
-
-
-
- com.google.guava
- guava
-
-
-
- com.google.inject
- guice
-
-
-
- com.oceanbase
- oceanbase-client
- ${oceanbase.client.version}
-
-
-
- io.airlift
- configuration
-
-
-
- io.airlift
- units
-
-
-
- io.trino
- trino-base-jdbc
-
-
-
- io.trino
- trino-plugin-toolkit
-
-
-
- jakarta.annotation
- jakarta.annotation-api
-
- 3.0.0
-
-
-
- jakarta.validation
- jakarta.validation-api
-
-
-
- com.fasterxml.jackson.core
- jackson-annotations
- provided
-
-
-
- io.airlift
- slice
- provided
-
-
-
- io.opentelemetry
- opentelemetry-api
- provided
-
-
-
- io.opentelemetry
- opentelemetry-context
- provided
-
-
-
- io.trino
- trino-spi
- provided
-
-
-
- org.openjdk.jol
- jol-core
- provided
-
-
-
- com.google.errorprone
- error_prone_annotations
- runtime
-
-
-
- io.airlift
- log-manager
- runtime
-
-
-
-
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClient.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClient.java
deleted file mode 100644
index 1adcc147..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClient.java
+++ /dev/null
@@ -1,962 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Inject;
-import io.trino.plugin.base.aggregation.AggregateFunctionRewriter;
-import io.trino.plugin.base.aggregation.AggregateFunctionRule;
-import io.trino.plugin.base.expression.ConnectorExpressionRewriter;
-import io.trino.plugin.base.mapping.IdentifierMapping;
-import io.trino.plugin.jdbc.BaseJdbcClient;
-import io.trino.plugin.jdbc.BaseJdbcConfig;
-import io.trino.plugin.jdbc.BooleanWriteFunction;
-import io.trino.plugin.jdbc.ColumnMapping;
-import io.trino.plugin.jdbc.ConnectionFactory;
-import io.trino.plugin.jdbc.JdbcColumnHandle;
-import io.trino.plugin.jdbc.JdbcExpression;
-import io.trino.plugin.jdbc.JdbcJoinCondition;
-import io.trino.plugin.jdbc.JdbcSortItem;
-import io.trino.plugin.jdbc.JdbcTableHandle;
-import io.trino.plugin.jdbc.JdbcTypeHandle;
-import io.trino.plugin.jdbc.LongReadFunction;
-import io.trino.plugin.jdbc.LongWriteFunction;
-import io.trino.plugin.jdbc.ObjectReadFunction;
-import io.trino.plugin.jdbc.ObjectWriteFunction;
-import io.trino.plugin.jdbc.QueryBuilder;
-import io.trino.plugin.jdbc.RemoteTableName;
-import io.trino.plugin.jdbc.StandardColumnMappings;
-import io.trino.plugin.jdbc.WriteMapping;
-import io.trino.plugin.jdbc.aggregation.ImplementAvgDecimal;
-import io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint;
-import io.trino.plugin.jdbc.aggregation.ImplementCount;
-import io.trino.plugin.jdbc.aggregation.ImplementCountAll;
-import io.trino.plugin.jdbc.aggregation.ImplementCountDistinct;
-import io.trino.plugin.jdbc.aggregation.ImplementCovariancePop;
-import io.trino.plugin.jdbc.aggregation.ImplementCovarianceSamp;
-import io.trino.plugin.jdbc.aggregation.ImplementMinMax;
-import io.trino.plugin.jdbc.aggregation.ImplementStddevPop;
-import io.trino.plugin.jdbc.aggregation.ImplementStddevSamp;
-import io.trino.plugin.jdbc.aggregation.ImplementSum;
-import io.trino.plugin.jdbc.aggregation.ImplementVariancePop;
-import io.trino.plugin.jdbc.aggregation.ImplementVarianceSamp;
-import io.trino.plugin.jdbc.expression.JdbcConnectorExpressionRewriterBuilder;
-import io.trino.plugin.jdbc.expression.ParameterizedExpression;
-import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
-import io.trino.spi.TrinoException;
-import io.trino.spi.connector.AggregateFunction;
-import io.trino.spi.connector.ColumnHandle;
-import io.trino.spi.connector.ColumnMetadata;
-import io.trino.spi.connector.ConnectorSession;
-import io.trino.spi.connector.ConnectorTableMetadata;
-import io.trino.spi.connector.SchemaTableName;
-import io.trino.spi.expression.ConnectorExpression;
-import io.trino.spi.type.CharType;
-import io.trino.spi.type.DecimalType;
-import io.trino.spi.type.Decimals;
-import io.trino.spi.type.LongTimestamp;
-import io.trino.spi.type.LongTimestampWithTimeZone;
-import io.trino.spi.type.StandardTypes;
-import io.trino.spi.type.TimeType;
-import io.trino.spi.type.TimestampType;
-import io.trino.spi.type.TimestampWithTimeZoneType;
-import io.trino.spi.type.Type;
-import io.trino.spi.type.TypeManager;
-import io.trino.spi.type.TypeSignature;
-import io.trino.spi.type.VarcharType;
-import jakarta.annotation.Nullable;
-
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.time.Instant;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.OffsetDateTime;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.temporal.ChronoField;
-import java.util.Collection;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.function.BiFunction;
-import java.util.stream.Stream;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Strings.emptyToNull;
-import static com.google.common.base.Strings.isNullOrEmpty;
-import static com.google.common.base.Verify.verify;
-import static io.airlift.slice.Slices.utf8Slice;
-import static io.trino.plugin.base.util.JsonTypeUtil.jsonParse;
-import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode;
-import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
-import static io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN;
-import static io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN;
-import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.dateReadFunctionUsingLocalDate;
-import static io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.fromLongTrinoTimestamp;
-import static io.trino.plugin.jdbc.StandardColumnMappings.fromTrinoTimestamp;
-import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalReadFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.longTimestampReadFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.timeWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.timestampReadFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.toTrinoTimestamp;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
-import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
-import static io.trino.spi.StandardErrorCode.SCHEMA_NOT_EMPTY;
-import static io.trino.spi.type.BigintType.BIGINT;
-import static io.trino.spi.type.BooleanType.BOOLEAN;
-import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
-import static io.trino.spi.type.DateTimeEncoding.unpackMillisUtc;
-import static io.trino.spi.type.DateType.DATE;
-import static io.trino.spi.type.DecimalType.createDecimalType;
-import static io.trino.spi.type.DoubleType.DOUBLE;
-import static io.trino.spi.type.IntegerType.INTEGER;
-import static io.trino.spi.type.RealType.REAL;
-import static io.trino.spi.type.SmallintType.SMALLINT;
-import static io.trino.spi.type.TimeType.createTimeType;
-import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
-import static io.trino.spi.type.TimestampType.TIMESTAMP_SECONDS;
-import static io.trino.spi.type.TimestampType.createTimestampType;
-import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType;
-import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_SECOND;
-import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
-import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
-import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
-import static io.trino.spi.type.TinyintType.TINYINT;
-import static io.trino.spi.type.VarbinaryType.VARBINARY;
-import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
-import static java.lang.Float.floatToRawIntBits;
-import static java.lang.Math.floorDiv;
-import static java.lang.Math.floorMod;
-import static java.lang.Math.max;
-import static java.lang.Math.min;
-import static java.lang.String.format;
-import static java.lang.String.join;
-import static java.time.format.DateTimeFormatter.ISO_DATE;
-import static java.util.Locale.ENGLISH;
-import static java.util.concurrent.TimeUnit.DAYS;
-import static java.util.stream.Collectors.joining;
-
-public class OceanBaseClient
- extends BaseJdbcClient
-{
- private static final int TYPE_BINARY_FLOAT = 100;
- private static final int TYPE_BINARY_DOUBLE = 101;
-
- private static final int ZERO_PRECISION_TIME_COLUMN_SIZE = 10;
- private static final int ZERO_PRECISION_TIMESTAMP_COLUMN_SIZE = 19;
- private static final int MYSQL_MODE_MAX_TIMESTAMP_PRECISION = 6;
- private static final int ORACLE_MODE_MAX_TIMESTAMP_PRECISION = 9;
-
- private static final int BYTES_PER_CHAR = 4;
- private static final int MYSQL_MODE_CHAR_MAX_LENGTH = 256;
- private static final int ORACLE_MODE_CHAR_MAX_BYTES = 2000;
- private static final int ORACLE_MODE_CHAR_MAX_LENGTH = ORACLE_MODE_CHAR_MAX_BYTES / BYTES_PER_CHAR;
- private static final int VARCHAR2_MAX_BYTES = 32767;
- private static final int VARCHAR2_MAX_LENGTH = VARCHAR2_MAX_BYTES / BYTES_PER_CHAR;
- private static final int TINYTEXT_MAX_BYTES = 255;
- private static final int TEXT_MAX_BYTES = 65535;
- private static final int MEDIUMTEXT_MAX_BYTES = 16777215;
-
- private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd");
- private static final DateTimeFormatter TIMESTAMP_SECONDS_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ss");
- private static final DateTimeFormatter TIMESTAMP_NANO_OPTIONAL_FORMATTER = new DateTimeFormatterBuilder().appendPattern("uuuu-MM-dd HH:mm:ss").optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd().toFormatter();
-
- private static final Set INTERNAL_DATABASES = ImmutableSet.builder().add("information_schema").add("mysql").add("oceanbase").build();
-
- private static final Set INTERNAL_SCHEMAS = ImmutableSet.builder().add("SYS").add("LBACSYS").add("ORAAUDITOR").build();
-
- private final OceanBaseCompatibleMode compatibleMode;
- private final Type jsonType;
- private final ConnectorExpressionRewriter connectorExpressionRewriter;
- private final AggregateFunctionRewriter aggregateFunctionRewriter;
-
- @Inject
- public OceanBaseClient(BaseJdbcConfig config, OceanBaseConfig obConfig, ConnectionFactory connectionFactory, QueryBuilder queryBuilder, TypeManager typeManager, IdentifierMapping identifierMapping, RemoteQueryModifier queryModifier)
- {
- super(obConfig.getCompatibleMode().isMySQLMode() ? "`" : "\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, queryModifier, true);
-
- this.compatibleMode = obConfig.getCompatibleMode();
-
- this.jsonType = typeManager.getType(new TypeSignature(StandardTypes.JSON));
-
- this.connectorExpressionRewriter = JdbcConnectorExpressionRewriterBuilder.newBuilder().addStandardRules(this::quoted).withTypeClass("numeric_type", ImmutableSet.of("tinyint", "smallint", "integer", "bigint", "decimal", "real", "double")).map("$equal(left: numeric_type, right: numeric_type)").to("left = right").map("$not_equal(left: numeric_type, right: numeric_type)").to("left <> right").map("$less_than(left: numeric_type, right: numeric_type)").to("left < right").map("$less_than_or_equal(left: numeric_type, right: numeric_type)").to("left <= right").map("$greater_than(left: numeric_type, right: numeric_type)").to("left > right").map("$greater_than_or_equal(left: numeric_type, right: numeric_type)").to("left >= right").build();
-
- JdbcTypeHandle bigintTypeHandle = new JdbcTypeHandle(Types.BIGINT, Optional.of("bigint"), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
- this.aggregateFunctionRewriter = new AggregateFunctionRewriter<>(connectorExpressionRewriter, ImmutableSet.>builder().add(new ImplementCountAll(bigintTypeHandle)).add(new ImplementCount(bigintTypeHandle)).add(new ImplementCountDistinct(bigintTypeHandle, true)).add(new ImplementMinMax(true)).add(new ImplementSum(this::toTypeHandle)).add(new ImplementAvgFloatingPoint()).add(new ImplementAvgDecimal()).add(new ImplementStddevSamp()).add(new ImplementStddevPop()).add(new ImplementVarianceSamp()).add(new ImplementVariancePop()).add(new ImplementCovarianceSamp()).add(new ImplementCovariancePop()).build());
- }
-
- private Optional toTypeHandle(DecimalType decimalType)
- {
- return Optional.of(new JdbcTypeHandle(Types.NUMERIC, Optional.of("decimal"), Optional.of(decimalType.getPrecision()), Optional.of(decimalType.getScale()), Optional.empty(), Optional.empty()));
- }
-
- @Override
- protected String quoted(@Nullable String catalog, @Nullable String schema, String table)
- {
- StringBuilder sb = new StringBuilder();
- if (!isNullOrEmpty(schema)) {
- sb.append(quoted(schema)).append(".");
- }
- else if (!isNullOrEmpty(catalog)) {
- sb.append(quoted(catalog)).append(".");
- }
- sb.append(quoted(table));
- return sb.toString();
- }
-
- @Override
- public Optional convertPredicate(ConnectorSession session, ConnectorExpression expression, Map assignments)
- {
- return connectorExpressionRewriter.rewrite(session, expression, assignments);
- }
-
- @Override
- public Optional implementAggregation(ConnectorSession session, AggregateFunction aggregate, Map assignments)
- {
- return aggregateFunctionRewriter.rewrite(session, aggregate, assignments);
- }
-
- @Override
- public PreparedStatement getPreparedStatement(Connection connection, String sql, Optional columnCount)
- throws SQLException
- {
- PreparedStatement statement = connection.prepareStatement(sql);
- if (columnCount.isPresent()) {
- statement.setFetchSize(max(100_000 / columnCount.get(), 1_000));
- }
- return statement;
- }
-
- @Override
- protected String getTableSchemaName(ResultSet resultSet)
- throws SQLException
- {
- return compatibleMode.isMySQLMode() ? resultSet.getString("TABLE_CAT") : resultSet.getString("TABLE_SCHEM");
- }
-
- @Override
- public Collection listSchemas(Connection connection)
- {
- try {
- DatabaseMetaData databaseMetaData = connection.getMetaData();
- try (ResultSet resultSet = compatibleMode.isMySQLMode() ? databaseMetaData.getCatalogs() : databaseMetaData.getSchemas()) {
- ImmutableSet.Builder schemaNames = ImmutableSet.builder();
- while (resultSet.next()) {
- String schemaName = getTableSchemaName(resultSet);
- // skip internal schemas
- if (filterSchema(schemaName)) {
- schemaNames.add(schemaName);
- }
- }
- return schemaNames.build();
- }
- }
- catch (SQLException e) {
- throw new TrinoException(JDBC_ERROR, e);
- }
- }
-
- @Override
- protected boolean filterSchema(String schemaName)
- {
- return compatibleMode.isMySQLMode() ? !INTERNAL_DATABASES.contains(schemaName.toLowerCase(Locale.ENGLISH)) : !INTERNAL_SCHEMAS.contains(schemaName.toUpperCase(Locale.ENGLISH));
- }
-
- @Override
- public void createSchema(ConnectorSession session, String schemaName)
- {
- if (!compatibleMode.isMySQLMode()) {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support creating schemas on Oracle mode");
- }
- super.createSchema(session, schemaName);
- }
-
- @Override
- public void dropSchema(ConnectorSession session, String schemaName, boolean cascade)
- {
- if (!compatibleMode.isMySQLMode()) {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping schemas on Oracle mode");
- }
- super.dropSchema(session, schemaName, cascade);
- }
-
- @Override
- protected void dropSchema(ConnectorSession session, Connection connection, String remoteSchemaName, boolean cascade)
- throws SQLException
- {
- if (!cascade) {
- try (ResultSet tables = getTables(connection, Optional.of(remoteSchemaName), Optional.empty())) {
- if (tables.next()) {
- throw new TrinoException(SCHEMA_NOT_EMPTY, "Cannot drop non-empty schema '%s'".formatted(remoteSchemaName));
- }
- }
- }
- execute(session, connection, "DROP SCHEMA " + quoted(remoteSchemaName));
- }
-
- @Override
- public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName)
- {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schemas");
- }
-
- @Override
- public ResultSet getTables(Connection connection, Optional remoteSchemaName, Optional remoteTableName)
- throws SQLException
- {
- DatabaseMetaData metadata = connection.getMetaData();
- String schemaName = escapeObjectNameForMetadataQuery(remoteSchemaName, metadata.getSearchStringEscape()).orElse(null);
- return metadata.getTables(compatibleMode.isMySQLMode() ? schemaName : null, compatibleMode.isMySQLMode() ? null : schemaName, escapeObjectNameForMetadataQuery(remoteTableName, metadata.getSearchStringEscape()).orElse(null), getTableTypes().map(types -> types.toArray(String[]::new)).orElse(null));
- }
-
- @Override
- public Optional getTableComment(ResultSet resultSet)
- throws SQLException
- {
- return Optional.ofNullable(emptyToNull(resultSet.getString("REMARKS")));
- }
-
- @Override
- protected String getColumnDefinitionSql(ConnectorSession session, ColumnMetadata column, String columnName)
- {
- if (column.getComment() != null) {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support creating tables with column comment");
- }
-
- return "%s %s %s".formatted(quoted(columnName), toWriteMapping(session, column.getType()).getDataType(), column.isNullable() ? compatibleMode.isMySQLMode() ? "NULL" : "" : "NOT NULL");
- }
-
- @Override
- protected List createTableSqls(RemoteTableName remoteTableName, List columns, ConnectorTableMetadata tableMetadata)
- {
- checkArgument(tableMetadata.getProperties().isEmpty(), "Unsupported table properties: %s", tableMetadata.getProperties());
- ImmutableList.Builder createTableSqlsBuilder = ImmutableList.builder();
- createTableSqlsBuilder.add(format("CREATE TABLE %s (%s)", quoted(remoteTableName), join(", ", columns)));
- Optional tableComment = tableMetadata.getComment();
- if (tableComment.isPresent()) {
- createTableSqlsBuilder.add(buildTableCommentSql(remoteTableName, tableComment));
- }
- return createTableSqlsBuilder.build();
- }
-
- @Override
- public void setTableComment(ConnectorSession session, JdbcTableHandle handle, Optional comment)
- {
- execute(session, buildTableCommentSql(handle.asPlainTable().getRemoteTableName(), comment));
- }
-
- private String buildTableCommentSql(RemoteTableName remoteTableName, Optional comment)
- {
- return compatibleMode.isMySQLMode() ? format("COMMENT %s", varcharLiteral(comment.orElse(""))) : format("COMMENT ON TABLE %s IS %s", quoted(remoteTableName), varcharLiteral(comment.orElse("")));
- }
-
- @Override
- public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
- {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
- }
-
- @Override
- public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName)
- {
- RemoteTableName remoteTableName = handle.asPlainTable().getRemoteTableName();
- String catalogName = remoteTableName.getCatalogName().orElse(null);
- String schemaName = remoteTableName.getSchemaName().orElse(null);
- if (compatibleMode.isMySQLMode()) {
- verify(schemaName == null);
- renameTable(session, null, catalogName, remoteTableName.getTableName(), newTableName);
- }
- else {
- renameTable(session, catalogName, schemaName, remoteTableName.getTableName(), newTableName);
- }
- }
-
- @Override
- protected void renameTable(ConnectorSession session, Connection connection, String catalogName, String remoteSchemaName, String remoteTableName, String newRemoteSchemaName, String newRemoteTableName)
- throws SQLException
- {
- if (!remoteSchemaName.equals(newRemoteSchemaName)) {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables across schemas");
- }
-
- execute(session, connection, format("ALTER TABLE %s RENAME TO %s", quoted(catalogName, remoteSchemaName, remoteTableName), quoted(compatibleMode.isMySQLMode() ? newRemoteTableName : newRemoteTableName.toUpperCase(ENGLISH))));
- }
-
- @Override
- protected void renameColumn(ConnectorSession session, Connection connection, RemoteTableName remoteTableName, String remoteColumnName, String newRemoteColumnName)
- throws SQLException
- {
- execute(session, connection, format("ALTER TABLE %s RENAME COLUMN %s TO %s", quoted(remoteTableName.getCatalogName().orElse(null), remoteTableName.getSchemaName().orElse(null), remoteTableName.getTableName()), quoted(remoteColumnName), quoted(newRemoteColumnName)));
- }
-
- @Override
- public boolean supportsAggregationPushdown(ConnectorSession session, JdbcTableHandle table, List aggregates, Map assignments, List> groupingSets)
- {
- return preventTextualTypeAggregationPushdown(groupingSets);
- }
-
- @Override
- public boolean supportsTopN(ConnectorSession session, JdbcTableHandle handle, List sortOrder)
- {
- if (!compatibleMode.isMySQLMode()) {
- return false;
- }
- for (JdbcSortItem sortItem : sortOrder) {
- Type sortItemType = sortItem.column().getColumnType();
- if (sortItemType instanceof CharType || sortItemType instanceof VarcharType) {
- return false;
- }
- }
- return true;
- }
-
- @Override
- protected Optional topNFunction()
- {
- return compatibleMode.isMySQLMode() ? Optional.of((query, sortItems, limit) -> {
- String orderBy = sortItems.stream().flatMap(sortItem -> {
- String ordering = sortItem.sortOrder().isAscending() ? "ASC" : "DESC";
- String columnSorting = format("%s %s", quoted(sortItem.column().getColumnName()), ordering);
- return switch (sortItem.sortOrder()) {
- case ASC_NULLS_FIRST, DESC_NULLS_LAST -> Stream.of(columnSorting);
- case ASC_NULLS_LAST -> Stream.of(format("ISNULL(%s) ASC", quoted(sortItem.column().getColumnName())), columnSorting);
- case DESC_NULLS_FIRST -> Stream.of(format("ISNULL(%s) DESC", quoted(sortItem.column().getColumnName())), columnSorting);
- };
- }).collect(joining(", "));
- return format("%s ORDER BY %s LIMIT %s", query, orderBy, limit);
- }) : Optional.empty();
- }
-
- @Override
- public boolean isTopNGuaranteed(ConnectorSession session)
- {
- if (compatibleMode.isMySQLMode()) {
- return true;
- }
- throw new UnsupportedOperationException("isTopNGuaranteed is not implemented on Oracle mode");
- }
-
- @Override
- protected Optional> limitFunction()
- {
- return Optional.of((sql, limit) -> compatibleMode.isMySQLMode() ? sql + " LIMIT " + limit : format("SELECT * FROM (%s) WHERE ROWNUM <= %s", sql, limit));
- }
-
- @Override
- public boolean isLimitGuaranteed(ConnectorSession session)
- {
- return true;
- }
-
- @Override
- protected boolean isSupportedJoinCondition(ConnectorSession session, JdbcJoinCondition joinCondition)
- {
- return !compatibleMode.isMySQLMode() || Stream.of(joinCondition.getLeftColumn(), joinCondition.getRightColumn()).map(JdbcColumnHandle::getColumnType).noneMatch(type -> type instanceof CharType || type instanceof VarcharType);
- }
-
- @Override
- public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
- {
- String jdbcTypeName = typeHandle.jdbcTypeName().orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
-
- Optional mapping = getForcedMappingToVarchar(typeHandle);
- if (mapping.isPresent()) {
- return mapping;
- }
-
- return switch (jdbcTypeName.toLowerCase(ENGLISH)) {
- case "tinyint unsigned" -> Optional.of(smallintColumnMapping());
- case "smallint unsigned", "year" -> Optional.of(integerColumnMapping());
- case "int unsigned" -> Optional.of(bigintColumnMapping());
- case "bigint unsigned" -> Optional.of(StandardColumnMappings.decimalColumnMapping(createDecimalType(20)));
- case "date" -> Optional.of(dateColumnMapping());
- case "json" -> Optional.of(jsonColumnMapping());
- case "enum", "set" -> Optional.of(defaultVarcharColumnMapping(typeHandle.columnSize().orElse(MYSQL_MODE_CHAR_MAX_LENGTH), false));
- case "datetime" -> Optional.of(timestampColumnMapping(typeHandle.requiredColumnSize(), typeHandle.decimalDigits().orElse(0)));
- default -> switch (typeHandle.jdbcType()) {
- case Types.BIT -> Optional.of(booleanColumnMapping());
- case Types.TINYINT -> Optional.of(tinyintColumnMapping());
- case Types.SMALLINT -> Optional.of(smallintColumnMapping());
- case Types.INTEGER -> Optional.of(integerColumnMapping());
- case Types.BIGINT -> Optional.of(bigintColumnMapping());
- case Types.REAL, TYPE_BINARY_FLOAT -> Optional.of(floatColumnMapping());
- case Types.DOUBLE, TYPE_BINARY_DOUBLE -> Optional.of(doubleColumnMapping());
- case Types.NUMERIC, Types.DECIMAL -> Optional.ofNullable(numericColumnMapping(session, typeHandle));
- case Types.CHAR, Types.NCHAR -> Optional.of(defaultCharColumnMapping(typeHandle.requiredColumnSize(), false));
- case Types.VARCHAR, Types.NVARCHAR, Types.LONGVARCHAR -> Optional.of(defaultVarcharColumnMapping(typeHandle.requiredColumnSize(), false));
- case Types.CLOB -> Optional.of(clobColumnMapping());
- case Types.BINARY, Types.VARBINARY, Types.LONGVARBINARY, Types.BLOB -> Optional.of(varbinaryColumnMapping());
- case Types.TIME -> Optional.of(timeColumnMapping(typeHandle.requiredColumnSize()));
- case Types.TIMESTAMP ->
- compatibleMode.isMySQLMode() ? Optional.of(timestampWithTimeZoneColumnMapping(typeHandle.requiredColumnSize(), typeHandle.decimalDigits().orElse(0))) : Optional.of(timestampColumnMapping(typeHandle.requiredColumnSize(), typeHandle.decimalDigits().orElse(0)));
- default -> Optional.empty();
- };
- };
- }
-
- private ColumnMapping dateColumnMapping()
- {
- return compatibleMode.isMySQLMode() ? ColumnMapping.longMapping(DATE, dateReadFunctionUsingLocalDate(), dateWriteFunction(), FULL_PUSHDOWN) : ColumnMapping.longMapping(TIMESTAMP_SECONDS, dateToTimestampReadFunction(), timestampToDateWriteFunction(), FULL_PUSHDOWN);
- }
-
- private LongReadFunction dateToTimestampReadFunction()
- {
- return (resultSet, columnIndex) -> {
- LocalDateTime timestamp = resultSet.getObject(columnIndex, LocalDateTime.class);
- // Adjust years when the value is B.C. dates because Oracle returns +1 year unless converting to string in their server side
- if (timestamp.getYear() <= 0) {
- timestamp = timestamp.minusYears(1);
- }
- return toTrinoTimestamp(TIMESTAMP_SECONDS, timestamp);
- };
- }
-
- private String getToDateExpression()
- {
- return compatibleMode.isMySQLMode() ? "CAST(? AS DATE)" : "TO_DATE(?, 'SYYYY-MM-DD HH24:MI:SS')";
- }
-
- private LongWriteFunction dateWriteFunction()
- {
- return new LongWriteFunction()
- {
- @Override
- public String getBindExpression()
- {
- return getToDateExpression();
- }
-
- @Override
- public void set(PreparedStatement statement, int index, long value)
- throws SQLException
- {
- if (compatibleMode.isMySQLMode()) {
- statement.setString(index, LocalDate.ofEpochDay(value).format(ISO_DATE));
- }
- else {
- long utcMillis = DAYS.toMillis(value);
- LocalDateTime date = LocalDateTime.from(Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC));
- statement.setString(index, DATE_FORMATTER.format(date));
- }
- }
-
- @Override
- public void setNull(PreparedStatement statement, int index)
- throws SQLException
- {
- if (compatibleMode.isMySQLMode()) {
- statement.setObject(index, null);
- }
- else {
- statement.setNull(index, Types.VARCHAR);
- }
- }
- };
- }
-
- private LongWriteFunction timestampToDateWriteFunction()
- {
- return new LongWriteFunction()
- {
- @Override
- public String getBindExpression()
- {
- return getToDateExpression();
- }
-
- @Override
- public void set(PreparedStatement statement, int index, long value)
- throws SQLException
- {
- long epochSecond = floorDiv(value, MICROSECONDS_PER_SECOND);
- int microsOfSecond = floorMod(value, MICROSECONDS_PER_SECOND);
- verify(microsOfSecond == 0, "Micros of second must be zero: '%s'", value);
- LocalDateTime localDateTime = LocalDateTime.ofEpochSecond(epochSecond, 0, ZoneOffset.UTC);
- statement.setString(index, TIMESTAMP_SECONDS_FORMATTER.format(localDateTime));
- }
-
- @Override
- public void setNull(PreparedStatement statement, int index)
- throws SQLException
- {
- statement.setNull(index, Types.VARCHAR);
- }
- };
- }
-
- private ColumnMapping jsonColumnMapping()
- {
- return ColumnMapping.sliceMapping(jsonType, (resultSet, columnIndex) -> jsonParse(utf8Slice(resultSet.getString(columnIndex))), varcharWriteFunction(), DISABLE_PUSHDOWN);
- }
-
- private ColumnMapping floatColumnMapping()
- {
- return ColumnMapping.longMapping(REAL, (resultSet, columnIndex) -> floatToRawIntBits(resultSet.getFloat(columnIndex)), realWriteFunction(), DISABLE_PUSHDOWN);
- }
-
- private ColumnMapping numericColumnMapping(ConnectorSession session, JdbcTypeHandle typeHandle)
- {
- int precision = typeHandle.requiredColumnSize();
- if (precision == 0) {
- if (getDecimalRounding(session) == ALLOW_OVERFLOW) {
- DecimalType decimalType = createDecimalType(Decimals.MAX_PRECISION, getDecimalDefaultScale(session));
- return ColumnMapping.objectMapping(decimalType, longDecimalReadFunction(decimalType, getDecimalRoundingMode(session)), longDecimalWriteFunction(decimalType), FULL_PUSHDOWN);
- }
- return null;
- }
-
- int decimalDigits = typeHandle.requiredDecimalDigits();
- int width = precision + max(-decimalDigits, 0);
- int scale = max(decimalDigits, 0);
-
- if (width <= Decimals.MAX_PRECISION) {
- return StandardColumnMappings.decimalColumnMapping(createDecimalType(width, scale));
- }
- return getDecimalRounding(session) == ALLOW_OVERFLOW ? StandardColumnMappings.decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale), getDecimalRoundingMode(session)) : null;
- }
-
- private ColumnMapping clobColumnMapping()
- {
- return ColumnMapping.sliceMapping(createUnboundedVarcharType(), (resultSet, columnIndex) -> utf8Slice(resultSet.getString(columnIndex)), varcharWriteFunction(), DISABLE_PUSHDOWN);
- }
-
- private int getMaxTimestampPrecision()
- {
- return compatibleMode.isMySQLMode() ? MYSQL_MODE_MAX_TIMESTAMP_PRECISION : ORACLE_MODE_MAX_TIMESTAMP_PRECISION;
- }
-
- private void verifyTimestampPrecision(int precision)
- {
- verify(1 <= precision && precision <= getMaxTimestampPrecision(), "Unexpected timestamp precision %s", precision);
- }
-
- private int getTimePrecision(int timeColumnSize)
- {
- if (timeColumnSize == ZERO_PRECISION_TIME_COLUMN_SIZE) {
- return 0;
- }
- int timePrecision = timeColumnSize - ZERO_PRECISION_TIME_COLUMN_SIZE - 1;
- verifyTimestampPrecision(timePrecision);
- return timePrecision;
- }
-
- private int getTimestampPrecision(int timestampColumnSize, int scale)
- {
- if (timestampColumnSize < ZERO_PRECISION_TIMESTAMP_COLUMN_SIZE) {
- verifyTimestampPrecision(scale);
- return scale;
- }
- else if (timestampColumnSize == ZERO_PRECISION_TIMESTAMP_COLUMN_SIZE) {
- return 0;
- }
- else {
- int timestampPrecision = timestampColumnSize - ZERO_PRECISION_TIMESTAMP_COLUMN_SIZE - 1;
- verifyTimestampPrecision(timestampPrecision);
- return timestampPrecision;
- }
- }
-
- private ColumnMapping timeColumnMapping(int timeColumnSize)
- {
- TimeType timeType = createTimeType(getTimePrecision(timeColumnSize));
- return StandardColumnMappings.timeColumnMapping(timeType);
- }
-
- private String getToTimestampExpression(int precision)
- {
- if (precision == 0) {
- return "TO_TIMESTAMP(?, 'SYYYY-MM-DD HH24:MI:SS')";
- }
- if (precision <= 2) {
- return "TO_TIMESTAMP(?, 'SYYYY-MM-DD HH24:MI:SS.FF')";
- }
- return format("TO_TIMESTAMP(?, 'SYYYY-MM-DD HH24:MI:SS.FF%d')", precision);
- }
-
- private ColumnMapping timestampColumnMapping(int timestampColumnSize, int scale)
- {
- TimestampType timestampType = createTimestampType(getTimestampPrecision(timestampColumnSize, scale));
- if (timestampType.getPrecision() <= TimestampType.MAX_SHORT_PRECISION) {
- return ColumnMapping.longMapping(timestampType, timestampReadFunction(timestampType), timestampWriteFunction(timestampType));
- }
- return ColumnMapping.objectMapping(timestampType, longTimestampReadFunction(timestampType), longTimestampWriteFunction(timestampType));
- }
-
- private LongWriteFunction timestampWriteFunction(TimestampType timestampType)
- {
- return compatibleMode.isMySQLMode() ? StandardColumnMappings.timestampWriteFunction(timestampType) : new LongWriteFunction()
- {
- @Override
- public String getBindExpression()
- {
- return getToTimestampExpression(timestampType.getPrecision());
- }
-
- @Override
- public void set(PreparedStatement statement, int index, long epochMicros)
- throws SQLException
- {
- LocalDateTime timestamp = fromTrinoTimestamp(epochMicros);
- statement.setString(index, TIMESTAMP_NANO_OPTIONAL_FORMATTER.format(timestamp));
- }
-
- @Override
- public void setNull(PreparedStatement statement, int index)
- throws SQLException
- {
- statement.setNull(index, Types.VARCHAR);
- }
- };
- }
-
- private ObjectWriteFunction longTimestampWriteFunction(TimestampType timestampType)
- {
- return compatibleMode.isMySQLMode() ? StandardColumnMappings.longTimestampWriteFunction(timestampType, MYSQL_MODE_MAX_TIMESTAMP_PRECISION) : new ObjectWriteFunction()
- {
- @Override
- public Class> getJavaType()
- {
- return LongTimestamp.class;
- }
-
- @Override
- public String getBindExpression()
- {
- return getToTimestampExpression(timestampType.getPrecision());
- }
-
- @Override
- public void set(PreparedStatement statement, int index, Object value)
- throws SQLException
- {
- LocalDateTime timestamp = fromLongTrinoTimestamp((LongTimestamp) value, timestampType.getPrecision());
- statement.setString(index, TIMESTAMP_NANO_OPTIONAL_FORMATTER.format(timestamp));
- }
-
- @Override
- public void setNull(PreparedStatement statement, int index)
- throws SQLException
- {
- statement.setNull(index, Types.VARCHAR);
- }
- };
- }
-
- private ColumnMapping timestampWithTimeZoneColumnMapping(int timestampColumnSize, int scale)
- {
- TimestampWithTimeZoneType trinoType = createTimestampWithTimeZoneType(getTimestampPrecision(timestampColumnSize, scale));
- if (trinoType.getPrecision() <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) {
- return ColumnMapping.longMapping(trinoType, shortTimestampWithTimeZoneReadFunction(), shortTimestampWithTimeZoneWriteFunction());
- }
- return ColumnMapping.objectMapping(trinoType, longTimestampWithTimeZoneReadFunction(), longTimestampWithTimeZoneWriteFunction());
- }
-
- private LongReadFunction shortTimestampWithTimeZoneReadFunction()
- {
- return (resultSet, columnIndex) -> {
- Timestamp timestamp = resultSet.getTimestamp(columnIndex);
- long millisUtc = timestamp.getTime();
- return packDateTimeWithZone(millisUtc, UTC_KEY);
- };
- }
-
- private LongWriteFunction shortTimestampWithTimeZoneWriteFunction()
- {
- return (statement, index, value) -> {
- Instant instantValue = Instant.ofEpochMilli(unpackMillisUtc(value));
- statement.setObject(index, instantValue);
- };
- }
-
- private ObjectReadFunction longTimestampWithTimeZoneReadFunction()
- {
- return ObjectReadFunction.of(LongTimestampWithTimeZone.class, (resultSet, columnIndex) -> {
- OffsetDateTime offsetDateTime = resultSet.getObject(columnIndex, OffsetDateTime.class);
- return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(offsetDateTime.toEpochSecond(), (long) offsetDateTime.getNano() * PICOSECONDS_PER_NANOSECOND, UTC_KEY);
- });
- }
-
- private ObjectWriteFunction longTimestampWithTimeZoneWriteFunction()
- {
- return ObjectWriteFunction.of(LongTimestampWithTimeZone.class, (statement, index, value) -> {
- long epochSeconds = floorDiv(value.getEpochMillis(), MILLISECONDS_PER_SECOND);
- long nanosOfSecond = (long) floorMod(value.getEpochMillis(), MILLISECONDS_PER_SECOND) * NANOSECONDS_PER_MILLISECOND + value.getPicosOfMilli() / PICOSECONDS_PER_NANOSECOND;
- Instant instantValue = Instant.ofEpochSecond(epochSeconds, nanosOfSecond);
- statement.setObject(index, instantValue);
- });
- }
-
- @Override
- public WriteMapping toWriteMapping(ConnectorSession session, Type type)
- {
- if (type == BOOLEAN) {
- return compatibleMode.isMySQLMode() ? WriteMapping.booleanMapping("boolean", booleanWriteFunction()) : WriteMapping.booleanMapping("number(1)", booleanWriteFunction());
- }
- if (type == TINYINT) {
- return compatibleMode.isMySQLMode() ? WriteMapping.longMapping("tinyint", tinyintWriteFunction()) : WriteMapping.longMapping("number(3)", tinyintWriteFunction());
- }
- if (type == SMALLINT) {
- return compatibleMode.isMySQLMode() ? WriteMapping.longMapping("smallint", smallintWriteFunction()) : WriteMapping.longMapping("number(5)", smallintWriteFunction());
- }
- if (type == INTEGER) {
- return compatibleMode.isMySQLMode() ? WriteMapping.longMapping("integer", integerWriteFunction()) : WriteMapping.longMapping("number(10)", integerWriteFunction());
- }
- if (type == BIGINT) {
- return compatibleMode.isMySQLMode() ? WriteMapping.longMapping("bigint", bigintWriteFunction()) : WriteMapping.longMapping("number(19)", bigintWriteFunction());
- }
- if (type == REAL) {
- return compatibleMode.isMySQLMode() ? WriteMapping.longMapping("float", realWriteFunction()) : WriteMapping.longMapping("binary_float", realWriteFunction());
- }
- if (type == DOUBLE) {
- return compatibleMode.isMySQLMode() ? WriteMapping.doubleMapping("double precision", doubleWriteFunction()) : WriteMapping.doubleMapping("binary_double", doubleWriteFunction());
- }
- if (type instanceof DecimalType decimalType) {
- String dataType = format(compatibleMode.isMySQLMode() ? "decimal(%s, %s)" : "number(%s, %s)", decimalType.getPrecision(), decimalType.getScale());
- if (decimalType.isShort()) {
- return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType));
- }
- return WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalType));
- }
-
- if (type instanceof CharType charType) {
- String dataType;
- if (compatibleMode.isMySQLMode()) {
- dataType = charType.getLength() < MYSQL_MODE_CHAR_MAX_LENGTH ? "char(" + charType.getLength() + ")" : "clob";
- }
- else {
- dataType = charType.getLength() < ORACLE_MODE_CHAR_MAX_LENGTH ? "char(" + charType.getLength() + " CHAR)" : "clob";
- }
- return WriteMapping.sliceMapping(dataType, charWriteFunction());
- }
-
- if (type instanceof VarcharType varcharType) {
- return WriteMapping.sliceMapping(getVarcharDataType(varcharType), varcharWriteFunction());
- }
-
- if (type.equals(jsonType)) {
- return WriteMapping.sliceMapping("json", varcharWriteFunction());
- }
-
- if (type == VARBINARY) {
- return WriteMapping.sliceMapping("blob", varbinaryWriteFunction());
- }
-
- if (type == DATE) {
- return WriteMapping.longMapping("date", dateWriteFunction());
- }
-
- if (type instanceof TimeType timeType) {
- if (timeType.getPrecision() <= MYSQL_MODE_MAX_TIMESTAMP_PRECISION) {
- return WriteMapping.longMapping(format("time(%s)", timeType.getPrecision()), timeWriteFunction(timeType.getPrecision()));
- }
- return WriteMapping.longMapping(format("time(%s)", MYSQL_MODE_MAX_TIMESTAMP_PRECISION), timeWriteFunction(MYSQL_MODE_MAX_TIMESTAMP_PRECISION));
- }
-
- if (type instanceof TimestampType timestampType) {
- if (compatibleMode.isMySQLMode()) {
- if (timestampType.getPrecision() <= MYSQL_MODE_MAX_TIMESTAMP_PRECISION) {
- return WriteMapping.longMapping(format("datetime(%s)", timestampType.getPrecision()), timestampWriteFunction(timestampType));
- }
- return WriteMapping.objectMapping(format("datetime(%s)", MYSQL_MODE_MAX_TIMESTAMP_PRECISION), longTimestampWriteFunction(timestampType));
- }
- else {
- if (type.equals(TIMESTAMP_SECONDS)) {
- return WriteMapping.longMapping("date", timestampToDateWriteFunction());
- }
- int precision = min(timestampType.getPrecision(), ORACLE_MODE_MAX_TIMESTAMP_PRECISION);
- String dataType = format("timestamp(%d)", precision);
- if (timestampType.isShort()) {
- return WriteMapping.longMapping(dataType, timestampWriteFunction(timestampType));
- }
- return WriteMapping.objectMapping(dataType, longTimestampWriteFunction(createTimestampType(precision)));
- }
- }
-
- if (type instanceof TimestampWithTimeZoneType timestampWithTimeZoneType) {
- if (timestampWithTimeZoneType.getPrecision() <= MYSQL_MODE_MAX_TIMESTAMP_PRECISION) {
- String dataType = format("timestamp(%d)", timestampWithTimeZoneType.getPrecision());
- if (timestampWithTimeZoneType.getPrecision() <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) {
- return WriteMapping.longMapping(dataType, shortTimestampWithTimeZoneWriteFunction());
- }
- return WriteMapping.objectMapping(dataType, longTimestampWithTimeZoneWriteFunction());
- }
- return WriteMapping.objectMapping(format("timestamp(%d)", MYSQL_MODE_MAX_TIMESTAMP_PRECISION), longTimestampWithTimeZoneWriteFunction());
- }
-
- throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
- }
-
- private String getVarcharDataType(VarcharType varcharType)
- {
- if (compatibleMode.isMySQLMode()) {
- if (varcharType.isUnbounded() || varcharType.getBoundedLength() > MEDIUMTEXT_MAX_BYTES) {
- return "longtext";
- }
- else if (varcharType.getBoundedLength() <= TINYTEXT_MAX_BYTES) {
- return "tinytext";
- }
- else if (varcharType.getBoundedLength() <= TEXT_MAX_BYTES) {
- return "text";
- }
- else {
- return "mediumtext";
- }
- }
- else {
- if (varcharType.isUnbounded() || varcharType.getBoundedLength() > VARCHAR2_MAX_LENGTH) {
- return "clob";
- }
- else {
- return "varchar2(" + varcharType.getBoundedLength() + " CHAR)";
- }
- }
- }
-
- private BooleanWriteFunction booleanWriteFunction()
- {
- return BooleanWriteFunction.of(Types.TINYINT, (statement, index, value) -> statement.setObject(index, value ? 1 : 0));
- }
-}
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClientModule.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClientModule.java
deleted file mode 100644
index c10e8a4c..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseClientModule.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import com.google.inject.Binder;
-import com.google.inject.Provides;
-import com.google.inject.Scopes;
-import com.google.inject.Singleton;
-import com.oceanbase.jdbc.Driver;
-import io.airlift.configuration.AbstractConfigurationAwareModule;
-import io.opentelemetry.api.OpenTelemetry;
-import io.trino.plugin.jdbc.BaseJdbcConfig;
-import io.trino.plugin.jdbc.ConnectionFactory;
-import io.trino.plugin.jdbc.DecimalModule;
-import io.trino.plugin.jdbc.DriverConnectionFactory;
-import io.trino.plugin.jdbc.ForBaseJdbc;
-import io.trino.plugin.jdbc.JdbcClient;
-import io.trino.plugin.jdbc.JdbcJoinPushdownSupportModule;
-import io.trino.plugin.jdbc.JdbcMetadataConfig;
-import io.trino.plugin.jdbc.JdbcStatisticsConfig;
-import io.trino.plugin.jdbc.TimestampTimeZoneDomain;
-import io.trino.plugin.jdbc.credential.CredentialProvider;
-import io.trino.plugin.jdbc.ptf.Query;
-import io.trino.spi.function.table.ConnectorTableFunction;
-
-import java.util.Properties;
-
-import static com.google.inject.multibindings.Multibinder.newSetBinder;
-import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder;
-import static io.airlift.configuration.ConfigBinder.configBinder;
-
-public class OceanBaseClientModule
- extends AbstractConfigurationAwareModule
-{
- @Override
- protected void setup(Binder binder)
- {
- binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(OceanBaseClient.class).in(Scopes.SINGLETON);
- configBinder(binder).bindConfigDefaults(JdbcMetadataConfig.class, config -> config.setBulkListColumns(true));
- newOptionalBinder(binder, TimestampTimeZoneDomain.class).setBinding().toInstance(TimestampTimeZoneDomain.UTC_ONLY);
- configBinder(binder).bindConfig(OceanBaseJdbcConfig.class);
- configBinder(binder).bindConfig(OceanBaseConfig.class);
- configBinder(binder).bindConfig(JdbcStatisticsConfig.class);
- install(new DecimalModule());
- install(new JdbcJoinPushdownSupportModule());
- newSetBinder(binder, ConnectorTableFunction.class).addBinding().toProvider(Query.class).in(Scopes.SINGLETON);
- }
-
- @Provides
- @Singleton
- @ForBaseJdbc
- public ConnectionFactory createConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OceanBaseConfig obConfig, OpenTelemetry openTelemetry)
- {
- return DriverConnectionFactory.builder(new Driver(), config.getConnectionUrl(), credentialProvider)
- .setConnectionProperties(getConnectionProperties(obConfig))
- .setOpenTelemetry(openTelemetry)
- .build();
- }
-
- public Properties getConnectionProperties(OceanBaseConfig config)
- {
- Properties connectionProperties = new Properties();
- connectionProperties.setProperty("useInformationSchema", Boolean.toString(config.isDriverUseInformationSchema()));
- connectionProperties.setProperty("useUnicode", "true");
- connectionProperties.setProperty("characterEncoding", "utf8");
- connectionProperties.setProperty("tinyInt1isBit", "false");
- connectionProperties.setProperty("rewriteBatchedStatements", "true");
-
- connectionProperties.setProperty("connectionTimeZone", "LOCAL");
- connectionProperties.setProperty("forceConnectionTimeZoneToSession", "true");
-
- if (config.isAutoReconnect()) {
- connectionProperties.setProperty("autoReconnect", String.valueOf(config.isAutoReconnect()));
- connectionProperties.setProperty("maxReconnects", String.valueOf(config.getMaxReconnects()));
- }
- if (config.getConnectionTimeout() != null) {
- connectionProperties.setProperty("connectTimeout", String.valueOf(config.getConnectionTimeout().toMillis()));
- }
- if (config.isRemarksReportingEnabled()) {
- connectionProperties.setProperty("remarksReporting", String.valueOf(config.isRemarksReportingEnabled()));
- }
- return connectionProperties;
- }
-}
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseCompatibleMode.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseCompatibleMode.java
deleted file mode 100644
index b2da1573..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseCompatibleMode.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import java.util.Locale;
-
-public enum OceanBaseCompatibleMode
-{
- MySQL,
- Oracle;
-
- public static OceanBaseCompatibleMode parse(String text)
- {
- if (text == null || text.trim().isEmpty()) {
- return OceanBaseCompatibleMode.MySQL;
- }
- return switch (text.trim().toLowerCase(Locale.ENGLISH)) {
- case "mysql" -> OceanBaseCompatibleMode.MySQL;
- case "oracle" -> OceanBaseCompatibleMode.Oracle;
- default -> throw new IllegalArgumentException("Unsupported compatible mode: " + text);
- };
- }
-
- public boolean isMySQLMode()
- {
- return this == MySQL;
- }
-
- @Override
- public String toString()
- {
- return this.name().toLowerCase(Locale.ENGLISH);
- }
-}
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseConfig.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseConfig.java
deleted file mode 100644
index 9bd911f6..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseConfig.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import io.airlift.configuration.Config;
-import io.airlift.configuration.ConfigDescription;
-import io.airlift.units.Duration;
-import jakarta.validation.constraints.Min;
-
-import java.util.concurrent.TimeUnit;
-
-public class OceanBaseConfig
-{
- private OceanBaseCompatibleMode compatibleMode = OceanBaseCompatibleMode.MySQL;
- private boolean autoReconnect = true;
- private int maxReconnects = 3;
- private Duration connectionTimeout = new Duration(10, TimeUnit.SECONDS);
- private boolean driverUseInformationSchema = true;
- private boolean remarksReportingEnabled;
-
- public OceanBaseCompatibleMode getCompatibleMode()
- {
- return compatibleMode;
- }
-
- @Config("oceanbase.compatible-mode")
- public OceanBaseConfig setCompatibleMode(String compatibleMode)
- {
- this.compatibleMode = OceanBaseCompatibleMode.parse(compatibleMode);
- return this;
- }
-
- public boolean isAutoReconnect()
- {
- return autoReconnect;
- }
-
- @Config("oceanbase.auto-reconnect")
- public OceanBaseConfig setAutoReconnect(boolean autoReconnect)
- {
- this.autoReconnect = autoReconnect;
- return this;
- }
-
- @Min(1)
- public int getMaxReconnects()
- {
- return maxReconnects;
- }
-
- @Config("oceanbase.max-reconnects")
- public OceanBaseConfig setMaxReconnects(int maxReconnects)
- {
- this.maxReconnects = maxReconnects;
- return this;
- }
-
- public Duration getConnectionTimeout()
- {
- return connectionTimeout;
- }
-
- @Config("oceanbase.connection-timeout")
- public OceanBaseConfig setConnectionTimeout(Duration connectionTimeout)
- {
- this.connectionTimeout = connectionTimeout;
- return this;
- }
-
- public boolean isDriverUseInformationSchema()
- {
- return driverUseInformationSchema;
- }
-
- @Config("oceanbase.use-information-schema")
- @ConfigDescription("Value of JDBC driver connection property 'useInformationSchema' on MySQL mode")
- public OceanBaseConfig setDriverUseInformationSchema(boolean driverUseInformationSchema)
- {
- this.driverUseInformationSchema = driverUseInformationSchema;
- return this;
- }
-
- public boolean isRemarksReportingEnabled()
- {
- return remarksReportingEnabled;
- }
-
- @Config("oceanbase.remarks-reporting.enabled")
- public OceanBaseConfig setRemarksReportingEnabled(boolean enabled)
- {
- this.remarksReportingEnabled = enabled;
- return this;
- }
-}
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseJdbcConfig.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseJdbcConfig.java
deleted file mode 100644
index 66a931d5..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBaseJdbcConfig.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import io.trino.plugin.jdbc.BaseJdbcConfig;
-import jakarta.validation.constraints.AssertTrue;
-
-public class OceanBaseJdbcConfig
- extends BaseJdbcConfig
-{
- @AssertTrue(message = "Invalid JDBC URL for OceanBase connector")
- public boolean isUrlValid()
- {
- return getConnectionUrl().startsWith("jdbc:oceanbase://");
- }
-}
diff --git a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBasePlugin.java b/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBasePlugin.java
deleted file mode 100644
index 119acc8e..00000000
--- a/trino-oceanbase-plugin/src/main/java/io/trino/plugin/oceanbase/OceanBasePlugin.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.oceanbase;
-
-import io.trino.plugin.jdbc.JdbcPlugin;
-
-public class OceanBasePlugin
- extends JdbcPlugin
-{
- public OceanBasePlugin()
- {
- super("oceanbase", OceanBaseClientModule::new);
- }
-}