From 8eb1479a1728580aa929a2fede5a6662814e506c Mon Sep 17 00:00:00 2001 From: Anton Kurako Date: Thu, 30 Jan 2025 15:57:16 +0300 Subject: [PATCH 1/5] Lettuce and Jedis as separate modules --- cache/cache-redis-common/build.gradle | 24 + .../kora/cache/redis/AbstractRedisCache.java | 660 ++++++++++++++++++ .../tinkoff/kora/cache/redis/RedisCache.java | 7 + .../kora/cache/redis/RedisCacheClient.java | 42 ++ .../kora/cache/redis/RedisCacheConfig.java | 24 + .../kora/cache/redis/RedisCacheKeyMapper.java | 17 + .../cache/redis/RedisCacheMapperModule.java | 175 +++++ .../kora/cache/redis/RedisCacheModule.java | 7 + .../kora/cache/redis/RedisCacheTelemetry.java | 129 ++++ .../cache/redis/RedisCacheValueMapper.java | 19 + .../redis/lettuce/LettuceClientConfig.java | 43 ++ .../redis/lettuce/LettuceClientFactory.java | 133 ++++ .../LettuceClusterRedisCacheClient.java | 190 +++++ .../cache/redis/lettuce/LettuceModule.java | 36 + .../lettuce/LettuceRedisCacheClient.java | 190 +++++ .../cache/redis/AbstractAsyncCacheTests.java | 252 +++++++ .../cache/redis/AbstractSyncCacheTests.java | 229 ++++++ .../redis/AsyncCacheExpireReadTests.java | 22 + .../redis/AsyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/AsyncCacheTests.java | 20 + .../tinkoff/kora/cache/redis/CacheRunner.java | 85 +++ .../cache/redis/SyncCacheExpireReadTests.java | 22 + .../redis/SyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/SyncCacheTests.java | 20 + .../kora/cache/redis/testdata/DummyCache.java | 14 + cache/cache-redis-jedis/build.gradle | 24 + .../kora/cache/redis/AbstractRedisCache.java | 660 ++++++++++++++++++ .../tinkoff/kora/cache/redis/RedisCache.java | 7 + .../kora/cache/redis/RedisCacheClient.java | 42 ++ .../kora/cache/redis/RedisCacheConfig.java | 24 + .../kora/cache/redis/RedisCacheKeyMapper.java | 17 + .../cache/redis/RedisCacheMapperModule.java | 175 +++++ .../kora/cache/redis/RedisCacheModule.java | 7 + .../kora/cache/redis/RedisCacheTelemetry.java | 129 ++++ .../cache/redis/RedisCacheValueMapper.java | 19 + .../redis/lettuce/LettuceClientConfig.java | 43 ++ .../redis/lettuce/LettuceClientFactory.java | 133 ++++ .../LettuceClusterRedisCacheClient.java | 190 +++++ .../cache/redis/lettuce/LettuceModule.java | 36 + .../lettuce/LettuceRedisCacheClient.java | 190 +++++ .../cache/redis/AbstractAsyncCacheTests.java | 252 +++++++ .../cache/redis/AbstractSyncCacheTests.java | 229 ++++++ .../redis/AsyncCacheExpireReadTests.java | 22 + .../redis/AsyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/AsyncCacheTests.java | 20 + .../tinkoff/kora/cache/redis/CacheRunner.java | 85 +++ .../cache/redis/SyncCacheExpireReadTests.java | 22 + .../redis/SyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/SyncCacheTests.java | 20 + .../kora/cache/redis/testdata/DummyCache.java | 14 + cache/cache-redis-lettuce/build.gradle | 24 + .../kora/cache/redis/AbstractRedisCache.java | 660 ++++++++++++++++++ .../tinkoff/kora/cache/redis/RedisCache.java | 7 + .../kora/cache/redis/RedisCacheClient.java | 42 ++ .../kora/cache/redis/RedisCacheConfig.java | 24 + .../kora/cache/redis/RedisCacheKeyMapper.java | 17 + .../cache/redis/RedisCacheMapperModule.java | 175 +++++ .../kora/cache/redis/RedisCacheModule.java | 7 + .../kora/cache/redis/RedisCacheTelemetry.java | 129 ++++ .../cache/redis/RedisCacheValueMapper.java | 19 + .../redis/lettuce/LettuceClientConfig.java | 43 ++ .../redis/lettuce/LettuceClientFactory.java | 133 ++++ .../LettuceClusterRedisCacheClient.java | 190 +++++ .../cache/redis/lettuce/LettuceModule.java | 36 + .../lettuce/LettuceRedisCacheClient.java | 190 +++++ .../cache/redis/AbstractAsyncCacheTests.java | 252 +++++++ .../cache/redis/AbstractSyncCacheTests.java | 229 ++++++ .../redis/AsyncCacheExpireReadTests.java | 22 + .../redis/AsyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/AsyncCacheTests.java | 20 + .../tinkoff/kora/cache/redis/CacheRunner.java | 85 +++ .../cache/redis/SyncCacheExpireReadTests.java | 22 + .../redis/SyncCacheExpireWriteTests.java | 22 + .../kora/cache/redis/SyncCacheTests.java | 20 + .../kora/cache/redis/testdata/DummyCache.java | 14 + cache/cache-redis/build.gradle | 2 +- dependencies.gradle | 3 +- internal/test-redis/build.gradle | 2 +- redis/redis-jedis/build.gradle | 11 + .../tinkoff/kora/redis/jedis/JedisConfig.java | 42 ++ .../kora/redis/jedis/JedisFactory.java | 94 +++ .../tinkoff/kora/redis/jedis/JedisModule.java | 17 + redis/redis-lettuce/build.gradle | 20 + .../kora/redis/lettuce/LettuceConfig.java | 43 ++ .../kora/redis/lettuce/LettuceFactory.java | 132 ++++ .../kora/redis/lettuce/LettuceModule.java | 17 + settings.gradle | 7 +- 87 files changed, 7598 insertions(+), 4 deletions(-) create mode 100644 cache/cache-redis-common/build.gradle create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java create mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java create mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java create mode 100644 cache/cache-redis-jedis/build.gradle create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java create mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java create mode 100644 cache/cache-redis-lettuce/build.gradle create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java create mode 100644 redis/redis-jedis/build.gradle create mode 100644 redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java create mode 100644 redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java create mode 100644 redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java create mode 100644 redis/redis-lettuce/build.gradle create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java diff --git a/cache/cache-redis-common/build.gradle b/cache/cache-redis-common/build.gradle new file mode 100644 index 000000000..44c085705 --- /dev/null +++ b/cache/cache-redis-common/build.gradle @@ -0,0 +1,24 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + api project(":cache:cache-common") + + implementation project(":json:json-common") + implementation project(":config:config-common") + implementation(libs.redis.lettuce) { + exclude group: 'io.projectreactor', module: 'reactor-core' + exclude group: 'io.netty', module: 'netty-common' + exclude group: 'io.netty', module: 'netty-handler' + exclude group: 'io.netty', module: 'netty-transport' + } + implementation libs.reactor.core + implementation libs.netty.common + implementation libs.netty.handlers + implementation libs.netty.transports + implementation libs.apache.pool + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java new file mode 100644 index 000000000..941591ac8 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -0,0 +1,660 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.AsyncCache; + +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.stream.Collectors; + +public abstract class AbstractRedisCache implements AsyncCache { + + private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); + + private final String name; + private final RedisCacheClient redisClient; + private final RedisCacheTelemetry telemetry; + private final byte[] keyPrefix; + + private final RedisCacheKeyMapper keyMapper; + private final RedisCacheValueMapper valueMapper; + + private final Long expireAfterAccessMillis; + private final Long expireAfterWriteMillis; + + protected AbstractRedisCache(String name, + RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + this.name = name; + this.redisClient = redisClient; + this.telemetry = telemetry; + this.keyMapper = keyMapper; + this.valueMapper = valueMapper; + this.expireAfterAccessMillis = (config.expireAfterAccess() == null) + ? null + : config.expireAfterAccess().toMillis(); + this.expireAfterWriteMillis = (config.expireAfterWrite() == null) + ? null + : config.expireAfterWrite().toMillis(); + + if (config.keyPrefix().isEmpty()) { + this.keyPrefix = null; + } else { + var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); + this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; + System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); + System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); + } + } + + @Override + public V get(@Nonnull K key) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("GET", name); + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map get(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return Collections.emptyMap(); + } catch (Exception e) { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + } + } + + @Nonnull + @Override + public V put(@Nonnull K key, @Nonnull V value) { + if (key == null || value == null) { + return null; + } + + var telemetryContext = telemetry.create("PUT", name); + + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return value; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return value; + } + } + + @Nonnull + @Override + public Map put(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + + try { + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + + telemetryContext.recordSuccess(); + return keyAndValues; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return keyAndValues; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return keyAndValues; + } + } + + @Override + public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + + V fromCache = null; + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + fromCache = valueMapper.read(jsonAsBytes); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache != null) { + telemetryContext.recordSuccess(); + return fromCache; + } + + try { + var value = mappingFunction.apply(key); + if (value != null) { + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + + final Map fromCache = new HashMap<>(); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache.size() == keys.size()) { + telemetryContext.recordSuccess(); + return fromCache; + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + try { + var values = mappingFunction.apply(missingKeys); + if (!values.isEmpty()) { + try { + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return fromCache; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return fromCache; + } + } + + @Override + public void invalidate(@Nonnull K key) { + if (key != null) { + final byte[] keyAsBytes = mapKey(key); + var telemetryContext = telemetry.create("INVALIDATE", name); + + try { + redisClient.del(keyAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidate(@Nonnull Collection keys) { + if (keys != null && !keys.isEmpty()) { + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + + try { + final byte[][] keysAsBytes = keys.stream() + .map(this::mapKey) + .toArray(byte[][]::new); + + redisClient.del(keysAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidateAll() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + + try { + redisClient.flushAll().toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + + @Nonnull + @Override + public CompletionStage getAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("GET", name); + final byte[] keyAsBytes = mapKey(key); + + CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(jsonAsBytes -> { + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> getAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + var keysByKeyByte = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysAsBytes) + : redisClient.getex(keysAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valuesByKeys -> { + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyByte.entrySet()) { + valuesByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { + if (key == null) { + return CompletableFuture.completedFuture(value); + } + + var telemetryContext = telemetry.create("PUT", name); + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return value; + }); + } + + @Nonnull + @Override + public CompletionStage> putAsync(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return keyAndValues; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return keyAndValues; + }); + } + + @Override + public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + final byte[] keyAsBytes = mapKey(key); + final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueMapper::read) + .thenCompose(fromCache -> { + if (fromCache != null) { + return CompletableFuture.completedFuture(fromCache); + } + + return mappingFunction.apply(key) + .thenCompose(value -> { + if (value == null) { + return CompletableFuture.completedFuture(null); + } + + final byte[] valueAsBytes = valueMapper.write(value); + var putFutureResponse = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return putFutureResponse + .thenApply(v -> { + telemetryContext.recordSuccess(); + return value; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueByKeys -> { + final Map fromCache = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + + return fromCache; + }) + .thenCompose(fromCache -> { + if (fromCache.size() == keys.size()) { + return CompletableFuture.completedFuture(fromCache); + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + return mappingFunction.apply(missingKeys) + .thenCompose(values -> { + if (values.isEmpty()) { + return CompletableFuture.completedFuture(fromCache); + } + + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var putCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return putCompletionStage + .thenApply(v -> { + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE", name); + final byte[] keyAsBytes = mapKey(key); + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Override + public CompletionStage invalidateAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + final byte[][] keyAsBytes = keys.stream() + .distinct() + .map(this::mapKey) + .toArray(byte[][]::new); + + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAllAsync() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + return redisClient.flushAll() + .thenApply(r -> { + telemetryContext.recordSuccess(); + return r; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + private byte[] mapKey(K key) { + final byte[] suffixAsBytes = keyMapper.apply(key); + if (this.keyPrefix == null) { + return suffixAsBytes; + } else { + var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; + System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); + System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); + + return keyAsBytes; + } + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java new file mode 100644 index 000000000..75a932976 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.AsyncCache; + +public interface RedisCache extends AsyncCache { + +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java new file mode 100644 index 000000000..a5b995bd6 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; + +import java.util.Map; +import java.util.concurrent.CompletionStage; + +public interface RedisCacheClient { + + @Nonnull + CompletionStage get(byte[] key); + + @Nonnull + CompletionStage> mget(byte[][] keys); + + @Nonnull + CompletionStage getex(byte[] key, long expireAfterMillis); + + @Nonnull + CompletionStage> getex(byte[][] keys, long expireAfterMillis); + + @Nonnull + CompletionStage set(byte[] key, byte[] value); + + @Nonnull + CompletionStage mset(@Nonnull Map keyAndValue); + + @Nonnull + CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + + @Nonnull + CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + + @Nonnull + CompletionStage del(byte[] key); + + @Nonnull + CompletionStage del(byte[][] keys); + + @Nonnull + CompletionStage flushAll(); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java new file mode 100644 index 000000000..120bd511a --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -0,0 +1,24 @@ +package ru.tinkoff.kora.cache.redis; + + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface RedisCacheConfig { + + /** + * Key prefix allow to avoid key collision in single Redis database between multiple caches + * + * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied + */ + String keyPrefix(); + + @Nullable + Duration expireAfterWrite(); + + @Nullable + Duration expireAfterAccess(); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java new file mode 100644 index 000000000..f6edc71ad --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.CacheKeyMapper; + +import java.nio.charset.StandardCharsets; +import java.util.function.Function; + +/** + * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. + */ +public interface RedisCacheKeyMapper extends Function { + + /** + * Is used to delimiter composite key such as {@link CacheKeyMapper} + */ + byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java new file mode 100644 index 000000000..81e48e005 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -0,0 +1,175 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.json.common.JsonCommonModule; +import ru.tinkoff.kora.json.common.JsonReader; +import ru.tinkoff.kora.json.common.JsonWriter; +import ru.tinkoff.kora.json.common.annotation.Json; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.UUID; + +public interface RedisCacheMapperModule extends JsonCommonModule { + + @DefaultComponent + default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + return new RedisCacheTelemetry(metrics, tracer); + } + + @Json + @DefaultComponent + default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(V value) { + try { + return jsonWriter.toByteArray(value); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + + @Override + public V read(byte[] serializedValue) { + try { + return (serializedValue == null) ? null : jsonReader.read(serializedValue); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper stringRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(String value) { + return value.getBytes(StandardCharsets.UTF_8); + } + + @Override + public String read(byte[] serializedValue) { + return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bytesRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(byte[] value) { + return value; + } + + @Override + public byte[] read(byte[] serializedValue) { + return serializedValue; + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Integer value) { + return keyMapper.apply(value); + } + + @Override + public Integer read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Long value) { + return keyMapper.apply(value); + } + + @Override + public Long read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(BigInteger value) { + return keyMapper.apply(value); + } + + @Override + public BigInteger read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + + @Override + public byte[] write(UUID value) { + return keyMapper.apply(value); + } + + @Override + public UUID read(byte[] serializedValue) { + return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); + } + }; + } + + @DefaultComponent + default RedisCacheKeyMapper intRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper longRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper bigIntRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper uuidRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper stringRedisKeyMapper() { + return c -> c.getBytes(StandardCharsets.UTF_8); + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java new file mode 100644 index 000000000..fe07914b0 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; + +public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { + +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java new file mode 100644 index 000000000..af84dfdd5 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java @@ -0,0 +1,129 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; + +public final class RedisCacheTelemetry { + + private static final String ORIGIN = "redis"; + + record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + } + + interface TelemetryContext { + void recordSuccess(); + + void recordSuccess(@Nullable Object valueFromCache); + + void recordFailure(@Nullable Throwable throwable); + } + + private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); + + private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); + + @Nullable + private final CacheMetrics metrics; + @Nullable + private final CacheTracer tracer; + private final boolean isStubTelemetry; + + RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + this.metrics = metrics; + this.tracer = tracer; + this.isStubTelemetry = metrics == null && tracer == null; + } + + record StubCacheTelemetry() implements TelemetryContext { + + @Override + public void recordSuccess() {} + + @Override + public void recordSuccess(@Nullable Object valueFromCache) {} + + @Override + public void recordFailure(@Nullable Throwable throwable) {} + } + + class DefaultCacheTelemetryContext implements TelemetryContext { + + private final Operation operation; + + private CacheTracer.CacheSpan span; + private final long startedInNanos = System.nanoTime(); + + DefaultCacheTelemetryContext(Operation operation) { + logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); + if (tracer != null) { + span = tracer.trace(operation); + } + this.operation = operation; + } + + @Override + public void recordSuccess() { + recordSuccess(null); + } + + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordSuccess(operation, durationInNanos, valueFromCache); + } + if (span != null) { + span.recordSuccess(); + } + + if (operation.name().startsWith("GET")) { + if (valueFromCache == null) { + logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); + } else { + logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); + } + } else { + logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordFailure(operation, durationInNanos, throwable); + } + if (span != null) { + span.recordFailure(throwable); + } + + if (throwable != null) { + logger.warn("Operation '{}' failed for cache '{}' with message: {}", + operation.name(), operation.cacheName(), throwable.getMessage()); + } else { + logger.warn("Operation '{}' failed for cache '{}'", + operation.name(), operation.cacheName()); + } + } + } + + @Nonnull + TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { + if (isStubTelemetry) { + return STUB_CONTEXT; + } + + return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java new file mode 100644 index 000000000..cf2037f42 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -0,0 +1,19 @@ +package ru.tinkoff.kora.cache.redis; + +/** + * Converts cache value into serializer value to store in cache. + */ +public interface RedisCacheValueMapper { + + /** + * @param value to serialize + * @return value serialized + */ + byte[] write(V value); + + /** + * @param serializedValue to deserialize + * @return value deserialized + */ + V read(byte[] serializedValue); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java new file mode 100644 index 000000000..6fb2ee3e9 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java @@ -0,0 +1,43 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.RedisURI; +import io.lettuce.core.SocketOptions; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface LettuceClientConfig { + + String uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); + } + + enum Protocol { + + /** Redis 2 to Redis 5 */ + RESP2, + /** Redis 6+ */ + RESP3 + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java new file mode 100644 index 000000000..fb29c05e2 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nonnull; + +import java.net.URI; +import java.time.Duration; +import java.util.List; + +public final class LettuceClientFactory { + + @Nonnull + public AbstractRedisClient build(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + + final List mappedRedisUris = buildRedisURI(config); + + return (mappedRedisUris.size() == 1) + ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) + : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClient buildRedisClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClusterClient client = RedisClusterClient.create(redisURIs); + client.setOptions(ClusterClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + @Nonnull + private static RedisClient buildRedisClientInternal(RedisURI redisURI, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClient client = RedisClient.create(redisURI); + client.setOptions(ClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + static List buildRedisURI(LettuceClientConfig config) { + final String uri = config.uri(); + final Integer database = config.database(); + final String user = config.user(); + final String password = config.password(); + + final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); + return redisURIS.stream() + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(config.commandTimeout()) + .build(); + }) + .toList(); + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java new file mode 100644 index 000000000..adda5d4f8 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import io.lettuce.core.GetExArgs; +import io.lettuce.core.KeyValue; +import io.lettuce.core.Value; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); + + private final RedisClusterClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisClusterConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAdvancedClusterAsyncCommands commands; + + LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { + this.redisClient = redisClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_v -> true); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java new file mode 100644 index 000000000..25cb53904 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -0,0 +1,36 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.ConfigValue; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +import java.time.Duration; + +public interface LettuceModule { + + default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("lettuce"); + return extractor.extract(value); + } + + default LettuceClientFactory lettuceClientFactory() { + return new LettuceClientFactory(); + } + + @DefaultComponent + default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { + var redisClient = factory.build(config); + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + return new LettuceRedisCacheClient(rc, config); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceClusterRedisCacheClient(rcc); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java new file mode 100644 index 000000000..cdb95be7c --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.api.async.RedisAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); + + private final RedisURI redisURI; + private final RedisClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAsyncCommands commands; + + LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { + this.redisClient = redisClient; + final List redisURIs = LettuceClientFactory.buildRedisURI(config); + this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .thenApply(v -> true) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) + .thenApply(_void -> true) + .whenComplete((s, throwable) -> pool.release(connection)); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java new file mode 100644 index 000000000..209ce6ecb --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java @@ -0,0 +1,252 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; + +abstract class AbstractAsyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.getAsync("1").toCompletableFuture().join()); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.getAsync("1").toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "???")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "???")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync("2").toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java new file mode 100644 index 000000000..f5ec3aa9d --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java @@ -0,0 +1,229 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +abstract class AbstractSyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.get(key)); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get(key); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.get(keys); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.get("1")); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.get("1"); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return Map.of("1", "1", "2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { + if ("2".equals(keysCompute.iterator().next())) { + return Map.of("2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(Set.of("1", "2")); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get("2"); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java new file mode 100644 index 000000000..3f7764c9a --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java new file mode 100644 index 000000000..faa5acb1a --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java new file mode 100644 index 000000000..6f95bee4a --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java new file mode 100644 index 000000000..3509176e2 --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java @@ -0,0 +1,85 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; + +abstract class CacheRunner extends Assertions implements RedisCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + }; + } + + private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { + var lettuceClientFactory = lettuceClientFactory(); + var lettuceClientConfig = new LettuceClientConfig() { + @Override + public String uri() { + return redisParams.uri().toString(); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + }; + + var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); + if (lettuceClient instanceof Lifecycle lc) { + lc.init(); + } + return lettuceClient; + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var lettuceClient = createLettuce(redisParams); + return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java new file mode 100644 index 000000000..43008e371 --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireReadTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java new file mode 100644 index 000000000..72feb88f6 --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java new file mode 100644 index 000000000..bb8818bbc --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java new file mode 100644 index 000000000..4d098b5ff --- /dev/null +++ b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java @@ -0,0 +1,14 @@ +package ru.tinkoff.kora.cache.redis.testdata; + +import ru.tinkoff.kora.cache.redis.*; + +public final class DummyCache extends AbstractRedisCache { + + public DummyCache(RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); + } +} diff --git a/cache/cache-redis-jedis/build.gradle b/cache/cache-redis-jedis/build.gradle new file mode 100644 index 000000000..44c085705 --- /dev/null +++ b/cache/cache-redis-jedis/build.gradle @@ -0,0 +1,24 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + api project(":cache:cache-common") + + implementation project(":json:json-common") + implementation project(":config:config-common") + implementation(libs.redis.lettuce) { + exclude group: 'io.projectreactor', module: 'reactor-core' + exclude group: 'io.netty', module: 'netty-common' + exclude group: 'io.netty', module: 'netty-handler' + exclude group: 'io.netty', module: 'netty-transport' + } + implementation libs.reactor.core + implementation libs.netty.common + implementation libs.netty.handlers + implementation libs.netty.transports + implementation libs.apache.pool + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java new file mode 100644 index 000000000..941591ac8 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -0,0 +1,660 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.AsyncCache; + +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.stream.Collectors; + +public abstract class AbstractRedisCache implements AsyncCache { + + private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); + + private final String name; + private final RedisCacheClient redisClient; + private final RedisCacheTelemetry telemetry; + private final byte[] keyPrefix; + + private final RedisCacheKeyMapper keyMapper; + private final RedisCacheValueMapper valueMapper; + + private final Long expireAfterAccessMillis; + private final Long expireAfterWriteMillis; + + protected AbstractRedisCache(String name, + RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + this.name = name; + this.redisClient = redisClient; + this.telemetry = telemetry; + this.keyMapper = keyMapper; + this.valueMapper = valueMapper; + this.expireAfterAccessMillis = (config.expireAfterAccess() == null) + ? null + : config.expireAfterAccess().toMillis(); + this.expireAfterWriteMillis = (config.expireAfterWrite() == null) + ? null + : config.expireAfterWrite().toMillis(); + + if (config.keyPrefix().isEmpty()) { + this.keyPrefix = null; + } else { + var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); + this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; + System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); + System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); + } + } + + @Override + public V get(@Nonnull K key) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("GET", name); + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map get(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return Collections.emptyMap(); + } catch (Exception e) { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + } + } + + @Nonnull + @Override + public V put(@Nonnull K key, @Nonnull V value) { + if (key == null || value == null) { + return null; + } + + var telemetryContext = telemetry.create("PUT", name); + + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return value; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return value; + } + } + + @Nonnull + @Override + public Map put(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + + try { + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + + telemetryContext.recordSuccess(); + return keyAndValues; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return keyAndValues; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return keyAndValues; + } + } + + @Override + public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + + V fromCache = null; + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + fromCache = valueMapper.read(jsonAsBytes); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache != null) { + telemetryContext.recordSuccess(); + return fromCache; + } + + try { + var value = mappingFunction.apply(key); + if (value != null) { + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + + final Map fromCache = new HashMap<>(); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache.size() == keys.size()) { + telemetryContext.recordSuccess(); + return fromCache; + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + try { + var values = mappingFunction.apply(missingKeys); + if (!values.isEmpty()) { + try { + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return fromCache; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return fromCache; + } + } + + @Override + public void invalidate(@Nonnull K key) { + if (key != null) { + final byte[] keyAsBytes = mapKey(key); + var telemetryContext = telemetry.create("INVALIDATE", name); + + try { + redisClient.del(keyAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidate(@Nonnull Collection keys) { + if (keys != null && !keys.isEmpty()) { + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + + try { + final byte[][] keysAsBytes = keys.stream() + .map(this::mapKey) + .toArray(byte[][]::new); + + redisClient.del(keysAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidateAll() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + + try { + redisClient.flushAll().toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + + @Nonnull + @Override + public CompletionStage getAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("GET", name); + final byte[] keyAsBytes = mapKey(key); + + CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(jsonAsBytes -> { + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> getAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + var keysByKeyByte = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysAsBytes) + : redisClient.getex(keysAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valuesByKeys -> { + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyByte.entrySet()) { + valuesByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { + if (key == null) { + return CompletableFuture.completedFuture(value); + } + + var telemetryContext = telemetry.create("PUT", name); + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return value; + }); + } + + @Nonnull + @Override + public CompletionStage> putAsync(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return keyAndValues; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return keyAndValues; + }); + } + + @Override + public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + final byte[] keyAsBytes = mapKey(key); + final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueMapper::read) + .thenCompose(fromCache -> { + if (fromCache != null) { + return CompletableFuture.completedFuture(fromCache); + } + + return mappingFunction.apply(key) + .thenCompose(value -> { + if (value == null) { + return CompletableFuture.completedFuture(null); + } + + final byte[] valueAsBytes = valueMapper.write(value); + var putFutureResponse = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return putFutureResponse + .thenApply(v -> { + telemetryContext.recordSuccess(); + return value; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueByKeys -> { + final Map fromCache = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + + return fromCache; + }) + .thenCompose(fromCache -> { + if (fromCache.size() == keys.size()) { + return CompletableFuture.completedFuture(fromCache); + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + return mappingFunction.apply(missingKeys) + .thenCompose(values -> { + if (values.isEmpty()) { + return CompletableFuture.completedFuture(fromCache); + } + + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var putCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return putCompletionStage + .thenApply(v -> { + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE", name); + final byte[] keyAsBytes = mapKey(key); + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Override + public CompletionStage invalidateAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + final byte[][] keyAsBytes = keys.stream() + .distinct() + .map(this::mapKey) + .toArray(byte[][]::new); + + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAllAsync() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + return redisClient.flushAll() + .thenApply(r -> { + telemetryContext.recordSuccess(); + return r; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + private byte[] mapKey(K key) { + final byte[] suffixAsBytes = keyMapper.apply(key); + if (this.keyPrefix == null) { + return suffixAsBytes; + } else { + var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; + System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); + System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); + + return keyAsBytes; + } + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java new file mode 100644 index 000000000..75a932976 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.AsyncCache; + +public interface RedisCache extends AsyncCache { + +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java new file mode 100644 index 000000000..a5b995bd6 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; + +import java.util.Map; +import java.util.concurrent.CompletionStage; + +public interface RedisCacheClient { + + @Nonnull + CompletionStage get(byte[] key); + + @Nonnull + CompletionStage> mget(byte[][] keys); + + @Nonnull + CompletionStage getex(byte[] key, long expireAfterMillis); + + @Nonnull + CompletionStage> getex(byte[][] keys, long expireAfterMillis); + + @Nonnull + CompletionStage set(byte[] key, byte[] value); + + @Nonnull + CompletionStage mset(@Nonnull Map keyAndValue); + + @Nonnull + CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + + @Nonnull + CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + + @Nonnull + CompletionStage del(byte[] key); + + @Nonnull + CompletionStage del(byte[][] keys); + + @Nonnull + CompletionStage flushAll(); +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java new file mode 100644 index 000000000..120bd511a --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -0,0 +1,24 @@ +package ru.tinkoff.kora.cache.redis; + + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface RedisCacheConfig { + + /** + * Key prefix allow to avoid key collision in single Redis database between multiple caches + * + * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied + */ + String keyPrefix(); + + @Nullable + Duration expireAfterWrite(); + + @Nullable + Duration expireAfterAccess(); +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java new file mode 100644 index 000000000..f6edc71ad --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.CacheKeyMapper; + +import java.nio.charset.StandardCharsets; +import java.util.function.Function; + +/** + * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. + */ +public interface RedisCacheKeyMapper extends Function { + + /** + * Is used to delimiter composite key such as {@link CacheKeyMapper} + */ + byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java new file mode 100644 index 000000000..81e48e005 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -0,0 +1,175 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.json.common.JsonCommonModule; +import ru.tinkoff.kora.json.common.JsonReader; +import ru.tinkoff.kora.json.common.JsonWriter; +import ru.tinkoff.kora.json.common.annotation.Json; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.UUID; + +public interface RedisCacheMapperModule extends JsonCommonModule { + + @DefaultComponent + default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + return new RedisCacheTelemetry(metrics, tracer); + } + + @Json + @DefaultComponent + default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(V value) { + try { + return jsonWriter.toByteArray(value); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + + @Override + public V read(byte[] serializedValue) { + try { + return (serializedValue == null) ? null : jsonReader.read(serializedValue); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper stringRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(String value) { + return value.getBytes(StandardCharsets.UTF_8); + } + + @Override + public String read(byte[] serializedValue) { + return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bytesRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(byte[] value) { + return value; + } + + @Override + public byte[] read(byte[] serializedValue) { + return serializedValue; + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Integer value) { + return keyMapper.apply(value); + } + + @Override + public Integer read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Long value) { + return keyMapper.apply(value); + } + + @Override + public Long read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(BigInteger value) { + return keyMapper.apply(value); + } + + @Override + public BigInteger read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + + @Override + public byte[] write(UUID value) { + return keyMapper.apply(value); + } + + @Override + public UUID read(byte[] serializedValue) { + return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); + } + }; + } + + @DefaultComponent + default RedisCacheKeyMapper intRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper longRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper bigIntRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper uuidRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper stringRedisKeyMapper() { + return c -> c.getBytes(StandardCharsets.UTF_8); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java new file mode 100644 index 000000000..fe07914b0 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; + +public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { + +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java new file mode 100644 index 000000000..af84dfdd5 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java @@ -0,0 +1,129 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; + +public final class RedisCacheTelemetry { + + private static final String ORIGIN = "redis"; + + record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + } + + interface TelemetryContext { + void recordSuccess(); + + void recordSuccess(@Nullable Object valueFromCache); + + void recordFailure(@Nullable Throwable throwable); + } + + private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); + + private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); + + @Nullable + private final CacheMetrics metrics; + @Nullable + private final CacheTracer tracer; + private final boolean isStubTelemetry; + + RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + this.metrics = metrics; + this.tracer = tracer; + this.isStubTelemetry = metrics == null && tracer == null; + } + + record StubCacheTelemetry() implements TelemetryContext { + + @Override + public void recordSuccess() {} + + @Override + public void recordSuccess(@Nullable Object valueFromCache) {} + + @Override + public void recordFailure(@Nullable Throwable throwable) {} + } + + class DefaultCacheTelemetryContext implements TelemetryContext { + + private final Operation operation; + + private CacheTracer.CacheSpan span; + private final long startedInNanos = System.nanoTime(); + + DefaultCacheTelemetryContext(Operation operation) { + logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); + if (tracer != null) { + span = tracer.trace(operation); + } + this.operation = operation; + } + + @Override + public void recordSuccess() { + recordSuccess(null); + } + + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordSuccess(operation, durationInNanos, valueFromCache); + } + if (span != null) { + span.recordSuccess(); + } + + if (operation.name().startsWith("GET")) { + if (valueFromCache == null) { + logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); + } else { + logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); + } + } else { + logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordFailure(operation, durationInNanos, throwable); + } + if (span != null) { + span.recordFailure(throwable); + } + + if (throwable != null) { + logger.warn("Operation '{}' failed for cache '{}' with message: {}", + operation.name(), operation.cacheName(), throwable.getMessage()); + } else { + logger.warn("Operation '{}' failed for cache '{}'", + operation.name(), operation.cacheName()); + } + } + } + + @Nonnull + TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { + if (isStubTelemetry) { + return STUB_CONTEXT; + } + + return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java new file mode 100644 index 000000000..cf2037f42 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -0,0 +1,19 @@ +package ru.tinkoff.kora.cache.redis; + +/** + * Converts cache value into serializer value to store in cache. + */ +public interface RedisCacheValueMapper { + + /** + * @param value to serialize + * @return value serialized + */ + byte[] write(V value); + + /** + * @param serializedValue to deserialize + * @return value deserialized + */ + V read(byte[] serializedValue); +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java new file mode 100644 index 000000000..6fb2ee3e9 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java @@ -0,0 +1,43 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.RedisURI; +import io.lettuce.core.SocketOptions; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface LettuceClientConfig { + + String uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); + } + + enum Protocol { + + /** Redis 2 to Redis 5 */ + RESP2, + /** Redis 6+ */ + RESP3 + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java new file mode 100644 index 000000000..fb29c05e2 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nonnull; + +import java.net.URI; +import java.time.Duration; +import java.util.List; + +public final class LettuceClientFactory { + + @Nonnull + public AbstractRedisClient build(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + + final List mappedRedisUris = buildRedisURI(config); + + return (mappedRedisUris.size() == 1) + ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) + : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClient buildRedisClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClusterClient client = RedisClusterClient.create(redisURIs); + client.setOptions(ClusterClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + @Nonnull + private static RedisClient buildRedisClientInternal(RedisURI redisURI, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClient client = RedisClient.create(redisURI); + client.setOptions(ClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + static List buildRedisURI(LettuceClientConfig config) { + final String uri = config.uri(); + final Integer database = config.database(); + final String user = config.user(); + final String password = config.password(); + + final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); + return redisURIS.stream() + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(config.commandTimeout()) + .build(); + }) + .toList(); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java new file mode 100644 index 000000000..adda5d4f8 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import io.lettuce.core.GetExArgs; +import io.lettuce.core.KeyValue; +import io.lettuce.core.Value; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); + + private final RedisClusterClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisClusterConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAdvancedClusterAsyncCommands commands; + + LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { + this.redisClient = redisClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_v -> true); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java new file mode 100644 index 000000000..25cb53904 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -0,0 +1,36 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.ConfigValue; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +import java.time.Duration; + +public interface LettuceModule { + + default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("lettuce"); + return extractor.extract(value); + } + + default LettuceClientFactory lettuceClientFactory() { + return new LettuceClientFactory(); + } + + @DefaultComponent + default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { + var redisClient = factory.build(config); + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + return new LettuceRedisCacheClient(rc, config); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceClusterRedisCacheClient(rcc); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java new file mode 100644 index 000000000..cdb95be7c --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.api.async.RedisAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); + + private final RedisURI redisURI; + private final RedisClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAsyncCommands commands; + + LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { + this.redisClient = redisClient; + final List redisURIs = LettuceClientFactory.buildRedisURI(config); + this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .thenApply(v -> true) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) + .thenApply(_void -> true) + .whenComplete((s, throwable) -> pool.release(connection)); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java new file mode 100644 index 000000000..209ce6ecb --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java @@ -0,0 +1,252 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; + +abstract class AbstractAsyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.getAsync("1").toCompletableFuture().join()); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.getAsync("1").toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "???")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "???")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync("2").toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java new file mode 100644 index 000000000..f5ec3aa9d --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java @@ -0,0 +1,229 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +abstract class AbstractSyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.get(key)); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get(key); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.get(keys); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.get("1")); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.get("1"); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return Map.of("1", "1", "2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { + if ("2".equals(keysCompute.iterator().next())) { + return Map.of("2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(Set.of("1", "2")); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get("2"); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java new file mode 100644 index 000000000..3f7764c9a --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java new file mode 100644 index 000000000..faa5acb1a --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java new file mode 100644 index 000000000..6f95bee4a --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java new file mode 100644 index 000000000..3509176e2 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java @@ -0,0 +1,85 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; + +abstract class CacheRunner extends Assertions implements RedisCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + }; + } + + private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { + var lettuceClientFactory = lettuceClientFactory(); + var lettuceClientConfig = new LettuceClientConfig() { + @Override + public String uri() { + return redisParams.uri().toString(); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + }; + + var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); + if (lettuceClient instanceof Lifecycle lc) { + lc.init(); + } + return lettuceClient; + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var lettuceClient = createLettuce(redisParams); + return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java new file mode 100644 index 000000000..43008e371 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireReadTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java new file mode 100644 index 000000000..72feb88f6 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java new file mode 100644 index 000000000..bb8818bbc --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java new file mode 100644 index 000000000..4d098b5ff --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java @@ -0,0 +1,14 @@ +package ru.tinkoff.kora.cache.redis.testdata; + +import ru.tinkoff.kora.cache.redis.*; + +public final class DummyCache extends AbstractRedisCache { + + public DummyCache(RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); + } +} diff --git a/cache/cache-redis-lettuce/build.gradle b/cache/cache-redis-lettuce/build.gradle new file mode 100644 index 000000000..44c085705 --- /dev/null +++ b/cache/cache-redis-lettuce/build.gradle @@ -0,0 +1,24 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + api project(":cache:cache-common") + + implementation project(":json:json-common") + implementation project(":config:config-common") + implementation(libs.redis.lettuce) { + exclude group: 'io.projectreactor', module: 'reactor-core' + exclude group: 'io.netty', module: 'netty-common' + exclude group: 'io.netty', module: 'netty-handler' + exclude group: 'io.netty', module: 'netty-transport' + } + implementation libs.reactor.core + implementation libs.netty.common + implementation libs.netty.handlers + implementation libs.netty.transports + implementation libs.apache.pool + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java new file mode 100644 index 000000000..941591ac8 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -0,0 +1,660 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.AsyncCache; + +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.stream.Collectors; + +public abstract class AbstractRedisCache implements AsyncCache { + + private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); + + private final String name; + private final RedisCacheClient redisClient; + private final RedisCacheTelemetry telemetry; + private final byte[] keyPrefix; + + private final RedisCacheKeyMapper keyMapper; + private final RedisCacheValueMapper valueMapper; + + private final Long expireAfterAccessMillis; + private final Long expireAfterWriteMillis; + + protected AbstractRedisCache(String name, + RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + this.name = name; + this.redisClient = redisClient; + this.telemetry = telemetry; + this.keyMapper = keyMapper; + this.valueMapper = valueMapper; + this.expireAfterAccessMillis = (config.expireAfterAccess() == null) + ? null + : config.expireAfterAccess().toMillis(); + this.expireAfterWriteMillis = (config.expireAfterWrite() == null) + ? null + : config.expireAfterWrite().toMillis(); + + if (config.keyPrefix().isEmpty()) { + this.keyPrefix = null; + } else { + var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); + this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; + System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); + System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); + } + } + + @Override + public V get(@Nonnull K key) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("GET", name); + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map get(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return Collections.emptyMap(); + } catch (Exception e) { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + } + } + + @Nonnull + @Override + public V put(@Nonnull K key, @Nonnull V value) { + if (key == null || value == null) { + return null; + } + + var telemetryContext = telemetry.create("PUT", name); + + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return value; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return value; + } + } + + @Nonnull + @Override + public Map put(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + + try { + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + + telemetryContext.recordSuccess(); + return keyAndValues; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return keyAndValues; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return keyAndValues; + } + } + + @Override + public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + + V fromCache = null; + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes).toCompletableFuture().join() + : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + fromCache = valueMapper.read(jsonAsBytes); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache != null) { + telemetryContext.recordSuccess(); + return fromCache; + } + + try { + var value = mappingFunction.apply(key); + if (value != null) { + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + + final Map fromCache = new HashMap<>(); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes).toCompletableFuture().join() + : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache.size() == keys.size()) { + telemetryContext.recordSuccess(); + return fromCache; + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + try { + var values = mappingFunction.apply(missingKeys); + if (!values.isEmpty()) { + try { + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return fromCache; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return fromCache; + } + } + + @Override + public void invalidate(@Nonnull K key) { + if (key != null) { + final byte[] keyAsBytes = mapKey(key); + var telemetryContext = telemetry.create("INVALIDATE", name); + + try { + redisClient.del(keyAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidate(@Nonnull Collection keys) { + if (keys != null && !keys.isEmpty()) { + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + + try { + final byte[][] keysAsBytes = keys.stream() + .map(this::mapKey) + .toArray(byte[][]::new); + + redisClient.del(keysAsBytes).toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidateAll() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + + try { + redisClient.flushAll().toCompletableFuture().join(); + telemetryContext.recordSuccess(); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + + @Nonnull + @Override + public CompletionStage getAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("GET", name); + final byte[] keyAsBytes = mapKey(key); + + CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(jsonAsBytes -> { + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> getAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("GET_MANY", name); + var keysByKeyByte = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysAsBytes) + : redisClient.getex(keysAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valuesByKeys -> { + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyByte.entrySet()) { + valuesByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { + if (key == null) { + return CompletableFuture.completedFuture(value); + } + + var telemetryContext = telemetry.create("PUT", name); + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return value; + }); + } + + @Nonnull + @Override + public CompletionStage> putAsync(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("PUT_MANY", name); + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(); + return keyAndValues; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return keyAndValues; + }); + } + + @Override + public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + final byte[] keyAsBytes = mapKey(key); + final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueMapper::read) + .thenCompose(fromCache -> { + if (fromCache != null) { + return CompletableFuture.completedFuture(fromCache); + } + + return mappingFunction.apply(key) + .thenCompose(value -> { + if (value == null) { + return CompletableFuture.completedFuture(null); + } + + final byte[] valueAsBytes = valueMapper.write(value); + var putFutureResponse = (expireAfterWriteMillis == null) + ? redisClient.set(keyAsBytes, valueAsBytes) + : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return putFutureResponse + .thenApply(v -> { + telemetryContext.recordSuccess(); + return value; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueByKeys -> { + final Map fromCache = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + + return fromCache; + }) + .thenCompose(fromCache -> { + if (fromCache.size() == keys.size()) { + return CompletableFuture.completedFuture(fromCache); + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + return mappingFunction.apply(missingKeys) + .thenCompose(values -> { + if (values.isEmpty()) { + return CompletableFuture.completedFuture(fromCache); + } + + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var putCompletionStage = (expireAfterAccessMillis == null) + ? redisClient.mset(keyAndValuesAsBytes) + : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return putCompletionStage + .thenApply(v -> { + telemetryContext.recordSuccess(); + fromCache.putAll(values); + return fromCache; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE", name); + final byte[] keyAsBytes = mapKey(key); + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Override + public CompletionStage invalidateAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + final byte[][] keyAsBytes = keys.stream() + .distinct() + .map(this::mapKey) + .toArray(byte[][]::new); + + return redisClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAllAsync() { + var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + return redisClient.flushAll() + .thenApply(r -> { + telemetryContext.recordSuccess(); + return r; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + private byte[] mapKey(K key) { + final byte[] suffixAsBytes = keyMapper.apply(key); + if (this.keyPrefix == null) { + return suffixAsBytes; + } else { + var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; + System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); + System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); + + return keyAsBytes; + } + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java new file mode 100644 index 000000000..75a932976 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.AsyncCache; + +public interface RedisCache extends AsyncCache { + +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java new file mode 100644 index 000000000..a5b995bd6 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; + +import java.util.Map; +import java.util.concurrent.CompletionStage; + +public interface RedisCacheClient { + + @Nonnull + CompletionStage get(byte[] key); + + @Nonnull + CompletionStage> mget(byte[][] keys); + + @Nonnull + CompletionStage getex(byte[] key, long expireAfterMillis); + + @Nonnull + CompletionStage> getex(byte[][] keys, long expireAfterMillis); + + @Nonnull + CompletionStage set(byte[] key, byte[] value); + + @Nonnull + CompletionStage mset(@Nonnull Map keyAndValue); + + @Nonnull + CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + + @Nonnull + CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + + @Nonnull + CompletionStage del(byte[] key); + + @Nonnull + CompletionStage del(byte[][] keys); + + @Nonnull + CompletionStage flushAll(); +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java new file mode 100644 index 000000000..120bd511a --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -0,0 +1,24 @@ +package ru.tinkoff.kora.cache.redis; + + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface RedisCacheConfig { + + /** + * Key prefix allow to avoid key collision in single Redis database between multiple caches + * + * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied + */ + String keyPrefix(); + + @Nullable + Duration expireAfterWrite(); + + @Nullable + Duration expireAfterAccess(); +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java new file mode 100644 index 000000000..f6edc71ad --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.CacheKeyMapper; + +import java.nio.charset.StandardCharsets; +import java.util.function.Function; + +/** + * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. + */ +public interface RedisCacheKeyMapper extends Function { + + /** + * Is used to delimiter composite key such as {@link CacheKeyMapper} + */ + byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java new file mode 100644 index 000000000..81e48e005 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -0,0 +1,175 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.json.common.JsonCommonModule; +import ru.tinkoff.kora.json.common.JsonReader; +import ru.tinkoff.kora.json.common.JsonWriter; +import ru.tinkoff.kora.json.common.annotation.Json; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.UUID; + +public interface RedisCacheMapperModule extends JsonCommonModule { + + @DefaultComponent + default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + return new RedisCacheTelemetry(metrics, tracer); + } + + @Json + @DefaultComponent + default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(V value) { + try { + return jsonWriter.toByteArray(value); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + + @Override + public V read(byte[] serializedValue) { + try { + return (serializedValue == null) ? null : jsonReader.read(serializedValue); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper stringRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(String value) { + return value.getBytes(StandardCharsets.UTF_8); + } + + @Override + public String read(byte[] serializedValue) { + return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bytesRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(byte[] value) { + return value; + } + + @Override + public byte[] read(byte[] serializedValue) { + return serializedValue; + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Integer value) { + return keyMapper.apply(value); + } + + @Override + public Integer read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Long value) { + return keyMapper.apply(value); + } + + @Override + public Long read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(BigInteger value) { + return keyMapper.apply(value); + } + + @Override + public BigInteger read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + + @Override + public byte[] write(UUID value) { + return keyMapper.apply(value); + } + + @Override + public UUID read(byte[] serializedValue) { + return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); + } + }; + } + + @DefaultComponent + default RedisCacheKeyMapper intRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper longRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper bigIntRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper uuidRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper stringRedisKeyMapper() { + return c -> c.getBytes(StandardCharsets.UTF_8); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java new file mode 100644 index 000000000..fe07914b0 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; + +public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { + +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java new file mode 100644 index 000000000..af84dfdd5 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java @@ -0,0 +1,129 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; + +public final class RedisCacheTelemetry { + + private static final String ORIGIN = "redis"; + + record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + } + + interface TelemetryContext { + void recordSuccess(); + + void recordSuccess(@Nullable Object valueFromCache); + + void recordFailure(@Nullable Throwable throwable); + } + + private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); + + private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); + + @Nullable + private final CacheMetrics metrics; + @Nullable + private final CacheTracer tracer; + private final boolean isStubTelemetry; + + RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + this.metrics = metrics; + this.tracer = tracer; + this.isStubTelemetry = metrics == null && tracer == null; + } + + record StubCacheTelemetry() implements TelemetryContext { + + @Override + public void recordSuccess() {} + + @Override + public void recordSuccess(@Nullable Object valueFromCache) {} + + @Override + public void recordFailure(@Nullable Throwable throwable) {} + } + + class DefaultCacheTelemetryContext implements TelemetryContext { + + private final Operation operation; + + private CacheTracer.CacheSpan span; + private final long startedInNanos = System.nanoTime(); + + DefaultCacheTelemetryContext(Operation operation) { + logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); + if (tracer != null) { + span = tracer.trace(operation); + } + this.operation = operation; + } + + @Override + public void recordSuccess() { + recordSuccess(null); + } + + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordSuccess(operation, durationInNanos, valueFromCache); + } + if (span != null) { + span.recordSuccess(); + } + + if (operation.name().startsWith("GET")) { + if (valueFromCache == null) { + logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); + } else { + logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); + } + } else { + logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordFailure(operation, durationInNanos, throwable); + } + if (span != null) { + span.recordFailure(throwable); + } + + if (throwable != null) { + logger.warn("Operation '{}' failed for cache '{}' with message: {}", + operation.name(), operation.cacheName(), throwable.getMessage()); + } else { + logger.warn("Operation '{}' failed for cache '{}'", + operation.name(), operation.cacheName()); + } + } + } + + @Nonnull + TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { + if (isStubTelemetry) { + return STUB_CONTEXT; + } + + return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java new file mode 100644 index 000000000..cf2037f42 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -0,0 +1,19 @@ +package ru.tinkoff.kora.cache.redis; + +/** + * Converts cache value into serializer value to store in cache. + */ +public interface RedisCacheValueMapper { + + /** + * @param value to serialize + * @return value serialized + */ + byte[] write(V value); + + /** + * @param serializedValue to deserialize + * @return value deserialized + */ + V read(byte[] serializedValue); +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java new file mode 100644 index 000000000..6fb2ee3e9 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java @@ -0,0 +1,43 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.RedisURI; +import io.lettuce.core.SocketOptions; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; + +@ConfigValueExtractor +public interface LettuceClientConfig { + + String uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); + } + + enum Protocol { + + /** Redis 2 to Redis 5 */ + RESP2, + /** Redis 6+ */ + RESP3 + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java new file mode 100644 index 000000000..fb29c05e2 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nonnull; + +import java.net.URI; +import java.time.Duration; +import java.util.List; + +public final class LettuceClientFactory { + + @Nonnull + public AbstractRedisClient build(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + + final List mappedRedisUris = buildRedisURI(config); + + return (mappedRedisUris.size() == 1) + ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) + : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + public RedisClient buildRedisClient(LettuceClientConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClusterClient client = RedisClusterClient.create(redisURIs); + client.setOptions(ClusterClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + @Nonnull + private static RedisClient buildRedisClientInternal(RedisURI redisURI, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClient client = RedisClient.create(redisURI); + client.setOptions(ClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + static List buildRedisURI(LettuceClientConfig config) { + final String uri = config.uri(); + final Integer database = config.database(); + final String user = config.user(); + final String password = config.password(); + + final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); + return redisURIS.stream() + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(config.commandTimeout()) + .build(); + }) + .toList(); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java new file mode 100644 index 000000000..adda5d4f8 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import io.lettuce.core.GetExArgs; +import io.lettuce.core.KeyValue; +import io.lettuce.core.Value; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); + + private final RedisClusterClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisClusterConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAdvancedClusterAsyncCommands commands; + + LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { + this.redisClient = redisClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_v -> true); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java new file mode 100644 index 000000000..25cb53904 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -0,0 +1,36 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.ConfigValue; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +import java.time.Duration; + +public interface LettuceModule { + + default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("lettuce"); + return extractor.extract(value); + } + + default LettuceClientFactory lettuceClientFactory() { + return new LettuceClientFactory(); + } + + @DefaultComponent + default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { + var redisClient = factory.build(config); + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + return new LettuceRedisCacheClient(rc, config); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceClusterRedisCacheClient(rcc); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java new file mode 100644 index 000000000..cdb95be7c --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java @@ -0,0 +1,190 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.api.async.RedisAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); + + private final RedisURI redisURI; + private final RedisClient redisClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> pool; + private StatefulRedisConnection connection; + + // always use async cause sync uses JDK Proxy wrapped async impl + private RedisAsyncCommands commands; + + LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { + this.redisClient = redisClient; + final List redisURIs = LettuceClientFactory.buildRedisURI(config); + this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return commands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return commands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return pool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return commands.set(key, value).thenApply(r -> true); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return commands.mset(keyAndValue).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return pool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .thenApply(v -> true) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) + .thenApply(_void -> true) + .whenComplete((s, throwable) -> pool.release(connection)); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return commands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return commands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); + this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); + this.commands = this.connection.async(); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.pool.close(); + this.connection.close(); + this.redisClient.shutdown(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java new file mode 100644 index 000000000..209ce6ecb --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java @@ -0,0 +1,252 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; + +abstract class AbstractAsyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.getAsync("1").toCompletableFuture().join()); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.getAsync("1").toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "???")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "???")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync("2").toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java new file mode 100644 index 000000000..f5ec3aa9d --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java @@ -0,0 +1,229 @@ +package ru.tinkoff.kora.cache.redis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +abstract class AbstractSyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.get(key)); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get(key); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.get(keys); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.get("1")); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.get("1"); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return Map.of("1", "1", "2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { + if ("2".equals(keysCompute.iterator().next())) { + return Map.of("2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(Set.of("1", "2")); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get("2"); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java new file mode 100644 index 000000000..3f7764c9a --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java new file mode 100644 index 000000000..faa5acb1a --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java new file mode 100644 index 000000000..6f95bee4a --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java new file mode 100644 index 000000000..3509176e2 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java @@ -0,0 +1,85 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; +import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; + +abstract class CacheRunner extends Assertions implements RedisCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + }; + } + + private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { + var lettuceClientFactory = lettuceClientFactory(); + var lettuceClientConfig = new LettuceClientConfig() { + @Override + public String uri() { + return redisParams.uri().toString(); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + }; + + var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); + if (lettuceClient instanceof Lifecycle lc) { + lc.init(); + } + return lettuceClient; + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var lettuceClient = createLettuce(redisParams); + return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java new file mode 100644 index 000000000..43008e371 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireReadTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java new file mode 100644 index 000000000..72feb88f6 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java new file mode 100644 index 000000000..bb8818bbc --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java new file mode 100644 index 000000000..4d098b5ff --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java @@ -0,0 +1,14 @@ +package ru.tinkoff.kora.cache.redis.testdata; + +import ru.tinkoff.kora.cache.redis.*; + +public final class DummyCache extends AbstractRedisCache { + + public DummyCache(RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheTelemetry telemetry, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); + } +} diff --git a/cache/cache-redis/build.gradle b/cache/cache-redis/build.gradle index 748493949..44c085705 100644 --- a/cache/cache-redis/build.gradle +++ b/cache/cache-redis/build.gradle @@ -5,7 +5,7 @@ dependencies { implementation project(":json:json-common") implementation project(":config:config-common") - implementation(libs.lettuce.core) { + implementation(libs.redis.lettuce) { exclude group: 'io.projectreactor', module: 'reactor-core' exclude group: 'io.netty', module: 'netty-common' exclude group: 'io.netty', module: 'netty-handler' diff --git a/dependencies.gradle b/dependencies.gradle index 061357c80..25a68d0db 100644 --- a/dependencies.gradle +++ b/dependencies.gradle @@ -120,7 +120,8 @@ dependencyResolutionManagement { DependencyResolutionManagement it -> library("javapoet", "com.squareup", "javapoet").version("1.13.0") library("classgraph", "io.github.classgraph", "classgraph").version("4.8.170") - library('lettuce-core', 'io.lettuce', 'lettuce-core').version('6.5.2.RELEASE') + library('redis-jedis', 'redis.clients', 'jedis').version('5.2.0') + library('redis-lettuce', 'io.lettuce', 'lettuce-core').version('6.5.2.RELEASE') library('apache-pool', 'org.apache.commons', 'commons-pool2').version('2.12.1') library('quartz', 'org.quartz-scheduler', 'quartz').version('2.3.2') diff --git a/internal/test-redis/build.gradle b/internal/test-redis/build.gradle index 3adee5f5a..08f074261 100644 --- a/internal/test-redis/build.gradle +++ b/internal/test-redis/build.gradle @@ -1,6 +1,6 @@ dependencies { api libs.testcontainers.core - api(libs.lettuce.core) { + api(libs.redis.lettuce) { exclude group: 'io.projectreactor', module: 'reactor-core' exclude group: 'io.netty', module: 'netty-common' exclude group: 'io.netty', module: 'netty-handler' diff --git a/redis/redis-jedis/build.gradle b/redis/redis-jedis/build.gradle new file mode 100644 index 000000000..667fb10bc --- /dev/null +++ b/redis/redis-jedis/build.gradle @@ -0,0 +1,11 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + implementation project(":config:config-common") + implementation libs.redis.jedis + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java new file mode 100644 index 000000000..93fe0d4b2 --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.redis.jedis; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; +import java.util.List; + +@ConfigValueExtractor +public interface JedisConfig { + + List uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(10); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(20); + } + + enum Protocol { + + /** Redis 2 to Redis 5 */ + RESP2, + /** Redis 6+ */ + RESP3 + } +} diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java new file mode 100644 index 000000000..761c4507d --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java @@ -0,0 +1,94 @@ +package ru.tinkoff.kora.redis.jedis; + +import jakarta.annotation.Nonnull; +import redis.clients.jedis.*; +import redis.clients.jedis.util.JedisURIHelper; + +import java.net.URI; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +final class JedisFactory { + + @Nonnull + static UnifiedJedis build(JedisConfig config) { + return (config.uri().size() == 1) + ? buildRedisClient(config) + : buildRedisClusterClient(config); + } + + @Nonnull + private static JedisPooled buildRedisClient(JedisConfig config) { + URI uri = URI.create(config.uri().get(0)); + + var jedisConfigBuilder = DefaultJedisClientConfig.builder() + .user(JedisURIHelper.getUser(uri)) + .password(JedisURIHelper.getPassword(uri)) + .database(JedisURIHelper.getDBIndex(uri)) + .ssl(JedisURIHelper.isRedisSSLScheme(uri)); + + var protocol = switch (config.protocol()) { + case RESP3 -> RedisProtocol.RESP3; + case RESP2 -> RedisProtocol.RESP2; + }; + jedisConfigBuilder = jedisConfigBuilder.protocol(protocol); + + var uriProtocol = JedisURIHelper.getRedisProtocol(uri); + if (uriProtocol != null) { + jedisConfigBuilder = jedisConfigBuilder.protocol(uriProtocol); + } + if (config.database() != null) { + jedisConfigBuilder = jedisConfigBuilder.database(config.database()); + } + if (config.user() != null) { + jedisConfigBuilder = jedisConfigBuilder.user(config.user()); + } + if (config.password() != null) { + jedisConfigBuilder = jedisConfigBuilder.password(config.password()); + } + + return new JedisPooled(JedisURIHelper.getHostAndPort(uri), jedisConfigBuilder.build()); + } + + @Nonnull + private static JedisCluster buildRedisClusterClient(JedisConfig config) { + List uris = config.uri().stream() + .map(URI::create) + .toList(); + + Set hostAndPorts = uris.stream() + .map(JedisURIHelper::getHostAndPort) + .collect(Collectors.toSet()); + + URI uri = uris.get(0); + var jedisConfigBuilder = DefaultJedisClientConfig.builder() + .user(JedisURIHelper.getUser(uri)) + .password(JedisURIHelper.getPassword(uri)) + .database(JedisURIHelper.getDBIndex(uri)) + .protocol(JedisURIHelper.getRedisProtocol(uri)) + .ssl(JedisURIHelper.isRedisSSLScheme(uri)); + + var protocol = switch (config.protocol()) { + case RESP3 -> RedisProtocol.RESP3; + case RESP2 -> RedisProtocol.RESP2; + }; + jedisConfigBuilder = jedisConfigBuilder.protocol(protocol); + + var uriProtocol = JedisURIHelper.getRedisProtocol(uri); + if (uriProtocol != null) { + jedisConfigBuilder = jedisConfigBuilder.protocol(uriProtocol); + } + if (config.database() != null) { + jedisConfigBuilder = jedisConfigBuilder.database(config.database()); + } + if (config.user() != null) { + jedisConfigBuilder = jedisConfigBuilder.user(config.user()); + } + if (config.password() != null) { + jedisConfigBuilder = jedisConfigBuilder.password(config.password()); + } + + return new JedisCluster(hostAndPorts, jedisConfigBuilder.build()); + } +} diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java new file mode 100644 index 000000000..4079b3f02 --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.redis.jedis; + +import redis.clients.jedis.UnifiedJedis; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +public interface JedisModule { + + default JedisConfig jedisConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("jedis"); + return extractor.extract(value); + } + + default UnifiedJedis jedisClient(JedisConfig config) { + return JedisFactory.build(config); + } +} diff --git a/redis/redis-lettuce/build.gradle b/redis/redis-lettuce/build.gradle new file mode 100644 index 000000000..7ca016699 --- /dev/null +++ b/redis/redis-lettuce/build.gradle @@ -0,0 +1,20 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + implementation project(":config:config-common") + implementation(libs.redis.lettuce) { + exclude group: 'io.projectreactor', module: 'reactor-core' + exclude group: 'io.netty', module: 'netty-common' + exclude group: 'io.netty', module: 'netty-handler' + exclude group: 'io.netty', module: 'netty-transport' + } + implementation libs.reactor.core + implementation libs.netty.common + implementation libs.netty.handlers + implementation libs.netty.transports + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java new file mode 100644 index 000000000..43a461f10 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java @@ -0,0 +1,43 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.SocketOptions; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; +import java.util.List; + +@ConfigValueExtractor +public interface LettuceConfig { + + List uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(20); + } + + enum Protocol { + + /** Redis 2 to Redis 5 */ + RESP2, + /** Redis 6+ */ + RESP3 + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java new file mode 100644 index 000000000..3aafeac4d --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java @@ -0,0 +1,132 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nonnull; + +import java.net.URI; +import java.time.Duration; +import java.util.List; + +final class LettuceFactory { + + @Nonnull + static AbstractRedisClient build(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + + final List mappedRedisUris = buildRedisURI(config); + + return (mappedRedisUris.size() == 1) + ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) + : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClient(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClient buildRedisClient(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClusterClient client = RedisClusterClient.create(redisURIs); + client.setOptions(ClusterClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + @Nonnull + private static RedisClient buildRedisClientInternal(RedisURI redisURI, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClient client = RedisClient.create(redisURI); + client.setOptions(ClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + private static List buildRedisURI(LettuceConfig config) { + final Integer database = config.database(); + final String user = config.user(); + final String password = config.password(); + + return config.uri().stream() + .flatMap(uri -> RedisClusterURIUtil.toRedisURIs(URI.create(uri)).stream()) + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(config.commandTimeout()) + .build(); + }) + .toList(); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java new file mode 100644 index 000000000..1f740dcb2 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.AbstractRedisClient; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +public interface LettuceModule { + + default LettuceConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("lettuce"); + return extractor.extract(value); + } + + default AbstractRedisClient lettuceClient(LettuceConfig config) { + return LettuceFactory.build(config); + } +} diff --git a/settings.gradle b/settings.gradle index 18f037917..45113761d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -96,7 +96,10 @@ include( 'cache:cache-annotation-processor', 'cache:cache-symbol-processor', 'cache:cache-caffeine', - 'cache:cache-redis', + 'cache:cache-redis', // deprecated + 'cache:cache-redis-common', + 'cache:cache-redis-lettuce', + 'cache:cache-redis-jedis', 'validation:validation-common', 'validation:validation-module', 'validation:validation-annotation-processor', @@ -104,6 +107,8 @@ include( 'test:test-junit5', 'mapstruct:mapstruct-java-extension', 'mapstruct:mapstruct-ksp-extension', + 'redis:redis-lettuce', + 'redis:redis-jedis', 'experimental:s3-client-annotation-processor', 'experimental:s3-client-symbol-processor', 'experimental:s3-client-common', From c0161225c6cfb7653dd0e59328e6899959b2a900 Mon Sep 17 00:00:00 2001 From: Anton Kurako Date: Tue, 4 Mar 2025 16:45:20 +0300 Subject: [PATCH 2/5] Jedis & Lettuce modules added Cache Redis module refactored and new replacement for old Redis cache module introduced Old Redis Cache module deprecated but not removed for compatibility --- cache/cache-annotation-processor/build.gradle | 2 +- .../processor/CacheAnnotationProcessor.java | 188 +++-- .../processor/AsyncCacheAopTests.java | 2 +- .../processor/AsyncCacheManyAopTests.java | 5 +- .../AsyncCacheManyOptionalAopTests.java | 5 +- .../processor/AsyncCacheOneAopTests.java | 2 +- .../processor/AsyncCacheOneManyAopTests.java | 5 +- .../AsyncCacheOneManySyncAopTests.java | 4 +- .../processor/AsyncCacheOptionalAopTests.java | 2 +- .../annotation/processor/CacheRunner.java | 104 ++- .../processor/MonoCacheAopTests.java | 2 +- .../processor/MonoCacheManyAopTests.java | 5 +- .../MonoCacheManyOptionalAopTests.java | 5 +- .../processor/MonoCacheOneAopTests.java | 2 +- .../processor/MonoCacheOneManyAopTests.java | 5 +- .../MonoCacheOneManySyncAopTests.java | 4 +- .../processor/MonoCacheOptionalAopTests.java | 2 +- .../processor/SyncCacheAopTests.java | 2 +- .../processor/SyncCacheManyAopTests.java | 5 +- .../processor/SyncCacheOneAopTests.java | 2 +- .../processor/SyncCacheOneManyAopTests.java | 5 +- .../SyncCacheOneManySyncAopTests.java | 4 +- cache/cache-caffeine/build.gradle | 2 +- .../cache/caffeine/AbstractCaffeineCache.java | 90 ++- .../cache/caffeine/CaffeineCacheConfig.java | 3 + .../cache/caffeine/CaffeineCacheModule.java | 32 +- .../caffeine/CaffeineCacheTelemetry.java | 1 + .../kora/cache/caffeine/CacheRunner.java | 10 +- .../cache/caffeine/testdata/DummyCache.java | 6 +- cache/cache-common/build.gradle | 2 + .../ru/tinkoff/kora/cache/CacheModule.java | 20 + .../tinkoff/kora/cache/LoadableCacheImpl.java | 2 +- .../kora/cache/telemetry/CacheLogger.java | 13 + .../cache/telemetry/CacheLoggerFactory.java | 10 + .../kora/cache/telemetry/CacheMetrics.java | 2 +- .../cache/telemetry/CacheMetricsFactory.java | 10 + .../kora/cache/telemetry/CacheTelemetry.java | 16 + .../cache/telemetry/CacheTelemetryArgs.java | 12 + .../telemetry/CacheTelemetryFactory.java | 8 + .../cache/telemetry/CacheTracerFactory.java | 10 + .../DefaultCacheTelemetryFactory.java | 109 +++ .../cache/telemetry/Sl4fjCacheLogger.java | 80 +++ .../telemetry/Sl4fjCacheLoggerFactory.java | 23 + cache/cache-redis-common/build.gradle | 11 - .../kora/cache/redis/AbstractRedisCache.java | 176 ++--- .../cache/redis/RedisCacheAsyncClient.java} | 12 +- .../kora/cache/redis/RedisCacheClient.java | 30 +- .../kora/cache/redis/RedisCacheConfig.java | 3 + .../cache/redis/RedisCacheMapperModule.java | 8 - .../kora/cache/redis/RedisCacheModule.java | 4 +- .../kora/cache/redis/RedisCacheTelemetry.java | 129 ---- .../redis/lettuce/LettuceClientConfig.java | 43 -- .../redis/lettuce/LettuceClientFactory.java | 133 ---- .../LettuceClusterRedisCacheClient.java | 190 ----- .../cache/redis/lettuce/LettuceModule.java | 36 - .../lettuce/LettuceRedisCacheClient.java | 190 ----- .../redis/AsyncCacheExpireWriteTests.java | 22 - .../kora/cache/redis/AsyncCacheTests.java | 20 - .../tinkoff/kora/cache/redis/CacheRunner.java | 85 --- .../redis/SyncCacheExpireWriteTests.java | 22 - cache/cache-redis-jedis/build.gradle | 17 +- .../kora/cache/redis/AbstractRedisCache.java | 660 ------------------ .../tinkoff/kora/cache/redis/RedisCache.java | 7 - .../kora/cache/redis/RedisCacheClient.java | 42 -- .../kora/cache/redis/RedisCacheConfig.java | 24 - .../kora/cache/redis/RedisCacheKeyMapper.java | 17 - .../cache/redis/RedisCacheMapperModule.java | 175 ----- .../kora/cache/redis/RedisCacheModule.java | 7 - .../kora/cache/redis/RedisCacheTelemetry.java | 129 ---- .../cache/redis/RedisCacheValueMapper.java | 19 - .../redis/jedis/JedisCacheAsyncClient.java | 133 ++++ .../cache/redis/jedis/JedisCacheModule.java | 21 + .../redis/jedis/JedisCacheSyncClient.java | 119 ++++ .../redis/lettuce/LettuceClientConfig.java | 43 -- .../redis/lettuce/LettuceClientFactory.java | 133 ---- .../cache/redis/lettuce/LettuceModule.java | 36 - .../lettuce/LettuceRedisCacheClient.java | 190 ----- .../cache/redis/AbstractSyncCacheTests.java | 229 ------ .../redis/AsyncCacheExpireReadTests.java | 22 - .../cache/redis/SyncCacheExpireReadTests.java | 22 - .../{ => jedis}/AbstractAsyncCacheTests.java | 4 +- .../redis/jedis}/AbstractSyncCacheTests.java | 4 +- .../jedis}/AsyncCacheExpireReadTests.java | 2 +- .../AsyncCacheExpireWriteTests.java | 2 +- .../cache/redis/jedis}/AsyncCacheTests.java | 2 +- .../cache/redis/{ => jedis}/CacheRunner.java | 52 +- .../jedis}/SyncCacheExpireReadTests.java | 2 +- .../jedis}/SyncCacheExpireWriteTests.java | 2 +- .../cache/redis/jedis}/SyncCacheTests.java | 2 +- .../redis/jedis}/testdata/DummyCache.java | 8 +- cache/cache-redis-lettuce/build.gradle | 17 +- .../kora/cache/redis/AbstractRedisCache.java | 660 ------------------ .../tinkoff/kora/cache/redis/RedisCache.java | 7 - .../kora/cache/redis/RedisCacheConfig.java | 24 - .../kora/cache/redis/RedisCacheKeyMapper.java | 17 - .../cache/redis/RedisCacheMapperModule.java | 175 ----- .../kora/cache/redis/RedisCacheModule.java | 7 - .../kora/cache/redis/RedisCacheTelemetry.java | 129 ---- .../cache/redis/RedisCacheValueMapper.java | 19 - .../redis/lettuce/LettuceCacheModule.java | 58 ++ .../redis/lettuce/LettuceCacheSyncClient.java | 75 ++ .../redis/lettuce/LettuceClientConfig.java | 43 -- .../redis/lettuce/LettuceClientFactory.java | 133 ---- .../LettuceClusterCacheAsyncClient.java} | 79 +-- .../LettuceClusterRedisCacheClient.java | 190 ----- .../cache/redis/lettuce/LettuceModule.java | 36 - ...ava => LettuceSingleCacheAsyncClient.java} | 84 +-- .../cache/redis/AbstractAsyncCacheTests.java | 252 ------- .../tinkoff/kora/cache/redis/CacheRunner.java | 85 --- .../kora/cache/redis/SyncCacheTests.java | 20 - .../lettuce}/AbstractAsyncCacheTests.java | 4 +- .../{ => lettuce}/AbstractSyncCacheTests.java | 4 +- .../AsyncCacheExpireReadTests.java | 2 +- .../AsyncCacheExpireWriteTests.java | 2 +- .../cache/redis/lettuce}/AsyncCacheTests.java | 2 +- .../kora/cache/redis/lettuce/CacheRunner.java | 129 ++++ .../lettuce}/SyncCacheExpireReadTests.java | 2 +- .../lettuce}/SyncCacheExpireWriteTests.java | 2 +- .../cache/redis/lettuce}/SyncCacheTests.java | 2 +- .../{ => lettuce}/testdata/DummyCache.java | 8 +- cache/cache-redis/README.md | 3 + .../kora/cache/redis/AbstractRedisCache.java | 4 + .../tinkoff/kora/cache/redis/RedisCache.java | 4 + .../kora/cache/redis/RedisCacheClient.java | 4 + .../kora/cache/redis/RedisCacheConfig.java | 4 + .../kora/cache/redis/RedisCacheKeyMapper.java | 2 + .../cache/redis/RedisCacheMapperModule.java | 4 + .../kora/cache/redis/RedisCacheModule.java | 4 + .../kora/cache/redis/RedisCacheTelemetry.java | 4 + .../cache/redis/RedisCacheValueMapper.java | 2 + .../redis/lettuce/LettuceClientConfig.java | 8 +- .../redis/lettuce/LettuceClientFactory.java | 4 + .../cache/redis/lettuce/LettuceModule.java | 4 + .../cache/redis/AbstractAsyncCacheTests.java | 2 +- .../cache/redis/AbstractSyncCacheTests.java | 2 +- .../tinkoff/kora/cache/redis/CacheRunner.java | 2 +- .../redis/lettuce}/testdata/DummyCache.java | 2 +- .../kora/cache/redis/testdata/DummyCache.java | 14 - cache/cache-symbol-processor/build.gradle | 2 +- .../symbol/processor/CacheSymbolProcessor.kt | 204 ++++-- .../cache/symbol/processor/CacheRunner.kt | 98 ++- .../symbol/processor/SuspendCacheAopTests.kt | 2 +- .../processor/SuspendCacheManyAopTests.kt | 7 +- .../processor/SuspendCacheOneAopTests.kt | 2 +- .../processor/SuspendCacheOneManyAopTests.kt | 7 +- .../symbol/processor/SyncCacheAopTests.kt | 2 +- .../symbol/processor/SyncCacheManyAopTests.kt | 7 +- .../symbol/processor/SyncCacheOneAopTests.kt | 2 +- .../processor/SyncCacheOneManyAopTests.kt | 7 +- .../kora/micrometer/module/MetricsModule.java | 7 + .../module/cache/MicrometerCacheMetrics.java | 1 + .../cache/MicrometerCacheMetricsFactory.java | 27 + .../cache/Opentelemetry120CacheMetrics.java | 131 ++++ .../cache/Opentelemetry123CacheMetrics.java | 133 ++++ .../module/OpentelemetryModule.java | 7 + .../OpentelementryCacheTracerFactory.java | 29 + redis/redis-jedis/build.gradle | 5 +- .../kora/redis/jedis/JedisFactory.java | 2 + redis/redis-lettuce/build.gradle | 8 +- .../redis/lettuce/LettuceByteBufferCodec.java | 37 + .../lettuce/LettuceCompositeRedisCodec.java | 39 ++ .../kora/redis/lettuce/LettuceConfig.java | 38 +- .../kora/redis/lettuce/LettuceFactory.java | 4 +- .../redis/lettuce/LettuceIntegerCodec.java | 32 + .../LettuceLifecycleConnectionWrapper.java | 53 ++ .../LettuceLifecyclePoolAsyncWrapper.java | 54 ++ .../LettuceLifecyclePoolSyncWrapper.java | 54 ++ .../kora/redis/lettuce/LettuceLongCodec.java | 32 + .../kora/redis/lettuce/LettuceModule.java | 144 ++++ .../kora/redis/lettuce/LettuceVoidCodec.java | 30 + 170 files changed, 2650 insertions(+), 4959 deletions(-) create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java create mode 100644 cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java rename cache/{cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java => cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java} (61%) delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java delete mode 100644 cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java delete mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java delete mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java delete mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java delete mode 100644 cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java delete mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java delete mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java delete mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java delete mode 100644 cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java rename cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/{ => jedis}/AbstractAsyncCacheTests.java (98%) rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/AbstractSyncCacheTests.java (98%) rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/AsyncCacheExpireReadTests.java (93%) rename cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/{ => jedis}/AsyncCacheExpireWriteTests.java (93%) rename cache/{cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/AsyncCacheTests.java (93%) rename cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/{ => jedis}/CacheRunner.java (55%) rename cache/{cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/SyncCacheExpireReadTests.java (93%) rename cache/{cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/SyncCacheExpireWriteTests.java (93%) rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/SyncCacheTests.java (93%) rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis}/testdata/DummyCache.java (50%) delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java create mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java rename cache/{cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java => cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java} (66%) delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java delete mode 100644 cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java rename cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/{LettuceRedisCacheClient.java => LettuceSingleCacheAsyncClient.java} (65%) delete mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java delete mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java delete mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/AbstractAsyncCacheTests.java (98%) rename cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/{ => lettuce}/AbstractSyncCacheTests.java (98%) rename cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/{ => lettuce}/AsyncCacheExpireReadTests.java (93%) rename cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/{ => lettuce}/AsyncCacheExpireWriteTests.java (93%) rename cache/{cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/AsyncCacheTests.java (92%) create mode 100644 cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java rename cache/{cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/SyncCacheExpireReadTests.java (93%) rename cache/{cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/SyncCacheExpireWriteTests.java (93%) rename cache/{cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/SyncCacheTests.java (92%) rename cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/{ => lettuce}/testdata/DummyCache.java (50%) create mode 100644 cache/cache-redis/README.md rename cache/{cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis => cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce}/testdata/DummyCache.java (89%) delete mode 100644 cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java create mode 100644 micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java create mode 100644 micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java create mode 100644 micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java create mode 100644 opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java create mode 100644 redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java diff --git a/cache/cache-annotation-processor/build.gradle b/cache/cache-annotation-processor/build.gradle index 3365658e8..923d650be 100644 --- a/cache/cache-annotation-processor/build.gradle +++ b/cache/cache-annotation-processor/build.gradle @@ -8,7 +8,7 @@ dependencies { testImplementation testFixtures(project(":annotation-processor-common")) testImplementation project(":internal:test-logging") testImplementation project(":cache:cache-caffeine") - testImplementation project(":cache:cache-redis") + testImplementation project(":cache:cache-redis-lettuce") testImplementation project(":json:json-common") testImplementation project(":config:config-common") } diff --git a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java index 9e30e4312..ec5305dc2 100644 --- a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java +++ b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java @@ -29,17 +29,23 @@ public class CacheAnnotationProcessor extends AbstractKoraProcessor { private static final ClassName ANNOTATION_CACHE = ClassName.get("ru.tinkoff.kora.cache.annotation", "Cache"); - private static final ClassName CAFFEINE_TELEMETRY = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheTelemetry"); + private static final ClassName CACHE_TELEMETRY_FACTORY = ClassName.get("ru.tinkoff.kora.cache.telemetry", "CacheTelemetryFactory"); + private static final ClassName CAFFEINE_CACHE = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCache"); private static final ClassName CAFFEINE_CACHE_FACTORY = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheFactory"); private static final ClassName CAFFEINE_CACHE_CONFIG = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheConfig"); private static final ClassName CAFFEINE_CACHE_IMPL = ClassName.get("ru.tinkoff.kora.cache.caffeine", "AbstractCaffeineCache"); + @Deprecated private static final ClassName REDIS_TELEMETRY = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheTelemetry"); + @Deprecated + private static final ClassName REDIS_CACHE_OLD_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCache"); private static final ClassName REDIS_CACHE_IMPL = ClassName.get("ru.tinkoff.kora.cache.redis", "AbstractRedisCache"); private static final ClassName REDIS_CACHE_CONFIG = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheConfig"); - private static final ClassName REDIS_CACHE_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE_SYNC_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE_ASYNC_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheAsyncClient"); private static final ClassName REDIS_CACHE_MAPPER_KEY = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheKeyMapper"); private static final ClassName REDIS_CACHE_MAPPER_VALUE = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheValueMapper"); @@ -217,8 +223,8 @@ private MethodSpec getCacheRedisKeyMapperForRecord(DeclaredType keyType) { var keyName = "_key" + (i + 1); keyBuilder.addStatement("var $L = $L.apply($T.requireNonNull(key.$L(), $S))", - keyName, mapperName, Objects.class, recordField.getSimpleName().toString(), - "Cache key '%s' field '%s' must be non null".formatted(keyType.asElement().toString(), recordField.getSimpleName().toString())); + keyName, mapperName, Objects.class, recordField.getSimpleName().toString(), + "Cache key '%s' field '%s' must be non null".formatted(keyType.asElement().toString(), recordField.getSimpleName().toString())); if (i == 0) { compositeKeyBuilder.add("var _compositeKey = new byte["); @@ -276,51 +282,109 @@ private MethodSpec getCacheMethodImpl(TypeElement cacheContract, ParameterizedTy .build()) .build()) .addParameter(CAFFEINE_CACHE_FACTORY, "factory") - .addParameter(CAFFEINE_TELEMETRY, "telemetry") - .addStatement("return new $T(config, factory, telemetry)", cacheImplName) + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addStatement("return new $T(config, factory, telemetryFactory)", cacheImplName) .returns(TypeName.get(cacheContract.asType())) .build(); } + if (cacheType.rawType.equals(REDIS_CACHE)) { - var keyType = cacheType.typeArguments.get(0); - var valueType = cacheType.typeArguments.get(1); - var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); - var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); - - final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() - .filter(i -> ClassName.get(i).equals(cacheType)) - .map(i -> (DeclaredType) i) - .findFirst() - .orElseThrow(); - - var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); - final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); - if (!valueTags.isEmpty()) { - valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + if (cacheType.annotations.stream().anyMatch(a -> a.type.equals(TypeName.get(Deprecated.class)))) { + return getCacheRedisDeprecatedMethod(cacheContract, cacheType, cacheImplName, methodName); + } else { + return getCacheRedisMethod(cacheContract, cacheType, cacheImplName, methodName); } + } - var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); - final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); - if (!keyTags.isEmpty()) { - keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); - } + throw new IllegalArgumentException("Unknown cache implementation type: " + cacheType.rawType); + } - return MethodSpec.methodBuilder(methodName) - .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) - .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") - .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) - .addMember("value", "$T.class", cacheContract) - .build()) + private MethodSpec getCacheRedisMethod(TypeElement cacheContract, + ParameterizedTypeName cacheType, + ClassName cacheImplName, + String methodName) { + var keyType = cacheType.typeArguments.get(0); + var valueType = cacheType.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + + final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() + .filter(i -> ClassName.get(i).equals(cacheType)) + .map(i -> (DeclaredType) i) + .findFirst() + .orElseThrow(); + + var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); + final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); + if (!valueTags.isEmpty()) { + valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + } + + var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); + final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); + if (!keyTags.isEmpty()) { + keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); + } + + return MethodSpec.methodBuilder(methodName) + .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) + .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") + .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) + .addMember("value", "$T.class", cacheContract) .build()) - .addParameter(REDIS_CACHE_CLIENT, "redisClient") - .addParameter(REDIS_TELEMETRY, "telemetry") - .addParameter(keyParamBuilder.build()) - .addParameter(valueParamBuilder.build()) - .addStatement("return new $T(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) - .returns(TypeName.get(cacheContract.asType())) - .build(); + .build()) + .addParameter(REDIS_CACHE_SYNC_CLIENT, "redisSyncClient") + .addParameter(REDIS_CACHE_ASYNC_CLIENT, "redisAsyncClient") + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addParameter(keyParamBuilder.build()) + .addParameter(valueParamBuilder.build()) + .addStatement("return new $T(config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", cacheImplName) + .returns(TypeName.get(cacheContract.asType())) + .build(); + } + + @Deprecated + private MethodSpec getCacheRedisDeprecatedMethod(TypeElement cacheContract, + ParameterizedTypeName cacheType, + ClassName cacheImplName, + String methodName) { + var keyType = cacheType.typeArguments.get(0); + var valueType = cacheType.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + + final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() + .filter(i -> ClassName.get(i).equals(cacheType)) + .map(i -> (DeclaredType) i) + .findFirst() + .orElseThrow(); + + var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); + final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); + if (!valueTags.isEmpty()) { + valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + } + + var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); + final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); + if (!keyTags.isEmpty()) { + keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); } - throw new IllegalArgumentException("Unknown cache type: " + cacheType.rawType); + + return MethodSpec.methodBuilder(methodName) + .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) + .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") + .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) + .addMember("value", "$T.class", cacheContract) + .build()) + .build()) + .addParameter(REDIS_CACHE_OLD_CLIENT, "redisClient") + .addParameter(REDIS_TELEMETRY, "telemetry") + .addParameter(keyParamBuilder.build()) + .addParameter(valueParamBuilder.build()) + .addStatement("return new $T(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) + .returns(TypeName.get(cacheContract.asType())) + .build(); } private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName cacheContract) { @@ -328,24 +392,40 @@ private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName return MethodSpec.constructorBuilder() .addParameter(CAFFEINE_CACHE_CONFIG, "config") .addParameter(CAFFEINE_CACHE_FACTORY, "factory") - .addParameter(CAFFEINE_TELEMETRY, "telemetry") - .addStatement("super($S, config, factory, telemetry)", configPath) + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addStatement("super($S, config, factory, telemetryFactory)", configPath) .build(); } if (cacheContract.rawType.equals(REDIS_CACHE)) { - var keyType = cacheContract.typeArguments.get(0); - var valueType = cacheContract.typeArguments.get(1); - var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); - var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); - return MethodSpec.constructorBuilder() - .addParameter(REDIS_CACHE_CONFIG, "config") - .addParameter(REDIS_CACHE_CLIENT, "redisClient") - .addParameter(REDIS_TELEMETRY, "telemetry") - .addParameter(keyMapperType, "keyMapper") - .addParameter(valueMapperType, "valueMapper") - .addStatement("super($S, config, redisClient, telemetry, keyMapper, valueMapper)", configPath) - .build(); + if (cacheContract.annotations.stream().anyMatch(a -> a.type.equals(TypeName.get(Deprecated.class)))) { + var keyType = cacheContract.typeArguments.get(0); + var valueType = cacheContract.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + return MethodSpec.constructorBuilder() + .addParameter(REDIS_CACHE_CONFIG, "config") + .addParameter(REDIS_CACHE_OLD_CLIENT, "redisClient") + .addParameter(REDIS_TELEMETRY, "telemetry") + .addParameter(keyMapperType, "keyMapper") + .addParameter(valueMapperType, "valueMapper") + .addStatement("super($S, config, redisClient, telemetry, keyMapper, valueMapper)", configPath) + .build(); + } else { + var keyType = cacheContract.typeArguments.get(0); + var valueType = cacheContract.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + return MethodSpec.constructorBuilder() + .addParameter(REDIS_CACHE_CONFIG, "config") + .addParameter(REDIS_CACHE_SYNC_CLIENT, "redisSyncClient") + .addParameter(REDIS_CACHE_ASYNC_CLIENT, "redisAsyncClient") + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addParameter(keyMapperType, "keyMapper") + .addParameter(valueMapperType, "valueMapper") + .addStatement("super($S, config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", configPath) + .build(); + } } throw new IllegalArgumentException("Unknown cache type: " + cacheContract.rawType); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java index c72dcbb3e..ffd8fcace 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java @@ -41,7 +41,7 @@ private CacheableAsync getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java index 441b28392..252a14863 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -51,7 +50,7 @@ private CacheableAsyncMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableAsyncMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java index e0ca38474..65368c2ee 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java @@ -7,7 +7,6 @@ import ru.tinkoff.kora.aop.annotation.processor.AopAnnotationProcessor; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache21; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache22; -import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncMany; import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncManyOptional; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; @@ -51,7 +50,7 @@ private CacheableAsyncManyOptional getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableAsyncManyOptional getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java index 3a293d91a..8ba607af3 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java @@ -41,7 +41,7 @@ private CacheableAsyncOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java index cd01760bf..57b6098d5 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -49,7 +48,7 @@ private CacheableAsyncOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +59,7 @@ private CacheableAsyncOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java index 7247ac7cf..25f3fa13c 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java @@ -50,7 +50,7 @@ private CacheableAsyncOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +60,7 @@ private CacheableAsyncOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java index bfcb32c37..b7963a7f2 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java @@ -42,7 +42,7 @@ private CacheableAsyncOptional getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java index 6d5b868a8..3eed2dc0b 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java @@ -3,8 +3,10 @@ import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; import ru.tinkoff.kora.cache.redis.RedisCacheClient; import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.nio.ByteBuffer; import java.time.Duration; @@ -37,6 +39,11 @@ public Duration expireAfterAccess() { public Integer initialSize() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } @@ -59,11 +66,16 @@ public Duration expireAfterWrite() { public Duration expireAfterAccess() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } - public static RedisCacheClient lettuceClient(final Map cache) { - return new RedisCacheClient() { + public static RedisCacheAsyncClient lettuceAsyncClient(final Map cache) { + return new RedisCacheAsyncClient() { @Override public CompletionStage get(byte[] key) { var r = cache.get(ByteBuffer.wrap(key)); @@ -94,24 +106,24 @@ public CompletionStage> getex(byte[][] keys, long expireAfte } @Override - public CompletionStage set(byte[] key, byte[] value) { + public CompletionStage set(byte[] key, byte[] value) { cache.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); - return CompletableFuture.completedFuture(true); + return CompletableFuture.completedFuture(null); } @Override - public CompletionStage mset(Map keyAndValue) { + public CompletionStage mset(Map keyAndValue) { keyAndValue.forEach((k, v) -> cache.put(ByteBuffer.wrap(k), ByteBuffer.wrap(v))); - return CompletableFuture.completedFuture(true); + return CompletableFuture.completedFuture(null); } @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { return mset(keyAndValue); } @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { return set(key, value); } @@ -130,9 +142,81 @@ public CompletionStage del(byte[][] keys) { } @Override - public CompletionStage flushAll() { + public CompletionStage flushAll() { + cache.clear(); + return CompletableFuture.completedFuture(null); + } + }; + } + + public static RedisCacheClient lettuceSyncClient(final Map cache) { + return new RedisCacheClient() { + @Override + public byte[] get(byte[] key) { + var r = cache.get(ByteBuffer.wrap(key)); + return (r == null) + ? null + : r.array(); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + final Map result = new HashMap<>(); + for (byte[] key : keys) { + Optional.ofNullable(cache.get(ByteBuffer.wrap(key))).ifPresent(r -> result.put(key, r.array())); + } + return result; + } + + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return get(key); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + return mget(keys); + } + + @Override + public void set(byte[] key, byte[] value) { + cache.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); + } + + @Override + public void mset(Map keyAndValue) { + keyAndValue.forEach((k, v) -> cache.put(ByteBuffer.wrap(k), ByteBuffer.wrap(v))); + } + + @Override + public void psetex(Map keyAndValue, long expireAfterMillis) { + mset(keyAndValue); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + set(key, value); + } + + @Override + public long del(byte[] key) { + return cache.remove(ByteBuffer.wrap(key)) == null ? 0L : 1L; + } + + @Override + public long del(byte[][] keys) { + long counter = 0; + for (byte[] key : keys) { + counter += del(key); + } + return counter; + } + + @Override + public void flushAll() { cache.clear(); - return CompletableFuture.completedFuture(true); } }; } diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java index 3faed39ac..e24eb47ab 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java @@ -48,7 +48,7 @@ private CacheableMono getService() { cacheConstructor.setAccessible(true); final Map cacheBuf = new HashMap<>(); cache = (DummyCache22) cacheConstructor.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cacheBuf), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cacheBuf), CacheRunner.lettuceAsyncClient(cacheBuf), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java index be38e6b67..bb8aa68bd 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -53,7 +52,7 @@ private CacheableMonoMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -64,7 +63,7 @@ private CacheableMonoMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java index 692b5d6e5..97c3d41d4 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java @@ -7,7 +7,6 @@ import ru.tinkoff.kora.aop.annotation.processor.AopAnnotationProcessor; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache21; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache22; -import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoMany; import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoManyOptional; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; @@ -52,7 +51,7 @@ private CacheableMonoManyOptional getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -63,7 +62,7 @@ private CacheableMonoManyOptional getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java index 1b16ba04d..69a1a15db 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java @@ -42,7 +42,7 @@ private CacheableMonoOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java index 178e5b1e5..e56303708 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -50,7 +49,7 @@ private CacheableMonoOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -61,7 +60,7 @@ private CacheableMonoOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java index 245b68962..504c10048 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java @@ -51,7 +51,7 @@ private CacheableMonoOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -61,7 +61,7 @@ private CacheableMonoOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java index 6cb14402d..597212fb2 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java @@ -43,7 +43,7 @@ private CacheableMonoOptional getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java index ddfcce1bd..45e9ee5f6 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java @@ -41,7 +41,7 @@ private CacheableSync getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java index ee57a589f..7bbcf6cc9 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.sync.CacheableSyncMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -51,7 +50,7 @@ private CacheableSyncMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableSyncMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java index b3d969882..da00238dd 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java @@ -41,7 +41,7 @@ private CacheableSyncOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java index 6cb067969..a38e5c72b 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.sync.CacheableSyncOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -49,7 +48,7 @@ private CacheableSyncOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +59,7 @@ private CacheableSyncOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java index 724d72db4..21ab9844a 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java @@ -50,7 +50,7 @@ private CacheableSyncOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +60,7 @@ private CacheableSyncOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-caffeine/build.gradle b/cache/cache-caffeine/build.gradle index 79fccd71d..6ae40f58f 100644 --- a/cache/cache-caffeine/build.gradle +++ b/cache/cache-caffeine/build.gradle @@ -3,9 +3,9 @@ dependencies { compileOnly libs.reactor.core api project(":cache:cache-common") + api libs.caffeine implementation project(":config:config-common") - implementation libs.caffeine testImplementation libs.reactor.core testImplementation testFixtures(project(":annotation-processor-common")) diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java index 934dc69c9..499f7a7cb 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java @@ -1,28 +1,68 @@ package ru.tinkoff.kora.cache.caffeine; import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; import java.util.function.Function; public abstract class AbstractCaffeineCache implements CaffeineCache { - private final String name; + private static final String ORIGIN = "caffeine"; + private final com.github.benmanes.caffeine.cache.Cache caffeine; - private final CaffeineCacheTelemetry telemetry; + private final CacheTelemetry telemetry; + @Deprecated protected AbstractCaffeineCache(String name, CaffeineCacheConfig config, CaffeineCacheFactory factory, CaffeineCacheTelemetry telemetry) { - this.name = name; this.caffeine = factory.build(name, config); - this.telemetry = telemetry; + this.telemetry = operationName -> { + var telemetryContext = telemetry.create(operationName, name); + return new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (valueFromCache == null) { + telemetryContext.recordSuccess(); + } else { + telemetryContext.recordSuccess(valueFromCache); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + telemetryContext.recordFailure(throwable); + } + }; + }; + } + + protected AbstractCaffeineCache(String name, + CaffeineCacheConfig config, + CaffeineCacheFactory factory, + CacheTelemetryFactory telemetry) { + this.caffeine = factory.build(name, config); + this.telemetry = telemetry.get(config.telemetry(), new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return name; + } + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + }); } @Override @@ -31,7 +71,7 @@ public V get(@Nonnull K key) { return null; } - var telemetryContext = telemetry.create("GET", name); + var telemetryContext = telemetry.get("GET"); var value = caffeine.getIfPresent(key); telemetryContext.recordSuccess(value); return value; @@ -44,18 +84,18 @@ public Map get(@Nonnull Collection keys) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("GET_MANY", name); + var telemetryContext = telemetry.get("GET_MANY"); var values = caffeine.getAllPresent(keys); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(values); return values; } @Nonnull @Override public Map getAll() { - var telemetryContext = telemetry.create("GET_ALL", name); + var telemetryContext = telemetry.get("GET_ALL"); var values = Collections.unmodifiableMap(caffeine.asMap()); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return values; } @@ -65,9 +105,9 @@ public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction return mappingFunction.apply(key); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); var value = caffeine.get(key, mappingFunction); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -79,9 +119,9 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< return mappingFunction.apply(Collections.emptySet()); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); var value = caffeine.getAll(keys, ks -> mappingFunction.apply((Set) ks)); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -91,9 +131,9 @@ public V put(@Nonnull K key, @Nonnull V value) { return value; } - var telemetryContext = telemetry.create("PUT", name); + var telemetryContext = telemetry.get("PUT"); caffeine.put(key, value); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -104,34 +144,34 @@ public Map put(@Nonnull Map keyAndValues) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("PUT_MANY", name); + var telemetryContext = telemetry.get("PUT_MANY"); caffeine.putAll(keyAndValues); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return keyAndValues; } @Override public void invalidate(@Nonnull K key) { if (key != null) { - var telemetryContext = telemetry.create("INVALIDATE", name); + var telemetryContext = telemetry.get("INVALIDATE"); caffeine.invalidate(key); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } @Override public void invalidate(@Nonnull Collection keys) { if (keys != null && !keys.isEmpty()) { - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + var telemetryContext = telemetry.get("INVALIDATE_MANY"); caffeine.invalidateAll(keys); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } @Override public void invalidateAll() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + var telemetryContext = telemetry.get("INVALIDATE_ALL"); caffeine.invalidateAll(); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java index 2119fa2f5..68f46947c 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java @@ -3,6 +3,7 @@ import jakarta.annotation.Nullable; import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.time.Duration; @@ -21,4 +22,6 @@ default Long maximumSize() { @Nullable Integer initialSize(); + + TelemetryConfig telemetry(); } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java index 9904ad4ee..d0ad0f6d8 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java @@ -4,14 +4,38 @@ import com.github.benmanes.caffeine.cache.Caffeine; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.cache.CacheModule; +import ru.tinkoff.kora.cache.telemetry.*; import ru.tinkoff.kora.common.DefaultComponent; -public interface CaffeineCacheModule { +public interface CaffeineCacheModule extends CacheModule { + @Deprecated(forRemoval = true) @DefaultComponent - default CaffeineCacheTelemetry caffeineCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + default CaffeineCacheTelemetry caffeineCacheTelemetry(@Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracerFactory, + CaffeineCacheConfig config) { + var args = new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return ""; + } + + @Nonnull + @Override + public String origin() { + return "caffeine"; + } + }; + + CacheMetrics metrics = (metricsFactory == null) + ? null + : metricsFactory.get(config.telemetry().metrics(), args); + CacheTracer tracer = (tracerFactory == null) + ? null + : tracerFactory.get(config.telemetry().tracing(), args); + return new CaffeineCacheTelemetry(metrics, tracer); } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java index 972e55a64..70efdec02 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java @@ -8,6 +8,7 @@ import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; import ru.tinkoff.kora.cache.telemetry.CacheTracer; +@Deprecated public final class CaffeineCacheTelemetry { private static final String ORIGIN = "caffeine"; diff --git a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java index 4c3b50a31..180d9b0c9 100644 --- a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java +++ b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java @@ -3,6 +3,7 @@ import jakarta.annotation.Nullable; import org.junit.jupiter.api.Assertions; import ru.tinkoff.kora.cache.caffeine.testdata.DummyCache; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.time.Duration; @@ -27,12 +28,19 @@ public Duration expireAfterAccess() { public Integer initialSize() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } protected DummyCache createCache() { try { - return new DummyCache(getConfig(), caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + return new DummyCache(getConfig(), + caffeineCacheFactory(null), + defaultCacheTelemetryFactory(null, null, null)); } catch (Exception e) { throw new IllegalStateException(e); } diff --git a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java index b56c13c15..6bfe9b9c2 100644 --- a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java +++ b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java @@ -3,11 +3,11 @@ import ru.tinkoff.kora.cache.caffeine.AbstractCaffeineCache; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheFactory; -import ru.tinkoff.kora.cache.caffeine.CaffeineCacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; public final class DummyCache extends AbstractCaffeineCache { - public DummyCache(CaffeineCacheConfig config, CaffeineCacheFactory factory, CaffeineCacheTelemetry telemetry) { - super("dummy", config, factory, telemetry); + public DummyCache(CaffeineCacheConfig config, CaffeineCacheFactory factory, CacheTelemetryFactory telemetryFactory) { + super("dummy", config, factory, telemetryFactory); } } diff --git a/cache/cache-common/build.gradle b/cache/cache-common/build.gradle index 2920f3043..67c895fcd 100644 --- a/cache/cache-common/build.gradle +++ b/cache/cache-common/build.gradle @@ -1,5 +1,7 @@ dependencies { api project(":common") + api project(":logging:logging-common") + api project(":telemetry:telemetry-common") testImplementation project(":internal:test-logging") } diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java new file mode 100644 index 000000000..43c2b5eb1 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.*; +import ru.tinkoff.kora.common.DefaultComponent; + +public interface CacheModule { + + @DefaultComponent + default CacheLoggerFactory defaultCacheLoggerFactory() { + return new Sl4fjCacheLoggerFactory(); + } + + @DefaultComponent + default CacheTelemetryFactory defaultCacheTelemetryFactory(@Nullable CacheLoggerFactory loggerFactory, + @Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracerFactory) { + return new DefaultCacheTelemetryFactory(loggerFactory, metricsFactory, tracerFactory); + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java index 2aace26ec..bba677580 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java @@ -23,7 +23,7 @@ final class LoadableCacheImpl implements LoadableCache { public V get(@Nonnull K key) { return cache.computeIfAbsent(key, k -> { final Map result = cacheLoader.apply(Set.of(k)); - if(result.isEmpty()) { + if (result.isEmpty()) { return null; } else { return result.values().iterator().next(); diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java new file mode 100644 index 000000000..cb90ed9cb --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java @@ -0,0 +1,13 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; + +public interface CacheLogger { + + void logStart(@Nonnull CacheTelemetryOperation operation); + + void logSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache); + + void logFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java new file mode 100644 index 000000000..dfeb3326f --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheLoggerFactory { + + @Nullable + CacheLogger get(TelemetryConfig.LogConfig logging, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java index eac708ea4..d2e733ed2 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java @@ -7,5 +7,5 @@ public interface CacheMetrics { void recordSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache); - void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable throwable); + void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception); } diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java new file mode 100644 index 000000000..a4b69308b --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheMetricsFactory { + + @Nullable + CacheMetrics get(TelemetryConfig.MetricsConfig config, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java new file mode 100644 index 000000000..ddee56206 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java @@ -0,0 +1,16 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; + +public interface CacheTelemetry { + + interface CacheTelemetryContext { + + void recordSuccess(@Nullable Object valueFromCache); + + void recordFailure(@Nullable Throwable throwable); + } + + CacheTelemetryContext get(@Nonnull String operationName); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java new file mode 100644 index 000000000..050ca3dbf --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java @@ -0,0 +1,12 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; + +public interface CacheTelemetryArgs { + + @Nonnull + String cacheName(); + + @Nonnull + String origin(); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java new file mode 100644 index 000000000..0fadbb6d5 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java @@ -0,0 +1,8 @@ +package ru.tinkoff.kora.cache.telemetry; + +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheTelemetryFactory { + + CacheTelemetry get(TelemetryConfig telemetryConfig, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java new file mode 100644 index 000000000..eeedf9a50 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheTracerFactory { + + @Nullable + CacheTracer get(TelemetryConfig.TracingConfig tracing, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java new file mode 100644 index 000000000..3282cdad5 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java @@ -0,0 +1,109 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public final class DefaultCacheTelemetryFactory implements CacheTelemetryFactory { + + private static final CacheTelemetry.CacheTelemetryContext EMPTY_CONTEXT = new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + + } + }; + private static final CacheTelemetry EMPTY_TELEMETRY = operationName -> EMPTY_CONTEXT; + + @Nullable + private final CacheLoggerFactory loggerFactory; + @Nullable + private final CacheTracerFactory tracingFactory; + @Nullable + private final CacheMetricsFactory metricsFactory; + + public DefaultCacheTelemetryFactory(@Nullable CacheLoggerFactory loggerFactory, + @Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracingFactory) { + this.loggerFactory = loggerFactory; + this.metricsFactory = metricsFactory; + this.tracingFactory = tracingFactory; + } + + @Override + public CacheTelemetry get(TelemetryConfig config, CacheTelemetryArgs args) { + var tracing = this.tracingFactory == null ? null : this.tracingFactory.get(config.tracing(), args); + var metrics = this.metricsFactory == null ? null : this.metricsFactory.get(config.metrics(), args); + var logger = this.loggerFactory == null ? null : this.loggerFactory.get(config.logging(), args); + if (tracing == null && metrics == null && logger == null) { + return EMPTY_TELEMETRY; + } + + return new DefaultCacheTelemetry(args, tracing, metrics, logger); + } + + private record Operation(String name, String cacheName, String origin) implements CacheTelemetryOperation {} + + private static final class DefaultCacheTelemetry implements CacheTelemetry { + + private final CacheTelemetryArgs args; + @Nullable + private final CacheTracer tracer; + @Nullable + private final CacheMetrics metrics; + @Nullable + private final CacheLogger logger; + + public DefaultCacheTelemetry(CacheTelemetryArgs args, + @Nullable CacheTracer tracer, + @Nullable CacheMetrics metrics, + @Nullable CacheLogger logger) { + this.args = args; + this.tracer = tracer; + this.metrics = metrics; + this.logger = logger; + } + + @Override + public CacheTelemetryContext get(@Nonnull String operationName) { + var operation = new Operation(operationName, args.cacheName(), args.origin()); + + var startedInNanos = System.nanoTime(); + if (logger != null) { + logger.logStart(operation); + } + + final CacheTracer.CacheSpan span = (tracer != null) + ? tracer.trace(operation) + : null; + + return new CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordSuccess(operation, durationInNanos, valueFromCache); + } + if (span != null) { + span.recordSuccess(); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordFailure(operation, durationInNanos, throwable); + } + if (span != null) { + span.recordFailure(throwable); + } + } + }; + } + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java new file mode 100644 index 000000000..803e815f4 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java @@ -0,0 +1,80 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.slf4j.Logger; +import ru.tinkoff.kora.logging.common.arg.StructuredArgument; + +public class Sl4fjCacheLogger implements CacheLogger { + + private final Logger startLogger; + private final Logger finishLogger; + + public Sl4fjCacheLogger(Logger requestLogger, Logger finishLogger) { + this.startLogger = requestLogger; + this.finishLogger = finishLogger; + } + + @Override + public void logStart(@Nonnull CacheTelemetryOperation operation) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeEndObject(); + }); + + startLogger.debug(marker, "Operation '{}' for cache '{}' started", + operation.name(), operation.cacheName()); + } + + @Override + public void logSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeNumberField("processingTime", durationInNanos / 1_000_000); + gen.writeEndObject(); + }); + + if (operation.name().startsWith("GET")) { + if (valueFromCache == null) { + finishLogger.debug(marker, "Operation '{}' for cache '{}' didn't retried value", + operation.name(), operation.cacheName()); + } else { + finishLogger.debug(marker, "Operation '{}' for cache '{}' retried value", + operation.name(), operation.cacheName()); + } + } else { + finishLogger.debug(marker, "Operation '{}' for cache '{}' completed", + operation.name(), operation.cacheName()); + } + } + + @Override + public void logFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeNumberField("processingTime", durationInNanos / 1_000_000); + if (exception != null) { + var exceptionType = exception.getClass().getCanonicalName(); + gen.writeStringField("exceptionType", exceptionType); + } + gen.writeEndObject(); + }); + + if (exception != null) { + finishLogger.warn(marker, "Operation '{}' failed for cache '{}' with message: {}", + operation.name(), operation.cacheName(), exception.getMessage()); + } else { + finishLogger.warn(marker, "Operation '{}' failed for cache '{}'", + operation.name(), operation.cacheName()); + } + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java new file mode 100644 index 000000000..c28408756 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java @@ -0,0 +1,23 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.Cache; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class Sl4fjCacheLoggerFactory implements CacheLoggerFactory { + + @Nullable + @Override + public CacheLogger get(TelemetryConfig.LogConfig logging, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(logging.enabled(), false)) { + var startLogger = LoggerFactory.getLogger(Cache.class.getPackageName() + ".start." + args.origin() + "." + args.cacheName()); + var finishLogger = LoggerFactory.getLogger(Cache.class.getPackageName() + ".finish." + args.origin() + "." + args.cacheName()); + return new Sl4fjCacheLogger(startLogger, finishLogger); + } else { + return null; + } + } +} diff --git a/cache/cache-redis-common/build.gradle b/cache/cache-redis-common/build.gradle index 44c085705..c6218e579 100644 --- a/cache/cache-redis-common/build.gradle +++ b/cache/cache-redis-common/build.gradle @@ -5,17 +5,6 @@ dependencies { implementation project(":json:json-common") implementation project(":config:config-common") - implementation(libs.redis.lettuce) { - exclude group: 'io.projectreactor', module: 'reactor-core' - exclude group: 'io.netty', module: 'netty-common' - exclude group: 'io.netty', module: 'netty-handler' - exclude group: 'io.netty', module: 'netty-transport' - } - implementation libs.reactor.core - implementation libs.netty.common - implementation libs.netty.handlers - implementation libs.netty.transports - implementation libs.apache.pool testImplementation project(":internal:test-logging") testImplementation project(":internal:test-redis") diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java index 941591ac8..1ac359ce9 100644 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -4,6 +4,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.tinkoff.kora.cache.AsyncCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; import java.nio.charset.StandardCharsets; import java.util.*; @@ -17,9 +20,11 @@ public abstract class AbstractRedisCache implements AsyncCache { private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); - private final String name; + private static final String ORIGIN = "redis"; + private final RedisCacheClient redisClient; - private final RedisCacheTelemetry telemetry; + private final RedisCacheAsyncClient redisAsyncClient; + private final CacheTelemetry telemetry; private final byte[] keyPrefix; private final RedisCacheKeyMapper keyMapper; @@ -31,12 +36,25 @@ public abstract class AbstractRedisCache implements AsyncCache { protected AbstractRedisCache(String name, RedisCacheConfig config, RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, RedisCacheKeyMapper keyMapper, RedisCacheValueMapper valueMapper) { - this.name = name; this.redisClient = redisClient; - this.telemetry = telemetry; + this.redisAsyncClient = redisAsyncClient; + this.telemetry = telemetryFactory.get(config.telemetry(), new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return name; + } + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + }); this.keyMapper = keyMapper; this.valueMapper = valueMapper; this.expireAfterAccessMillis = (config.expireAfterAccess() == null) @@ -62,12 +80,12 @@ public V get(@Nonnull K key) { return null; } - var telemetryContext = telemetry.create("GET", name); + var telemetryContext = telemetry.get("GET"); try { final byte[] keyAsBytes = mapKey(key); final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); final V value = valueMapper.read(jsonAsBytes); telemetryContext.recordSuccess(value); @@ -88,15 +106,15 @@ public Map get(@Nonnull Collection keys) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("GET_MANY", name); + var telemetryContext = telemetry.get("GET_MANY"); try { final Map keysByKeyBytes = keys.stream() .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); final Map keyToValue = new HashMap<>(); for (var entry : keysByKeyBytes.entrySet()) { @@ -126,17 +144,17 @@ public V put(@Nonnull K key, @Nonnull V value) { return null; } - var telemetryContext = telemetry.create("PUT", name); + var telemetryContext = telemetry.get("PUT"); try { final byte[] keyAsBytes = mapKey(key); final byte[] valueAsBytes = valueMapper.write(value); if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + redisClient.set(keyAsBytes, valueAsBytes); } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); } - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); @@ -154,7 +172,7 @@ public Map put(@Nonnull Map keyAndValues) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("PUT_MANY", name); + var telemetryContext = telemetry.get("PUT_MANY"); try { var keyAndValuesAsBytes = new HashMap(); @@ -165,12 +183,12 @@ public Map put(@Nonnull Map keyAndValues) { }); if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + redisClient.mset(keyAndValuesAsBytes); } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis); } - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return keyAndValues; } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); @@ -187,14 +205,14 @@ public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction return null; } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); V fromCache = null; try { final byte[] keyAsBytes = mapKey(key); final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); fromCache = valueMapper.read(jsonAsBytes); } catch (Exception e) { @@ -202,7 +220,7 @@ public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction } if (fromCache != null) { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return fromCache; } @@ -213,16 +231,16 @@ public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction final byte[] keyAsBytes = mapKey(key); final byte[] valueAsBytes = valueMapper.write(value); if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); + redisClient.set(keyAsBytes, valueAsBytes); } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); @@ -240,7 +258,7 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< return Collections.emptyMap(); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); final Map fromCache = new HashMap<>(); try { @@ -249,8 +267,8 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); for (var entry : keysByKeyBytes.entrySet()) { valueByKeys.forEach((k, v) -> { @@ -265,7 +283,7 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< } if (fromCache.size() == keys.size()) { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return fromCache; } @@ -285,16 +303,16 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< }); if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); + redisClient.mset(keyAndValuesAsBytes); } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis); } } catch (Exception e) { logger.error(e.getMessage(), e); } } - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); fromCache.putAll(values); return fromCache; } catch (CompletionException e) { @@ -310,11 +328,11 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< public void invalidate(@Nonnull K key) { if (key != null) { final byte[] keyAsBytes = mapKey(key); - var telemetryContext = telemetry.create("INVALIDATE", name); + var telemetryContext = telemetry.get("INVALIDATE"); try { - redisClient.del(keyAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); + redisClient.del(keyAsBytes); + telemetryContext.recordSuccess(null); } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); } catch (Exception e) { @@ -326,15 +344,15 @@ public void invalidate(@Nonnull K key) { @Override public void invalidate(@Nonnull Collection keys) { if (keys != null && !keys.isEmpty()) { - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + var telemetryContext = telemetry.get("INVALIDATE_MANY"); try { final byte[][] keysAsBytes = keys.stream() .map(this::mapKey) .toArray(byte[][]::new); - redisClient.del(keysAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); + redisClient.del(keysAsBytes); + telemetryContext.recordSuccess(null); } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); } catch (Exception e) { @@ -345,11 +363,11 @@ public void invalidate(@Nonnull Collection keys) { @Override public void invalidateAll() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + var telemetryContext = telemetry.get("INVALIDATE_ALL"); try { - redisClient.flushAll().toCompletableFuture().join(); - telemetryContext.recordSuccess(); + redisClient.flushAll(); + telemetryContext.recordSuccess(null); } catch (CompletionException e) { telemetryContext.recordFailure(e.getCause()); } catch (Exception e) { @@ -364,12 +382,12 @@ public CompletionStage getAsync(@Nonnull K key) { return CompletableFuture.completedFuture(null); } - var telemetryContext = telemetry.create("GET", name); + var telemetryContext = telemetry.get("GET"); final byte[] keyAsBytes = mapKey(key); CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + ? redisAsyncClient.get(keyAsBytes) + : redisAsyncClient.getex(keyAsBytes, expireAfterAccessMillis); return responseCompletionStage .thenApply(jsonAsBytes -> { @@ -390,14 +408,14 @@ public CompletionStage> getAsync(@Nonnull Collection keys) { return CompletableFuture.completedFuture(Collections.emptyMap()); } - var telemetryContext = telemetry.create("GET_MANY", name); + var telemetryContext = telemetry.get("GET_MANY"); var keysByKeyByte = keys.stream() .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysAsBytes) - : redisClient.getex(keysAsBytes, expireAfterAccessMillis); + ? redisAsyncClient.mget(keysAsBytes) + : redisAsyncClient.getex(keysAsBytes, expireAfterAccessMillis); return responseCompletionStage .thenApply(valuesByKeys -> { @@ -426,16 +444,16 @@ public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { return CompletableFuture.completedFuture(value); } - var telemetryContext = telemetry.create("PUT", name); + var telemetryContext = telemetry.get("PUT"); final byte[] keyAsBytes = mapKey(key); final byte[] valueAsBytes = valueMapper.write(value); - final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisAsyncClient.set(keyAsBytes, valueAsBytes) + : redisAsyncClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); return responseCompletionStage .thenApply(r -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; }) .exceptionally(e -> { @@ -451,7 +469,7 @@ public CompletionStage> putAsync(@Nonnull Map keyAndValues) { return CompletableFuture.completedFuture(Collections.emptyMap()); } - var telemetryContext = telemetry.create("PUT_MANY", name); + var telemetryContext = telemetry.get("PUT_MANY"); var keyAndValuesAsBytes = new HashMap(); keyAndValues.forEach((k, v) -> { final byte[] keyAsBytes = mapKey(k); @@ -460,12 +478,12 @@ public CompletionStage> putAsync(@Nonnull Map keyAndValues) { }); var responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + ? redisAsyncClient.mset(keyAndValuesAsBytes) + : redisAsyncClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); return responseCompletionStage .thenApply(r -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return keyAndValues; }) .exceptionally(e -> { @@ -480,11 +498,11 @@ public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function return CompletableFuture.completedFuture(null); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); final byte[] keyAsBytes = mapKey(key); final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + ? redisAsyncClient.get(keyAsBytes) + : redisAsyncClient.getex(keyAsBytes, expireAfterAccessMillis); return responseCompletionStage .thenApply(valueMapper::read) @@ -501,12 +519,12 @@ public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function final byte[] valueAsBytes = valueMapper.write(value); var putFutureResponse = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + ? redisAsyncClient.set(keyAsBytes, valueAsBytes) + : redisAsyncClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); return putFutureResponse .thenApply(v -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; }); }); @@ -524,14 +542,14 @@ public CompletionStage> computeIfAbsentAsync(@Nonnull Collection ke return CompletableFuture.completedFuture(Collections.emptyMap()); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); final Map keysByKeyBytes = keys.stream() .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes) - : redisClient.getex(keysByBytes, expireAfterAccessMillis); + ? redisAsyncClient.mget(keysByBytes) + : redisAsyncClient.getex(keysByBytes, expireAfterAccessMillis); return responseCompletionStage .thenApply(valueByKeys -> { @@ -570,12 +588,12 @@ public CompletionStage> computeIfAbsentAsync(@Nonnull Collection ke }); var putCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + ? redisAsyncClient.mset(keyAndValuesAsBytes) + : redisAsyncClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); return putCompletionStage .thenApply(v -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); fromCache.putAll(values); return fromCache; }); @@ -594,11 +612,11 @@ public CompletionStage invalidateAsync(@Nonnull K key) { return CompletableFuture.completedFuture(false); } - var telemetryContext = telemetry.create("INVALIDATE", name); + var telemetryContext = telemetry.get("INVALIDATE"); final byte[] keyAsBytes = mapKey(key); - return redisClient.del(keyAsBytes) + return redisAsyncClient.del(keyAsBytes) .thenApply(r -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return true; }) .exceptionally(e -> { @@ -613,15 +631,15 @@ public CompletionStage invalidateAsync(@Nonnull Collection keys) { return CompletableFuture.completedFuture(false); } - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + var telemetryContext = telemetry.get("INVALIDATE_MANY"); final byte[][] keyAsBytes = keys.stream() .distinct() .map(this::mapKey) .toArray(byte[][]::new); - return redisClient.del(keyAsBytes) + return redisAsyncClient.del(keyAsBytes) .thenApply(r -> { - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return true; }) .exceptionally(e -> { @@ -633,11 +651,11 @@ public CompletionStage invalidateAsync(@Nonnull Collection keys) { @Nonnull @Override public CompletionStage invalidateAllAsync() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); - return redisClient.flushAll() + var telemetryContext = telemetry.get("INVALIDATE_ALL"); + return redisAsyncClient.flushAll() .thenApply(r -> { - telemetryContext.recordSuccess(); - return r; + telemetryContext.recordSuccess(null); + return true; }) .exceptionally(e -> { telemetryContext.recordFailure(e); diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java similarity index 61% rename from cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java rename to cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java index a5b995bd6..0af4506e8 100644 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java @@ -5,7 +5,7 @@ import java.util.Map; import java.util.concurrent.CompletionStage; -public interface RedisCacheClient { +public interface RedisCacheAsyncClient { @Nonnull CompletionStage get(byte[] key); @@ -20,16 +20,16 @@ public interface RedisCacheClient { CompletionStage> getex(byte[][] keys, long expireAfterMillis); @Nonnull - CompletionStage set(byte[] key, byte[] value); + CompletionStage set(byte[] key, byte[] value); @Nonnull - CompletionStage mset(@Nonnull Map keyAndValue); + CompletionStage mset(@Nonnull Map keyAndValue); @Nonnull - CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); @Nonnull - CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); @Nonnull CompletionStage del(byte[] key); @@ -38,5 +38,5 @@ public interface RedisCacheClient { CompletionStage del(byte[][] keys); @Nonnull - CompletionStage flushAll(); + CompletionStage flushAll(); } diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java index a5b995bd6..9edfbb1c9 100644 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -3,40 +3,32 @@ import jakarta.annotation.Nonnull; import java.util.Map; -import java.util.concurrent.CompletionStage; public interface RedisCacheClient { @Nonnull - CompletionStage get(byte[] key); + byte[] get(byte[] key); @Nonnull - CompletionStage> mget(byte[][] keys); + Map mget(byte[][] keys); @Nonnull - CompletionStage getex(byte[] key, long expireAfterMillis); + byte[] getex(byte[] key, long expireAfterMillis); @Nonnull - CompletionStage> getex(byte[][] keys, long expireAfterMillis); + Map getex(byte[][] keys, long expireAfterMillis); - @Nonnull - CompletionStage set(byte[] key, byte[] value); + void set(byte[] key, byte[] value); - @Nonnull - CompletionStage mset(@Nonnull Map keyAndValue); + void mset(@Nonnull Map keyAndValue); - @Nonnull - CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + void psetex(byte[] key, byte[] value, long expireAfterMillis); - @Nonnull - CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + void psetex(@Nonnull Map keyAndValue, long expireAfterMillis); - @Nonnull - CompletionStage del(byte[] key); + long del(byte[] key); - @Nonnull - CompletionStage del(byte[][] keys); + long del(byte[][] keys); - @Nonnull - CompletionStage flushAll(); + void flushAll(); } diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java index 120bd511a..d94dfdc31 100644 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -3,6 +3,7 @@ import jakarta.annotation.Nullable; import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.time.Duration; @@ -21,4 +22,6 @@ public interface RedisCacheConfig { @Nullable Duration expireAfterAccess(); + + TelemetryConfig telemetry(); } diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java index 81e48e005..d3ea8846d 100644 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -1,8 +1,5 @@ package ru.tinkoff.kora.cache.redis; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.json.common.JsonCommonModule; import ru.tinkoff.kora.json.common.JsonReader; @@ -16,11 +13,6 @@ public interface RedisCacheMapperModule extends JsonCommonModule { - @DefaultComponent - default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - return new RedisCacheTelemetry(metrics, tracer); - } - @Json @DefaultComponent default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java index fe07914b0..84c9675d8 100644 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -1,7 +1,7 @@ package ru.tinkoff.kora.cache.redis; -import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; +import ru.tinkoff.kora.cache.CacheModule; -public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { +public interface RedisCacheModule extends CacheModule, RedisCacheMapperModule { } diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java deleted file mode 100644 index af84dfdd5..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java +++ /dev/null @@ -1,129 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; -import jakarta.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; - -public final class RedisCacheTelemetry { - - private static final String ORIGIN = "redis"; - - record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { - - @Nonnull - @Override - public String origin() { - return ORIGIN; - } - } - - interface TelemetryContext { - void recordSuccess(); - - void recordSuccess(@Nullable Object valueFromCache); - - void recordFailure(@Nullable Throwable throwable); - } - - private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); - - private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); - - @Nullable - private final CacheMetrics metrics; - @Nullable - private final CacheTracer tracer; - private final boolean isStubTelemetry; - - RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - this.metrics = metrics; - this.tracer = tracer; - this.isStubTelemetry = metrics == null && tracer == null; - } - - record StubCacheTelemetry() implements TelemetryContext { - - @Override - public void recordSuccess() {} - - @Override - public void recordSuccess(@Nullable Object valueFromCache) {} - - @Override - public void recordFailure(@Nullable Throwable throwable) {} - } - - class DefaultCacheTelemetryContext implements TelemetryContext { - - private final Operation operation; - - private CacheTracer.CacheSpan span; - private final long startedInNanos = System.nanoTime(); - - DefaultCacheTelemetryContext(Operation operation) { - logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); - if (tracer != null) { - span = tracer.trace(operation); - } - this.operation = operation; - } - - @Override - public void recordSuccess() { - recordSuccess(null); - } - - @Override - public void recordSuccess(@Nullable Object valueFromCache) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordSuccess(operation, durationInNanos, valueFromCache); - } - if (span != null) { - span.recordSuccess(); - } - - if (operation.name().startsWith("GET")) { - if (valueFromCache == null) { - logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); - } else { - logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); - } - } else { - logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); - } - } - - @Override - public void recordFailure(@Nullable Throwable throwable) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordFailure(operation, durationInNanos, throwable); - } - if (span != null) { - span.recordFailure(throwable); - } - - if (throwable != null) { - logger.warn("Operation '{}' failed for cache '{}' with message: {}", - operation.name(), operation.cacheName(), throwable.getMessage()); - } else { - logger.warn("Operation '{}' failed for cache '{}'", - operation.name(), operation.cacheName()); - } - } - } - - @Nonnull - TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { - if (isStubTelemetry) { - return STUB_CONTEXT; - } - - return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); - } -} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java deleted file mode 100644 index 6fb2ee3e9..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java +++ /dev/null @@ -1,43 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.RedisURI; -import io.lettuce.core.SocketOptions; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; - -import java.time.Duration; - -@ConfigValueExtractor -public interface LettuceClientConfig { - - String uri(); - - @Nullable - Integer database(); - - @Nullable - String user(); - - @Nullable - String password(); - - default Protocol protocol() { - return Protocol.RESP3; - } - - default Duration socketTimeout() { - return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); - } - - default Duration commandTimeout() { - return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); - } - - enum Protocol { - - /** Redis 2 to Redis 5 */ - RESP2, - /** Redis 6+ */ - RESP3 - } -} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java deleted file mode 100644 index fb29c05e2..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java +++ /dev/null @@ -1,133 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.*; -import io.lettuce.core.cluster.ClusterClientOptions; -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.cluster.RedisClusterURIUtil; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nonnull; - -import java.net.URI; -import java.time.Duration; -import java.util.List; - -public final class LettuceClientFactory { - - @Nonnull - public AbstractRedisClient build(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - - final List mappedRedisUris = buildRedisURI(config); - - return (mappedRedisUris.size() == 1) - ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) - : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClient buildRedisClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClusterClient client = RedisClusterClient.create(redisURIs); - client.setOptions(ClusterClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - @Nonnull - private static RedisClient buildRedisClientInternal(RedisURI redisURI, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClient client = RedisClient.create(redisURI); - client.setOptions(ClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - static List buildRedisURI(LettuceClientConfig config) { - final String uri = config.uri(); - final Integer database = config.database(); - final String user = config.user(); - final String password = config.password(); - - final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); - return redisURIS.stream() - .map(redisURI -> { - RedisURI.Builder builder = RedisURI.builder(redisURI); - if (database != null) { - builder = builder.withDatabase(database); - } - if (user != null && password != null) { - builder = builder.withAuthentication(user, password); - } else if (password != null) { - builder = builder.withPassword(((CharSequence) password)); - } - - return builder - .withTimeout(config.commandTimeout()) - .build(); - }) - .toList(); - } -} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java deleted file mode 100644 index adda5d4f8..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java +++ /dev/null @@ -1,190 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.FlushMode; -import io.lettuce.core.GetExArgs; -import io.lettuce.core.KeyValue; -import io.lettuce.core.Value; -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; -import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.support.AsyncConnectionPoolSupport; -import io.lettuce.core.support.BoundedAsyncPool; -import io.lettuce.core.support.BoundedPoolConfig; -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.util.TimeUtils; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.stream.Collectors; - -final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { - - private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); - - private final RedisClusterClient redisClient; - - // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisClusterConnection connection; - - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAdvancedClusterAsyncCommands commands; - - LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { - this.redisClient = redisClient; - } - - @Nonnull - @Override - public CompletionStage get(byte[] key) { - return commands.get(key); - } - - @Nonnull - @Override - public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) - .thenApply(r -> r.stream() - .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); - } - - @Nonnull - @Override - public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); - } - - @SuppressWarnings("unchecked") - @Nonnull - @Override - public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (byte[] key : keys) { - var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) - .thenApply(v -> (v == null) ? null : Map.entry(key, v)) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_void -> futures.stream() - .map(f -> f.getNow(null)) - .filter(Objects::nonNull) - .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); - }); - } - - @Nonnull - @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); - } - - @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (Map.Entry entry : keyAndValue.entrySet()) { - var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_v -> true); - }); - } - - @Nonnull - @Override - public CompletionStage del(byte[] key) { - return commands.del(key); - } - - @Nonnull - @Override - public CompletionStage del(byte[][] keys) { - return commands.del(keys); - } - - @Nonnull - @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); - } - - @Override - public void init() { - logger.debug("Redis Client (Lettuce) starting..."); - final long started = TimeUtils.started(); - - final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() - .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .minIdle(0) - .testOnAcquire(false) - .testOnCreate(false) - .testOnRelease(false) - .build(); - - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); - - logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); - } - - @Override - public void release() { - logger.debug("Redis Client (Lettuce) stopping..."); - final long started = TimeUtils.started(); - - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); - - logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); - } -} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java deleted file mode 100644 index 25cb53904..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ /dev/null @@ -1,36 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.DefaultComponent; -import ru.tinkoff.kora.config.common.Config; -import ru.tinkoff.kora.config.common.ConfigValue; -import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; - -import java.time.Duration; - -public interface LettuceModule { - - default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { - var value = config.get("lettuce"); - return extractor.extract(value); - } - - default LettuceClientFactory lettuceClientFactory() { - return new LettuceClientFactory(); - } - - @DefaultComponent - default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { - var redisClient = factory.build(config); - if (redisClient instanceof io.lettuce.core.RedisClient rc) { - return new LettuceRedisCacheClient(rc, config); - } else if (redisClient instanceof RedisClusterClient rcc) { - return new LettuceClusterRedisCacheClient(rcc); - } else { - throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); - } - } -} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java deleted file mode 100644 index cdb95be7c..000000000 --- a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java +++ /dev/null @@ -1,190 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.*; -import io.lettuce.core.api.StatefulRedisConnection; -import io.lettuce.core.api.async.RedisAsyncCommands; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.support.AsyncConnectionPoolSupport; -import io.lettuce.core.support.BoundedAsyncPool; -import io.lettuce.core.support.BoundedPoolConfig; -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.util.TimeUtils; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.stream.Collectors; - -final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { - - private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); - - private final RedisURI redisURI; - private final RedisClient redisClient; - - // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisConnection connection; - - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAsyncCommands commands; - - LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { - this.redisClient = redisClient; - final List redisURIs = LettuceClientFactory.buildRedisURI(config); - this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; - } - - @Nonnull - @Override - public CompletionStage get(byte[] key) { - return commands.get(key); - } - - @Nonnull - @Override - public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) - .thenApply(r -> r.stream() - .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); - } - - @Nonnull - @Override - public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); - } - - @SuppressWarnings("unchecked") - @Nonnull - @Override - public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (byte[] key : keys) { - var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) - .thenApply(v -> (v == null) ? null : Map.entry(key, v)) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_void -> futures.stream() - .map(f -> f.getNow(null)) - .filter(Objects::nonNull) - .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); - }); - } - - @Nonnull - @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); - } - - @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (Map.Entry entry : keyAndValue.entrySet()) { - var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) - .thenApply(v -> true) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) - .thenApply(_void -> true) - .whenComplete((s, throwable) -> pool.release(connection)); - }); - } - - @Nonnull - @Override - public CompletionStage del(byte[] key) { - return commands.del(key); - } - - @Nonnull - @Override - public CompletionStage del(byte[][] keys) { - return commands.del(keys); - } - - @Nonnull - @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); - } - - @Override - public void init() { - logger.debug("Redis Client (Lettuce) starting..."); - final long started = TimeUtils.started(); - - final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() - .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .minIdle(0) - .testOnAcquire(false) - .testOnCreate(false) - .testOnRelease(false) - .build(); - - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); - - logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); - } - - @Override - public void release() { - logger.debug("Redis Client (Lettuce) stopping..."); - final long started = TimeUtils.started(); - - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); - - logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); - } -} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java deleted file mode 100644 index faa5acb1a..000000000 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java +++ /dev/null @@ -1,22 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -import java.time.Duration; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); - } - } -} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java deleted file mode 100644 index 6f95bee4a..000000000 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java +++ /dev/null @@ -1,20 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class AsyncCacheTests extends AbstractAsyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCache(redisParams); - } - } -} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java deleted file mode 100644 index 3509176e2..000000000 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java +++ /dev/null @@ -1,85 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nullable; -import org.junit.jupiter.api.Assertions; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; -import ru.tinkoff.kora.test.redis.RedisParams; - -import java.time.Duration; - -abstract class CacheRunner extends Assertions implements RedisCacheModule { - - public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, - @Nullable Duration expireRead) { - return new RedisCacheConfig() { - - @Override - public String keyPrefix() { - return "pref"; - } - - @Nullable - @Override - public Duration expireAfterWrite() { - return expireWrite; - } - - @Nullable - @Override - public Duration expireAfterAccess() { - return expireRead; - } - }; - } - - private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { - var lettuceClientFactory = lettuceClientFactory(); - var lettuceClientConfig = new LettuceClientConfig() { - @Override - public String uri() { - return redisParams.uri().toString(); - } - - @Override - public Integer database() { - return null; - } - - @Override - public String user() { - return null; - } - - @Override - public String password() { - return null; - } - }; - - var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); - if (lettuceClient instanceof Lifecycle lc) { - lc.init(); - } - return lettuceClient; - } - - private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { - var lettuceClient = createLettuce(redisParams); - return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), - stringRedisKeyMapper(), stringRedisValueMapper()); - } - - protected DummyCache createCache(RedisParams redisParams) throws Exception { - return createDummyCache(redisParams, null, null); - } - - protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { - return createDummyCache(redisParams, expireWrite, null); - } - - protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { - return createDummyCache(redisParams, null, expireRead); - } -} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java deleted file mode 100644 index 72feb88f6..000000000 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java +++ /dev/null @@ -1,22 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -import java.time.Duration; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); - } - } -} diff --git a/cache/cache-redis-jedis/build.gradle b/cache/cache-redis-jedis/build.gradle index 44c085705..8ef7e5d61 100644 --- a/cache/cache-redis-jedis/build.gradle +++ b/cache/cache-redis-jedis/build.gradle @@ -1,21 +1,10 @@ dependencies { - annotationProcessor project(':config:config-annotation-processor') + annotationProcessor project(":config:config-annotation-processor") - api project(":cache:cache-common") + api project(":cache:cache-redis-common") + api project(":redis:redis-jedis") - implementation project(":json:json-common") implementation project(":config:config-common") - implementation(libs.redis.lettuce) { - exclude group: 'io.projectreactor', module: 'reactor-core' - exclude group: 'io.netty', module: 'netty-common' - exclude group: 'io.netty', module: 'netty-handler' - exclude group: 'io.netty', module: 'netty-transport' - } - implementation libs.reactor.core - implementation libs.netty.common - implementation libs.netty.handlers - implementation libs.netty.transports - implementation libs.apache.pool testImplementation project(":internal:test-logging") testImplementation project(":internal:test-redis") diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java deleted file mode 100644 index 941591ac8..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ /dev/null @@ -1,660 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.cache.AsyncCache; - -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionException; -import java.util.concurrent.CompletionStage; -import java.util.function.Function; -import java.util.stream.Collectors; - -public abstract class AbstractRedisCache implements AsyncCache { - - private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); - - private final String name; - private final RedisCacheClient redisClient; - private final RedisCacheTelemetry telemetry; - private final byte[] keyPrefix; - - private final RedisCacheKeyMapper keyMapper; - private final RedisCacheValueMapper valueMapper; - - private final Long expireAfterAccessMillis; - private final Long expireAfterWriteMillis; - - protected AbstractRedisCache(String name, - RedisCacheConfig config, - RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, - RedisCacheKeyMapper keyMapper, - RedisCacheValueMapper valueMapper) { - this.name = name; - this.redisClient = redisClient; - this.telemetry = telemetry; - this.keyMapper = keyMapper; - this.valueMapper = valueMapper; - this.expireAfterAccessMillis = (config.expireAfterAccess() == null) - ? null - : config.expireAfterAccess().toMillis(); - this.expireAfterWriteMillis = (config.expireAfterWrite() == null) - ? null - : config.expireAfterWrite().toMillis(); - - if (config.keyPrefix().isEmpty()) { - this.keyPrefix = null; - } else { - var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); - this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; - System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); - System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); - } - } - - @Override - public V get(@Nonnull K key) { - if (key == null) { - return null; - } - - var telemetryContext = telemetry.create("GET", name); - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - final V value = valueMapper.read(jsonAsBytes); - telemetryContext.recordSuccess(value); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return null; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return null; - } - } - - @Nonnull - @Override - public Map get(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("GET_MANY", name); - try { - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - final Map keyToValue = new HashMap<>(); - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - keyToValue.put(entry.getKey(), value); - } - }); - } - - telemetryContext.recordSuccess(keyToValue); - return keyToValue; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return Collections.emptyMap(); - } catch (Exception e) { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - } - } - - @Nonnull - @Override - public V put(@Nonnull K key, @Nonnull V value) { - if (key == null || value == null) { - return null; - } - - var telemetryContext = telemetry.create("PUT", name); - - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - telemetryContext.recordSuccess(); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return value; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return value; - } - } - - @Nonnull - @Override - public Map put(@Nonnull Map keyAndValues) { - if (keyAndValues == null || keyAndValues.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("PUT_MANY", name); - - try { - var keyAndValuesAsBytes = new HashMap(); - keyAndValues.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - - telemetryContext.recordSuccess(); - return keyAndValues; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return keyAndValues; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return keyAndValues; - } - } - - @Override - public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { - if (key == null) { - return null; - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); - - V fromCache = null; - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - fromCache = valueMapper.read(jsonAsBytes); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - if (fromCache != null) { - telemetryContext.recordSuccess(); - return fromCache; - } - - try { - var value = mappingFunction.apply(key); - if (value != null) { - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - - telemetryContext.recordSuccess(); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return null; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return null; - } - } - - @Nonnull - @Override - public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { - if (keys == null || keys.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); - - final Map fromCache = new HashMap<>(); - try { - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - fromCache.put(entry.getKey(), value); - } - }); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - if (fromCache.size() == keys.size()) { - telemetryContext.recordSuccess(); - return fromCache; - } - - var missingKeys = keys.stream() - .filter(k -> !fromCache.containsKey(k)) - .collect(Collectors.toSet()); - - try { - var values = mappingFunction.apply(missingKeys); - if (!values.isEmpty()) { - try { - var keyAndValuesAsBytes = new HashMap(); - values.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - - telemetryContext.recordSuccess(); - fromCache.putAll(values); - return fromCache; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return fromCache; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return fromCache; - } - } - - @Override - public void invalidate(@Nonnull K key) { - if (key != null) { - final byte[] keyAsBytes = mapKey(key); - var telemetryContext = telemetry.create("INVALIDATE", name); - - try { - redisClient.del(keyAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - } - - @Override - public void invalidate(@Nonnull Collection keys) { - if (keys != null && !keys.isEmpty()) { - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); - - try { - final byte[][] keysAsBytes = keys.stream() - .map(this::mapKey) - .toArray(byte[][]::new); - - redisClient.del(keysAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - } - - @Override - public void invalidateAll() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); - - try { - redisClient.flushAll().toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - - @Nonnull - @Override - public CompletionStage getAsync(@Nonnull K key) { - if (key == null) { - return CompletableFuture.completedFuture(null); - } - - var telemetryContext = telemetry.create("GET", name); - final byte[] keyAsBytes = mapKey(key); - - CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(jsonAsBytes -> { - final V value = valueMapper.read(jsonAsBytes); - telemetryContext.recordSuccess(value); - return value; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return null; - }); - } - - @Nonnull - @Override - public CompletionStage> getAsync(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("GET_MANY", name); - var keysByKeyByte = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); - var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysAsBytes) - : redisClient.getex(keysAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valuesByKeys -> { - final Map keyToValue = new HashMap<>(); - for (var entry : keysByKeyByte.entrySet()) { - valuesByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - keyToValue.put(entry.getKey(), value); - } - }); - } - telemetryContext.recordSuccess(keyToValue); - return keyToValue; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - }); - } - - @Nonnull - @Override - public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { - if (key == null) { - return CompletableFuture.completedFuture(value); - } - - var telemetryContext = telemetry.create("PUT", name); - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); - - return responseCompletionStage - .thenApply(r -> { - telemetryContext.recordSuccess(); - return value; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return value; - }); - } - - @Nonnull - @Override - public CompletionStage> putAsync(@Nonnull Map keyAndValues) { - if (keyAndValues == null || keyAndValues.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("PUT_MANY", name); - var keyAndValuesAsBytes = new HashMap(); - keyAndValues.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - var responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(r -> { - telemetryContext.recordSuccess(); - return keyAndValues; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return keyAndValues; - }); - } - - @Override - public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { - if (key == null) { - return CompletableFuture.completedFuture(null); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); - final byte[] keyAsBytes = mapKey(key); - final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valueMapper::read) - .thenCompose(fromCache -> { - if (fromCache != null) { - return CompletableFuture.completedFuture(fromCache); - } - - return mappingFunction.apply(key) - .thenCompose(value -> { - if (value == null) { - return CompletableFuture.completedFuture(null); - } - - final byte[] valueAsBytes = valueMapper.write(value); - var putFutureResponse = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); - - return putFutureResponse - .thenApply(v -> { - telemetryContext.recordSuccess(); - return value; - }); - }); - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return null; - }); - } - - @Nonnull - @Override - public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes) - : redisClient.getex(keysByBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valueByKeys -> { - final Map fromCache = new HashMap<>(); - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - fromCache.put(entry.getKey(), value); - } - }); - } - - return fromCache; - }) - .thenCompose(fromCache -> { - if (fromCache.size() == keys.size()) { - return CompletableFuture.completedFuture(fromCache); - } - - var missingKeys = keys.stream() - .filter(k -> !fromCache.containsKey(k)) - .collect(Collectors.toSet()); - - return mappingFunction.apply(missingKeys) - .thenCompose(values -> { - if (values.isEmpty()) { - return CompletableFuture.completedFuture(fromCache); - } - - var keyAndValuesAsBytes = new HashMap(); - values.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - var putCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); - - return putCompletionStage - .thenApply(v -> { - telemetryContext.recordSuccess(); - fromCache.putAll(values); - return fromCache; - }); - }); - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - }); - } - - @Nonnull - @Override - public CompletionStage invalidateAsync(@Nonnull K key) { - if (key == null) { - return CompletableFuture.completedFuture(false); - } - - var telemetryContext = telemetry.create("INVALIDATE", name); - final byte[] keyAsBytes = mapKey(key); - return redisClient.del(keyAsBytes) - .thenApply(r -> { - telemetryContext.recordSuccess(); - return true; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - @Override - public CompletionStage invalidateAsync(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(false); - } - - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); - final byte[][] keyAsBytes = keys.stream() - .distinct() - .map(this::mapKey) - .toArray(byte[][]::new); - - return redisClient.del(keyAsBytes) - .thenApply(r -> { - telemetryContext.recordSuccess(); - return true; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - @Nonnull - @Override - public CompletionStage invalidateAllAsync() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); - return redisClient.flushAll() - .thenApply(r -> { - telemetryContext.recordSuccess(); - return r; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - private byte[] mapKey(K key) { - final byte[] suffixAsBytes = keyMapper.apply(key); - if (this.keyPrefix == null) { - return suffixAsBytes; - } else { - var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; - System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); - System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); - - return keyAsBytes; - } - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java deleted file mode 100644 index 75a932976..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java +++ /dev/null @@ -1,7 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.AsyncCache; - -public interface RedisCache extends AsyncCache { - -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java deleted file mode 100644 index a5b995bd6..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ /dev/null @@ -1,42 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; - -import java.util.Map; -import java.util.concurrent.CompletionStage; - -public interface RedisCacheClient { - - @Nonnull - CompletionStage get(byte[] key); - - @Nonnull - CompletionStage> mget(byte[][] keys); - - @Nonnull - CompletionStage getex(byte[] key, long expireAfterMillis); - - @Nonnull - CompletionStage> getex(byte[][] keys, long expireAfterMillis); - - @Nonnull - CompletionStage set(byte[] key, byte[] value); - - @Nonnull - CompletionStage mset(@Nonnull Map keyAndValue); - - @Nonnull - CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); - - @Nonnull - CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); - - @Nonnull - CompletionStage del(byte[] key); - - @Nonnull - CompletionStage del(byte[][] keys); - - @Nonnull - CompletionStage flushAll(); -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java deleted file mode 100644 index 120bd511a..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ /dev/null @@ -1,24 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - - -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; - -import java.time.Duration; - -@ConfigValueExtractor -public interface RedisCacheConfig { - - /** - * Key prefix allow to avoid key collision in single Redis database between multiple caches - * - * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied - */ - String keyPrefix(); - - @Nullable - Duration expireAfterWrite(); - - @Nullable - Duration expireAfterAccess(); -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java deleted file mode 100644 index f6edc71ad..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java +++ /dev/null @@ -1,17 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.CacheKeyMapper; - -import java.nio.charset.StandardCharsets; -import java.util.function.Function; - -/** - * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. - */ -public interface RedisCacheKeyMapper extends Function { - - /** - * Is used to delimiter composite key such as {@link CacheKeyMapper} - */ - byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java deleted file mode 100644 index 81e48e005..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ /dev/null @@ -1,175 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; -import ru.tinkoff.kora.common.DefaultComponent; -import ru.tinkoff.kora.json.common.JsonCommonModule; -import ru.tinkoff.kora.json.common.JsonReader; -import ru.tinkoff.kora.json.common.JsonWriter; -import ru.tinkoff.kora.json.common.annotation.Json; - -import java.io.IOException; -import java.math.BigInteger; -import java.nio.charset.StandardCharsets; -import java.util.UUID; - -public interface RedisCacheMapperModule extends JsonCommonModule { - - @DefaultComponent - default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - return new RedisCacheTelemetry(metrics, tracer); - } - - @Json - @DefaultComponent - default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(V value) { - try { - return jsonWriter.toByteArray(value); - } catch (IOException e) { - throw new IllegalStateException(e.getMessage()); - } - } - - @Override - public V read(byte[] serializedValue) { - try { - return (serializedValue == null) ? null : jsonReader.read(serializedValue); - } catch (IOException e) { - throw new IllegalStateException(e.getMessage()); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper stringRedisValueMapper() { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(String value) { - return value.getBytes(StandardCharsets.UTF_8); - } - - @Override - public String read(byte[] serializedValue) { - return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper bytesRedisValueMapper() { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(byte[] value) { - return value; - } - - @Override - public byte[] read(byte[] serializedValue) { - return serializedValue; - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(Integer value) { - return keyMapper.apply(value); - } - - @Override - public Integer read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(Long value) { - return keyMapper.apply(value); - } - - @Override - public Long read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(BigInteger value) { - return keyMapper.apply(value); - } - - @Override - public BigInteger read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - - @Override - public byte[] write(UUID value) { - return keyMapper.apply(value); - } - - @Override - public UUID read(byte[] serializedValue) { - return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); - } - }; - } - - @DefaultComponent - default RedisCacheKeyMapper intRedisKeyMapper() { - return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper longRedisKeyMapper() { - return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper bigIntRedisKeyMapper() { - return c -> c.toString().getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper uuidRedisKeyMapper() { - return c -> c.toString().getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper stringRedisKeyMapper() { - return c -> c.getBytes(StandardCharsets.UTF_8); - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java deleted file mode 100644 index fe07914b0..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ /dev/null @@ -1,7 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; - -public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { - -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java deleted file mode 100644 index af84dfdd5..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java +++ /dev/null @@ -1,129 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; -import jakarta.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; - -public final class RedisCacheTelemetry { - - private static final String ORIGIN = "redis"; - - record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { - - @Nonnull - @Override - public String origin() { - return ORIGIN; - } - } - - interface TelemetryContext { - void recordSuccess(); - - void recordSuccess(@Nullable Object valueFromCache); - - void recordFailure(@Nullable Throwable throwable); - } - - private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); - - private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); - - @Nullable - private final CacheMetrics metrics; - @Nullable - private final CacheTracer tracer; - private final boolean isStubTelemetry; - - RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - this.metrics = metrics; - this.tracer = tracer; - this.isStubTelemetry = metrics == null && tracer == null; - } - - record StubCacheTelemetry() implements TelemetryContext { - - @Override - public void recordSuccess() {} - - @Override - public void recordSuccess(@Nullable Object valueFromCache) {} - - @Override - public void recordFailure(@Nullable Throwable throwable) {} - } - - class DefaultCacheTelemetryContext implements TelemetryContext { - - private final Operation operation; - - private CacheTracer.CacheSpan span; - private final long startedInNanos = System.nanoTime(); - - DefaultCacheTelemetryContext(Operation operation) { - logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); - if (tracer != null) { - span = tracer.trace(operation); - } - this.operation = operation; - } - - @Override - public void recordSuccess() { - recordSuccess(null); - } - - @Override - public void recordSuccess(@Nullable Object valueFromCache) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordSuccess(operation, durationInNanos, valueFromCache); - } - if (span != null) { - span.recordSuccess(); - } - - if (operation.name().startsWith("GET")) { - if (valueFromCache == null) { - logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); - } else { - logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); - } - } else { - logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); - } - } - - @Override - public void recordFailure(@Nullable Throwable throwable) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordFailure(operation, durationInNanos, throwable); - } - if (span != null) { - span.recordFailure(throwable); - } - - if (throwable != null) { - logger.warn("Operation '{}' failed for cache '{}' with message: {}", - operation.name(), operation.cacheName(), throwable.getMessage()); - } else { - logger.warn("Operation '{}' failed for cache '{}'", - operation.name(), operation.cacheName()); - } - } - } - - @Nonnull - TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { - if (isStubTelemetry) { - return STUB_CONTEXT; - } - - return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java deleted file mode 100644 index cf2037f42..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java +++ /dev/null @@ -1,19 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -/** - * Converts cache value into serializer value to store in cache. - */ -public interface RedisCacheValueMapper { - - /** - * @param value to serialize - * @return value serialized - */ - byte[] write(V value); - - /** - * @param serializedValue to deserialize - * @return value deserialized - */ - V read(byte[] serializedValue); -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java new file mode 100644 index 000000000..c53533d42 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +final class JedisCacheAsyncClient implements RedisCacheAsyncClient { + + private final RedisCacheClient syncClient; + + JedisCacheAsyncClient(RedisCacheClient syncClient) { + this.syncClient = syncClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.get(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.mget(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(key, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(keys, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + try { + syncClient.set(key, value); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage mset(@Nonnull Map keyAndValue) { + try { + syncClient.mset(keyAndValue); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + try { + syncClient.psetex(key, value, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + try { + syncClient.psetex(keyAndValue, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.del(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.del(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage flushAll() { + try { + syncClient.flushAll(); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java new file mode 100644 index 000000000..fb5f69920 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java @@ -0,0 +1,21 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import redis.clients.jedis.UnifiedJedis; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheModule; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.redis.jedis.JedisModule; + +public interface JedisCacheModule extends RedisCacheModule, JedisModule { + + @DefaultComponent + default RedisCacheClient lettuceRedisClient(UnifiedJedis jedis) { + return new JedisCacheSyncClient(jedis); + } + + @DefaultComponent + default RedisCacheAsyncClient lettuceRedisAsyncClient(RedisCacheClient redisCacheClient) { + return new JedisCacheAsyncClient(redisCacheClient); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java new file mode 100644 index 000000000..c831ea9b0 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java @@ -0,0 +1,119 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import redis.clients.jedis.Response; +import redis.clients.jedis.UnifiedJedis; +import redis.clients.jedis.params.GetExParams; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +final class JedisCacheSyncClient implements RedisCacheClient { + + private final UnifiedJedis jedis; + + JedisCacheSyncClient(UnifiedJedis jedis) { + this.jedis = jedis; + } + + @Nonnull + @Override + public byte[] get(byte[] key) { + return jedis.get(key); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + List values = jedis.mget(keys); + + int i = 0; + Map keysAndValues = new LinkedHashMap<>(values.size() + 1); + for (byte[] key : keys) { + byte[] value = values.get(i); + if (value != null) { + keysAndValues.put(key, value); + } + i++; + } + + return keysAndValues; + } + + @Nonnull + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return jedis.getEx(key, GetExParams.getExParams().px(expireAfterMillis)); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + try (var tx = jedis.pipelined()) { + final Map> responses = new LinkedHashMap<>(); + for (byte[] key : keys) { + var response = tx.getEx(key, GetExParams.getExParams().px(expireAfterMillis)); + responses.put(key, response); + } + tx.sync(); + + final Map values = new LinkedHashMap<>(); + responses.forEach((k, r) -> { + byte[] value = r.get(); + if (value != null) { + values.put(k, value); + } + }); + + return values; + } + } + + @Override + public void set(byte[] key, byte[] value) { + jedis.set(key, value); + } + + @Override + public void mset(Map keyAndValue) { + var keysAndValues = new ArrayList(keyAndValue.size() * 2); + for (var entry : keyAndValue.entrySet()) { + keysAndValues.add(entry.getKey()); + keysAndValues.add(entry.getValue()); + } + jedis.mset(keysAndValues.toArray(new byte[][]{})); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + jedis.psetex(key, expireAfterMillis, value); + } + + @Override + public void psetex(Map keyAndValue, long expireAfterMillis) { + try (var pipeline = jedis.pipelined()) { + for (var entry : keyAndValue.entrySet()) { + pipeline.psetex(entry.getKey(), expireAfterMillis, entry.getValue()); + } + pipeline.sync(); + } + } + + @Override + public long del(byte[] key) { + return jedis.del(key); + } + + @Override + public long del(byte[][] keys) { + return jedis.del(keys); + } + + @Override + public void flushAll() { + jedis.flushAll(); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java deleted file mode 100644 index 6fb2ee3e9..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java +++ /dev/null @@ -1,43 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.RedisURI; -import io.lettuce.core.SocketOptions; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; - -import java.time.Duration; - -@ConfigValueExtractor -public interface LettuceClientConfig { - - String uri(); - - @Nullable - Integer database(); - - @Nullable - String user(); - - @Nullable - String password(); - - default Protocol protocol() { - return Protocol.RESP3; - } - - default Duration socketTimeout() { - return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); - } - - default Duration commandTimeout() { - return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); - } - - enum Protocol { - - /** Redis 2 to Redis 5 */ - RESP2, - /** Redis 6+ */ - RESP3 - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java deleted file mode 100644 index fb29c05e2..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java +++ /dev/null @@ -1,133 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.*; -import io.lettuce.core.cluster.ClusterClientOptions; -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.cluster.RedisClusterURIUtil; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nonnull; - -import java.net.URI; -import java.time.Duration; -import java.util.List; - -public final class LettuceClientFactory { - - @Nonnull - public AbstractRedisClient build(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - - final List mappedRedisUris = buildRedisURI(config); - - return (mappedRedisUris.size() == 1) - ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) - : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClient buildRedisClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClusterClient client = RedisClusterClient.create(redisURIs); - client.setOptions(ClusterClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - @Nonnull - private static RedisClient buildRedisClientInternal(RedisURI redisURI, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClient client = RedisClient.create(redisURI); - client.setOptions(ClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - static List buildRedisURI(LettuceClientConfig config) { - final String uri = config.uri(); - final Integer database = config.database(); - final String user = config.user(); - final String password = config.password(); - - final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); - return redisURIS.stream() - .map(redisURI -> { - RedisURI.Builder builder = RedisURI.builder(redisURI); - if (database != null) { - builder = builder.withDatabase(database); - } - if (user != null && password != null) { - builder = builder.withAuthentication(user, password); - } else if (password != null) { - builder = builder.withPassword(((CharSequence) password)); - } - - return builder - .withTimeout(config.commandTimeout()) - .build(); - }) - .toList(); - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java deleted file mode 100644 index 25cb53904..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ /dev/null @@ -1,36 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.DefaultComponent; -import ru.tinkoff.kora.config.common.Config; -import ru.tinkoff.kora.config.common.ConfigValue; -import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; - -import java.time.Duration; - -public interface LettuceModule { - - default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { - var value = config.get("lettuce"); - return extractor.extract(value); - } - - default LettuceClientFactory lettuceClientFactory() { - return new LettuceClientFactory(); - } - - @DefaultComponent - default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { - var redisClient = factory.build(config); - if (redisClient instanceof io.lettuce.core.RedisClient rc) { - return new LettuceRedisCacheClient(rc, config); - } else if (redisClient instanceof RedisClusterClient rcc) { - return new LettuceClusterRedisCacheClient(rcc); - } else { - throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); - } - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java deleted file mode 100644 index cdb95be7c..000000000 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java +++ /dev/null @@ -1,190 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.*; -import io.lettuce.core.api.StatefulRedisConnection; -import io.lettuce.core.api.async.RedisAsyncCommands; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.support.AsyncConnectionPoolSupport; -import io.lettuce.core.support.BoundedAsyncPool; -import io.lettuce.core.support.BoundedPoolConfig; -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.util.TimeUtils; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.stream.Collectors; - -final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { - - private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); - - private final RedisURI redisURI; - private final RedisClient redisClient; - - // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisConnection connection; - - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAsyncCommands commands; - - LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { - this.redisClient = redisClient; - final List redisURIs = LettuceClientFactory.buildRedisURI(config); - this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; - } - - @Nonnull - @Override - public CompletionStage get(byte[] key) { - return commands.get(key); - } - - @Nonnull - @Override - public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) - .thenApply(r -> r.stream() - .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); - } - - @Nonnull - @Override - public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); - } - - @SuppressWarnings("unchecked") - @Nonnull - @Override - public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (byte[] key : keys) { - var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) - .thenApply(v -> (v == null) ? null : Map.entry(key, v)) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_void -> futures.stream() - .map(f -> f.getNow(null)) - .filter(Objects::nonNull) - .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); - }); - } - - @Nonnull - @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); - } - - @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (Map.Entry entry : keyAndValue.entrySet()) { - var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) - .thenApply(v -> true) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) - .thenApply(_void -> true) - .whenComplete((s, throwable) -> pool.release(connection)); - }); - } - - @Nonnull - @Override - public CompletionStage del(byte[] key) { - return commands.del(key); - } - - @Nonnull - @Override - public CompletionStage del(byte[][] keys) { - return commands.del(keys); - } - - @Nonnull - @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); - } - - @Override - public void init() { - logger.debug("Redis Client (Lettuce) starting..."); - final long started = TimeUtils.started(); - - final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() - .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .minIdle(0) - .testOnAcquire(false) - .testOnCreate(false) - .testOnRelease(false) - .build(); - - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); - - logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); - } - - @Override - public void release() { - logger.debug("Redis Client (Lettuce) stopping..."); - final long started = TimeUtils.started(); - - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); - - logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); - } -} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java deleted file mode 100644 index f5ec3aa9d..000000000 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java +++ /dev/null @@ -1,229 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -abstract class AbstractSyncCacheTests extends CacheRunner { - - protected DummyCache cache = null; - - @Test - void getWhenCacheEmpty() { - // given - var key = "1"; - - // when - assertNull(cache.get(key)); - } - - @Test - void getWhenCacheFilled() { - // given - var key = "1"; - var value = "1"; - - // when - cache.put(key, value); - - // then - final String fromCache = cache.get(key); - assertEquals(value, fromCache); - } - - @Test - void getMultiWhenCacheEmpty() { - // given - List keys = List.of("1", "2"); - - // when - Map keyToValue = cache.get(keys); - assertTrue(keyToValue.isEmpty()); - } - - @Test - void getMultiWhenCacheFilledPartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - assertNull(cache.get(key)); - cache.put(key, key); - } - - // when - Map keyToValue = cache.get(keys); - assertEquals(1, keyToValue.size()); - keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); - } - - @Test - void getMultiWhenCacheFilled() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - assertNull(cache.get(key)); - cache.put(key, key); - } - - // when - Map keyToValue = cache.get(keys); - assertEquals(2, keyToValue.size()); - keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); - } - - @Test - void computeIfAbsentWhenCacheEmpty() { - // given - - // when - assertNull(cache.get("1")); - final String valueComputed = cache.computeIfAbsent("1", k -> "1"); - assertEquals("1", valueComputed); - - // then - final String cached = cache.get("1"); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiWhenCacheEmpty() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - assertNull(cache.get(key)); - } - - // when - final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { - if (keysCompute.size() == 2) { - return Map.of("1", "1", "2", "2"); - } - - throw new IllegalStateException("Should not happen"); - }); - assertEquals(2, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.get(keys); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiOneWhenCachePartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - assertNull(cache.get(key)); - cache.put(key, key); - } - - // when - final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { - throw new IllegalStateException("Should not happen"); - }); - assertEquals(1, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.get(keys); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiAllWhenCachePartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - assertNull(cache.get(key)); - cache.put(key, key); - } - - // when - final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { - if ("2".equals(keysCompute.iterator().next())) { - return Map.of("2", "2"); - } - - throw new IllegalStateException("Should not happen"); - }); - assertEquals(2, valueComputed.size()); - assertEquals(Set.of("1", "2"), valueComputed.keySet()); - assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.get(Set.of("1", "2")); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiWhenCacheFilled() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - assertNull(cache.get(key)); - cache.put(key, key); - } - - // when - final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { - throw new IllegalStateException("Should not happen"); - }); - assertEquals(2, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.get(keys); - assertEquals(valueComputed, cached); - } - - @Test - void getWrongKeyWhenCacheFilled() { - // given - var key = "1"; - var value = "1"; - - // when - cache.put(key, value); - - // then - final String fromCache = cache.get("2"); - assertNull(fromCache); - } - - @Test - void getWhenCacheInvalidate() { - // given - var key = "1"; - var value = "1"; - cache.put(key, value); - - // when - cache.invalidate(key); - - // then - final String fromCache = cache.get(key); - assertNull(fromCache); - } - - @Test - void getFromCacheWhenCacheInvalidateAll() { - // given - var key = "1"; - var value = "1"; - cache.put(key, value); - - // when - cache.invalidateAll(); - - // then - final String fromCache = cache.get(key); - assertNull(fromCache); - } -} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java deleted file mode 100644 index 3f7764c9a..000000000 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java +++ /dev/null @@ -1,22 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -import java.time.Duration; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); - } - } -} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java deleted file mode 100644 index 43008e371..000000000 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java +++ /dev/null @@ -1,22 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -import java.time.Duration; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class SyncCacheExpireReadTests extends AbstractSyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); - } - } -} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java similarity index 98% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java index 209ce6ecb..e5cf29368 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java @@ -1,7 +1,7 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java similarity index 98% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java index f5ec3aa9d..28c17516a 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java @@ -1,7 +1,7 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java similarity index 93% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java index 3f7764c9a..f335110dd 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java similarity index 93% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java index faa5acb1a..1bffc28fd 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java similarity index 93% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java index 6f95bee4a..085682a7a 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java similarity index 55% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java index 3509176e2..58d6a08f9 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java @@ -1,15 +1,21 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import jakarta.annotation.Nullable; import org.junit.jupiter.api.Assertions; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.redis.jedis.JedisConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import ru.tinkoff.kora.test.redis.RedisParams; import java.time.Duration; +import java.util.List; -abstract class CacheRunner extends Assertions implements RedisCacheModule { +abstract class CacheRunner extends Assertions implements RedisCacheMapperModule, JedisCacheModule { public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, @Nullable Duration expireRead) { @@ -31,15 +37,19 @@ public Duration expireAfterWrite() { public Duration expireAfterAccess() { return expireRead; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } - private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { - var lettuceClientFactory = lettuceClientFactory(); - var lettuceClientConfig = new LettuceClientConfig() { + private RedisCacheClient createJedis(RedisParams redisParams) throws Exception { + var jedisConfig = new JedisConfig() { @Override - public String uri() { - return redisParams.uri().toString(); + public List uri() { + return List.of(redisParams.uri().toString()); } @Override @@ -58,16 +68,24 @@ public String password() { } }; - var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); - if (lettuceClient instanceof Lifecycle lc) { - lc.init(); - } - return lettuceClient; + var jedis = jedisClient(jedisConfig); + return new JedisCacheSyncClient(jedis); + } + + private RedisCacheAsyncClient createAsyncJedis(RedisCacheClient cacheClient) throws Exception { + return new JedisCacheAsyncClient(cacheClient); } private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { - var lettuceClient = createLettuce(redisParams); - return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), + var syncClient = createJedis(redisParams); + var asyncClient = createAsyncJedis(syncClient); + return new DummyCache(getConfig(expireWrite, expireRead), syncClient, asyncClient, + (telemetryConfig, args) -> operationName -> new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(Object valueFromCache) {} + @Override + public void recordFailure(Throwable throwable) {} + }, stringRedisKeyMapper(), stringRedisValueMapper()); } diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java similarity index 93% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java index 43008e371..75fd7c1d4 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java similarity index 93% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java index 72feb88f6..7d7319ecc 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java similarity index 93% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java index bb8818bbc..01ca94c4d 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.jedis; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java similarity index 50% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java rename to cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java index 4d098b5ff..b1e2badc7 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java @@ -1,14 +1,16 @@ -package ru.tinkoff.kora.cache.redis.testdata; +package ru.tinkoff.kora.cache.redis.jedis.testdata; import ru.tinkoff.kora.cache.redis.*; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; public final class DummyCache extends AbstractRedisCache { public DummyCache(RedisCacheConfig config, RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, RedisCacheKeyMapper keyMapper, RedisCacheValueMapper valueMapper) { - super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); + super("dummy", config, redisClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper); } } diff --git a/cache/cache-redis-lettuce/build.gradle b/cache/cache-redis-lettuce/build.gradle index 44c085705..29c1f9046 100644 --- a/cache/cache-redis-lettuce/build.gradle +++ b/cache/cache-redis-lettuce/build.gradle @@ -1,21 +1,10 @@ dependencies { - annotationProcessor project(':config:config-annotation-processor') + annotationProcessor project(":config:config-annotation-processor") - api project(":cache:cache-common") + api project(":cache:cache-redis-common") + api project(":redis:redis-lettuce") - implementation project(":json:json-common") implementation project(":config:config-common") - implementation(libs.redis.lettuce) { - exclude group: 'io.projectreactor', module: 'reactor-core' - exclude group: 'io.netty', module: 'netty-common' - exclude group: 'io.netty', module: 'netty-handler' - exclude group: 'io.netty', module: 'netty-transport' - } - implementation libs.reactor.core - implementation libs.netty.common - implementation libs.netty.handlers - implementation libs.netty.transports - implementation libs.apache.pool testImplementation project(":internal:test-logging") testImplementation project(":internal:test-redis") diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java deleted file mode 100644 index 941591ac8..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ /dev/null @@ -1,660 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.cache.AsyncCache; - -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionException; -import java.util.concurrent.CompletionStage; -import java.util.function.Function; -import java.util.stream.Collectors; - -public abstract class AbstractRedisCache implements AsyncCache { - - private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); - - private final String name; - private final RedisCacheClient redisClient; - private final RedisCacheTelemetry telemetry; - private final byte[] keyPrefix; - - private final RedisCacheKeyMapper keyMapper; - private final RedisCacheValueMapper valueMapper; - - private final Long expireAfterAccessMillis; - private final Long expireAfterWriteMillis; - - protected AbstractRedisCache(String name, - RedisCacheConfig config, - RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, - RedisCacheKeyMapper keyMapper, - RedisCacheValueMapper valueMapper) { - this.name = name; - this.redisClient = redisClient; - this.telemetry = telemetry; - this.keyMapper = keyMapper; - this.valueMapper = valueMapper; - this.expireAfterAccessMillis = (config.expireAfterAccess() == null) - ? null - : config.expireAfterAccess().toMillis(); - this.expireAfterWriteMillis = (config.expireAfterWrite() == null) - ? null - : config.expireAfterWrite().toMillis(); - - if (config.keyPrefix().isEmpty()) { - this.keyPrefix = null; - } else { - var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); - this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; - System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); - System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); - } - } - - @Override - public V get(@Nonnull K key) { - if (key == null) { - return null; - } - - var telemetryContext = telemetry.create("GET", name); - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - final V value = valueMapper.read(jsonAsBytes); - telemetryContext.recordSuccess(value); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return null; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return null; - } - } - - @Nonnull - @Override - public Map get(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("GET_MANY", name); - try { - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - final Map keyToValue = new HashMap<>(); - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - keyToValue.put(entry.getKey(), value); - } - }); - } - - telemetryContext.recordSuccess(keyToValue); - return keyToValue; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return Collections.emptyMap(); - } catch (Exception e) { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - } - } - - @Nonnull - @Override - public V put(@Nonnull K key, @Nonnull V value) { - if (key == null || value == null) { - return null; - } - - var telemetryContext = telemetry.create("PUT", name); - - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - telemetryContext.recordSuccess(); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return value; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return value; - } - } - - @Nonnull - @Override - public Map put(@Nonnull Map keyAndValues) { - if (keyAndValues == null || keyAndValues.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("PUT_MANY", name); - - try { - var keyAndValuesAsBytes = new HashMap(); - keyAndValues.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - - telemetryContext.recordSuccess(); - return keyAndValues; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return keyAndValues; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return keyAndValues; - } - } - - @Override - public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { - if (key == null) { - return null; - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); - - V fromCache = null; - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] jsonAsBytes = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes).toCompletableFuture().join() - : redisClient.getex(keyAsBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - fromCache = valueMapper.read(jsonAsBytes); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - if (fromCache != null) { - telemetryContext.recordSuccess(); - return fromCache; - } - - try { - var value = mappingFunction.apply(key); - if (value != null) { - try { - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - if (expireAfterWriteMillis == null) { - redisClient.set(keyAsBytes, valueAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - - telemetryContext.recordSuccess(); - return value; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return null; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return null; - } - } - - @Nonnull - @Override - public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { - if (keys == null || keys.isEmpty()) { - return Collections.emptyMap(); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); - - final Map fromCache = new HashMap<>(); - try { - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - final Map valueByKeys = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes).toCompletableFuture().join() - : redisClient.getex(keysByBytes, expireAfterAccessMillis).toCompletableFuture().join(); - - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - fromCache.put(entry.getKey(), value); - } - }); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - if (fromCache.size() == keys.size()) { - telemetryContext.recordSuccess(); - return fromCache; - } - - var missingKeys = keys.stream() - .filter(k -> !fromCache.containsKey(k)) - .collect(Collectors.toSet()); - - try { - var values = mappingFunction.apply(missingKeys); - if (!values.isEmpty()) { - try { - var keyAndValuesAsBytes = new HashMap(); - values.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - if (expireAfterWriteMillis == null) { - redisClient.mset(keyAndValuesAsBytes).toCompletableFuture().join(); - } else { - redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis).toCompletableFuture().join(); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - - telemetryContext.recordSuccess(); - fromCache.putAll(values); - return fromCache; - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - return fromCache; - } catch (Exception e) { - telemetryContext.recordFailure(e); - return fromCache; - } - } - - @Override - public void invalidate(@Nonnull K key) { - if (key != null) { - final byte[] keyAsBytes = mapKey(key); - var telemetryContext = telemetry.create("INVALIDATE", name); - - try { - redisClient.del(keyAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - } - - @Override - public void invalidate(@Nonnull Collection keys) { - if (keys != null && !keys.isEmpty()) { - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); - - try { - final byte[][] keysAsBytes = keys.stream() - .map(this::mapKey) - .toArray(byte[][]::new); - - redisClient.del(keysAsBytes).toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - } - - @Override - public void invalidateAll() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); - - try { - redisClient.flushAll().toCompletableFuture().join(); - telemetryContext.recordSuccess(); - } catch (CompletionException e) { - telemetryContext.recordFailure(e.getCause()); - } catch (Exception e) { - telemetryContext.recordFailure(e); - } - } - - @Nonnull - @Override - public CompletionStage getAsync(@Nonnull K key) { - if (key == null) { - return CompletableFuture.completedFuture(null); - } - - var telemetryContext = telemetry.create("GET", name); - final byte[] keyAsBytes = mapKey(key); - - CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(jsonAsBytes -> { - final V value = valueMapper.read(jsonAsBytes); - telemetryContext.recordSuccess(value); - return value; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return null; - }); - } - - @Nonnull - @Override - public CompletionStage> getAsync(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("GET_MANY", name); - var keysByKeyByte = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); - var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysAsBytes) - : redisClient.getex(keysAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valuesByKeys -> { - final Map keyToValue = new HashMap<>(); - for (var entry : keysByKeyByte.entrySet()) { - valuesByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - keyToValue.put(entry.getKey(), value); - } - }); - } - telemetryContext.recordSuccess(keyToValue); - return keyToValue; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - }); - } - - @Nonnull - @Override - public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { - if (key == null) { - return CompletableFuture.completedFuture(value); - } - - var telemetryContext = telemetry.create("PUT", name); - final byte[] keyAsBytes = mapKey(key); - final byte[] valueAsBytes = valueMapper.write(value); - final CompletionStage responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); - - return responseCompletionStage - .thenApply(r -> { - telemetryContext.recordSuccess(); - return value; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return value; - }); - } - - @Nonnull - @Override - public CompletionStage> putAsync(@Nonnull Map keyAndValues) { - if (keyAndValues == null || keyAndValues.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("PUT_MANY", name); - var keyAndValuesAsBytes = new HashMap(); - keyAndValues.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - var responseCompletionStage = (expireAfterWriteMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(r -> { - telemetryContext.recordSuccess(); - return keyAndValues; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return keyAndValues; - }); - } - - @Override - public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { - if (key == null) { - return CompletableFuture.completedFuture(null); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); - final byte[] keyAsBytes = mapKey(key); - final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.get(keyAsBytes) - : redisClient.getex(keyAsBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valueMapper::read) - .thenCompose(fromCache -> { - if (fromCache != null) { - return CompletableFuture.completedFuture(fromCache); - } - - return mappingFunction.apply(key) - .thenCompose(value -> { - if (value == null) { - return CompletableFuture.completedFuture(null); - } - - final byte[] valueAsBytes = valueMapper.write(value); - var putFutureResponse = (expireAfterWriteMillis == null) - ? redisClient.set(keyAsBytes, valueAsBytes) - : redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); - - return putFutureResponse - .thenApply(v -> { - telemetryContext.recordSuccess(); - return value; - }); - }); - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return null; - }); - } - - @Nonnull - @Override - public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(Collections.emptyMap()); - } - - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); - final Map keysByKeyBytes = keys.stream() - .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); - - final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); - var responseCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mget(keysByBytes) - : redisClient.getex(keysByBytes, expireAfterAccessMillis); - - return responseCompletionStage - .thenApply(valueByKeys -> { - final Map fromCache = new HashMap<>(); - for (var entry : keysByKeyBytes.entrySet()) { - valueByKeys.forEach((k, v) -> { - if (Arrays.equals(entry.getValue(), k)) { - var value = valueMapper.read(v); - fromCache.put(entry.getKey(), value); - } - }); - } - - return fromCache; - }) - .thenCompose(fromCache -> { - if (fromCache.size() == keys.size()) { - return CompletableFuture.completedFuture(fromCache); - } - - var missingKeys = keys.stream() - .filter(k -> !fromCache.containsKey(k)) - .collect(Collectors.toSet()); - - return mappingFunction.apply(missingKeys) - .thenCompose(values -> { - if (values.isEmpty()) { - return CompletableFuture.completedFuture(fromCache); - } - - var keyAndValuesAsBytes = new HashMap(); - values.forEach((k, v) -> { - final byte[] keyAsBytes = mapKey(k); - final byte[] valueAsBytes = valueMapper.write(v); - keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); - }); - - var putCompletionStage = (expireAfterAccessMillis == null) - ? redisClient.mset(keyAndValuesAsBytes) - : redisClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); - - return putCompletionStage - .thenApply(v -> { - telemetryContext.recordSuccess(); - fromCache.putAll(values); - return fromCache; - }); - }); - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return Collections.emptyMap(); - }); - } - - @Nonnull - @Override - public CompletionStage invalidateAsync(@Nonnull K key) { - if (key == null) { - return CompletableFuture.completedFuture(false); - } - - var telemetryContext = telemetry.create("INVALIDATE", name); - final byte[] keyAsBytes = mapKey(key); - return redisClient.del(keyAsBytes) - .thenApply(r -> { - telemetryContext.recordSuccess(); - return true; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - @Override - public CompletionStage invalidateAsync(@Nonnull Collection keys) { - if (keys == null || keys.isEmpty()) { - return CompletableFuture.completedFuture(false); - } - - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); - final byte[][] keyAsBytes = keys.stream() - .distinct() - .map(this::mapKey) - .toArray(byte[][]::new); - - return redisClient.del(keyAsBytes) - .thenApply(r -> { - telemetryContext.recordSuccess(); - return true; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - @Nonnull - @Override - public CompletionStage invalidateAllAsync() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); - return redisClient.flushAll() - .thenApply(r -> { - telemetryContext.recordSuccess(); - return r; - }) - .exceptionally(e -> { - telemetryContext.recordFailure(e); - return false; - }); - } - - private byte[] mapKey(K key) { - final byte[] suffixAsBytes = keyMapper.apply(key); - if (this.keyPrefix == null) { - return suffixAsBytes; - } else { - var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; - System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); - System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); - - return keyAsBytes; - } - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java deleted file mode 100644 index 75a932976..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java +++ /dev/null @@ -1,7 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.AsyncCache; - -public interface RedisCache extends AsyncCache { - -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java deleted file mode 100644 index 120bd511a..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ /dev/null @@ -1,24 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - - -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; - -import java.time.Duration; - -@ConfigValueExtractor -public interface RedisCacheConfig { - - /** - * Key prefix allow to avoid key collision in single Redis database between multiple caches - * - * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied - */ - String keyPrefix(); - - @Nullable - Duration expireAfterWrite(); - - @Nullable - Duration expireAfterAccess(); -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java deleted file mode 100644 index f6edc71ad..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java +++ /dev/null @@ -1,17 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.CacheKeyMapper; - -import java.nio.charset.StandardCharsets; -import java.util.function.Function; - -/** - * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. - */ -public interface RedisCacheKeyMapper extends Function { - - /** - * Is used to delimiter composite key such as {@link CacheKeyMapper} - */ - byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java deleted file mode 100644 index 81e48e005..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ /dev/null @@ -1,175 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; -import ru.tinkoff.kora.common.DefaultComponent; -import ru.tinkoff.kora.json.common.JsonCommonModule; -import ru.tinkoff.kora.json.common.JsonReader; -import ru.tinkoff.kora.json.common.JsonWriter; -import ru.tinkoff.kora.json.common.annotation.Json; - -import java.io.IOException; -import java.math.BigInteger; -import java.nio.charset.StandardCharsets; -import java.util.UUID; - -public interface RedisCacheMapperModule extends JsonCommonModule { - - @DefaultComponent - default RedisCacheTelemetry redisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - return new RedisCacheTelemetry(metrics, tracer); - } - - @Json - @DefaultComponent - default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(V value) { - try { - return jsonWriter.toByteArray(value); - } catch (IOException e) { - throw new IllegalStateException(e.getMessage()); - } - } - - @Override - public V read(byte[] serializedValue) { - try { - return (serializedValue == null) ? null : jsonReader.read(serializedValue); - } catch (IOException e) { - throw new IllegalStateException(e.getMessage()); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper stringRedisValueMapper() { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(String value) { - return value.getBytes(StandardCharsets.UTF_8); - } - - @Override - public String read(byte[] serializedValue) { - return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper bytesRedisValueMapper() { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(byte[] value) { - return value; - } - - @Override - public byte[] read(byte[] serializedValue) { - return serializedValue; - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(Integer value) { - return keyMapper.apply(value); - } - - @Override - public Integer read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(Long value) { - return keyMapper.apply(value); - } - - @Override - public Long read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - @Override - public byte[] write(BigInteger value) { - return keyMapper.apply(value); - } - - @Override - public BigInteger read(byte[] serializedValue) { - if (serializedValue == null) { - return null; - } else { - return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); - } - } - }; - } - - @DefaultComponent - default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { - return new RedisCacheValueMapper<>() { - - @Override - public byte[] write(UUID value) { - return keyMapper.apply(value); - } - - @Override - public UUID read(byte[] serializedValue) { - return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); - } - }; - } - - @DefaultComponent - default RedisCacheKeyMapper intRedisKeyMapper() { - return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper longRedisKeyMapper() { - return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper bigIntRedisKeyMapper() { - return c -> c.toString().getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper uuidRedisKeyMapper() { - return c -> c.toString().getBytes(StandardCharsets.UTF_8); - } - - @DefaultComponent - default RedisCacheKeyMapper stringRedisKeyMapper() { - return c -> c.getBytes(StandardCharsets.UTF_8); - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java deleted file mode 100644 index fe07914b0..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ /dev/null @@ -1,7 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; - -public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { - -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java deleted file mode 100644 index af84dfdd5..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java +++ /dev/null @@ -1,129 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nonnull; -import jakarta.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; - -public final class RedisCacheTelemetry { - - private static final String ORIGIN = "redis"; - - record Operation(@Nonnull String name, @Nonnull String cacheName) implements CacheTelemetryOperation { - - @Nonnull - @Override - public String origin() { - return ORIGIN; - } - } - - interface TelemetryContext { - void recordSuccess(); - - void recordSuccess(@Nullable Object valueFromCache); - - void recordFailure(@Nullable Throwable throwable); - } - - private static final Logger logger = LoggerFactory.getLogger(RedisCacheTelemetry.class); - - private static final TelemetryContext STUB_CONTEXT = new StubCacheTelemetry(); - - @Nullable - private final CacheMetrics metrics; - @Nullable - private final CacheTracer tracer; - private final boolean isStubTelemetry; - - RedisCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { - this.metrics = metrics; - this.tracer = tracer; - this.isStubTelemetry = metrics == null && tracer == null; - } - - record StubCacheTelemetry() implements TelemetryContext { - - @Override - public void recordSuccess() {} - - @Override - public void recordSuccess(@Nullable Object valueFromCache) {} - - @Override - public void recordFailure(@Nullable Throwable throwable) {} - } - - class DefaultCacheTelemetryContext implements TelemetryContext { - - private final Operation operation; - - private CacheTracer.CacheSpan span; - private final long startedInNanos = System.nanoTime(); - - DefaultCacheTelemetryContext(Operation operation) { - logger.trace("Operation '{}' for cache '{}' started", operation.name(), operation.cacheName()); - if (tracer != null) { - span = tracer.trace(operation); - } - this.operation = operation; - } - - @Override - public void recordSuccess() { - recordSuccess(null); - } - - @Override - public void recordSuccess(@Nullable Object valueFromCache) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordSuccess(operation, durationInNanos, valueFromCache); - } - if (span != null) { - span.recordSuccess(); - } - - if (operation.name().startsWith("GET")) { - if (valueFromCache == null) { - logger.trace("Operation '{}' for cache '{}' didn't retried value", operation.name(), operation.cacheName()); - } else { - logger.debug("Operation '{}' for cache '{}' retried value", operation.name(), operation.cacheName()); - } - } else { - logger.trace("Operation '{}' for cache '{}' completed", operation.name(), operation.cacheName()); - } - } - - @Override - public void recordFailure(@Nullable Throwable throwable) { - if (metrics != null) { - final long durationInNanos = System.nanoTime() - startedInNanos; - metrics.recordFailure(operation, durationInNanos, throwable); - } - if (span != null) { - span.recordFailure(throwable); - } - - if (throwable != null) { - logger.warn("Operation '{}' failed for cache '{}' with message: {}", - operation.name(), operation.cacheName(), throwable.getMessage()); - } else { - logger.warn("Operation '{}' failed for cache '{}'", - operation.name(), operation.cacheName()); - } - } - } - - @Nonnull - TelemetryContext create(@Nonnull String operationName, @Nonnull String cacheName) { - if (isStubTelemetry) { - return STUB_CONTEXT; - } - - return new DefaultCacheTelemetryContext(new Operation(operationName, cacheName)); - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java deleted file mode 100644 index cf2037f42..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java +++ /dev/null @@ -1,19 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -/** - * Converts cache value into serializer value to store in cache. - */ -public interface RedisCacheValueMapper { - - /** - * @param value to serialize - * @return value serialized - */ - byte[] write(V value); - - /** - * @param serializedValue to deserialize - * @return value deserialized - */ - V read(byte[] serializedValue); -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java new file mode 100644 index 000000000..da353f3b2 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java @@ -0,0 +1,58 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.AbstractRedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheModule; +import ru.tinkoff.kora.redis.lettuce.LettuceConfig; +import ru.tinkoff.kora.redis.lettuce.LettuceModule; + +import java.net.URI; +import java.util.List; + +public interface LettuceCacheModule extends RedisCacheModule, LettuceModule { + + default RedisCacheAsyncClient lettuceRedisCacheAsyncClient(AbstractRedisClient redisClient, + RedisClusterAsyncCommands lettuceCommands, + LettuceConfig lettuceConfig) { + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final Integer database = lettuceConfig.database(); + final String user = lettuceConfig.user(); + final String password = lettuceConfig.password(); + + final List redisURIs = lettuceConfig.uri().stream() + .flatMap(uri -> RedisClusterURIUtil.toRedisURIs(URI.create(uri)).stream()) + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(lettuceConfig.commandTimeout()) + .build(); + }) + .toList(); + + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceSingleCacheAsyncClient(rc, lettuceCommands, redisURI); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceClusterCacheAsyncClient(rcc, lettuceCommands); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + default RedisCacheClient lettuceRedisCacheSyncClient(RedisCacheAsyncClient redisCacheAsyncClient) { + return new LettuceCacheSyncClient(redisCacheAsyncClient); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java new file mode 100644 index 000000000..bc620c1fe --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java @@ -0,0 +1,75 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; + +final class LettuceCacheSyncClient implements RedisCacheClient { + + private final RedisCacheAsyncClient redisAsyncClient; + + LettuceCacheSyncClient(RedisCacheAsyncClient redisAsyncClient) { + this.redisAsyncClient = redisAsyncClient; + } + + @Nonnull + @Override + public byte[] get(byte[] key) { + return this.redisAsyncClient.get(key).toCompletableFuture().join(); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + return this.redisAsyncClient.mget(keys).toCompletableFuture().join(); + } + + @Nonnull + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return this.redisAsyncClient.getex(key, expireAfterMillis).toCompletableFuture().join(); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + return this.redisAsyncClient.getex(keys, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public void set(byte[] key, byte[] value) { + this.redisAsyncClient.set(key, value).toCompletableFuture().join(); + } + + @Override + public void mset(@Nonnull Map keyAndValue) { + this.redisAsyncClient.mset(keyAndValue).toCompletableFuture().join(); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + this.redisAsyncClient.psetex(key, value, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public void psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + this.redisAsyncClient.psetex(keyAndValue, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public long del(byte[] key) { + return this.redisAsyncClient.del(key).toCompletableFuture().join(); + } + + @Override + public long del(byte[][] keys) { + return this.redisAsyncClient.del(keys).toCompletableFuture().join(); + } + + @Override + public void flushAll() { + this.redisAsyncClient.flushAll().toCompletableFuture().join(); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java deleted file mode 100644 index 6fb2ee3e9..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java +++ /dev/null @@ -1,43 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.RedisURI; -import io.lettuce.core.SocketOptions; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; - -import java.time.Duration; - -@ConfigValueExtractor -public interface LettuceClientConfig { - - String uri(); - - @Nullable - Integer database(); - - @Nullable - String user(); - - @Nullable - String password(); - - default Protocol protocol() { - return Protocol.RESP3; - } - - default Duration socketTimeout() { - return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); - } - - default Duration commandTimeout() { - return Duration.ofSeconds(RedisURI.DEFAULT_TIMEOUT); - } - - enum Protocol { - - /** Redis 2 to Redis 5 */ - RESP2, - /** Redis 6+ */ - RESP3 - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java deleted file mode 100644 index fb29c05e2..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java +++ /dev/null @@ -1,133 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.*; -import io.lettuce.core.cluster.ClusterClientOptions; -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.cluster.RedisClusterURIUtil; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nonnull; - -import java.net.URI; -import java.time.Duration; -import java.util.List; - -public final class LettuceClientFactory { - - @Nonnull - public AbstractRedisClient build(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - - final List mappedRedisUris = buildRedisURI(config); - - return (mappedRedisUris.size() == 1) - ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) - : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClusterClient buildRedisClusterClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - public RedisClient buildRedisClient(LettuceClientConfig config) { - final Duration commandTimeout = config.commandTimeout(); - final Duration socketTimeout = config.socketTimeout(); - final ProtocolVersion protocolVersion = switch (config.protocol()) { - case RESP2 -> ProtocolVersion.RESP2; - case RESP3 -> ProtocolVersion.RESP3; - }; - final List mappedRedisUris = buildRedisURI(config); - return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); - } - - @Nonnull - private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClusterClient client = RedisClusterClient.create(redisURIs); - client.setOptions(ClusterClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - @Nonnull - private static RedisClient buildRedisClientInternal(RedisURI redisURI, - Duration commandTimeout, - Duration socketTimeout, - ProtocolVersion protocolVersion) { - final RedisClient client = RedisClient.create(redisURI); - client.setOptions(ClientOptions.builder() - .autoReconnect(true) - .publishOnScheduler(true) - .suspendReconnectOnProtocolFailure(false) - .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) - .protocolVersion(protocolVersion) - .timeoutOptions(TimeoutOptions.builder() - .connectionTimeout() - .fixedTimeout(commandTimeout) - .timeoutCommands(true) - .build()) - .socketOptions(SocketOptions.builder() - .keepAlive(true) - .connectTimeout(socketTimeout) - .build()) - .build()); - - return client; - } - - static List buildRedisURI(LettuceClientConfig config) { - final String uri = config.uri(); - final Integer database = config.database(); - final String user = config.user(); - final String password = config.password(); - - final List redisURIS = RedisClusterURIUtil.toRedisURIs(URI.create(uri)); - return redisURIS.stream() - .map(redisURI -> { - RedisURI.Builder builder = RedisURI.builder(redisURI); - if (database != null) { - builder = builder.withDatabase(database); - } - if (user != null && password != null) { - builder = builder.withAuthentication(user, password); - } else if (password != null) { - builder = builder.withPassword(((CharSequence) password)); - } - - return builder - .withTimeout(config.commandTimeout()) - .build(); - }) - .toList(); - } -} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java similarity index 66% rename from cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java rename to cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java index adda5d4f8..9edbef77f 100644 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java @@ -6,7 +6,7 @@ import io.lettuce.core.Value; import io.lettuce.core.cluster.RedisClusterClient; import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; -import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; import io.lettuce.core.codec.ByteArrayCodec; import io.lettuce.core.support.AsyncConnectionPoolSupport; import io.lettuce.core.support.BoundedAsyncPool; @@ -15,60 +15,58 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; import ru.tinkoff.kora.common.util.TimeUtils; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.stream.Collectors; -final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { +final class LettuceClusterCacheAsyncClient implements RedisCacheAsyncClient, Lifecycle { - private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); + private static final Logger logger = LoggerFactory.getLogger(LettuceClusterCacheAsyncClient.class); - private final RedisClusterClient redisClient; + // always use async cause sync uses JDK Proxy wrapped async impl + private final RedisClusterAsyncCommands lettuceCommands; + private final RedisClusterClient lettuceClient; // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisClusterConnection connection; + private BoundedAsyncPool> lettucePool; - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAdvancedClusterAsyncCommands commands; - LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { - this.redisClient = redisClient; + LettuceClusterCacheAsyncClient(RedisClusterClient lettuceClient, + RedisClusterAsyncCommands lettuceCommands) { + this.lettuceClient = lettuceClient; + this.lettuceCommands = lettuceCommands; } @Nonnull @Override public CompletionStage get(byte[] key) { - return commands.get(key); + return lettuceCommands.get(key); } @Nonnull @Override public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) + return lettuceCommands.mget(keys) .thenApply(r -> r.stream() .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue, (x, y) -> x, LinkedHashMap::new))); } @Nonnull @Override public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + return lettuceCommands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); } @SuppressWarnings("unchecked") @Nonnull @Override public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { + return lettucePool.acquire().thenCompose(connection -> { connection.setAutoFlushCommands(false); List> futures = new ArrayList<>(); @@ -85,37 +83,37 @@ public CompletionStage> getex(byte[][] keys, long expireAfte connection.flushCommands(); connection.setAutoFlushCommands(true); - return pool.release(connection) + return lettucePool.release(connection) .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) .thenApply(_void -> futures.stream() .map(f -> f.getNow(null)) .filter(Objects::nonNull) .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (x, y) -> x, LinkedHashMap::new))); }); } @Nonnull @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); + public CompletionStage set(byte[] key, byte[] value) { + return lettuceCommands.set(key, value).thenApply(r -> null); } @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); + public CompletionStage mset(Map keyAndValue) { + return lettuceCommands.mset(keyAndValue).thenApply(r -> null); } @Nonnull @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return lettuceCommands.psetex(key, expireAfterMillis, value).thenApply(r -> null); } @Nonnull @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { connection.setAutoFlushCommands(false); List> futures = new ArrayList<>(); @@ -131,28 +129,27 @@ public CompletionStage psetex(Map keyAndValue, long exp connection.flushCommands(); connection.setAutoFlushCommands(true); - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_v -> true); + return lettucePool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))); }); } @Nonnull @Override public CompletionStage del(byte[] key) { - return commands.del(key); + return lettuceCommands.del(key); } @Nonnull @Override public CompletionStage del(byte[][] keys) { - return commands.del(keys); + return lettuceCommands.del(keys); } @Nonnull @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + public CompletionStage flushAll() { + return lettuceCommands.flushall(FlushMode.SYNC).thenApply(r -> null); } @Override @@ -169,9 +166,7 @@ public void init() { .testOnRelease(false) .build(); - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); + this.lettucePool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> lettuceClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); } @@ -181,9 +176,7 @@ public void release() { logger.debug("Redis Client (Lettuce) stopping..."); final long started = TimeUtils.started(); - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); + this.lettucePool.close(); logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); } diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java deleted file mode 100644 index adda5d4f8..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterRedisCacheClient.java +++ /dev/null @@ -1,190 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.FlushMode; -import io.lettuce.core.GetExArgs; -import io.lettuce.core.KeyValue; -import io.lettuce.core.Value; -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; -import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.support.AsyncConnectionPoolSupport; -import io.lettuce.core.support.BoundedAsyncPool; -import io.lettuce.core.support.BoundedPoolConfig; -import jakarta.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.util.TimeUtils; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.stream.Collectors; - -final class LettuceClusterRedisCacheClient implements RedisCacheClient, Lifecycle { - - private static final Logger logger = LoggerFactory.getLogger(LettuceClusterRedisCacheClient.class); - - private final RedisClusterClient redisClient; - - // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisClusterConnection connection; - - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAdvancedClusterAsyncCommands commands; - - LettuceClusterRedisCacheClient(RedisClusterClient redisClient) { - this.redisClient = redisClient; - } - - @Nonnull - @Override - public CompletionStage get(byte[] key) { - return commands.get(key); - } - - @Nonnull - @Override - public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) - .thenApply(r -> r.stream() - .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); - } - - @Nonnull - @Override - public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); - } - - @SuppressWarnings("unchecked") - @Nonnull - @Override - public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (byte[] key : keys) { - var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) - .thenApply(v -> (v == null) ? null : Map.entry(key, v)) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_void -> futures.stream() - .map(f -> f.getNow(null)) - .filter(Objects::nonNull) - .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); - }); - } - - @Nonnull - @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); - } - - @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); - } - - @Nonnull - @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { - connection.setAutoFlushCommands(false); - - List> futures = new ArrayList<>(); - - var async = connection.async(); - for (Map.Entry entry : keyAndValue.entrySet()) { - var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) - .toCompletableFuture(); - - futures.add(future); - } - - connection.flushCommands(); - connection.setAutoFlushCommands(true); - - return pool.release(connection) - .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) - .thenApply(_v -> true); - }); - } - - @Nonnull - @Override - public CompletionStage del(byte[] key) { - return commands.del(key); - } - - @Nonnull - @Override - public CompletionStage del(byte[][] keys) { - return commands.del(keys); - } - - @Nonnull - @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); - } - - @Override - public void init() { - logger.debug("Redis Client (Lettuce) starting..."); - final long started = TimeUtils.started(); - - final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() - .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) - .minIdle(0) - .testOnAcquire(false) - .testOnCreate(false) - .testOnRelease(false) - .build(); - - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); - - logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); - } - - @Override - public void release() { - logger.debug("Redis Client (Lettuce) stopping..."); - final long started = TimeUtils.started(); - - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); - - logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java deleted file mode 100644 index 25cb53904..000000000 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ /dev/null @@ -1,36 +0,0 @@ -package ru.tinkoff.kora.cache.redis.lettuce; - -import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; -import ru.tinkoff.kora.common.DefaultComponent; -import ru.tinkoff.kora.config.common.Config; -import ru.tinkoff.kora.config.common.ConfigValue; -import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; - -import java.time.Duration; - -public interface LettuceModule { - - default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { - var value = config.get("lettuce"); - return extractor.extract(value); - } - - default LettuceClientFactory lettuceClientFactory() { - return new LettuceClientFactory(); - } - - @DefaultComponent - default RedisCacheClient lettuceRedisClient(LettuceClientFactory factory, LettuceClientConfig config) { - var redisClient = factory.build(config); - if (redisClient instanceof io.lettuce.core.RedisClient rc) { - return new LettuceRedisCacheClient(rc, config); - } else if (redisClient instanceof RedisClusterClient rcc) { - return new LettuceClusterRedisCacheClient(rcc); - } else { - throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); - } - } -} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java similarity index 65% rename from cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java rename to cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java index cdb95be7c..6b957b036 100644 --- a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceRedisCacheClient.java +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java @@ -2,7 +2,7 @@ import io.lettuce.core.*; import io.lettuce.core.api.StatefulRedisConnection; -import io.lettuce.core.api.async.RedisAsyncCommands; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; import io.lettuce.core.codec.ByteArrayCodec; import io.lettuce.core.support.AsyncConnectionPoolSupport; import io.lettuce.core.support.BoundedAsyncPool; @@ -11,63 +11,60 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; import ru.tinkoff.kora.common.util.TimeUtils; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.stream.Collectors; -final class LettuceRedisCacheClient implements RedisCacheClient, Lifecycle { +final class LettuceSingleCacheAsyncClient implements RedisCacheAsyncClient, Lifecycle { - private static final Logger logger = LoggerFactory.getLogger(LettuceRedisCacheClient.class); + private static final Logger logger = LoggerFactory.getLogger(LettuceSingleCacheAsyncClient.class); + // always use async cause sync uses JDK Proxy wrapped async impl + private final RedisClusterAsyncCommands lettuceCommands; + private final RedisClient lettuceClient; private final RedisURI redisURI; - private final RedisClient redisClient; // use for pipeline commands only cause lettuce have bad performance when using pool - private BoundedAsyncPool> pool; - private StatefulRedisConnection connection; - - // always use async cause sync uses JDK Proxy wrapped async impl - private RedisAsyncCommands commands; - - LettuceRedisCacheClient(RedisClient redisClient, LettuceClientConfig config) { - this.redisClient = redisClient; - final List redisURIs = LettuceClientFactory.buildRedisURI(config); - this.redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + private BoundedAsyncPool> lettucePool; + + LettuceSingleCacheAsyncClient(RedisClient lettuceClient, + RedisClusterAsyncCommands lettuceCommands, + RedisURI redisURI) { + this.lettuceClient = lettuceClient; + this.lettuceCommands = lettuceCommands; + this.redisURI = redisURI; } @Nonnull @Override public CompletionStage get(byte[] key) { - return commands.get(key); + return lettuceCommands.get(key); } @Nonnull @Override public CompletionStage> mget(byte[][] keys) { - return commands.mget(keys) + return lettuceCommands.mget(keys) .thenApply(r -> r.stream() .filter(Value::hasValue) - .collect(Collectors.toMap(KeyValue::getKey, Value::getValue))); + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue, (x, y) -> x, LinkedHashMap::new))); } @Nonnull @Override public CompletionStage getex(byte[] key, long expireAfterMillis) { - return commands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + return lettuceCommands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); } @SuppressWarnings("unchecked") @Nonnull @Override public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { + return lettucePool.acquire().thenCompose(connection -> { connection.setAutoFlushCommands(false); List> futures = new ArrayList<>(); @@ -84,37 +81,37 @@ public CompletionStage> getex(byte[][] keys, long expireAfte connection.flushCommands(); connection.setAutoFlushCommands(true); - return pool.release(connection) + return lettucePool.release(connection) .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) .thenApply(_void -> futures.stream() .map(f -> f.getNow(null)) .filter(Objects::nonNull) .map(v -> ((Map.Entry) v)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (x, y) -> x, LinkedHashMap::new))); }); } @Nonnull @Override - public CompletionStage set(byte[] key, byte[] value) { - return commands.set(key, value).thenApply(r -> true); + public CompletionStage set(byte[] key, byte[] value) { + return lettuceCommands.set(key, value).thenApply(r -> null); } @Override - public CompletionStage mset(Map keyAndValue) { - return commands.mset(keyAndValue).thenApply(r -> true); + public CompletionStage mset(Map keyAndValue) { + return lettuceCommands.mset(keyAndValue).thenApply(r -> null); } @Nonnull @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - return commands.psetex(key, expireAfterMillis, value).thenApply(r -> true); + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return lettuceCommands.psetex(key, expireAfterMillis, value).thenApply(r -> null); } @Nonnull @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { - return pool.acquire().thenCompose(connection -> { + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { connection.setAutoFlushCommands(false); List> futures = new ArrayList<>(); @@ -132,27 +129,26 @@ public CompletionStage psetex(Map keyAndValue, long exp connection.setAutoFlushCommands(true); return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) - .thenApply(_void -> true) - .whenComplete((s, throwable) -> pool.release(connection)); + .whenComplete((s, throwable) -> lettucePool.release(connection)); }); } @Nonnull @Override public CompletionStage del(byte[] key) { - return commands.del(key); + return lettuceCommands.del(key); } @Nonnull @Override public CompletionStage del(byte[][] keys) { - return commands.del(keys); + return lettuceCommands.del(keys); } @Nonnull @Override - public CompletionStage flushAll() { - return commands.flushall(FlushMode.SYNC).thenApply(r -> true); + public CompletionStage flushAll() { + return lettuceCommands.flushall(FlushMode.SYNC).thenApply(r -> null); } @Override @@ -169,9 +165,7 @@ public void init() { .testOnRelease(false) .build(); - this.pool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> redisClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); - this.connection = redisClient.connect(ByteArrayCodec.INSTANCE); - this.commands = this.connection.async(); + this.lettucePool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> lettuceClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); } @@ -181,9 +175,7 @@ public void release() { logger.debug("Redis Client (Lettuce) stopping..."); final long started = TimeUtils.started(); - this.pool.close(); - this.connection.close(); - this.redisClient.shutdown(); + this.lettucePool.close(); logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); } diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java deleted file mode 100644 index 209ce6ecb..000000000 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java +++ /dev/null @@ -1,252 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CompletableFuture; - -abstract class AbstractAsyncCacheTests extends CacheRunner { - - protected DummyCache cache = null; - - @Test - void getWhenCacheEmpty() { - // given - var key = "1"; - - // when - assertNull(cache.getAsync(key).toCompletableFuture().join()); - } - - @Test - void getWhenCacheFilled() { - // given - var key = "1"; - var value = "1"; - - // when - cache.putAsync(key, value).toCompletableFuture().join(); - - // then - final String fromCache = cache.getAsync(key).toCompletableFuture().join(); - assertEquals(value, fromCache); - } - - @Test - void getMultiWhenCacheEmpty() { - // given - List keys = List.of("1", "2"); - - // when - Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); - assertTrue(keyToValue.isEmpty()); - } - - @Test - void getMultiWhenCacheFilledPartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - cache.putAsync(key, key).toCompletableFuture().join(); - } - - // when - Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); - assertEquals(1, keyToValue.size()); - keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); - } - - @Test - void getMultiWhenCacheFilled() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - cache.putAsync(key, key).toCompletableFuture().join(); - } - - // when - Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); - assertEquals(2, keyToValue.size()); - keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); - } - - @Test - void computeIfAbsentWhenCacheEmpty() { - // given - - // when - assertNull(cache.getAsync("1").toCompletableFuture().join()); - final String valueComputed = cache.computeIfAbsent("1", k -> "1"); - assertEquals("1", valueComputed); - - // then - final String cached = cache.getAsync("1").toCompletableFuture().join(); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiWhenCacheEmpty() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - assertNull(cache.getAsync(key).toCompletableFuture().join()); - } - - // when - final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { - if (keysCompute.size() == 2) { - return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); - } else if ("1".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("1", "1")); - } else if ("2".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("2", "2")); - } - - throw new IllegalStateException("Should not happen"); - }).toCompletableFuture().join(); - assertEquals(2, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.getAsync(keys).toCompletableFuture().join(); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiOneWhenCachePartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - assertNull(cache.getAsync(key).toCompletableFuture().join()); - cache.putAsync(key, key).toCompletableFuture().join(); - } - - // when - final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { - if (keysCompute.size() == 2) { - return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); - } else if ("1".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("1", "1")); - } else if ("2".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("2", "2")); - } - - throw new IllegalStateException("Should not happen"); - }).toCompletableFuture().join(); - assertEquals(1, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.getAsync(keys).toCompletableFuture().join(); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiAllWhenCachePartly() { - // given - List keys = List.of("1"); - for (String key : keys) { - assertNull(cache.getAsync(key).toCompletableFuture().join()); - cache.putAsync(key, key).toCompletableFuture().join(); - } - - // when - final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { - if (keysCompute.size() == 2) { - return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); - } else if ("1".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("1", "1")); - } else if ("2".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("2", "2")); - } - - throw new IllegalStateException("Should not happen"); - }).toCompletableFuture().join(); - assertEquals(2, valueComputed.size()); - assertEquals(Set.of("1", "2"), valueComputed.keySet()); - assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); - assertEquals(valueComputed, cached); - } - - @Test - void computeIfAbsentMultiWhenCacheFilled() { - // given - List keys = List.of("1", "2"); - for (String key : keys) { - assertNull(cache.getAsync(key).toCompletableFuture().join()); - cache.putAsync(key, key).toCompletableFuture().join(); - } - - // when - final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { - if (keysCompute.size() == 2) { - return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); - } else if ("1".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("1", "???")); - } else if ("2".equals(keysCompute.iterator().next())) { - return CompletableFuture.completedFuture(Map.of("2", "???")); - } - - throw new IllegalStateException("Should not happen"); - }).toCompletableFuture().join(); - assertEquals(2, valueComputed.size()); - assertEquals(Set.copyOf(keys), valueComputed.keySet()); - assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); - - // then - final Map cached = cache.getAsync(keys).toCompletableFuture().join(); - assertEquals(valueComputed, cached); - } - - @Test - void getWrongKeyWhenCacheFilled() { - // given - var key = "1"; - var value = "1"; - - // when - cache.putAsync(key, value).toCompletableFuture().join(); - - // then - final String fromCache = cache.getAsync("2").toCompletableFuture().join(); - assertNull(fromCache); - } - - @Test - void getWhenCacheInvalidate() { - // given - var key = "1"; - var value = "1"; - cache.putAsync(key, value).toCompletableFuture().join(); - - // when - cache.invalidate(key); - - // then - final String fromCache = cache.getAsync(key).toCompletableFuture().join(); - assertNull(fromCache); - } - - @Test - void getFromCacheWhenCacheInvalidateAll() { - // given - var key = "1"; - var value = "1"; - cache.putAsync(key, value).toCompletableFuture().join(); - - // when - cache.invalidateAll(); - - // then - final String fromCache = cache.getAsync(key).toCompletableFuture().join(); - assertNull(fromCache); - } -} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java deleted file mode 100644 index 3509176e2..000000000 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java +++ /dev/null @@ -1,85 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import jakarta.annotation.Nullable; -import org.junit.jupiter.api.Assertions; -import ru.tinkoff.kora.application.graph.Lifecycle; -import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; -import ru.tinkoff.kora.test.redis.RedisParams; - -import java.time.Duration; - -abstract class CacheRunner extends Assertions implements RedisCacheModule { - - public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, - @Nullable Duration expireRead) { - return new RedisCacheConfig() { - - @Override - public String keyPrefix() { - return "pref"; - } - - @Nullable - @Override - public Duration expireAfterWrite() { - return expireWrite; - } - - @Nullable - @Override - public Duration expireAfterAccess() { - return expireRead; - } - }; - } - - private RedisCacheClient createLettuce(RedisParams redisParams) throws Exception { - var lettuceClientFactory = lettuceClientFactory(); - var lettuceClientConfig = new LettuceClientConfig() { - @Override - public String uri() { - return redisParams.uri().toString(); - } - - @Override - public Integer database() { - return null; - } - - @Override - public String user() { - return null; - } - - @Override - public String password() { - return null; - } - }; - - var lettuceClient = lettuceRedisClient(lettuceClientFactory, lettuceClientConfig); - if (lettuceClient instanceof Lifecycle lc) { - lc.init(); - } - return lettuceClient; - } - - private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { - var lettuceClient = createLettuce(redisParams); - return new DummyCache(getConfig(expireWrite, expireRead), lettuceClient, redisCacheTelemetry(null, null), - stringRedisKeyMapper(), stringRedisValueMapper()); - } - - protected DummyCache createCache(RedisParams redisParams) throws Exception { - return createDummyCache(redisParams, null, null); - } - - protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { - return createDummyCache(redisParams, expireWrite, null); - } - - protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { - return createDummyCache(redisParams, null, expireRead); - } -} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java deleted file mode 100644 index bb8818bbc..000000000 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java +++ /dev/null @@ -1,20 +0,0 @@ -package ru.tinkoff.kora.cache.redis; - -import io.lettuce.core.FlushMode; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.TestInstance; -import ru.tinkoff.kora.test.redis.RedisParams; -import ru.tinkoff.kora.test.redis.RedisTestContainer; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@RedisTestContainer -class SyncCacheTests extends AbstractSyncCacheTests { - - @BeforeEach - void setup(RedisParams redisParams) throws Exception { - redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); - if (cache == null) { - cache = createCache(redisParams); - } - } -} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java similarity index 98% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java index 209ce6ecb..832a91e0d 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java @@ -1,7 +1,7 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java similarity index 98% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java index f5ec3aa9d..0488159b2 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java @@ -1,7 +1,7 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java similarity index 93% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java index 3f7764c9a..7c3acecbe 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireReadTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java similarity index 93% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java index faa5acb1a..789c987bd 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheExpireWriteTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java similarity index 92% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java index 6f95bee4a..7af17f562 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/AsyncCacheTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java new file mode 100644 index 000000000..e8ea0251e --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java @@ -0,0 +1,129 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.RedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.codec.ByteArrayCodec; +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.redis.lettuce.LettuceConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; +import java.util.List; + +abstract class CacheRunner extends Assertions implements RedisCacheMapperModule, LettuceCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + + @Override + public TelemetryConfig telemetry() { + return null; + } + }; + } + + private RedisCacheAsyncClient createLettuce(RedisParams redisParams) throws Exception { + var lettuceClientConfig = new LettuceConfig() { + @Override + public List uri() { + return List.of(redisParams.uri().toString()); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + + @Override + public PoolConfig pool() { + return null; + } + }; + + var lettuceClient = lettuceClient(lettuceClientConfig); + if (lettuceClient instanceof Lifecycle lc) { + lc.init(); + } + + if (!(lettuceClient instanceof RedisClient rc)) { + throw new IllegalStateException(); + } + + Wrapped> statefulConnectionWrapped = lettuceStatefulConnection(lettuceClient, ByteArrayCodec.INSTANCE); + if(statefulConnectionWrapped instanceof Lifecycle l) { + l.init(); + } + var commands = lettuceRedisClusterAsyncCommands(statefulConnectionWrapped.value()); + LettuceSingleCacheAsyncClient lettuceSingleCacheAsyncClient = new LettuceSingleCacheAsyncClient(rc, commands, RedisURI.create(redisParams.uri())); + lettuceSingleCacheAsyncClient.init(); + return lettuceSingleCacheAsyncClient; + } + + private RedisCacheClient createSyncLettuce(RedisCacheAsyncClient asyncClient) { + return new LettuceCacheSyncClient(asyncClient); + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var lettuceClient = createLettuce(redisParams); + var lettuceSyncClient = createSyncLettuce(lettuceClient); + return new DummyCache(getConfig(expireWrite, expireRead), lettuceSyncClient, lettuceClient, + (telemetryConfig, args) -> operationName -> new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(Object valueFromCache) {} + @Override + public void recordFailure(Throwable throwable) {} + }, + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java similarity index 93% rename from cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java index 43008e371..e36676e23 100644 --- a/cache/cache-redis-common/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireReadTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java similarity index 93% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java index 72feb88f6..39db66914 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheExpireWriteTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java similarity index 92% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java index bb8818bbc..e6fb15f0b 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/SyncCacheTests.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis; +package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.FlushMode; import org.junit.jupiter.api.BeforeEach; diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java similarity index 50% rename from cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java rename to cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java index 4d098b5ff..6321fe1a3 100644 --- a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java @@ -1,14 +1,16 @@ -package ru.tinkoff.kora.cache.redis.testdata; +package ru.tinkoff.kora.cache.redis.lettuce.testdata; import ru.tinkoff.kora.cache.redis.*; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; public final class DummyCache extends AbstractRedisCache { public DummyCache(RedisCacheConfig config, RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, RedisCacheKeyMapper keyMapper, RedisCacheValueMapper valueMapper) { - super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); + super("dummy", config, redisClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper); } } diff --git a/cache/cache-redis/README.md b/cache/cache-redis/README.md new file mode 100644 index 000000000..e917a758e --- /dev/null +++ b/cache/cache-redis/README.md @@ -0,0 +1,3 @@ +# DEPRECATED + +Use `cache-redis-lettuce` instead diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java index 941591ac8..3ac3678b2 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -13,6 +13,10 @@ import java.util.function.Function; import java.util.stream.Collectors; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public abstract class AbstractRedisCache implements AsyncCache { private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java index 75a932976..45b17e929 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -2,6 +2,10 @@ import ru.tinkoff.kora.cache.AsyncCache; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface RedisCache extends AsyncCache { } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java index a5b995bd6..9711c9d83 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -5,6 +5,10 @@ import java.util.Map; import java.util.concurrent.CompletionStage; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface RedisCacheClient { @Nonnull diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java index 120bd511a..8404c59ec 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -6,6 +6,10 @@ import java.time.Duration; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated @ConfigValueExtractor public interface RedisCacheConfig { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java index f6edc71ad..ae8584c94 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -7,7 +7,9 @@ /** * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce */ +@Deprecated public interface RedisCacheKeyMapper extends Function { /** diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java index 81e48e005..86fd8225f 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -14,6 +14,10 @@ import java.nio.charset.StandardCharsets; import java.util.UUID; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface RedisCacheMapperModule extends JsonCommonModule { @DefaultComponent diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java index fe07914b0..b8553aa2c 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -2,6 +2,10 @@ import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java index af84dfdd5..7d9e44c8c 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java @@ -8,6 +8,10 @@ import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; import ru.tinkoff.kora.cache.telemetry.CacheTracer; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public final class RedisCacheTelemetry { private static final String ORIGIN = "redis"; diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java index cf2037f42..7e4b16919 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -2,7 +2,9 @@ /** * Converts cache value into serializer value to store in cache. + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce */ +@Deprecated public interface RedisCacheValueMapper { /** diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java index 6fb2ee3e9..5eec5745d 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java @@ -35,9 +35,13 @@ default Duration commandTimeout() { enum Protocol { - /** Redis 2 to Redis 5 */ + /** + * Redis 2 to Redis 5 + */ RESP2, - /** Redis 6+ */ + /** + * Redis 6+ + */ RESP3 } } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java index fb29c05e2..7d8c87c7e 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java @@ -11,6 +11,10 @@ import java.time.Duration; import java.util.List; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public final class LettuceClientFactory { @Nonnull diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java index 25cb53904..ad773cc8a 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -11,6 +11,10 @@ import java.time.Duration; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface LettuceModule { default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java index 209ce6ecb..2252e340a 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java @@ -1,7 +1,7 @@ package ru.tinkoff.kora.cache.redis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java index f5ec3aa9d..cbc7b6479 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java @@ -1,7 +1,7 @@ package ru.tinkoff.kora.cache.redis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java index 3509176e2..fb68a5d8b 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java @@ -4,7 +4,7 @@ import org.junit.jupiter.api.Assertions; import ru.tinkoff.kora.application.graph.Lifecycle; import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import ru.tinkoff.kora.test.redis.RedisParams; import java.time.Duration; diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java similarity index 89% rename from cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java rename to cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java index 4d098b5ff..92728f10d 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis.testdata; +package ru.tinkoff.kora.cache.redis.lettuce.testdata; import ru.tinkoff.kora.cache.redis.*; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java deleted file mode 100644 index 4d098b5ff..000000000 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java +++ /dev/null @@ -1,14 +0,0 @@ -package ru.tinkoff.kora.cache.redis.testdata; - -import ru.tinkoff.kora.cache.redis.*; - -public final class DummyCache extends AbstractRedisCache { - - public DummyCache(RedisCacheConfig config, - RedisCacheClient redisClient, - RedisCacheTelemetry telemetry, - RedisCacheKeyMapper keyMapper, - RedisCacheValueMapper valueMapper) { - super("dummy", config, redisClient, telemetry, keyMapper, valueMapper); - } -} diff --git a/cache/cache-symbol-processor/build.gradle b/cache/cache-symbol-processor/build.gradle index 475f658f6..b654f0273 100644 --- a/cache/cache-symbol-processor/build.gradle +++ b/cache/cache-symbol-processor/build.gradle @@ -10,7 +10,7 @@ dependencies { testImplementation libs.prometheus.collector.caffeine testImplementation project(":internal:test-logging") testImplementation project(":cache:cache-caffeine") - testImplementation project(":cache:cache-redis") + testImplementation project(":cache:cache-redis-lettuce") testImplementation project(":json:json-common") testImplementation project(":config:config-common") testImplementation testFixtures(project(":symbol-processor-common")) diff --git a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt index f947d2e99..8e595bd1c 100644 --- a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt +++ b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt @@ -26,7 +26,6 @@ import ru.tinkoff.kora.ksp.common.KspCommonUtils.generated import ru.tinkoff.kora.ksp.common.KspCommonUtils.toTypeName import ru.tinkoff.kora.ksp.common.TagUtils.parseTags import ru.tinkoff.kora.ksp.common.TagUtils.toTagAnnotation -import ru.tinkoff.kora.ksp.common.exception.ProcessingErrorException class CacheSymbolProcessor( private val environment: SymbolProcessorEnvironment @@ -35,17 +34,24 @@ class CacheSymbolProcessor( companion object { private val ANNOTATION_CACHE = ClassName("ru.tinkoff.kora.cache.annotation", "Cache") - private val CAFFEINE_TELEMETRY = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheTelemetry") + private val CACHE_TELEMETRY_FACTORY = ClassName("ru.tinkoff.kora.cache.telemetry", "CacheTelemetryFactory") + private val CAFFEINE_CACHE = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCache") private val CAFFEINE_CACHE_FACTORY = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheFactory") private val CAFFEINE_CACHE_CONFIG = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheConfig") private val CAFFEINE_CACHE_IMPL = ClassName("ru.tinkoff.kora.cache.caffeine", "AbstractCaffeineCache") + @Deprecated(message = "deprecated redis dependency") private val REDIS_TELEMETRY = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheTelemetry") + + @Deprecated(message = "deprecated redis dependency") + private val REDIS_CACHE_OLD_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE = ClassName("ru.tinkoff.kora.cache.redis", "RedisCache") private val REDIS_CACHE_IMPL = ClassName("ru.tinkoff.kora.cache.redis", "AbstractRedisCache") private val REDIS_CACHE_CONFIG = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheConfig") - private val REDIS_CACHE_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE_SYNC_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE_ASYNC_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheAsyncClient") private val REDIS_CACHE_MAPPER_KEY = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheKeyMapper") private val REDIS_CACHE_MAPPER_VALUE = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheValueMapper") } @@ -195,58 +201,122 @@ class CacheSymbolProcessor( .build() ) .addParameter("factory", CAFFEINE_CACHE_FACTORY) - .addParameter("telemetry", CAFFEINE_TELEMETRY) - .addStatement("return %T(config, factory, telemetry)", cacheImplName) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addStatement("return %T(config, factory, telemetryFactory)", cacheImplName) .returns(cacheTypeName) .build() } REDIS_CACHE -> { - val keyType = cacheContract.typeArguments[0] - val valueType = cacheContract.typeArguments[1] - val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) - val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) - - val cacheContractType = cacheClass.getAllSuperTypes() - .filter { i -> i.toTypeName() == cacheContract } - .first() - - val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) - val keyTags = cacheContractType.arguments[0].parseTags() - if (keyTags.isNotEmpty()) { - keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) + if (cacheContract.annotations.any { it.typeName == Deprecated::class.asTypeName() }) { + return getRedisDeprecatedFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) + } else { + return getRedisFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) } + } - val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) - val valueTags = cacheContractType.arguments[1].parseTags() - if (valueTags.isNotEmpty()) { - valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) - } + else -> { + throw IllegalArgumentException("Unknown cache type impl: ${cacheContract.rawType}") + } + } + } - FunSpec.builder(methodName) - .addModifiers(KModifier.PUBLIC) - .addParameter( - ParameterSpec.builder("config", REDIS_CACHE_CONFIG) - .addAnnotation( - AnnotationSpec.builder(CommonClassNames.tag) - .addMember("%T::class", cacheTypeName) - .build() - ) + private fun getRedisFunc( + cacheClass: KSClassDeclaration, + cacheContract: ParameterizedTypeName, + cacheImplName: ClassName, + cacheTypeName: TypeName, + methodName: String + ): FunSpec { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + + val cacheContractType = cacheClass.getAllSuperTypes() + .filter { i -> i.toTypeName() == cacheContract } + .first() + + val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) + val keyTags = cacheContractType.arguments[0].parseTags() + if (keyTags.isNotEmpty()) { + keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) + } + + val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) + val valueTags = cacheContractType.arguments[1].parseTags() + if (valueTags.isNotEmpty()) { + valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) + } + + return FunSpec.builder(methodName) + .addModifiers(KModifier.PUBLIC) + .addParameter( + ParameterSpec.builder("config", REDIS_CACHE_CONFIG) + .addAnnotation( + AnnotationSpec.builder(CommonClassNames.tag) + .addMember("%T::class", cacheTypeName) .build() ) - .addParameter("redisClient", REDIS_CACHE_CLIENT) - .addParameter("telemetry", REDIS_TELEMETRY) - .addParameter(keyMapperBuilder.build()) - .addParameter(valueMapperBuilder.build()) - .addStatement("return %L(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) - .returns(cacheTypeName) .build() - } + ) + .addParameter("redisSyncClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("redisAsyncClient", REDIS_CACHE_ASYNC_CLIENT) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addParameter(keyMapperBuilder.build()) + .addParameter(valueMapperBuilder.build()) + .addStatement("return %L(config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", cacheImplName) + .returns(cacheTypeName) + .build() + } - else -> { - throw IllegalArgumentException("Unknown cache type: ${cacheContract.rawType}") - } + @Deprecated(message = "deprecated redis dependency") + private fun getRedisDeprecatedFunc( + cacheClass: KSClassDeclaration, + cacheContract: ParameterizedTypeName, + cacheImplName: ClassName, + cacheTypeName: TypeName, + methodName: String + ): FunSpec { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + + val cacheContractType = cacheClass.getAllSuperTypes() + .filter { i -> i.toTypeName() == cacheContract } + .first() + + val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) + val keyTags = cacheContractType.arguments[0].parseTags() + if (keyTags.isNotEmpty()) { + keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) } + + val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) + val valueTags = cacheContractType.arguments[1].parseTags() + if (valueTags.isNotEmpty()) { + valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) + } + + return FunSpec.builder(methodName) + .addModifiers(KModifier.PUBLIC) + .addParameter( + ParameterSpec.builder("config", REDIS_CACHE_CONFIG) + .addAnnotation( + AnnotationSpec.builder(CommonClassNames.tag) + .addMember("%T::class", cacheTypeName) + .build() + ) + .build() + ) + .addParameter("redisClient", REDIS_CACHE_OLD_CLIENT) + .addParameter("telemetry", REDIS_TELEMETRY) + .addParameter(keyMapperBuilder.build()) + .addParameter(valueMapperBuilder.build()) + .addStatement("return %L(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) + .returns(cacheTypeName) + .build() } private fun getCacheConstructor(cacheContract: ParameterizedTypeName): FunSpec { @@ -255,22 +325,37 @@ class CacheSymbolProcessor( FunSpec.constructorBuilder() .addParameter("config", CAFFEINE_CACHE_CONFIG) .addParameter("factory", CAFFEINE_CACHE_FACTORY) - .addParameter("telemetry", CAFFEINE_TELEMETRY) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) .build() } REDIS_CACHE -> { - val keyType = cacheContract.typeArguments[0] - val valueType = cacheContract.typeArguments[1] - val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) - val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) - FunSpec.constructorBuilder() - .addParameter("config", REDIS_CACHE_CONFIG) - .addParameter("redisClient", REDIS_CACHE_CLIENT) - .addParameter("telemetry", REDIS_TELEMETRY) - .addParameter("keyMapper", keyMapperType) - .addParameter("valueMapper", valueMapperType) - .build() + if (cacheContract.annotations.any { it.typeName == Deprecated::class.asTypeName() }) { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + FunSpec.constructorBuilder() + .addParameter("config", REDIS_CACHE_CONFIG) + .addParameter("redisClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("telemetry", REDIS_TELEMETRY) + .addParameter("keyMapper", keyMapperType) + .addParameter("valueMapper", valueMapperType) + .build() + } else { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + FunSpec.constructorBuilder() + .addParameter("config", REDIS_CACHE_CONFIG) + .addParameter("redisSyncClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("redisAsyncClient", REDIS_CACHE_ASYNC_CLIENT) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addParameter("keyMapper", keyMapperType) + .addParameter("valueMapper", valueMapperType) + .build() + } } else -> { @@ -366,8 +451,15 @@ class CacheSymbolProcessor( ?.findValueNoDefault("value")!! return when (cacheType.rawType) { - CAFFEINE_CACHE -> CodeBlock.of("%S, config, factory, telemetry", configPath) - REDIS_CACHE -> CodeBlock.of("%S, config, redisClient, telemetry, keyMapper, valueMapper", configPath) + CAFFEINE_CACHE -> CodeBlock.of("%S, config, factory, telemetryFactory", configPath) + REDIS_CACHE -> { + if (cacheContract.annotations.any { it.annotationType.toTypeName() == Deprecated::class.asTypeName() }) { + CodeBlock.of("%S, config, redisClient, telemetry, keyMapper, valueMapper", configPath) + } else { + CodeBlock.of("%S, config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper", configPath) + } + } + else -> throw IllegalArgumentException("Unknown cache type: ${cacheType.rawType}") } } diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt index 1849578f4..e05e769d3 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt @@ -3,8 +3,14 @@ package ru.tinkoff.kora.cache.symbol.processor import kotlinx.coroutines.future.await import kotlinx.coroutines.runBlocking import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig -import ru.tinkoff.kora.cache.redis.RedisCacheConfig +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient import ru.tinkoff.kora.cache.redis.RedisCacheClient +import ru.tinkoff.kora.cache.redis.RedisCacheConfig +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_ConfigValueExtractor`.TelemetryConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_LogConfig_ConfigValueExtractor`.LogConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_MetricsConfig_ConfigValueExtractor`.MetricsConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_TracingConfig_ConfigValueExtractor`.TracingConfig_Impl +import ru.tinkoff.kora.telemetry.common.TelemetryConfig import java.nio.ByteBuffer import java.time.Duration import java.util.* @@ -28,6 +34,10 @@ class CacheRunner { override fun initialSize(): Int? { return null; } + + override fun telemetry(): TelemetryConfig { + return TelemetryConfig_Impl(LogConfig_Impl(false), TracingConfig_Impl(false), MetricsConfig_Impl(false, doubleArrayOf())) + } } } @@ -39,11 +49,15 @@ class CacheRunner { override fun expireAfterWrite(): Duration? = null override fun expireAfterAccess(): Duration? = null + + override fun telemetry(): TelemetryConfig { + return TelemetryConfig_Impl(LogConfig_Impl(false), TracingConfig_Impl(false), MetricsConfig_Impl(false, doubleArrayOf())) + } } } - fun lettuceClient(cache: MutableMap): RedisCacheClient { - return object : RedisCacheClient { + fun lettuceAsyncClient(cache: MutableMap): RedisCacheAsyncClient { + return object : RedisCacheAsyncClient { override fun get(key: ByteArray): CompletionStage { val r = cache[ByteBuffer.wrap(key)] return CompletableFuture.completedFuture(r?.array()) @@ -67,22 +81,22 @@ class CacheRunner { return mget(keys) } - override fun set(key: ByteArray, value: ByteArray) : CompletionStage { + override fun set(key: ByteArray, value: ByteArray): CompletionStage { cache[ByteBuffer.wrap(key)] = ByteBuffer.wrap(value) - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun mset(keyAndValue: MutableMap) : CompletionStage { + override fun mset(keyAndValue: MutableMap): CompletionStage { keyAndValue.forEach { (k, v) -> set(k, v) } - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long): CompletionStage { + override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long): CompletionStage { mset(keyAndValue) - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long): CompletionStage { + override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long): CompletionStage { return set(key, value) } @@ -99,9 +113,69 @@ class CacheRunner { return CompletableFuture.completedFuture(counter.toLong()) } - override fun flushAll() : CompletionStage { + override fun flushAll(): CompletionStage { + cache.clear() + return CompletableFuture.completedFuture(null) + } + } + } + + fun lettuceSyncClient(cache: MutableMap): RedisCacheClient { + return object : RedisCacheClient { + override fun get(key: ByteArray): ByteArray? { + val r = cache[ByteBuffer.wrap(key)] + return r?.array() + } + + override fun mget(keys: Array): Map { + val result: MutableMap = HashMap() + for (key in keys) { + Optional.ofNullable(cache[ByteBuffer.wrap(key)]).ifPresent { r: ByteBuffer -> + result[key] = r.array() + } + } + return result + } + + override fun getex(key: ByteArray, expireAfterMillis: Long): ByteArray? { + return get(key) + } + + override fun getex(keys: Array, expireAfterMillis: Long): Map { + return mget(keys) + } + + override fun set(key: ByteArray, value: ByteArray) { + cache[ByteBuffer.wrap(key)] = ByteBuffer.wrap(value) + } + + override fun mset(keyAndValue: MutableMap) { + keyAndValue.forEach { (k, v) -> set(k, v) } + } + + override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long) { + mset(keyAndValue) + } + + override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long) { + return set(key, value) + } + + override fun del(key: ByteArray): Long { + val res = if (cache.remove(ByteBuffer.wrap(key)) == null) 0L else 1L + return res + } + + override fun del(keys: Array): Long { + var counter = 0L + for (key in keys) { + counter += runBlocking { del(key) } + } + return counter + } + + override fun flushAll() { cache.clear() - return CompletableFuture.completedFuture(true) } } } diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt index 2372ddd5e..db912ed68 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt @@ -41,7 +41,7 @@ class SuspendCacheAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val serviceClass = classLoader.loadClass(SERVICE_CLASS) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt index 306e6a2ae..330fec07b 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt @@ -48,15 +48,16 @@ class SuspendCacheManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), RedisCacheKeyMapper { key -> val k1 = key.k1.toByteArray(StandardCharsets.UTF_8) val k2 = key.k2.toString().toByteArray(StandardCharsets.UTF_8) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt index 2c108a2fc..5113184ef 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt @@ -39,7 +39,7 @@ class SuspendCacheOneAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt index 75f93dfb4..587eed0e2 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt @@ -45,15 +45,16 @@ class SuspendCacheOneManyAopTests : CaffeineCacheModule, RedisCacheMapperModule cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper() ) as DummyCache12 diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt index e1368ae19..cd2a800ad 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt @@ -41,7 +41,7 @@ class SyncCacheAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt index 999c7cccd..bbbc1e4be 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt @@ -47,15 +47,16 @@ class SyncCacheManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), RedisCacheKeyMapper { key -> val k1 = key.k1.toByteArray(StandardCharsets.UTF_8) val k2 = key.k2.toString().toByteArray(StandardCharsets.UTF_8) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt index fb5946eb4..5ad2095f9 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt @@ -38,7 +38,7 @@ class SyncCacheOneAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt index d2fe13cb0..574026a0d 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt @@ -44,15 +44,16 @@ class SyncCacheOneManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val cache = mutableMapOf() val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper() ) as DummyCache12 diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java index 9af7caf18..3e7cb81e8 100644 --- a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java @@ -10,6 +10,7 @@ import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; import ru.tinkoff.kora.http.server.common.HttpServerConfig; import ru.tinkoff.kora.micrometer.module.cache.MicrometerCacheMetrics; +import ru.tinkoff.kora.micrometer.module.cache.MicrometerCacheMetricsFactory; import ru.tinkoff.kora.micrometer.module.cache.caffeine.MicrometerCaffeineCacheMetricCollector; import ru.tinkoff.kora.micrometer.module.camunda.engine.bpmn.MicrometerCamundaEngineBpmnMetricsFactory; import ru.tinkoff.kora.micrometer.module.camunda.rest.MicrometerCamundaRestMetricsFactory; @@ -132,11 +133,17 @@ default MicrometerTimeoutMetrics micrometerTimeoutMetrics(MeterRegistry meterReg return new MicrometerTimeoutMetrics(meterRegistry); } + @Deprecated @DefaultComponent default MicrometerCacheMetrics micrometerCacheMetrics(MeterRegistry meterRegistry) { return new MicrometerCacheMetrics(meterRegistry); } + @DefaultComponent + default MicrometerCacheMetricsFactory micrometerCacheMetricsFactory(MeterRegistry meterRegistry) { + return new MicrometerCacheMetricsFactory(meterRegistry); + } + @DefaultComponent default MicrometerCaffeineCacheMetricCollector micrometerCaffeineCacheMetricsCollector(MeterRegistry meterRegistry) { return new MicrometerCaffeineCacheMetricCollector(meterRegistry); diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java index feffbb01f..1437f0cc4 100644 --- a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java @@ -12,6 +12,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; +@Deprecated public final class MicrometerCacheMetrics implements CacheMetrics { record Key(String cacheName, String origin, String operationName, String status) {} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java new file mode 100644 index 000000000..a462b0540 --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java @@ -0,0 +1,27 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.MeterRegistry; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheMetricsFactory; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class MicrometerCacheMetricsFactory implements CacheMetricsFactory { + + private final MeterRegistry meterRegistry; + + public MicrometerCacheMetricsFactory(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + + @Override + public CacheMetrics get(TelemetryConfig.MetricsConfig config, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(config.enabled(), true)) { + return new Opentelemetry120CacheMetrics(meterRegistry, config); + } else { + return null; + } + } +} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java new file mode 100644 index 000000000..96cb840b5 --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java @@ -0,0 +1,131 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.MeterRegistry; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public final class Opentelemetry120CacheMetrics implements CacheMetrics { + + record DurationKey(String cacheName, String origin, String operationName, String status) {} + + record RatioKey(String cacheName, String origin, String type) {} + + record OpKey(String cacheName, String origin) {} + + private static final String METRIC_CACHE_DURATION = "cache.duration"; + private static final String METRIC_CACHE_RATIO = "cache.ratio"; + private static final String METRIC_CACHE_HIT = "cache.hit"; + private static final String METRIC_CACHE_MISS = "cache.miss"; + + private static final String TAG_OPERATION = "operation"; + private static final String TAG_CACHE_NAME = "cache"; + private static final String TAG_ORIGIN = "origin"; + private static final String TAG_STATUS = "status"; + private static final String TAG_TYPE = "type"; + + private static final String STATUS_SUCCESS = "success"; + private static final String STATUS_FAILED = "failed"; + + private static final String TYPE_HIT = "hit"; + private static final String TYPE_MISS = "miss"; + + private final ConcurrentHashMap durations = new ConcurrentHashMap<>(); + private final ConcurrentHashMap counters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap missCounters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap hitCounters = new ConcurrentHashMap<>(); + + private final MeterRegistry meterRegistry; + private final TelemetryConfig.MetricsConfig config; + + public Opentelemetry120CacheMetrics(MeterRegistry meterRegistry, TelemetryConfig.MetricsConfig config) { + this.meterRegistry = meterRegistry; + this.config = config; + } + + @Override + public void recordSuccess(@Nonnull CacheTelemetryOperation op, long durationInNanos, @Nullable Object valueFromCache) { + final DurationKey key = new DurationKey(op.cacheName(), op.origin(), op.name(), STATUS_SUCCESS); + durations.computeIfAbsent(key, k -> duration(key, null)) + .record((double) durationInNanos / 1_000_000); + + + if ("GET".startsWith(op.name())) { + final String ratioType; + var operationKey = new OpKey(op.cacheName(), op.origin()); + if (valueFromCache == null + || valueFromCache instanceof Collection vc && !vc.isEmpty() + || valueFromCache instanceof Map mc && !mc.isEmpty()) { + ratioType = TYPE_MISS; + + var counter = missCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_MISS) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } else { + ratioType = TYPE_HIT; + + var counter = hitCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_HIT) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + + final RatioKey ratioKey = new RatioKey(op.cacheName(), op.origin(), ratioType); + var counter = counters.computeIfAbsent(ratioKey, k -> { + var builder = Counter.builder(METRIC_CACHE_RATIO) + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()) + .tag(TAG_TYPE, ratioType); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + } + + @Override + public void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_FAILED); + durations.computeIfAbsent(key, k -> duration(key, exception)) + .record((double) durationInNanos / 1_000_000); + } + + private DistributionSummary duration(DurationKey key, @Nullable Throwable exception) { + var builder = DistributionSummary.builder(METRIC_CACHE_DURATION) + .serviceLevelObjectives(this.config.slo(TelemetryConfig.MetricsConfig.OpentelemetrySpec.V120)) + .baseUnit("milliseconds") + .tag(TAG_CACHE_NAME, key.cacheName()) + .tag(TAG_OPERATION, key.operationName()) + .tag(TAG_ORIGIN, key.origin()) + .tag(TAG_STATUS, key.status()); + + if (exception != null) { + builder.tag("error", exception.getClass().getCanonicalName()); + } else { + builder.tag("error", ""); + } + + return builder.register(meterRegistry); + } +} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java new file mode 100644 index 000000000..5cbaa7b9c --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.MeterRegistry; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public final class Opentelemetry123CacheMetrics implements CacheMetrics { + + record DurationKey(String cacheName, String origin, String operationName, String status) { + } + + record RatioKey(String cacheName, String origin, String type) { + } + + record OpKey(String cacheName, String origin) { + } + + private static final String METRIC_CACHE_DURATION = "cache.duration"; + private static final String METRIC_CACHE_RATIO = "cache.ratio"; + private static final String METRIC_CACHE_HIT = "cache.hit"; + private static final String METRIC_CACHE_MISS = "cache.miss"; + + private static final String TAG_OPERATION = "operation"; + private static final String TAG_CACHE_NAME = "cache"; + private static final String TAG_ORIGIN = "origin"; + private static final String TAG_STATUS = "status"; + private static final String TAG_TYPE = "type"; + + private static final String STATUS_SUCCESS = "success"; + private static final String STATUS_FAILED = "failed"; + + private static final String TYPE_HIT = "hit"; + private static final String TYPE_MISS = "miss"; + + private final ConcurrentHashMap durations = new ConcurrentHashMap<>(); + private final ConcurrentHashMap counters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap missCounters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap hitCounters = new ConcurrentHashMap<>(); + + private final MeterRegistry meterRegistry; + private final TelemetryConfig.MetricsConfig config; + + public Opentelemetry123CacheMetrics(MeterRegistry meterRegistry, TelemetryConfig.MetricsConfig config) { + this.meterRegistry = meterRegistry; + this.config = config; + } + + @Override + public void recordSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_SUCCESS); + durations.computeIfAbsent(key, k -> duration(key, null)) + .record((double) durationInNanos / 1_000_000_000); + + if ("GET".startsWith(operation.name())) { + final String ratioType; + var operationKey = new OpKey(operation.cacheName(), operation.origin()); + if (valueFromCache == null + || valueFromCache instanceof Collection vc && !vc.isEmpty() + || valueFromCache instanceof Map mc && !mc.isEmpty()) { + ratioType = TYPE_MISS; + + var counter = missCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_MISS) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } else { + ratioType = TYPE_HIT; + + var counter = hitCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_HIT) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + + final RatioKey ratioKey = new RatioKey(operation.cacheName(), operation.origin(), ratioType); + var counter = counters.computeIfAbsent(ratioKey, k -> { + var builder = Counter.builder(METRIC_CACHE_RATIO) + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()) + .tag(TAG_TYPE, ratioType); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + } + + @Override + public void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_FAILED); + durations.computeIfAbsent(key, k -> duration(key, exception)) + .record((double) durationInNanos / 1_000_000_000); + } + + private DistributionSummary duration(DurationKey key, @Nullable Throwable exception) { + var builder = DistributionSummary.builder(METRIC_CACHE_DURATION) + .serviceLevelObjectives(this.config.slo(TelemetryConfig.MetricsConfig.OpentelemetrySpec.V123)) + .baseUnit("s") + .tag(TAG_CACHE_NAME, key.cacheName()) + .tag(TAG_OPERATION, key.operationName()) + .tag(TAG_ORIGIN, key.origin()) + .tag(TAG_STATUS, key.status()); + + if (exception != null) { + builder.tag("error", exception.getClass().getCanonicalName()); + } else { + builder.tag("error", ""); + } + + return builder.register(meterRegistry); + } +} diff --git a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java index 118e45921..e7c0eb3a0 100644 --- a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java +++ b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java @@ -3,6 +3,7 @@ import io.opentelemetry.api.trace.Tracer; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.opentelemetry.module.cache.OpentelementryCacheTracer; +import ru.tinkoff.kora.opentelemetry.module.cache.OpentelementryCacheTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.engine.bpmn.OpentelemetryCamundaEngineBpmnTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.rest.OpentelemetryCamundaRestTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.zeebe.worker.OpentelemetryZeebeWorkerTracerFactory; @@ -71,11 +72,17 @@ default OpentelemetrySchedulingTracerFactory opentelemetrySchedulingTracerFactor return new OpentelemetrySchedulingTracerFactory(tracer); } + @Deprecated @DefaultComponent default OpentelementryCacheTracer opentelemetryCacheTracer(Tracer tracer) { return new OpentelementryCacheTracer(tracer); } + @DefaultComponent + default OpentelementryCacheTracerFactory opentelemetryCacheTracerFactory(Tracer tracer) { + return new OpentelementryCacheTracerFactory(tracer); + } + @DefaultComponent default OpentelemetryS3ClientTracerFactory opentelemetryS3ClientTracerFactory(Tracer tracer) { return new OpentelemetryS3ClientTracerFactory(tracer); diff --git a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java new file mode 100644 index 000000000..461c67178 --- /dev/null +++ b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java @@ -0,0 +1,29 @@ +package ru.tinkoff.kora.opentelemetry.module.cache; + +import io.opentelemetry.api.trace.Tracer; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.cache.telemetry.CacheTracerFactory; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class OpentelementryCacheTracerFactory implements CacheTracerFactory { + + private final Tracer tracer; + + public OpentelementryCacheTracerFactory(Tracer tracer) { + this.tracer = tracer; + } + + @Nullable + @Override + public CacheTracer get(TelemetryConfig.TracingConfig tracing, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(tracing.enabled(), true)) { + return new OpentelementryCacheTracer(tracer); + } else { + return null; + } + } +} diff --git a/redis/redis-jedis/build.gradle b/redis/redis-jedis/build.gradle index 667fb10bc..06ae3be25 100644 --- a/redis/redis-jedis/build.gradle +++ b/redis/redis-jedis/build.gradle @@ -1,8 +1,9 @@ dependencies { - annotationProcessor project(':config:config-annotation-processor') + annotationProcessor project(":config:config-annotation-processor") + + api libs.redis.jedis implementation project(":config:config-common") - implementation libs.redis.jedis testImplementation project(":internal:test-logging") testImplementation project(":internal:test-redis") diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java index 761c4507d..8584ea367 100644 --- a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java @@ -11,6 +11,8 @@ final class JedisFactory { + private JedisFactory() { } + @Nonnull static UnifiedJedis build(JedisConfig config) { return (config.uri().size() == 1) diff --git a/redis/redis-lettuce/build.gradle b/redis/redis-lettuce/build.gradle index 7ca016699..ba982f2ac 100644 --- a/redis/redis-lettuce/build.gradle +++ b/redis/redis-lettuce/build.gradle @@ -1,8 +1,8 @@ dependencies { - annotationProcessor project(':config:config-annotation-processor') + annotationProcessor project(":config:config-annotation-processor") - implementation project(":config:config-common") - implementation(libs.redis.lettuce) { + api libs.apache.pool + api(libs.redis.lettuce) { exclude group: 'io.projectreactor', module: 'reactor-core' exclude group: 'io.netty', module: 'netty-common' exclude group: 'io.netty', module: 'netty-handler' @@ -13,6 +13,8 @@ dependencies { implementation libs.netty.handlers implementation libs.netty.transports + implementation project(":config:config-common") + testImplementation project(":internal:test-logging") testImplementation project(":internal:test-redis") } diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java new file mode 100644 index 000000000..dd7635c0a --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java @@ -0,0 +1,37 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; + +import java.nio.ByteBuffer; + +final class LettuceByteBufferCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceByteBufferCodec(); + + @Override + public ByteBuffer decodeKey(ByteBuffer bytes) { + return copy(bytes); + } + + @Override + public ByteBuffer decodeValue(ByteBuffer bytes) { + return copy(bytes); + } + + @Override + public ByteBuffer encodeKey(ByteBuffer key) { + return copy(key); + } + + @Override + public ByteBuffer encodeValue(ByteBuffer value) { + return copy(value); + } + + private static ByteBuffer copy(ByteBuffer source) { + ByteBuffer copy = ByteBuffer.allocate(source.remaining()); + copy.put(source); + copy.flip(); + return copy; + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java new file mode 100644 index 000000000..890343159 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java @@ -0,0 +1,39 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.internal.LettuceAssert; + +import java.nio.ByteBuffer; + +final class LettuceCompositeRedisCodec implements RedisCodec { + + private final RedisCodec keyCodec; + private final RedisCodec valueCodec; + + LettuceCompositeRedisCodec(RedisCodec keyCodec, RedisCodec valueCodec) { + LettuceAssert.notNull(keyCodec, "Key codec must not be null"); + LettuceAssert.notNull(valueCodec, "Value codec must not be null"); + this.keyCodec = keyCodec; + this.valueCodec = valueCodec; + } + + @Override + public K decodeKey(ByteBuffer bytes) { + return keyCodec.decodeKey(bytes); + } + + @Override + public V decodeValue(ByteBuffer bytes) { + return valueCodec.decodeValue(bytes); + } + + @Override + public ByteBuffer encodeKey(K key) { + return keyCodec.encodeKey(key); + } + + @Override + public ByteBuffer encodeValue(V value) { + return valueCodec.encodeValue(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java index 43a461f10..34d01f62a 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java @@ -33,11 +33,45 @@ default Duration commandTimeout() { return Duration.ofSeconds(20); } + PoolConfig pool(); + enum Protocol { - /** Redis 2 to Redis 5 */ + /** + * Redis 2 to Redis 5 + */ RESP2, - /** Redis 6+ */ + /** + * Redis 6+ + */ RESP3 } + + @ConfigValueExtractor + interface PoolConfig { + + default int maxTotal() { + return Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4; + } + + default int maxIdle() { + return Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4; + } + + default int minIdle() { + return 0; + } + + default boolean validateOnAcquire() { + return false; + } + + default boolean validateOnCreate() { + return false; + } + + default boolean validateOnRelease() { + return false; + } + } } diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java index 3aafeac4d..7ce991a55 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java @@ -13,6 +13,8 @@ final class LettuceFactory { + private LettuceFactory() { } + @Nonnull static AbstractRedisClient build(LettuceConfig config) { final Duration commandTimeout = config.commandTimeout(); @@ -105,7 +107,7 @@ private static RedisClient buildRedisClientInternal(RedisURI redisURI, return client; } - private static List buildRedisURI(LettuceConfig config) { + static List buildRedisURI(LettuceConfig config) { final Integer database = config.database(); final String user = config.user(); final String password = config.password(); diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java new file mode 100644 index 000000000..9e075d82e --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java @@ -0,0 +1,32 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; + +import java.nio.ByteBuffer; + +final class LettuceIntegerCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceIntegerCodec(); + + @Override + public Integer decodeKey(ByteBuffer bytes) { + String s = StringCodec.ASCII.decodeKey(bytes); + return s == null ? null : Integer.valueOf(s); + } + + @Override + public Integer decodeValue(ByteBuffer bytes) { + return decodeKey(bytes); + } + + @Override + public ByteBuffer encodeKey(Integer key) { + return StringCodec.ASCII.encodeKey(key == null ? null : key.toString()); + } + + @Override + public ByteBuffer encodeValue(Integer value) { + return encodeKey(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java new file mode 100644 index 000000000..742c972eb --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java @@ -0,0 +1,53 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecycleConnectionWrapper implements Lifecycle, Wrapped> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final ConnectionProvider provider; + + private volatile StatefulConnection connection; + + @FunctionalInterface + interface ConnectionProvider { + StatefulConnection create() throws Exception; + } + + LettuceLifecycleConnectionWrapper(ConnectionProvider provider) { + this.provider = provider; + } + + @Override + public StatefulConnection value() { + return this.connection; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis connection starting..."); + final long started = TimeUtils.started(); + + this.connection = provider.create(); + + logger.info("Lettuce Redis connection started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.connection != null) { + logger.debug("Lettuce Redis connection stopping..."); + final long stopping = TimeUtils.started(); + + this.connection.close(); + + logger.info("Lettuce Redis connection stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java new file mode 100644 index 000000000..7a9e63f0b --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java @@ -0,0 +1,54 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.support.AsyncPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecyclePoolAsyncWrapper implements Lifecycle, Wrapped>> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final PoolProvider provider; + + private volatile AsyncPool> connection; + + @FunctionalInterface + interface PoolProvider { + AsyncPool> create() throws Exception; + } + + LettuceLifecyclePoolAsyncWrapper(PoolProvider provider) { + this.provider = provider; + } + + @Override + public AsyncPool> value() { + return this.connection; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis async pool starting..."); + final long started = TimeUtils.started(); + + this.connection = provider.create(); + + logger.info("Lettuce Redis async pool started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.connection != null) { + logger.debug("Lettuce Redis async pool stopping..."); + final long stopping = TimeUtils.started(); + + this.connection.close(); + + logger.info("Lettuce Redis async pool stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java new file mode 100644 index 000000000..fcf5294a3 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java @@ -0,0 +1,54 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import org.apache.commons.pool2.ObjectPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecyclePoolSyncWrapper implements Lifecycle, Wrapped>> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final PoolProvider provider; + + private volatile ObjectPool> pool; + + @FunctionalInterface + interface PoolProvider { + ObjectPool> create() throws Exception; + } + + LettuceLifecyclePoolSyncWrapper(PoolProvider provider) { + this.provider = provider; + } + + @Override + public ObjectPool> value() { + return this.pool; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis sync pool starting..."); + final long started = TimeUtils.started(); + + this.pool = provider.create(); + + logger.info("Lettuce Redis sync pool started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.pool != null) { + logger.debug("Lettuce Redis sync pool stopping..."); + final long stopping = TimeUtils.started(); + + this.pool.close(); + + logger.info("Lettuce Redis sync pool stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java new file mode 100644 index 000000000..0190aef74 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java @@ -0,0 +1,32 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; + +import java.nio.ByteBuffer; + +final class LettuceLongCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceLongCodec(); + + @Override + public Long decodeKey(ByteBuffer bytes) { + String s = StringCodec.ASCII.decodeKey(bytes); + return s == null ? null : Long.valueOf(s); + } + + @Override + public Long decodeValue(ByteBuffer bytes) { + return decodeKey(bytes); + } + + @Override + public ByteBuffer encodeKey(Long key) { + return StringCodec.ASCII.encodeKey(key == null ? null : key.toString()); + } + + @Override + public ByteBuffer encodeValue(Long value) { + return encodeKey(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java index 1f740dcb2..97f61f33e 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java @@ -1,9 +1,31 @@ package ru.tinkoff.kora.redis.lettuce; import io.lettuce.core.AbstractRedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import io.lettuce.core.cluster.api.reactive.RedisClusterReactiveCommands; +import io.lettuce.core.cluster.api.sync.RedisClusterCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.AsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import io.lettuce.core.support.ConnectionPoolSupport; +import org.apache.commons.pool2.ObjectPool; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.config.common.Config; import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; +import java.nio.ByteBuffer; +import java.util.List; + public interface LettuceModule { default LettuceConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { @@ -14,4 +36,126 @@ default LettuceConfig lettuceConfig(Config config, ConfigValueExtractor lettuceRedisVoidCodec() { + return LettuceVoidCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisByteArrayCodec() { + return ByteArrayCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisByteBufferCodec() { + return LettuceByteBufferCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisStringCodec() { + return StringCodec.UTF8; + } + + @DefaultComponent + default RedisCodec lettuceRedisLongCodec() { + return LettuceLongCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisIntegerCodec() { + return LettuceIntegerCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisCompositeCodec(RedisCodec keyCodec, + RedisCodec valueCodec) { + return new LettuceCompositeRedisCodec<>(keyCodec, valueCodec); + } + + default Wrapped> lettuceStatefulConnection(AbstractRedisClient redisClient, + RedisCodec codec) { + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + return new LettuceLifecycleConnectionWrapper<>(() -> rc.connect(codec)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecycleConnectionWrapper<>(() -> rcc.connect(codec)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + default Wrapped>> lettuceSyncConnectionPool(AbstractRedisClient redisClient, + LettuceConfig lettuceConfig, + RedisCodec codec) { + final GenericObjectPoolConfig> poolConfig = new GenericObjectPoolConfig<>(); + poolConfig.setMaxTotal(lettuceConfig.pool().maxTotal()); + poolConfig.setMaxIdle(lettuceConfig.pool().maxIdle()); + poolConfig.setMinIdle(lettuceConfig.pool().minIdle()); + poolConfig.setTestOnBorrow(lettuceConfig.pool().validateOnAcquire()); + poolConfig.setTestOnCreate(lettuceConfig.pool().validateOnCreate()); + poolConfig.setTestOnReturn(lettuceConfig.pool().validateOnRelease()); + + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final List redisURIs = LettuceFactory.buildRedisURI(lettuceConfig); + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceLifecyclePoolSyncWrapper<>(() -> ConnectionPoolSupport.createGenericObjectPool(() -> rc.connect(codec, redisURI), poolConfig)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecyclePoolSyncWrapper<>(() -> ConnectionPoolSupport.createGenericObjectPool(() -> rcc.connect(codec), poolConfig, false)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + default Wrapped>> lettuceAsyncConnectionPool(AbstractRedisClient redisClient, + LettuceConfig lettuceConfig, + RedisCodec codec) { + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(lettuceConfig.pool().maxTotal()) + .maxIdle(lettuceConfig.pool().maxIdle()) + .minIdle(lettuceConfig.pool().minIdle()) + .testOnAcquire(lettuceConfig.pool().validateOnAcquire()) + .testOnCreate(lettuceConfig.pool().validateOnCreate()) + .testOnRelease(lettuceConfig.pool().validateOnRelease()) + .build(); + + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final List redisURIs = LettuceFactory.buildRedisURI(lettuceConfig); + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceLifecyclePoolAsyncWrapper<>(() -> AsyncConnectionPoolSupport.createBoundedObjectPool(() -> rc.connectAsync(codec, redisURI).thenApply(v -> v), poolConfig)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecyclePoolAsyncWrapper<>(() -> AsyncConnectionPoolSupport.createBoundedObjectPool(() -> rcc.connectAsync(codec).thenApply(v -> v), poolConfig, false)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.sync(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.sync(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } + + default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.async(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.async(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } + + default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.reactive(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.reactive(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } } diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java new file mode 100644 index 000000000..7fae76835 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java @@ -0,0 +1,30 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; + +import java.nio.ByteBuffer; + +final class LettuceVoidCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceVoidCodec(); + + @Override + public Void decodeKey(ByteBuffer bytes) { + return null; + } + + @Override + public Void decodeValue(ByteBuffer bytes) { + return null; + } + + @Override + public ByteBuffer encodeKey(Void key) { + return null; + } + + @Override + public ByteBuffer encodeValue(Void value) { + return null; + } +} From e95a77f1d3f3bde27f4f645cb9fc4aff6d15e7b8 Mon Sep 17 00:00:00 2001 From: Anton Kurako Date: Wed, 5 Mar 2025 09:22:14 +0300 Subject: [PATCH 3/5] Redis Deprecated cache module support fixed JedisCacheStubAsyncClient for fake async client added JedisCacheAsyncClient uses user provided executor for async operations --- .../processor/CacheAnnotationProcessor.java | 23 ++- .../kora/cache/caffeine/CaffeineCache.java | 1 - .../ru/tinkoff/kora/cache/AsyncCache.java | 1 - .../kora/cache/AsyncFacadeCacheBuilder.java | 1 - .../java/ru/tinkoff/kora/cache/Cache.java | 2 - .../redis/jedis/JedisCacheAsyncClient.java | 81 ++++------- .../cache/redis/jedis/JedisCacheModule.java | 29 +++- .../jedis/JedisCacheStubAsyncClient.java | 133 ++++++++++++++++++ .../kora/cache/redis/jedis/CacheRunner.java | 4 +- .../kora/cache/redis/RedisCacheModule.java | 2 +- .../cache/redis/lettuce/LettuceModule.java | 5 - .../symbol/processor/CacheSymbolProcessor.kt | 16 ++- .../tinkoff/kora/redis/jedis/JedisConfig.java | 8 +- .../kora/redis/jedis/JedisFactory.java | 2 +- .../kora/redis/lettuce/LettuceFactory.java | 2 +- .../kora/redis/lettuce/LettuceModule.java | 35 ++++- 16 files changed, 251 insertions(+), 94 deletions(-) create mode 100644 cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java diff --git a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java index ec5305dc2..802275897 100644 --- a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java +++ b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java @@ -87,10 +87,10 @@ public boolean process(Set annotations, RoundEnvironment var cacheImplBase = getCacheImplBase(cacheContract, cacheContractType); var implSpec = TypeSpec.classBuilder(getCacheImpl(cacheContract)) - .addModifiers(Modifier.FINAL) + .addModifiers(Modifier.PUBLIC, Modifier.FINAL) .addAnnotation(AnnotationSpec.builder(CommonClassNames.koraGenerated) .addMember("value", CodeBlock.of("$S", CacheAnnotationProcessor.class.getCanonicalName())).build()) - .addMethod(getCacheConstructor(configPath, cacheContractType)) + .addMethod(getCacheConstructor(configPath, cacheContractType, cacheContract)) .superclass(cacheImplBase) .addSuperinterface(cacheContract.asType()) .build(); @@ -151,6 +151,16 @@ private ParameterizedTypeName getCacheSuperType(TypeElement candidate) { return null; } + private boolean isRedisDeprecated(TypeElement cacheContract) { + return cacheContract.getInterfaces().stream() + .filter(a -> a instanceof DeclaredType) + .map(a -> ((DeclaredType) a)) + .map(a -> ((TypeElement) a.asElement())) + .filter(a -> ClassName.get(a).equals(REDIS_CACHE)) + .flatMap(a -> a.getAnnotationMirrors().stream()) + .anyMatch(a -> TypeName.get(a.getAnnotationType()).equals(TypeName.get(Deprecated.class))); + } + private TypeName getCacheImplBase(TypeElement cacheContract, ParameterizedTypeName cacheType) { if (cacheType.rawType.equals(CAFFEINE_CACHE)) { return ParameterizedTypeName.get(CAFFEINE_CACHE_IMPL, cacheType.typeArguments.get(0), cacheType.typeArguments.get(1)); @@ -289,7 +299,7 @@ private MethodSpec getCacheMethodImpl(TypeElement cacheContract, ParameterizedTy } if (cacheType.rawType.equals(REDIS_CACHE)) { - if (cacheType.annotations.stream().anyMatch(a -> a.type.equals(TypeName.get(Deprecated.class)))) { + if (isRedisDeprecated(cacheContract)) { return getCacheRedisDeprecatedMethod(cacheContract, cacheType, cacheImplName, methodName); } else { return getCacheRedisMethod(cacheContract, cacheType, cacheImplName, methodName); @@ -387,9 +397,10 @@ private MethodSpec getCacheRedisDeprecatedMethod(TypeElement cacheContract, .build(); } - private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName cacheContract) { + private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName cacheContract, TypeElement cacheElement) { if (cacheContract.rawType.equals(CAFFEINE_CACHE)) { return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) .addParameter(CAFFEINE_CACHE_CONFIG, "config") .addParameter(CAFFEINE_CACHE_FACTORY, "factory") .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") @@ -398,12 +409,13 @@ private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName } if (cacheContract.rawType.equals(REDIS_CACHE)) { - if (cacheContract.annotations.stream().anyMatch(a -> a.type.equals(TypeName.get(Deprecated.class)))) { + if (isRedisDeprecated(cacheElement)) { var keyType = cacheContract.typeArguments.get(0); var valueType = cacheContract.typeArguments.get(1); var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) .addParameter(REDIS_CACHE_CONFIG, "config") .addParameter(REDIS_CACHE_OLD_CLIENT, "redisClient") .addParameter(REDIS_TELEMETRY, "telemetry") @@ -417,6 +429,7 @@ private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) .addParameter(REDIS_CACHE_CONFIG, "config") .addParameter(REDIS_CACHE_SYNC_CLIENT, "redisSyncClient") .addParameter(REDIS_CACHE_ASYNC_CLIENT, "redisAsyncClient") diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java index a1274eddd..2b59a6710 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java @@ -1,7 +1,6 @@ package ru.tinkoff.kora.cache.caffeine; import jakarta.annotation.Nonnull; -import ru.tinkoff.kora.cache.AsyncCache; import ru.tinkoff.kora.cache.Cache; import java.util.Map; diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java index da0d0839c..23386eade 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java @@ -9,7 +9,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.function.Function; -import java.util.stream.Collectors; /** * Represents Async Cache contract. diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java index 3f51c8e93..23cab16a2 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java @@ -6,7 +6,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.function.Function; -import java.util.stream.Collectors; final class AsyncFacadeCacheBuilder implements AsyncCache.Builder { diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java index ed16caa33..8ef24c718 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java @@ -7,9 +7,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import java.util.stream.Collectors; /** * Represents Synchronous Cache contract. diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java index c53533d42..330f101e5 100644 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java @@ -7,127 +7,96 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.Executor; final class JedisCacheAsyncClient implements RedisCacheAsyncClient { private final RedisCacheClient syncClient; + private final Executor executor; - JedisCacheAsyncClient(RedisCacheClient syncClient) { + JedisCacheAsyncClient(RedisCacheClient syncClient, Executor executor) { this.syncClient = syncClient; + this.executor = executor; } @Nonnull @Override public CompletionStage get(byte[] key) { - try { - return CompletableFuture.completedFuture(syncClient.get(key)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.get(key), executor); } @Nonnull @Override public CompletionStage> mget(byte[][] keys) { - try { - return CompletableFuture.completedFuture(syncClient.mget(keys)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.mget(keys), executor); } @Nonnull @Override public CompletionStage getex(byte[] key, long expireAfterMillis) { - try { - return CompletableFuture.completedFuture(syncClient.getex(key, expireAfterMillis)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.getex(key, expireAfterMillis), executor); } @Nonnull @Override public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { - try { - return CompletableFuture.completedFuture(syncClient.getex(keys, expireAfterMillis)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.getex(keys, expireAfterMillis), executor); } @Nonnull @Override public CompletionStage set(byte[] key, byte[] value) { - try { + return CompletableFuture.supplyAsync(() -> { syncClient.set(key, value); - return CompletableFuture.completedFuture(null); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return null; + }, executor); } @Nonnull @Override public CompletionStage mset(@Nonnull Map keyAndValue) { - try { + return CompletableFuture.supplyAsync(() -> { syncClient.mset(keyAndValue); - return CompletableFuture.completedFuture(null); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return null; + }, executor); } @Nonnull @Override public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { - try { + return CompletableFuture.supplyAsync(() -> { syncClient.psetex(key, value, expireAfterMillis); - return CompletableFuture.completedFuture(null); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return null; + }, executor); } @Nonnull @Override public CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { - try { + return CompletableFuture.supplyAsync(() -> { syncClient.psetex(keyAndValue, expireAfterMillis); - return CompletableFuture.completedFuture(null); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return null; + }, executor); } @Nonnull @Override public CompletionStage del(byte[] key) { - try { - return CompletableFuture.completedFuture(syncClient.del(key)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.del(key), executor); } @Nonnull @Override public CompletionStage del(byte[][] keys) { - try { - return CompletableFuture.completedFuture(syncClient.del(keys)); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return CompletableFuture.supplyAsync(() -> syncClient.del(keys), executor); } @Nonnull @Override public CompletionStage flushAll() { - try { + return CompletableFuture.supplyAsync(() -> { syncClient.flushAll(); - return CompletableFuture.completedFuture(null); - } catch (Exception e) { - return CompletableFuture.failedFuture(e); - } + return null; + }, executor); } } diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java index fb5f69920..064bf17e0 100644 --- a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java @@ -1,21 +1,42 @@ package ru.tinkoff.kora.cache.redis.jedis; +import jakarta.annotation.Nullable; +import redis.clients.jedis.Jedis; import redis.clients.jedis.UnifiedJedis; +import ru.tinkoff.kora.application.graph.internal.loom.VirtualThreadExecutorHolder; import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; import ru.tinkoff.kora.cache.redis.RedisCacheClient; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.common.Tag; import ru.tinkoff.kora.redis.jedis.JedisModule; +import java.util.concurrent.Executor; +import java.util.concurrent.ForkJoinPool; + public interface JedisCacheModule extends RedisCacheModule, JedisModule { + @Tag(Jedis.class) @DefaultComponent - default RedisCacheClient lettuceRedisClient(UnifiedJedis jedis) { + default Executor jedisRedisCacheAsyncExecutor() { + var virtualExecutor = VirtualThreadExecutorHolder.executor(); + if (virtualExecutor == null) { + return ForkJoinPool.commonPool(); + } else { + return virtualExecutor; + } + } + + default RedisCacheClient jedisRedisCacheSyncClient(UnifiedJedis jedis) { return new JedisCacheSyncClient(jedis); } - @DefaultComponent - default RedisCacheAsyncClient lettuceRedisAsyncClient(RedisCacheClient redisCacheClient) { - return new JedisCacheAsyncClient(redisCacheClient); + default RedisCacheAsyncClient jedisRedisCacheAsyncClient(RedisCacheClient redisCacheClient, + @Tag(Jedis.class) @Nullable Executor executor) { + if (executor == null) { + return new JedisCacheStubAsyncClient(redisCacheClient); + } else { + return new JedisCacheAsyncClient(redisCacheClient, executor); + } } } diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java new file mode 100644 index 000000000..82d92d449 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +final class JedisCacheStubAsyncClient implements RedisCacheAsyncClient { + + private final RedisCacheClient syncClient; + + JedisCacheStubAsyncClient(RedisCacheClient syncClient) { + this.syncClient = syncClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.get(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.mget(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(key, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(keys, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + try { + syncClient.set(key, value); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage mset(@Nonnull Map keyAndValue) { + try { + syncClient.mset(keyAndValue); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + try { + syncClient.psetex(key, value, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + try { + syncClient.psetex(keyAndValue, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.del(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.del(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage flushAll() { + try { + syncClient.flushAll(); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java index 58d6a08f9..2866b50da 100644 --- a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java @@ -72,8 +72,8 @@ public String password() { return new JedisCacheSyncClient(jedis); } - private RedisCacheAsyncClient createAsyncJedis(RedisCacheClient cacheClient) throws Exception { - return new JedisCacheAsyncClient(cacheClient); + private RedisCacheAsyncClient createAsyncJedis(RedisCacheClient cacheClient) { + return new JedisCacheStubAsyncClient(cacheClient); } private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java index b8553aa2c..15928f40e 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -3,7 +3,7 @@ import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND LettuceCacheModule */ @Deprecated public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java index ad773cc8a..9e5e4e366 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -1,16 +1,11 @@ package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nullable; import ru.tinkoff.kora.cache.redis.RedisCacheClient; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.config.common.Config; -import ru.tinkoff.kora.config.common.ConfigValue; import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; -import java.time.Duration; - /** * Use dependency - ru.tinkoff.kora:cache-redis-lettuce */ diff --git a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt index 8e595bd1c..e3198ea7c 100644 --- a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt +++ b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt @@ -76,7 +76,7 @@ class CacheSymbolProcessor( val cacheImplBase = getCacheImplBase(cacheContractType) val implSpec = TypeSpec.classBuilder(getCacheImpl(cacheContract)) .generated(CacheSymbolProcessor::class) - .primaryConstructor(getCacheConstructor(cacheContractType)) + .primaryConstructor(getCacheConstructor(cacheContractType, cacheContract)) .addSuperclassConstructorParameter(getCacheSuperConstructorCall(cacheContract, cacheContractType)) .superclass(cacheImplBase) .addSuperinterface(cacheContract.toTypeName()) @@ -208,7 +208,7 @@ class CacheSymbolProcessor( } REDIS_CACHE -> { - if (cacheContract.annotations.any { it.typeName == Deprecated::class.asTypeName() }) { + if (isRedisDeprecated(cacheClass)) { return getRedisDeprecatedFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) } else { return getRedisFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) @@ -319,7 +319,7 @@ class CacheSymbolProcessor( .build() } - private fun getCacheConstructor(cacheContract: ParameterizedTypeName): FunSpec { + private fun getCacheConstructor(cacheContract: ParameterizedTypeName, cacheClass: KSClassDeclaration): FunSpec { return when (cacheContract.rawType) { CAFFEINE_CACHE -> { FunSpec.constructorBuilder() @@ -330,7 +330,7 @@ class CacheSymbolProcessor( } REDIS_CACHE -> { - if (cacheContract.annotations.any { it.typeName == Deprecated::class.asTypeName() }) { + if (isRedisDeprecated(cacheClass)) { val keyType = cacheContract.typeArguments[0] val valueType = cacheContract.typeArguments[1] val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) @@ -364,6 +364,12 @@ class CacheSymbolProcessor( } } + private fun isRedisDeprecated(cacheContract: KSClassDeclaration): Boolean { + return cacheContract.getAllSuperTypes() + .filter { a -> a.toClassName() == REDIS_CACHE } + .any { a -> a.declaration.annotations.any { it.annotationType.toTypeName() == Deprecated::class.asTypeName() } } + } + private fun getCacheRedisKeyMapperForData(keyType: KSClassDeclaration): FunSpec { val prefix = keyType.toClassName().simpleNames.joinToString("_") val methodName = "${prefix}_RedisKeyMapper" @@ -453,7 +459,7 @@ class CacheSymbolProcessor( return when (cacheType.rawType) { CAFFEINE_CACHE -> CodeBlock.of("%S, config, factory, telemetryFactory", configPath) REDIS_CACHE -> { - if (cacheContract.annotations.any { it.annotationType.toTypeName() == Deprecated::class.asTypeName() }) { + if (isRedisDeprecated(cacheContract)) { CodeBlock.of("%S, config, redisClient, telemetry, keyMapper, valueMapper", configPath) } else { CodeBlock.of("%S, config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper", configPath) diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java index 93fe0d4b2..800dbfd85 100644 --- a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java @@ -34,9 +34,13 @@ default Duration commandTimeout() { enum Protocol { - /** Redis 2 to Redis 5 */ + /** + * Redis 2 to Redis 5 + */ RESP2, - /** Redis 6+ */ + /** + * Redis 6+ + */ RESP3 } } diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java index 8584ea367..10239a82e 100644 --- a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java @@ -11,7 +11,7 @@ final class JedisFactory { - private JedisFactory() { } + private JedisFactory() {} @Nonnull static UnifiedJedis build(JedisConfig config) { diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java index 7ce991a55..35cac642a 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java @@ -13,7 +13,7 @@ final class LettuceFactory { - private LettuceFactory() { } + private LettuceFactory() {} @Nonnull static AbstractRedisClient build(LettuceConfig config) { diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java index 97f61f33e..7d61a6ddf 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java @@ -18,6 +18,7 @@ import io.lettuce.core.support.ConnectionPoolSupport; import org.apache.commons.pool2.ObjectPool; import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import ru.tinkoff.kora.application.graph.TypeRef; import ru.tinkoff.kora.application.graph.Wrapped; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.config.common.Config; @@ -69,12 +70,17 @@ default RedisCodec lettuceRedisIntegerCodec() { @DefaultComponent default RedisCodec lettuceRedisCompositeCodec(RedisCodec keyCodec, - RedisCodec valueCodec) { + RedisCodec valueCodec, + TypeRef keyRef, + TypeRef valueRef) { return new LettuceCompositeRedisCodec<>(keyCodec, valueCodec); } + @DefaultComponent default Wrapped> lettuceStatefulConnection(AbstractRedisClient redisClient, - RedisCodec codec) { + RedisCodec codec, + TypeRef keyRef, + TypeRef valueRef) { if (redisClient instanceof io.lettuce.core.RedisClient rc) { return new LettuceLifecycleConnectionWrapper<>(() -> rc.connect(codec)); } else if (redisClient instanceof RedisClusterClient rcc) { @@ -84,9 +90,12 @@ default Wrapped> lettuceStatefulConnection(Abstr } } + @DefaultComponent default Wrapped>> lettuceSyncConnectionPool(AbstractRedisClient redisClient, LettuceConfig lettuceConfig, - RedisCodec codec) { + RedisCodec codec, + TypeRef keyRef, + TypeRef valueRef) { final GenericObjectPoolConfig> poolConfig = new GenericObjectPoolConfig<>(); poolConfig.setMaxTotal(lettuceConfig.pool().maxTotal()); poolConfig.setMaxIdle(lettuceConfig.pool().maxIdle()); @@ -106,9 +115,12 @@ default Wrapped>> lettuceSyncConnecti } } + @DefaultComponent default Wrapped>> lettuceAsyncConnectionPool(AbstractRedisClient redisClient, LettuceConfig lettuceConfig, - RedisCodec codec) { + RedisCodec codec, + TypeRef keyRef, + TypeRef valueRef) { final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() .maxTotal(lettuceConfig.pool().maxTotal()) .maxIdle(lettuceConfig.pool().maxIdle()) @@ -129,7 +141,10 @@ default Wrapped>> lettuceAsyncConnecti } } - default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection) { + @DefaultComponent + default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection, + TypeRef keyRef, + TypeRef valueRef) { if (connection instanceof StatefulRedisConnection rc) { return rc.sync(); } else if (connection instanceof StatefulRedisClusterConnection rcc) { @@ -139,7 +154,10 @@ default RedisClusterCommands lettuceRedisClusterSyncCommands(Statef } } - default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection) { + @DefaultComponent + default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection, + TypeRef keyRef, + TypeRef valueRef) { if (connection instanceof StatefulRedisConnection rc) { return rc.async(); } else if (connection instanceof StatefulRedisClusterConnection rcc) { @@ -149,7 +167,10 @@ default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands( } } - default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection) { + @DefaultComponent + default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection, + TypeRef keyRef, + TypeRef valueRef) { if (connection instanceof StatefulRedisConnection rc) { return rc.reactive(); } else if (connection instanceof StatefulRedisClusterConnection rcc) { From a62fbeac9d3c587a0008c98f6a75c728d12d3fc2 Mon Sep 17 00:00:00 2001 From: Anton Kurako Date: Tue, 11 Mar 2025 10:31:15 +0300 Subject: [PATCH 4/5] Javadoc improved --- .../ru/tinkoff/kora/cache/redis/AbstractRedisCache.java | 6 +++++- .../main/java/ru/tinkoff/kora/cache/redis/RedisCache.java | 6 +++++- .../java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java | 6 +++++- .../java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java | 6 +++++- .../ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java | 7 ++++++- .../tinkoff/kora/cache/redis/RedisCacheMapperModule.java | 6 +++++- .../java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java | 4 ++++ .../ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java | 7 ++++++- 8 files changed, 41 insertions(+), 7 deletions(-) diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java index 3ac3678b2..97a50013b 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -14,7 +14,11 @@ import java.util.stream.Collectors; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheAsyncClient + *

+ * Check documentation for more information */ @Deprecated public abstract class AbstractRedisCache implements AsyncCache { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java index 45b17e929..8c46d888e 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -3,7 +3,11 @@ import ru.tinkoff.kora.cache.AsyncCache; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCache + *

+ * Check documentation for more information */ @Deprecated public interface RedisCache extends AsyncCache { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java index 9711c9d83..9f692ccb6 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -6,7 +6,11 @@ import java.util.concurrent.CompletionStage; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheAsyncClient + *

+ * Check documentation for more information */ @Deprecated public interface RedisCacheClient { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java index 8404c59ec..a1a3ccc8d 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -7,7 +7,11 @@ import java.time.Duration; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheConfig + *

+ * Check documentation for more information */ @Deprecated @ConfigValueExtractor diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java index ae8584c94..ff31832ee 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -6,8 +6,13 @@ import java.util.function.Function; /** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheKeyMapper + *

+ * Check documentation for more information + *

* Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce */ @Deprecated public interface RedisCacheKeyMapper extends Function { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java index 86fd8225f..ab8c47e7a 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -15,7 +15,11 @@ import java.util.UUID; /** - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheMapperModule + *

+ * Check documentation for more information */ @Deprecated public interface RedisCacheMapperModule extends JsonCommonModule { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java index 15928f40e..06bf53637 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -3,7 +3,11 @@ import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; /** + * This module is no longer maintained, it was replaced with new one. + *

* Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND LettuceCacheModule + *

+ * Check documentation for more information */ @Deprecated public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java index 7e4b16919..472ec3069 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -1,8 +1,13 @@ package ru.tinkoff.kora.cache.redis; /** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheValueMapper + *

+ * Check documentation for more information + *

* Converts cache value into serializer value to store in cache. - * Use dependency - ru.tinkoff.kora:cache-redis-lettuce */ @Deprecated public interface RedisCacheValueMapper { From a1409163c3ff8543267ef574cf9dab7dbe9c0f11 Mon Sep 17 00:00:00 2001 From: Anton Kurako Date: Tue, 11 Mar 2025 10:50:55 +0300 Subject: [PATCH 5/5] CacheOptionalTests updated --- .../processor/CacheOptionalTests.java | 26 ++++++++--------- .../kora/redis/lettuce/LettuceModule.java | 29 +++++-------------- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java index fff753ce4..7fefae37a 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java @@ -40,7 +40,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -83,10 +83,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -123,7 +123,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -166,10 +166,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -206,7 +206,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -249,10 +249,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -295,10 +295,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -341,10 +341,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java index 7d61a6ddf..26157d8ec 100644 --- a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java @@ -18,7 +18,6 @@ import io.lettuce.core.support.ConnectionPoolSupport; import org.apache.commons.pool2.ObjectPool; import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import ru.tinkoff.kora.application.graph.TypeRef; import ru.tinkoff.kora.application.graph.Wrapped; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.config.common.Config; @@ -70,17 +69,13 @@ default RedisCodec lettuceRedisIntegerCodec() { @DefaultComponent default RedisCodec lettuceRedisCompositeCodec(RedisCodec keyCodec, - RedisCodec valueCodec, - TypeRef keyRef, - TypeRef valueRef) { + RedisCodec valueCodec) { return new LettuceCompositeRedisCodec<>(keyCodec, valueCodec); } @DefaultComponent default Wrapped> lettuceStatefulConnection(AbstractRedisClient redisClient, - RedisCodec codec, - TypeRef keyRef, - TypeRef valueRef) { + RedisCodec codec) { if (redisClient instanceof io.lettuce.core.RedisClient rc) { return new LettuceLifecycleConnectionWrapper<>(() -> rc.connect(codec)); } else if (redisClient instanceof RedisClusterClient rcc) { @@ -93,9 +88,7 @@ default Wrapped> lettuceStatefulConnection(Abstr @DefaultComponent default Wrapped>> lettuceSyncConnectionPool(AbstractRedisClient redisClient, LettuceConfig lettuceConfig, - RedisCodec codec, - TypeRef keyRef, - TypeRef valueRef) { + RedisCodec codec) { final GenericObjectPoolConfig> poolConfig = new GenericObjectPoolConfig<>(); poolConfig.setMaxTotal(lettuceConfig.pool().maxTotal()); poolConfig.setMaxIdle(lettuceConfig.pool().maxIdle()); @@ -118,9 +111,7 @@ default Wrapped>> lettuceSyncConnecti @DefaultComponent default Wrapped>> lettuceAsyncConnectionPool(AbstractRedisClient redisClient, LettuceConfig lettuceConfig, - RedisCodec codec, - TypeRef keyRef, - TypeRef valueRef) { + RedisCodec codec) { final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() .maxTotal(lettuceConfig.pool().maxTotal()) .maxIdle(lettuceConfig.pool().maxIdle()) @@ -142,9 +133,7 @@ default Wrapped>> lettuceAsyncConnecti } @DefaultComponent - default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection, - TypeRef keyRef, - TypeRef valueRef) { + default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection) { if (connection instanceof StatefulRedisConnection rc) { return rc.sync(); } else if (connection instanceof StatefulRedisClusterConnection rcc) { @@ -155,9 +144,7 @@ default RedisClusterCommands lettuceRedisClusterSyncCommands(Statef } @DefaultComponent - default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection, - TypeRef keyRef, - TypeRef valueRef) { + default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection) { if (connection instanceof StatefulRedisConnection rc) { return rc.async(); } else if (connection instanceof StatefulRedisClusterConnection rcc) { @@ -168,9 +155,7 @@ default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands( } @DefaultComponent - default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection, - TypeRef keyRef, - TypeRef valueRef) { + default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection) { if (connection instanceof StatefulRedisConnection rc) { return rc.reactive(); } else if (connection instanceof StatefulRedisClusterConnection rcc) {