diff --git a/cache/cache-annotation-processor/build.gradle b/cache/cache-annotation-processor/build.gradle index 3365658e8..923d650be 100644 --- a/cache/cache-annotation-processor/build.gradle +++ b/cache/cache-annotation-processor/build.gradle @@ -8,7 +8,7 @@ dependencies { testImplementation testFixtures(project(":annotation-processor-common")) testImplementation project(":internal:test-logging") testImplementation project(":cache:cache-caffeine") - testImplementation project(":cache:cache-redis") + testImplementation project(":cache:cache-redis-lettuce") testImplementation project(":json:json-common") testImplementation project(":config:config-common") } diff --git a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java index 9e30e4312..802275897 100644 --- a/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java +++ b/cache/cache-annotation-processor/src/main/java/ru/tinkoff/kora/cache/annotation/processor/CacheAnnotationProcessor.java @@ -29,17 +29,23 @@ public class CacheAnnotationProcessor extends AbstractKoraProcessor { private static final ClassName ANNOTATION_CACHE = ClassName.get("ru.tinkoff.kora.cache.annotation", "Cache"); - private static final ClassName CAFFEINE_TELEMETRY = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheTelemetry"); + private static final ClassName CACHE_TELEMETRY_FACTORY = ClassName.get("ru.tinkoff.kora.cache.telemetry", "CacheTelemetryFactory"); + private static final ClassName CAFFEINE_CACHE = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCache"); private static final ClassName CAFFEINE_CACHE_FACTORY = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheFactory"); private static final ClassName CAFFEINE_CACHE_CONFIG = ClassName.get("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheConfig"); private static final ClassName CAFFEINE_CACHE_IMPL = ClassName.get("ru.tinkoff.kora.cache.caffeine", "AbstractCaffeineCache"); + @Deprecated private static final ClassName REDIS_TELEMETRY = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheTelemetry"); + @Deprecated + private static final ClassName REDIS_CACHE_OLD_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCache"); private static final ClassName REDIS_CACHE_IMPL = ClassName.get("ru.tinkoff.kora.cache.redis", "AbstractRedisCache"); private static final ClassName REDIS_CACHE_CONFIG = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheConfig"); - private static final ClassName REDIS_CACHE_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE_SYNC_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheClient"); + private static final ClassName REDIS_CACHE_ASYNC_CLIENT = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheAsyncClient"); private static final ClassName REDIS_CACHE_MAPPER_KEY = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheKeyMapper"); private static final ClassName REDIS_CACHE_MAPPER_VALUE = ClassName.get("ru.tinkoff.kora.cache.redis", "RedisCacheValueMapper"); @@ -81,10 +87,10 @@ public boolean process(Set annotations, RoundEnvironment var cacheImplBase = getCacheImplBase(cacheContract, cacheContractType); var implSpec = TypeSpec.classBuilder(getCacheImpl(cacheContract)) - .addModifiers(Modifier.FINAL) + .addModifiers(Modifier.PUBLIC, Modifier.FINAL) .addAnnotation(AnnotationSpec.builder(CommonClassNames.koraGenerated) .addMember("value", CodeBlock.of("$S", CacheAnnotationProcessor.class.getCanonicalName())).build()) - .addMethod(getCacheConstructor(configPath, cacheContractType)) + .addMethod(getCacheConstructor(configPath, cacheContractType, cacheContract)) .superclass(cacheImplBase) .addSuperinterface(cacheContract.asType()) .build(); @@ -145,6 +151,16 @@ private ParameterizedTypeName getCacheSuperType(TypeElement candidate) { return null; } + private boolean isRedisDeprecated(TypeElement cacheContract) { + return cacheContract.getInterfaces().stream() + .filter(a -> a instanceof DeclaredType) + .map(a -> ((DeclaredType) a)) + .map(a -> ((TypeElement) a.asElement())) + .filter(a -> ClassName.get(a).equals(REDIS_CACHE)) + .flatMap(a -> a.getAnnotationMirrors().stream()) + .anyMatch(a -> TypeName.get(a.getAnnotationType()).equals(TypeName.get(Deprecated.class))); + } + private TypeName getCacheImplBase(TypeElement cacheContract, ParameterizedTypeName cacheType) { if (cacheType.rawType.equals(CAFFEINE_CACHE)) { return ParameterizedTypeName.get(CAFFEINE_CACHE_IMPL, cacheType.typeArguments.get(0), cacheType.typeArguments.get(1)); @@ -217,8 +233,8 @@ private MethodSpec getCacheRedisKeyMapperForRecord(DeclaredType keyType) { var keyName = "_key" + (i + 1); keyBuilder.addStatement("var $L = $L.apply($T.requireNonNull(key.$L(), $S))", - keyName, mapperName, Objects.class, recordField.getSimpleName().toString(), - "Cache key '%s' field '%s' must be non null".formatted(keyType.asElement().toString(), recordField.getSimpleName().toString())); + keyName, mapperName, Objects.class, recordField.getSimpleName().toString(), + "Cache key '%s' field '%s' must be non null".formatted(keyType.asElement().toString(), recordField.getSimpleName().toString())); if (i == 0) { compositeKeyBuilder.add("var _compositeKey = new byte["); @@ -276,76 +292,153 @@ private MethodSpec getCacheMethodImpl(TypeElement cacheContract, ParameterizedTy .build()) .build()) .addParameter(CAFFEINE_CACHE_FACTORY, "factory") - .addParameter(CAFFEINE_TELEMETRY, "telemetry") - .addStatement("return new $T(config, factory, telemetry)", cacheImplName) + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addStatement("return new $T(config, factory, telemetryFactory)", cacheImplName) .returns(TypeName.get(cacheContract.asType())) .build(); } + if (cacheType.rawType.equals(REDIS_CACHE)) { - var keyType = cacheType.typeArguments.get(0); - var valueType = cacheType.typeArguments.get(1); - var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); - var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); - - final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() - .filter(i -> ClassName.get(i).equals(cacheType)) - .map(i -> (DeclaredType) i) - .findFirst() - .orElseThrow(); - - var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); - final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); - if (!valueTags.isEmpty()) { - valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + if (isRedisDeprecated(cacheContract)) { + return getCacheRedisDeprecatedMethod(cacheContract, cacheType, cacheImplName, methodName); + } else { + return getCacheRedisMethod(cacheContract, cacheType, cacheImplName, methodName); } + } - var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); - final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); - if (!keyTags.isEmpty()) { - keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); - } + throw new IllegalArgumentException("Unknown cache implementation type: " + cacheType.rawType); + } - return MethodSpec.methodBuilder(methodName) - .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) - .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") - .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) - .addMember("value", "$T.class", cacheContract) - .build()) + private MethodSpec getCacheRedisMethod(TypeElement cacheContract, + ParameterizedTypeName cacheType, + ClassName cacheImplName, + String methodName) { + var keyType = cacheType.typeArguments.get(0); + var valueType = cacheType.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + + final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() + .filter(i -> ClassName.get(i).equals(cacheType)) + .map(i -> (DeclaredType) i) + .findFirst() + .orElseThrow(); + + var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); + final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); + if (!valueTags.isEmpty()) { + valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + } + + var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); + final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); + if (!keyTags.isEmpty()) { + keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); + } + + return MethodSpec.methodBuilder(methodName) + .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) + .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") + .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) + .addMember("value", "$T.class", cacheContract) .build()) - .addParameter(REDIS_CACHE_CLIENT, "redisClient") - .addParameter(REDIS_TELEMETRY, "telemetry") - .addParameter(keyParamBuilder.build()) - .addParameter(valueParamBuilder.build()) - .addStatement("return new $T(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) - .returns(TypeName.get(cacheContract.asType())) - .build(); + .build()) + .addParameter(REDIS_CACHE_SYNC_CLIENT, "redisSyncClient") + .addParameter(REDIS_CACHE_ASYNC_CLIENT, "redisAsyncClient") + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addParameter(keyParamBuilder.build()) + .addParameter(valueParamBuilder.build()) + .addStatement("return new $T(config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", cacheImplName) + .returns(TypeName.get(cacheContract.asType())) + .build(); + } + + @Deprecated + private MethodSpec getCacheRedisDeprecatedMethod(TypeElement cacheContract, + ParameterizedTypeName cacheType, + ClassName cacheImplName, + String methodName) { + var keyType = cacheType.typeArguments.get(0); + var valueType = cacheType.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + + final DeclaredType cacheDeclaredType = cacheContract.getInterfaces().stream() + .filter(i -> ClassName.get(i).equals(cacheType)) + .map(i -> (DeclaredType) i) + .findFirst() + .orElseThrow(); + + var valueParamBuilder = ParameterSpec.builder(valueMapperType, "valueMapper"); + final Set valueTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(1)); + if (!valueTags.isEmpty()) { + valueParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(valueTags)); + } + + var keyParamBuilder = ParameterSpec.builder(keyMapperType, "keyMapper"); + final Set keyTags = TagUtils.parseTagValue(cacheDeclaredType.getTypeArguments().get(0)); + if (!keyTags.isEmpty()) { + keyParamBuilder.addAnnotation(TagUtils.makeAnnotationSpec(keyTags)); } - throw new IllegalArgumentException("Unknown cache type: " + cacheType.rawType); + + return MethodSpec.methodBuilder(methodName) + .addModifiers(Modifier.DEFAULT, Modifier.PUBLIC) + .addParameter(ParameterSpec.builder(REDIS_CACHE_CONFIG, "config") + .addAnnotation(AnnotationSpec.builder(CommonClassNames.tag) + .addMember("value", "$T.class", cacheContract) + .build()) + .build()) + .addParameter(REDIS_CACHE_OLD_CLIENT, "redisClient") + .addParameter(REDIS_TELEMETRY, "telemetry") + .addParameter(keyParamBuilder.build()) + .addParameter(valueParamBuilder.build()) + .addStatement("return new $T(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) + .returns(TypeName.get(cacheContract.asType())) + .build(); } - private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName cacheContract) { + private MethodSpec getCacheConstructor(String configPath, ParameterizedTypeName cacheContract, TypeElement cacheElement) { if (cacheContract.rawType.equals(CAFFEINE_CACHE)) { return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) .addParameter(CAFFEINE_CACHE_CONFIG, "config") .addParameter(CAFFEINE_CACHE_FACTORY, "factory") - .addParameter(CAFFEINE_TELEMETRY, "telemetry") - .addStatement("super($S, config, factory, telemetry)", configPath) + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addStatement("super($S, config, factory, telemetryFactory)", configPath) .build(); } if (cacheContract.rawType.equals(REDIS_CACHE)) { - var keyType = cacheContract.typeArguments.get(0); - var valueType = cacheContract.typeArguments.get(1); - var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); - var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); - return MethodSpec.constructorBuilder() - .addParameter(REDIS_CACHE_CONFIG, "config") - .addParameter(REDIS_CACHE_CLIENT, "redisClient") - .addParameter(REDIS_TELEMETRY, "telemetry") - .addParameter(keyMapperType, "keyMapper") - .addParameter(valueMapperType, "valueMapper") - .addStatement("super($S, config, redisClient, telemetry, keyMapper, valueMapper)", configPath) - .build(); + if (isRedisDeprecated(cacheElement)) { + var keyType = cacheContract.typeArguments.get(0); + var valueType = cacheContract.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) + .addParameter(REDIS_CACHE_CONFIG, "config") + .addParameter(REDIS_CACHE_OLD_CLIENT, "redisClient") + .addParameter(REDIS_TELEMETRY, "telemetry") + .addParameter(keyMapperType, "keyMapper") + .addParameter(valueMapperType, "valueMapper") + .addStatement("super($S, config, redisClient, telemetry, keyMapper, valueMapper)", configPath) + .build(); + } else { + var keyType = cacheContract.typeArguments.get(0); + var valueType = cacheContract.typeArguments.get(1); + var keyMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_KEY, keyType); + var valueMapperType = ParameterizedTypeName.get(REDIS_CACHE_MAPPER_VALUE, valueType); + return MethodSpec.constructorBuilder() + .addModifiers(Modifier.PUBLIC) + .addParameter(REDIS_CACHE_CONFIG, "config") + .addParameter(REDIS_CACHE_SYNC_CLIENT, "redisSyncClient") + .addParameter(REDIS_CACHE_ASYNC_CLIENT, "redisAsyncClient") + .addParameter(CACHE_TELEMETRY_FACTORY, "telemetryFactory") + .addParameter(keyMapperType, "keyMapper") + .addParameter(valueMapperType, "valueMapper") + .addStatement("super($S, config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", configPath) + .build(); + } } throw new IllegalArgumentException("Unknown cache type: " + cacheContract.rawType); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java index c72dcbb3e..ffd8fcace 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheAopTests.java @@ -41,7 +41,7 @@ private CacheableAsync getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java index 441b28392..252a14863 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -51,7 +50,7 @@ private CacheableAsyncMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableAsyncMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java index e0ca38474..65368c2ee 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheManyOptionalAopTests.java @@ -7,7 +7,6 @@ import ru.tinkoff.kora.aop.annotation.processor.AopAnnotationProcessor; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache21; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache22; -import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncMany; import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncManyOptional; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; @@ -51,7 +50,7 @@ private CacheableAsyncManyOptional getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableAsyncManyOptional getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java index 3a293d91a..8ba607af3 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneAopTests.java @@ -41,7 +41,7 @@ private CacheableAsyncOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java index cd01760bf..57b6098d5 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.async.CacheableAsyncOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -49,7 +48,7 @@ private CacheableAsyncOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +59,7 @@ private CacheableAsyncOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java index 7247ac7cf..25f3fa13c 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOneManySyncAopTests.java @@ -50,7 +50,7 @@ private CacheableAsyncOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +60,7 @@ private CacheableAsyncOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java index bfcb32c37..b7963a7f2 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/AsyncCacheOptionalAopTests.java @@ -42,7 +42,7 @@ private CacheableAsyncOptional getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java index fff753ce4..7fefae37a 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheOptionalTests.java @@ -40,7 +40,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -83,10 +83,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -123,7 +123,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -166,10 +166,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -206,7 +206,7 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache = newObject("$DummyCacheImpl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache); @@ -249,10 +249,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -295,10 +295,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); @@ -341,10 +341,10 @@ public void evictValue(String arg1) { compileResult.assertSuccess(); var cache1 = newObject("$DummyCache1Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache1).isNotNull(); var cache2 = newObject("$DummyCache2Impl", CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); assertThat(cache2).isNotNull(); var service = newObject("$CacheableSync__AopProxy", cache1, cache2); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java index 6d5b868a8..3eed2dc0b 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/CacheRunner.java @@ -3,8 +3,10 @@ import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; import ru.tinkoff.kora.cache.redis.RedisCacheClient; import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.nio.ByteBuffer; import java.time.Duration; @@ -37,6 +39,11 @@ public Duration expireAfterAccess() { public Integer initialSize() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } @@ -59,11 +66,16 @@ public Duration expireAfterWrite() { public Duration expireAfterAccess() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } - public static RedisCacheClient lettuceClient(final Map cache) { - return new RedisCacheClient() { + public static RedisCacheAsyncClient lettuceAsyncClient(final Map cache) { + return new RedisCacheAsyncClient() { @Override public CompletionStage get(byte[] key) { var r = cache.get(ByteBuffer.wrap(key)); @@ -94,24 +106,24 @@ public CompletionStage> getex(byte[][] keys, long expireAfte } @Override - public CompletionStage set(byte[] key, byte[] value) { + public CompletionStage set(byte[] key, byte[] value) { cache.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); - return CompletableFuture.completedFuture(true); + return CompletableFuture.completedFuture(null); } @Override - public CompletionStage mset(Map keyAndValue) { + public CompletionStage mset(Map keyAndValue) { keyAndValue.forEach((k, v) -> cache.put(ByteBuffer.wrap(k), ByteBuffer.wrap(v))); - return CompletableFuture.completedFuture(true); + return CompletableFuture.completedFuture(null); } @Override - public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { return mset(keyAndValue); } @Override - public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { return set(key, value); } @@ -130,9 +142,81 @@ public CompletionStage del(byte[][] keys) { } @Override - public CompletionStage flushAll() { + public CompletionStage flushAll() { + cache.clear(); + return CompletableFuture.completedFuture(null); + } + }; + } + + public static RedisCacheClient lettuceSyncClient(final Map cache) { + return new RedisCacheClient() { + @Override + public byte[] get(byte[] key) { + var r = cache.get(ByteBuffer.wrap(key)); + return (r == null) + ? null + : r.array(); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + final Map result = new HashMap<>(); + for (byte[] key : keys) { + Optional.ofNullable(cache.get(ByteBuffer.wrap(key))).ifPresent(r -> result.put(key, r.array())); + } + return result; + } + + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return get(key); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + return mget(keys); + } + + @Override + public void set(byte[] key, byte[] value) { + cache.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); + } + + @Override + public void mset(Map keyAndValue) { + keyAndValue.forEach((k, v) -> cache.put(ByteBuffer.wrap(k), ByteBuffer.wrap(v))); + } + + @Override + public void psetex(Map keyAndValue, long expireAfterMillis) { + mset(keyAndValue); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + set(key, value); + } + + @Override + public long del(byte[] key) { + return cache.remove(ByteBuffer.wrap(key)) == null ? 0L : 1L; + } + + @Override + public long del(byte[][] keys) { + long counter = 0; + for (byte[] key : keys) { + counter += del(key); + } + return counter; + } + + @Override + public void flushAll() { cache.clear(); - return CompletableFuture.completedFuture(true); } }; } diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java index 3faed39ac..e24eb47ab 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheAopTests.java @@ -48,7 +48,7 @@ private CacheableMono getService() { cacheConstructor.setAccessible(true); final Map cacheBuf = new HashMap<>(); cache = (DummyCache22) cacheConstructor.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cacheBuf), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cacheBuf), CacheRunner.lettuceAsyncClient(cacheBuf), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java index be38e6b67..bb8aa68bd 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -53,7 +52,7 @@ private CacheableMonoMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -64,7 +63,7 @@ private CacheableMonoMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java index 692b5d6e5..97c3d41d4 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheManyOptionalAopTests.java @@ -7,7 +7,6 @@ import ru.tinkoff.kora.aop.annotation.processor.AopAnnotationProcessor; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache21; import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache22; -import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoMany; import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoManyOptional; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; @@ -52,7 +51,7 @@ private CacheableMonoManyOptional getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -63,7 +62,7 @@ private CacheableMonoManyOptional getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java index 1b16ba04d..69a1a15db 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneAopTests.java @@ -42,7 +42,7 @@ private CacheableMonoOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java index 178e5b1e5..e56303708 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.reactive.mono.CacheableMonoOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -50,7 +49,7 @@ private CacheableMonoOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -61,7 +60,7 @@ private CacheableMonoOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java index 245b68962..504c10048 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOneManySyncAopTests.java @@ -51,7 +51,7 @@ private CacheableMonoOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -61,7 +61,7 @@ private CacheableMonoOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java index 6cb14402d..597212fb2 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/MonoCacheOptionalAopTests.java @@ -43,7 +43,7 @@ private CacheableMonoOptional getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java index ddfcce1bd..45e9ee5f6 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheAopTests.java @@ -41,7 +41,7 @@ private CacheableSync getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache21) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java index ee57a589f..7bbcf6cc9 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheManyAopTests.java @@ -10,7 +10,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testdata.sync.CacheableSyncMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; import ru.tinkoff.kora.cache.redis.RedisCacheKeyMapper; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -51,7 +50,7 @@ private CacheableSyncMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache21) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -62,7 +61,7 @@ private CacheableSyncMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache22) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), (RedisCacheKeyMapper) key -> { var _key1 = key.k1().getBytes(StandardCharsets.UTF_8); var _key2 = key.k2().toString().getBytes(StandardCharsets.UTF_8); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java index b3d969882..da00238dd 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneAopTests.java @@ -41,7 +41,7 @@ private CacheableSyncOne getService() { final Constructor cacheConstructor = cacheClass.getDeclaredConstructors()[0]; cacheConstructor.setAccessible(true); cache = (DummyCache11) cacheConstructor.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java index 6cb067969..a38e5c72b 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManyAopTests.java @@ -9,7 +9,6 @@ import ru.tinkoff.kora.cache.annotation.processor.testcache.DummyCache12; import ru.tinkoff.kora.cache.annotation.processor.testdata.sync.CacheableSyncOneMany; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheModule; -import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; import ru.tinkoff.kora.cache.redis.RedisCacheModule; import java.lang.reflect.Constructor; @@ -49,7 +48,7 @@ private CacheableSyncOneMany getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +59,7 @@ private CacheableSyncOneMany getService() { cacheConstructor2.setAccessible(true); final Map cache = new HashMap<>(); cache2 = (DummyCache12) cacheConstructor2.newInstance(CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), CacheRunner.lettuceAsyncClient(cache), defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper()); var serviceClass = classLoader.loadClass(CACHED_SERVICE); diff --git a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java index 724d72db4..21ab9844a 100644 --- a/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java +++ b/cache/cache-annotation-processor/src/test/java/ru/tinkoff/kora/cache/annotation/processor/SyncCacheOneManySyncAopTests.java @@ -50,7 +50,7 @@ private CacheableSyncOneManySync getService() { final Constructor cacheConstructor1 = cacheClass1.getDeclaredConstructors()[0]; cacheConstructor1.setAccessible(true); cache1 = (DummyCache11) cacheConstructor1.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var cacheClass2 = classLoader.loadClass(CACHED_IMPL_2); if (cacheClass2 == null) { @@ -60,7 +60,7 @@ private CacheableSyncOneManySync getService() { final Constructor cacheConstructor2 = cacheClass2.getDeclaredConstructors()[0]; cacheConstructor2.setAccessible(true); cache2 = (DummyCache13) cacheConstructor2.newInstance(CacheRunner.getCaffeineConfig(), - caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + caffeineCacheFactory(null), defaultCacheTelemetryFactory(null, null, null)); var serviceClass = classLoader.loadClass(CACHED_SERVICE); if (serviceClass == null) { diff --git a/cache/cache-caffeine/build.gradle b/cache/cache-caffeine/build.gradle index 79fccd71d..6ae40f58f 100644 --- a/cache/cache-caffeine/build.gradle +++ b/cache/cache-caffeine/build.gradle @@ -3,9 +3,9 @@ dependencies { compileOnly libs.reactor.core api project(":cache:cache-common") + api libs.caffeine implementation project(":config:config-common") - implementation libs.caffeine testImplementation libs.reactor.core testImplementation testFixtures(project(":annotation-processor-common")) diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java index 934dc69c9..499f7a7cb 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/AbstractCaffeineCache.java @@ -1,28 +1,68 @@ package ru.tinkoff.kora.cache.caffeine; import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; import java.util.function.Function; public abstract class AbstractCaffeineCache implements CaffeineCache { - private final String name; + private static final String ORIGIN = "caffeine"; + private final com.github.benmanes.caffeine.cache.Cache caffeine; - private final CaffeineCacheTelemetry telemetry; + private final CacheTelemetry telemetry; + @Deprecated protected AbstractCaffeineCache(String name, CaffeineCacheConfig config, CaffeineCacheFactory factory, CaffeineCacheTelemetry telemetry) { - this.name = name; this.caffeine = factory.build(name, config); - this.telemetry = telemetry; + this.telemetry = operationName -> { + var telemetryContext = telemetry.create(operationName, name); + return new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (valueFromCache == null) { + telemetryContext.recordSuccess(); + } else { + telemetryContext.recordSuccess(valueFromCache); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + telemetryContext.recordFailure(throwable); + } + }; + }; + } + + protected AbstractCaffeineCache(String name, + CaffeineCacheConfig config, + CaffeineCacheFactory factory, + CacheTelemetryFactory telemetry) { + this.caffeine = factory.build(name, config); + this.telemetry = telemetry.get(config.telemetry(), new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return name; + } + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + }); } @Override @@ -31,7 +71,7 @@ public V get(@Nonnull K key) { return null; } - var telemetryContext = telemetry.create("GET", name); + var telemetryContext = telemetry.get("GET"); var value = caffeine.getIfPresent(key); telemetryContext.recordSuccess(value); return value; @@ -44,18 +84,18 @@ public Map get(@Nonnull Collection keys) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("GET_MANY", name); + var telemetryContext = telemetry.get("GET_MANY"); var values = caffeine.getAllPresent(keys); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(values); return values; } @Nonnull @Override public Map getAll() { - var telemetryContext = telemetry.create("GET_ALL", name); + var telemetryContext = telemetry.get("GET_ALL"); var values = Collections.unmodifiableMap(caffeine.asMap()); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return values; } @@ -65,9 +105,9 @@ public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction return mappingFunction.apply(key); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); var value = caffeine.get(key, mappingFunction); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -79,9 +119,9 @@ public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function< return mappingFunction.apply(Collections.emptySet()); } - var telemetryContext = telemetry.create("COMPUTE_IF_ABSENT_MANY", name); + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); var value = caffeine.getAll(keys, ks -> mappingFunction.apply((Set) ks)); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -91,9 +131,9 @@ public V put(@Nonnull K key, @Nonnull V value) { return value; } - var telemetryContext = telemetry.create("PUT", name); + var telemetryContext = telemetry.get("PUT"); caffeine.put(key, value); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return value; } @@ -104,34 +144,34 @@ public Map put(@Nonnull Map keyAndValues) { return Collections.emptyMap(); } - var telemetryContext = telemetry.create("PUT_MANY", name); + var telemetryContext = telemetry.get("PUT_MANY"); caffeine.putAll(keyAndValues); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); return keyAndValues; } @Override public void invalidate(@Nonnull K key) { if (key != null) { - var telemetryContext = telemetry.create("INVALIDATE", name); + var telemetryContext = telemetry.get("INVALIDATE"); caffeine.invalidate(key); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } @Override public void invalidate(@Nonnull Collection keys) { if (keys != null && !keys.isEmpty()) { - var telemetryContext = telemetry.create("INVALIDATE_MANY", name); + var telemetryContext = telemetry.get("INVALIDATE_MANY"); caffeine.invalidateAll(keys); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } @Override public void invalidateAll() { - var telemetryContext = telemetry.create("INVALIDATE_ALL", name); + var telemetryContext = telemetry.get("INVALIDATE_ALL"); caffeine.invalidateAll(); - telemetryContext.recordSuccess(); + telemetryContext.recordSuccess(null); } } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java index a1274eddd..2b59a6710 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCache.java @@ -1,7 +1,6 @@ package ru.tinkoff.kora.cache.caffeine; import jakarta.annotation.Nonnull; -import ru.tinkoff.kora.cache.AsyncCache; import ru.tinkoff.kora.cache.Cache; import java.util.Map; diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java index 2119fa2f5..68f46947c 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheConfig.java @@ -3,6 +3,7 @@ import jakarta.annotation.Nullable; import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.time.Duration; @@ -21,4 +22,6 @@ default Long maximumSize() { @Nullable Integer initialSize(); + + TelemetryConfig telemetry(); } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java index 9904ad4ee..d0ad0f6d8 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheModule.java @@ -4,14 +4,38 @@ import com.github.benmanes.caffeine.cache.Caffeine; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; -import ru.tinkoff.kora.cache.telemetry.CacheMetrics; -import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.cache.CacheModule; +import ru.tinkoff.kora.cache.telemetry.*; import ru.tinkoff.kora.common.DefaultComponent; -public interface CaffeineCacheModule { +public interface CaffeineCacheModule extends CacheModule { + @Deprecated(forRemoval = true) @DefaultComponent - default CaffeineCacheTelemetry caffeineCacheTelemetry(@Nullable CacheMetrics metrics, @Nullable CacheTracer tracer) { + default CaffeineCacheTelemetry caffeineCacheTelemetry(@Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracerFactory, + CaffeineCacheConfig config) { + var args = new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return ""; + } + + @Nonnull + @Override + public String origin() { + return "caffeine"; + } + }; + + CacheMetrics metrics = (metricsFactory == null) + ? null + : metricsFactory.get(config.telemetry().metrics(), args); + CacheTracer tracer = (tracerFactory == null) + ? null + : tracerFactory.get(config.telemetry().tracing(), args); + return new CaffeineCacheTelemetry(metrics, tracer); } diff --git a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java index 972e55a64..70efdec02 100644 --- a/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java +++ b/cache/cache-caffeine/src/main/java/ru/tinkoff/kora/cache/caffeine/CaffeineCacheTelemetry.java @@ -8,6 +8,7 @@ import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; import ru.tinkoff.kora.cache.telemetry.CacheTracer; +@Deprecated public final class CaffeineCacheTelemetry { private static final String ORIGIN = "caffeine"; diff --git a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java index 4c3b50a31..180d9b0c9 100644 --- a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java +++ b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/CacheRunner.java @@ -3,6 +3,7 @@ import jakarta.annotation.Nullable; import org.junit.jupiter.api.Assertions; import ru.tinkoff.kora.cache.caffeine.testdata.DummyCache; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; import java.time.Duration; @@ -27,12 +28,19 @@ public Duration expireAfterAccess() { public Integer initialSize() { return null; } + + @Override + public TelemetryConfig telemetry() { + return null; + } }; } protected DummyCache createCache() { try { - return new DummyCache(getConfig(), caffeineCacheFactory(null), caffeineCacheTelemetry(null, null)); + return new DummyCache(getConfig(), + caffeineCacheFactory(null), + defaultCacheTelemetryFactory(null, null, null)); } catch (Exception e) { throw new IllegalStateException(e); } diff --git a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java index b56c13c15..6bfe9b9c2 100644 --- a/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java +++ b/cache/cache-caffeine/src/test/java/ru/tinkoff/kora/cache/caffeine/testdata/DummyCache.java @@ -3,11 +3,11 @@ import ru.tinkoff.kora.cache.caffeine.AbstractCaffeineCache; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig; import ru.tinkoff.kora.cache.caffeine.CaffeineCacheFactory; -import ru.tinkoff.kora.cache.caffeine.CaffeineCacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; public final class DummyCache extends AbstractCaffeineCache { - public DummyCache(CaffeineCacheConfig config, CaffeineCacheFactory factory, CaffeineCacheTelemetry telemetry) { - super("dummy", config, factory, telemetry); + public DummyCache(CaffeineCacheConfig config, CaffeineCacheFactory factory, CacheTelemetryFactory telemetryFactory) { + super("dummy", config, factory, telemetryFactory); } } diff --git a/cache/cache-common/build.gradle b/cache/cache-common/build.gradle index 2920f3043..67c895fcd 100644 --- a/cache/cache-common/build.gradle +++ b/cache/cache-common/build.gradle @@ -1,5 +1,7 @@ dependencies { api project(":common") + api project(":logging:logging-common") + api project(":telemetry:telemetry-common") testImplementation project(":internal:test-logging") } diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java index da0d0839c..23386eade 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncCache.java @@ -9,7 +9,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.function.Function; -import java.util.stream.Collectors; /** * Represents Async Cache contract. diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java index 3f51c8e93..23cab16a2 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/AsyncFacadeCacheBuilder.java @@ -6,7 +6,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.function.Function; -import java.util.stream.Collectors; final class AsyncFacadeCacheBuilder implements AsyncCache.Builder { diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java index ed16caa33..8ef24c718 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/Cache.java @@ -7,9 +7,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import java.util.stream.Collectors; /** * Represents Synchronous Cache contract. diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java new file mode 100644 index 000000000..43c2b5eb1 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/CacheModule.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.*; +import ru.tinkoff.kora.common.DefaultComponent; + +public interface CacheModule { + + @DefaultComponent + default CacheLoggerFactory defaultCacheLoggerFactory() { + return new Sl4fjCacheLoggerFactory(); + } + + @DefaultComponent + default CacheTelemetryFactory defaultCacheTelemetryFactory(@Nullable CacheLoggerFactory loggerFactory, + @Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracerFactory) { + return new DefaultCacheTelemetryFactory(loggerFactory, metricsFactory, tracerFactory); + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java index 2aace26ec..bba677580 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/LoadableCacheImpl.java @@ -23,7 +23,7 @@ final class LoadableCacheImpl implements LoadableCache { public V get(@Nonnull K key) { return cache.computeIfAbsent(key, k -> { final Map result = cacheLoader.apply(Set.of(k)); - if(result.isEmpty()) { + if (result.isEmpty()) { return null; } else { return result.values().iterator().next(); diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java new file mode 100644 index 000000000..cb90ed9cb --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLogger.java @@ -0,0 +1,13 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; + +public interface CacheLogger { + + void logStart(@Nonnull CacheTelemetryOperation operation); + + void logSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache); + + void logFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java new file mode 100644 index 000000000..dfeb3326f --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheLoggerFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheLoggerFactory { + + @Nullable + CacheLogger get(TelemetryConfig.LogConfig logging, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java index eac708ea4..d2e733ed2 100644 --- a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetrics.java @@ -7,5 +7,5 @@ public interface CacheMetrics { void recordSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache); - void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable throwable); + void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception); } diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java new file mode 100644 index 000000000..a4b69308b --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheMetricsFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheMetricsFactory { + + @Nullable + CacheMetrics get(TelemetryConfig.MetricsConfig config, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java new file mode 100644 index 000000000..ddee56206 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetry.java @@ -0,0 +1,16 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; + +public interface CacheTelemetry { + + interface CacheTelemetryContext { + + void recordSuccess(@Nullable Object valueFromCache); + + void recordFailure(@Nullable Throwable throwable); + } + + CacheTelemetryContext get(@Nonnull String operationName); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java new file mode 100644 index 000000000..050ca3dbf --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryArgs.java @@ -0,0 +1,12 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; + +public interface CacheTelemetryArgs { + + @Nonnull + String cacheName(); + + @Nonnull + String origin(); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java new file mode 100644 index 000000000..0fadbb6d5 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTelemetryFactory.java @@ -0,0 +1,8 @@ +package ru.tinkoff.kora.cache.telemetry; + +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheTelemetryFactory { + + CacheTelemetry get(TelemetryConfig telemetryConfig, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java new file mode 100644 index 000000000..eeedf9a50 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/CacheTracerFactory.java @@ -0,0 +1,10 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public interface CacheTracerFactory { + + @Nullable + CacheTracer get(TelemetryConfig.TracingConfig tracing, CacheTelemetryArgs args); +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java new file mode 100644 index 000000000..3282cdad5 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/DefaultCacheTelemetryFactory.java @@ -0,0 +1,109 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +public final class DefaultCacheTelemetryFactory implements CacheTelemetryFactory { + + private static final CacheTelemetry.CacheTelemetryContext EMPTY_CONTEXT = new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + + } + }; + private static final CacheTelemetry EMPTY_TELEMETRY = operationName -> EMPTY_CONTEXT; + + @Nullable + private final CacheLoggerFactory loggerFactory; + @Nullable + private final CacheTracerFactory tracingFactory; + @Nullable + private final CacheMetricsFactory metricsFactory; + + public DefaultCacheTelemetryFactory(@Nullable CacheLoggerFactory loggerFactory, + @Nullable CacheMetricsFactory metricsFactory, + @Nullable CacheTracerFactory tracingFactory) { + this.loggerFactory = loggerFactory; + this.metricsFactory = metricsFactory; + this.tracingFactory = tracingFactory; + } + + @Override + public CacheTelemetry get(TelemetryConfig config, CacheTelemetryArgs args) { + var tracing = this.tracingFactory == null ? null : this.tracingFactory.get(config.tracing(), args); + var metrics = this.metricsFactory == null ? null : this.metricsFactory.get(config.metrics(), args); + var logger = this.loggerFactory == null ? null : this.loggerFactory.get(config.logging(), args); + if (tracing == null && metrics == null && logger == null) { + return EMPTY_TELEMETRY; + } + + return new DefaultCacheTelemetry(args, tracing, metrics, logger); + } + + private record Operation(String name, String cacheName, String origin) implements CacheTelemetryOperation {} + + private static final class DefaultCacheTelemetry implements CacheTelemetry { + + private final CacheTelemetryArgs args; + @Nullable + private final CacheTracer tracer; + @Nullable + private final CacheMetrics metrics; + @Nullable + private final CacheLogger logger; + + public DefaultCacheTelemetry(CacheTelemetryArgs args, + @Nullable CacheTracer tracer, + @Nullable CacheMetrics metrics, + @Nullable CacheLogger logger) { + this.args = args; + this.tracer = tracer; + this.metrics = metrics; + this.logger = logger; + } + + @Override + public CacheTelemetryContext get(@Nonnull String operationName) { + var operation = new Operation(operationName, args.cacheName(), args.origin()); + + var startedInNanos = System.nanoTime(); + if (logger != null) { + logger.logStart(operation); + } + + final CacheTracer.CacheSpan span = (tracer != null) + ? tracer.trace(operation) + : null; + + return new CacheTelemetryContext() { + @Override + public void recordSuccess(@Nullable Object valueFromCache) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordSuccess(operation, durationInNanos, valueFromCache); + } + if (span != null) { + span.recordSuccess(); + } + } + + @Override + public void recordFailure(@Nullable Throwable throwable) { + if (metrics != null) { + final long durationInNanos = System.nanoTime() - startedInNanos; + metrics.recordFailure(operation, durationInNanos, throwable); + } + if (span != null) { + span.recordFailure(throwable); + } + } + }; + } + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java new file mode 100644 index 000000000..803e815f4 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLogger.java @@ -0,0 +1,80 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.slf4j.Logger; +import ru.tinkoff.kora.logging.common.arg.StructuredArgument; + +public class Sl4fjCacheLogger implements CacheLogger { + + private final Logger startLogger; + private final Logger finishLogger; + + public Sl4fjCacheLogger(Logger requestLogger, Logger finishLogger) { + this.startLogger = requestLogger; + this.finishLogger = finishLogger; + } + + @Override + public void logStart(@Nonnull CacheTelemetryOperation operation) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeEndObject(); + }); + + startLogger.debug(marker, "Operation '{}' for cache '{}' started", + operation.name(), operation.cacheName()); + } + + @Override + public void logSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeNumberField("processingTime", durationInNanos / 1_000_000); + gen.writeEndObject(); + }); + + if (operation.name().startsWith("GET")) { + if (valueFromCache == null) { + finishLogger.debug(marker, "Operation '{}' for cache '{}' didn't retried value", + operation.name(), operation.cacheName()); + } else { + finishLogger.debug(marker, "Operation '{}' for cache '{}' retried value", + operation.name(), operation.cacheName()); + } + } else { + finishLogger.debug(marker, "Operation '{}' for cache '{}' completed", + operation.name(), operation.cacheName()); + } + } + + @Override + public void logFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + var marker = StructuredArgument.marker("cacheOperation", gen -> { + gen.writeStartObject(); + gen.writeStringField("name", operation.name()); + gen.writeStringField("cache", operation.cacheName()); + gen.writeStringField("origin", operation.origin()); + gen.writeNumberField("processingTime", durationInNanos / 1_000_000); + if (exception != null) { + var exceptionType = exception.getClass().getCanonicalName(); + gen.writeStringField("exceptionType", exceptionType); + } + gen.writeEndObject(); + }); + + if (exception != null) { + finishLogger.warn(marker, "Operation '{}' failed for cache '{}' with message: {}", + operation.name(), operation.cacheName(), exception.getMessage()); + } else { + finishLogger.warn(marker, "Operation '{}' failed for cache '{}'", + operation.name(), operation.cacheName()); + } + } +} diff --git a/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java new file mode 100644 index 000000000..c28408756 --- /dev/null +++ b/cache/cache-common/src/main/java/ru/tinkoff/kora/cache/telemetry/Sl4fjCacheLoggerFactory.java @@ -0,0 +1,23 @@ +package ru.tinkoff.kora.cache.telemetry; + +import jakarta.annotation.Nullable; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.Cache; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class Sl4fjCacheLoggerFactory implements CacheLoggerFactory { + + @Nullable + @Override + public CacheLogger get(TelemetryConfig.LogConfig logging, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(logging.enabled(), false)) { + var startLogger = LoggerFactory.getLogger(Cache.class.getPackageName() + ".start." + args.origin() + "." + args.cacheName()); + var finishLogger = LoggerFactory.getLogger(Cache.class.getPackageName() + ".finish." + args.origin() + "." + args.cacheName()); + return new Sl4fjCacheLogger(startLogger, finishLogger); + } else { + return null; + } + } +} diff --git a/cache/cache-redis-common/build.gradle b/cache/cache-redis-common/build.gradle new file mode 100644 index 000000000..c6218e579 --- /dev/null +++ b/cache/cache-redis-common/build.gradle @@ -0,0 +1,13 @@ +dependencies { + annotationProcessor project(':config:config-annotation-processor') + + api project(":cache:cache-common") + + implementation project(":json:json-common") + implementation project(":config:config-common") + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java new file mode 100644 index 000000000..1ac359ce9 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -0,0 +1,678 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.cache.AsyncCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; + +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.stream.Collectors; + +public abstract class AbstractRedisCache implements AsyncCache { + + private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); + + private static final String ORIGIN = "redis"; + + private final RedisCacheClient redisClient; + private final RedisCacheAsyncClient redisAsyncClient; + private final CacheTelemetry telemetry; + private final byte[] keyPrefix; + + private final RedisCacheKeyMapper keyMapper; + private final RedisCacheValueMapper valueMapper; + + private final Long expireAfterAccessMillis; + private final Long expireAfterWriteMillis; + + protected AbstractRedisCache(String name, + RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + this.redisClient = redisClient; + this.redisAsyncClient = redisAsyncClient; + this.telemetry = telemetryFactory.get(config.telemetry(), new CacheTelemetryArgs() { + @Nonnull + @Override + public String cacheName() { + return name; + } + + @Nonnull + @Override + public String origin() { + return ORIGIN; + } + }); + this.keyMapper = keyMapper; + this.valueMapper = valueMapper; + this.expireAfterAccessMillis = (config.expireAfterAccess() == null) + ? null + : config.expireAfterAccess().toMillis(); + this.expireAfterWriteMillis = (config.expireAfterWrite() == null) + ? null + : config.expireAfterWrite().toMillis(); + + if (config.keyPrefix().isEmpty()) { + this.keyPrefix = null; + } else { + var prefixRaw = config.keyPrefix().getBytes(StandardCharsets.UTF_8); + this.keyPrefix = new byte[prefixRaw.length + RedisCacheKeyMapper.DELIMITER.length]; + System.arraycopy(prefixRaw, 0, this.keyPrefix, 0, prefixRaw.length); + System.arraycopy(RedisCacheKeyMapper.DELIMITER, 0, this.keyPrefix, prefixRaw.length, RedisCacheKeyMapper.DELIMITER.length); + } + } + + @Override + public V get(@Nonnull K key) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.get("GET"); + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map get(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.get("GET_MANY"); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); + + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return Collections.emptyMap(); + } catch (Exception e) { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + } + } + + @Nonnull + @Override + public V put(@Nonnull K key, @Nonnull V value) { + if (key == null || value == null) { + return null; + } + + var telemetryContext = telemetry.get("PUT"); + + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + } + telemetryContext.recordSuccess(null); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return value; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return value; + } + } + + @Nonnull + @Override + public Map put(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.get("PUT_MANY"); + + try { + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis); + } + + telemetryContext.recordSuccess(null); + return keyAndValues; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return keyAndValues; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return keyAndValues; + } + } + + @Override + public V computeIfAbsent(@Nonnull K key, @Nonnull Function mappingFunction) { + if (key == null) { + return null; + } + + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); + + V fromCache = null; + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] jsonAsBytes = (expireAfterAccessMillis == null) + ? redisClient.get(keyAsBytes) + : redisClient.getex(keyAsBytes, expireAfterAccessMillis); + + fromCache = valueMapper.read(jsonAsBytes); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache != null) { + telemetryContext.recordSuccess(null); + return fromCache; + } + + try { + var value = mappingFunction.apply(key); + if (value != null) { + try { + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + if (expireAfterWriteMillis == null) { + redisClient.set(keyAsBytes, valueAsBytes); + } else { + redisClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(null); + return value; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return null; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return null; + } + } + + @Nonnull + @Override + public Map computeIfAbsent(@Nonnull Collection keys, @Nonnull Function, Map> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return Collections.emptyMap(); + } + + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); + + final Map fromCache = new HashMap<>(); + try { + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + final Map valueByKeys = (expireAfterAccessMillis == null) + ? redisClient.mget(keysByBytes) + : redisClient.getex(keysByBytes, expireAfterAccessMillis); + + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (fromCache.size() == keys.size()) { + telemetryContext.recordSuccess(null); + return fromCache; + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + try { + var values = mappingFunction.apply(missingKeys); + if (!values.isEmpty()) { + try { + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + if (expireAfterWriteMillis == null) { + redisClient.mset(keyAndValuesAsBytes); + } else { + redisClient.psetex(keyAndValuesAsBytes, expireAfterWriteMillis); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + telemetryContext.recordSuccess(null); + fromCache.putAll(values); + return fromCache; + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + return fromCache; + } catch (Exception e) { + telemetryContext.recordFailure(e); + return fromCache; + } + } + + @Override + public void invalidate(@Nonnull K key) { + if (key != null) { + final byte[] keyAsBytes = mapKey(key); + var telemetryContext = telemetry.get("INVALIDATE"); + + try { + redisClient.del(keyAsBytes); + telemetryContext.recordSuccess(null); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidate(@Nonnull Collection keys) { + if (keys != null && !keys.isEmpty()) { + var telemetryContext = telemetry.get("INVALIDATE_MANY"); + + try { + final byte[][] keysAsBytes = keys.stream() + .map(this::mapKey) + .toArray(byte[][]::new); + + redisClient.del(keysAsBytes); + telemetryContext.recordSuccess(null); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + } + + @Override + public void invalidateAll() { + var telemetryContext = telemetry.get("INVALIDATE_ALL"); + + try { + redisClient.flushAll(); + telemetryContext.recordSuccess(null); + } catch (CompletionException e) { + telemetryContext.recordFailure(e.getCause()); + } catch (Exception e) { + telemetryContext.recordFailure(e); + } + } + + @Nonnull + @Override + public CompletionStage getAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.get("GET"); + final byte[] keyAsBytes = mapKey(key); + + CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisAsyncClient.get(keyAsBytes) + : redisAsyncClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(jsonAsBytes -> { + final V value = valueMapper.read(jsonAsBytes); + telemetryContext.recordSuccess(value); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> getAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.get("GET_MANY"); + var keysByKeyByte = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + var keysAsBytes = keysByKeyByte.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisAsyncClient.mget(keysAsBytes) + : redisAsyncClient.getex(keysAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valuesByKeys -> { + final Map keyToValue = new HashMap<>(); + for (var entry : keysByKeyByte.entrySet()) { + valuesByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + keyToValue.put(entry.getKey(), value); + } + }); + } + telemetryContext.recordSuccess(keyToValue); + return keyToValue; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage putAsync(@Nonnull K key, @Nonnull V value) { + if (key == null) { + return CompletableFuture.completedFuture(value); + } + + var telemetryContext = telemetry.get("PUT"); + final byte[] keyAsBytes = mapKey(key); + final byte[] valueAsBytes = valueMapper.write(value); + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisAsyncClient.set(keyAsBytes, valueAsBytes) + : redisAsyncClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(null); + return value; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return value; + }); + } + + @Nonnull + @Override + public CompletionStage> putAsync(@Nonnull Map keyAndValues) { + if (keyAndValues == null || keyAndValues.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.get("PUT_MANY"); + var keyAndValuesAsBytes = new HashMap(); + keyAndValues.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var responseCompletionStage = (expireAfterWriteMillis == null) + ? redisAsyncClient.mset(keyAndValuesAsBytes) + : redisAsyncClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(r -> { + telemetryContext.recordSuccess(null); + return keyAndValues; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return keyAndValues; + }); + } + + @Override + public CompletionStage computeIfAbsentAsync(@Nonnull K key, @Nonnull Function> mappingFunction) { + if (key == null) { + return CompletableFuture.completedFuture(null); + } + + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT"); + final byte[] keyAsBytes = mapKey(key); + final CompletionStage responseCompletionStage = (expireAfterAccessMillis == null) + ? redisAsyncClient.get(keyAsBytes) + : redisAsyncClient.getex(keyAsBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueMapper::read) + .thenCompose(fromCache -> { + if (fromCache != null) { + return CompletableFuture.completedFuture(fromCache); + } + + return mappingFunction.apply(key) + .thenCompose(value -> { + if (value == null) { + return CompletableFuture.completedFuture(null); + } + + final byte[] valueAsBytes = valueMapper.write(value); + var putFutureResponse = (expireAfterWriteMillis == null) + ? redisAsyncClient.set(keyAsBytes, valueAsBytes) + : redisAsyncClient.psetex(keyAsBytes, valueAsBytes, expireAfterWriteMillis); + + return putFutureResponse + .thenApply(v -> { + telemetryContext.recordSuccess(null); + return value; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return null; + }); + } + + @Nonnull + @Override + public CompletionStage> computeIfAbsentAsync(@Nonnull Collection keys, @Nonnull Function, CompletionStage>> mappingFunction) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(Collections.emptyMap()); + } + + var telemetryContext = telemetry.get("COMPUTE_IF_ABSENT_MANY"); + final Map keysByKeyBytes = keys.stream() + .collect(Collectors.toMap(k -> k, this::mapKey, (v1, v2) -> v2)); + + final byte[][] keysByBytes = keysByKeyBytes.values().toArray(byte[][]::new); + var responseCompletionStage = (expireAfterAccessMillis == null) + ? redisAsyncClient.mget(keysByBytes) + : redisAsyncClient.getex(keysByBytes, expireAfterAccessMillis); + + return responseCompletionStage + .thenApply(valueByKeys -> { + final Map fromCache = new HashMap<>(); + for (var entry : keysByKeyBytes.entrySet()) { + valueByKeys.forEach((k, v) -> { + if (Arrays.equals(entry.getValue(), k)) { + var value = valueMapper.read(v); + fromCache.put(entry.getKey(), value); + } + }); + } + + return fromCache; + }) + .thenCompose(fromCache -> { + if (fromCache.size() == keys.size()) { + return CompletableFuture.completedFuture(fromCache); + } + + var missingKeys = keys.stream() + .filter(k -> !fromCache.containsKey(k)) + .collect(Collectors.toSet()); + + return mappingFunction.apply(missingKeys) + .thenCompose(values -> { + if (values.isEmpty()) { + return CompletableFuture.completedFuture(fromCache); + } + + var keyAndValuesAsBytes = new HashMap(); + values.forEach((k, v) -> { + final byte[] keyAsBytes = mapKey(k); + final byte[] valueAsBytes = valueMapper.write(v); + keyAndValuesAsBytes.put(keyAsBytes, valueAsBytes); + }); + + var putCompletionStage = (expireAfterAccessMillis == null) + ? redisAsyncClient.mset(keyAndValuesAsBytes) + : redisAsyncClient.psetex(keyAndValuesAsBytes, expireAfterAccessMillis); + + return putCompletionStage + .thenApply(v -> { + telemetryContext.recordSuccess(null); + fromCache.putAll(values); + return fromCache; + }); + }); + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return Collections.emptyMap(); + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAsync(@Nonnull K key) { + if (key == null) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.get("INVALIDATE"); + final byte[] keyAsBytes = mapKey(key); + return redisAsyncClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(null); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Override + public CompletionStage invalidateAsync(@Nonnull Collection keys) { + if (keys == null || keys.isEmpty()) { + return CompletableFuture.completedFuture(false); + } + + var telemetryContext = telemetry.get("INVALIDATE_MANY"); + final byte[][] keyAsBytes = keys.stream() + .distinct() + .map(this::mapKey) + .toArray(byte[][]::new); + + return redisAsyncClient.del(keyAsBytes) + .thenApply(r -> { + telemetryContext.recordSuccess(null); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + @Nonnull + @Override + public CompletionStage invalidateAllAsync() { + var telemetryContext = telemetry.get("INVALIDATE_ALL"); + return redisAsyncClient.flushAll() + .thenApply(r -> { + telemetryContext.recordSuccess(null); + return true; + }) + .exceptionally(e -> { + telemetryContext.recordFailure(e); + return false; + }); + } + + private byte[] mapKey(K key) { + final byte[] suffixAsBytes = keyMapper.apply(key); + if (this.keyPrefix == null) { + return suffixAsBytes; + } else { + var keyAsBytes = new byte[keyPrefix.length + suffixAsBytes.length]; + System.arraycopy(this.keyPrefix, 0, keyAsBytes, 0, this.keyPrefix.length); + System.arraycopy(suffixAsBytes, 0, keyAsBytes, this.keyPrefix.length, suffixAsBytes.length); + + return keyAsBytes; + } + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java new file mode 100644 index 000000000..75a932976 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.AsyncCache; + +public interface RedisCache extends AsyncCache { + +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java new file mode 100644 index 000000000..0af4506e8 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheAsyncClient.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; + +import java.util.Map; +import java.util.concurrent.CompletionStage; + +public interface RedisCacheAsyncClient { + + @Nonnull + CompletionStage get(byte[] key); + + @Nonnull + CompletionStage> mget(byte[][] keys); + + @Nonnull + CompletionStage getex(byte[] key, long expireAfterMillis); + + @Nonnull + CompletionStage> getex(byte[][] keys, long expireAfterMillis); + + @Nonnull + CompletionStage set(byte[] key, byte[] value); + + @Nonnull + CompletionStage mset(@Nonnull Map keyAndValue); + + @Nonnull + CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis); + + @Nonnull + CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + + @Nonnull + CompletionStage del(byte[] key); + + @Nonnull + CompletionStage del(byte[][] keys); + + @Nonnull + CompletionStage flushAll(); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java new file mode 100644 index 000000000..9edfbb1c9 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -0,0 +1,34 @@ +package ru.tinkoff.kora.cache.redis; + +import jakarta.annotation.Nonnull; + +import java.util.Map; + +public interface RedisCacheClient { + + @Nonnull + byte[] get(byte[] key); + + @Nonnull + Map mget(byte[][] keys); + + @Nonnull + byte[] getex(byte[] key, long expireAfterMillis); + + @Nonnull + Map getex(byte[][] keys, long expireAfterMillis); + + void set(byte[] key, byte[] value); + + void mset(@Nonnull Map keyAndValue); + + void psetex(byte[] key, byte[] value, long expireAfterMillis); + + void psetex(@Nonnull Map keyAndValue, long expireAfterMillis); + + long del(byte[] key); + + long del(byte[][] keys); + + void flushAll(); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java new file mode 100644 index 000000000..d94dfdc31 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -0,0 +1,27 @@ +package ru.tinkoff.kora.cache.redis; + + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.time.Duration; + +@ConfigValueExtractor +public interface RedisCacheConfig { + + /** + * Key prefix allow to avoid key collision in single Redis database between multiple caches + * + * @return Redis Cache key prefix, if empty string means that prefix will NOT be applied + */ + String keyPrefix(); + + @Nullable + Duration expireAfterWrite(); + + @Nullable + Duration expireAfterAccess(); + + TelemetryConfig telemetry(); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java new file mode 100644 index 000000000..f6edc71ad --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.CacheKeyMapper; + +import java.nio.charset.StandardCharsets; +import java.util.function.Function; + +/** + * Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. + */ +public interface RedisCacheKeyMapper extends Function { + + /** + * Is used to delimiter composite key such as {@link CacheKeyMapper} + */ + byte[] DELIMITER = ":".getBytes(StandardCharsets.UTF_8); +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java new file mode 100644 index 000000000..d3ea8846d --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -0,0 +1,167 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.json.common.JsonCommonModule; +import ru.tinkoff.kora.json.common.JsonReader; +import ru.tinkoff.kora.json.common.JsonWriter; +import ru.tinkoff.kora.json.common.annotation.Json; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.UUID; + +public interface RedisCacheMapperModule extends JsonCommonModule { + + @Json + @DefaultComponent + default RedisCacheValueMapper jsonRedisValueMapper(JsonWriter jsonWriter, JsonReader jsonReader) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(V value) { + try { + return jsonWriter.toByteArray(value); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + + @Override + public V read(byte[] serializedValue) { + try { + return (serializedValue == null) ? null : jsonReader.read(serializedValue); + } catch (IOException e) { + throw new IllegalStateException(e.getMessage()); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper stringRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(String value) { + return value.getBytes(StandardCharsets.UTF_8); + } + + @Override + public String read(byte[] serializedValue) { + return (serializedValue == null) ? null : new String(serializedValue, StandardCharsets.UTF_8); + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bytesRedisValueMapper() { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(byte[] value) { + return value; + } + + @Override + public byte[] read(byte[] serializedValue) { + return serializedValue; + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper intRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Integer value) { + return keyMapper.apply(value); + } + + @Override + public Integer read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Integer.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper longRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(Long value) { + return keyMapper.apply(value); + } + + @Override + public Long read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return Long.valueOf(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper bigIntRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + @Override + public byte[] write(BigInteger value) { + return keyMapper.apply(value); + } + + @Override + public BigInteger read(byte[] serializedValue) { + if (serializedValue == null) { + return null; + } else { + return new BigInteger(new String(serializedValue, StandardCharsets.UTF_8)); + } + } + }; + } + + @DefaultComponent + default RedisCacheValueMapper uuidRedisValueMapper(RedisCacheKeyMapper keyMapper) { + return new RedisCacheValueMapper<>() { + + @Override + public byte[] write(UUID value) { + return keyMapper.apply(value); + } + + @Override + public UUID read(byte[] serializedValue) { + return UUID.fromString(new String(serializedValue, StandardCharsets.UTF_8)); + } + }; + } + + @DefaultComponent + default RedisCacheKeyMapper intRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper longRedisKeyMapper() { + return c -> String.valueOf(c).getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper bigIntRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper uuidRedisKeyMapper() { + return c -> c.toString().getBytes(StandardCharsets.UTF_8); + } + + @DefaultComponent + default RedisCacheKeyMapper stringRedisKeyMapper() { + return c -> c.getBytes(StandardCharsets.UTF_8); + } +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java new file mode 100644 index 000000000..84c9675d8 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -0,0 +1,7 @@ +package ru.tinkoff.kora.cache.redis; + +import ru.tinkoff.kora.cache.CacheModule; + +public interface RedisCacheModule extends CacheModule, RedisCacheMapperModule { + +} diff --git a/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java new file mode 100644 index 000000000..cf2037f42 --- /dev/null +++ b/cache/cache-redis-common/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -0,0 +1,19 @@ +package ru.tinkoff.kora.cache.redis; + +/** + * Converts cache value into serializer value to store in cache. + */ +public interface RedisCacheValueMapper { + + /** + * @param value to serialize + * @return value serialized + */ + byte[] write(V value); + + /** + * @param serializedValue to deserialize + * @return value deserialized + */ + V read(byte[] serializedValue); +} diff --git a/cache/cache-redis-jedis/build.gradle b/cache/cache-redis-jedis/build.gradle new file mode 100644 index 000000000..8ef7e5d61 --- /dev/null +++ b/cache/cache-redis-jedis/build.gradle @@ -0,0 +1,13 @@ +dependencies { + annotationProcessor project(":config:config-annotation-processor") + + api project(":cache:cache-redis-common") + api project(":redis:redis-jedis") + + implementation project(":config:config-common") + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java new file mode 100644 index 000000000..330f101e5 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheAsyncClient.java @@ -0,0 +1,102 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.Executor; + +final class JedisCacheAsyncClient implements RedisCacheAsyncClient { + + private final RedisCacheClient syncClient; + private final Executor executor; + + JedisCacheAsyncClient(RedisCacheClient syncClient, Executor executor) { + this.syncClient = syncClient; + this.executor = executor; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return CompletableFuture.supplyAsync(() -> syncClient.get(key), executor); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return CompletableFuture.supplyAsync(() -> syncClient.mget(keys), executor); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return CompletableFuture.supplyAsync(() -> syncClient.getex(key, expireAfterMillis), executor); + } + + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return CompletableFuture.supplyAsync(() -> syncClient.getex(keys, expireAfterMillis), executor); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return CompletableFuture.supplyAsync(() -> { + syncClient.set(key, value); + return null; + }, executor); + } + + @Nonnull + @Override + public CompletionStage mset(@Nonnull Map keyAndValue) { + return CompletableFuture.supplyAsync(() -> { + syncClient.mset(keyAndValue); + return null; + }, executor); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return CompletableFuture.supplyAsync(() -> { + syncClient.psetex(key, value, expireAfterMillis); + return null; + }, executor); + } + + @Nonnull + @Override + public CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + return CompletableFuture.supplyAsync(() -> { + syncClient.psetex(keyAndValue, expireAfterMillis); + return null; + }, executor); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return CompletableFuture.supplyAsync(() -> syncClient.del(key), executor); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return CompletableFuture.supplyAsync(() -> syncClient.del(keys), executor); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return CompletableFuture.supplyAsync(() -> { + syncClient.flushAll(); + return null; + }, executor); + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java new file mode 100644 index 000000000..064bf17e0 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheModule.java @@ -0,0 +1,42 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nullable; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.UnifiedJedis; +import ru.tinkoff.kora.application.graph.internal.loom.VirtualThreadExecutorHolder; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheModule; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.common.Tag; +import ru.tinkoff.kora.redis.jedis.JedisModule; + +import java.util.concurrent.Executor; +import java.util.concurrent.ForkJoinPool; + +public interface JedisCacheModule extends RedisCacheModule, JedisModule { + + @Tag(Jedis.class) + @DefaultComponent + default Executor jedisRedisCacheAsyncExecutor() { + var virtualExecutor = VirtualThreadExecutorHolder.executor(); + if (virtualExecutor == null) { + return ForkJoinPool.commonPool(); + } else { + return virtualExecutor; + } + } + + default RedisCacheClient jedisRedisCacheSyncClient(UnifiedJedis jedis) { + return new JedisCacheSyncClient(jedis); + } + + default RedisCacheAsyncClient jedisRedisCacheAsyncClient(RedisCacheClient redisCacheClient, + @Tag(Jedis.class) @Nullable Executor executor) { + if (executor == null) { + return new JedisCacheStubAsyncClient(redisCacheClient); + } else { + return new JedisCacheAsyncClient(redisCacheClient, executor); + } + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java new file mode 100644 index 000000000..82d92d449 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheStubAsyncClient.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +final class JedisCacheStubAsyncClient implements RedisCacheAsyncClient { + + private final RedisCacheClient syncClient; + + JedisCacheStubAsyncClient(RedisCacheClient syncClient) { + this.syncClient = syncClient; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.get(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.mget(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(key, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + try { + return CompletableFuture.completedFuture(syncClient.getex(keys, expireAfterMillis)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + try { + syncClient.set(key, value); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage mset(@Nonnull Map keyAndValue) { + try { + syncClient.mset(keyAndValue); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + try { + syncClient.psetex(key, value, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + try { + syncClient.psetex(keyAndValue, expireAfterMillis); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + try { + return CompletableFuture.completedFuture(syncClient.del(key)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + try { + return CompletableFuture.completedFuture(syncClient.del(keys)); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } + + @Nonnull + @Override + public CompletionStage flushAll() { + try { + syncClient.flushAll(); + return CompletableFuture.completedFuture(null); + } catch (Exception e) { + return CompletableFuture.failedFuture(e); + } + } +} diff --git a/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java new file mode 100644 index 000000000..c831ea9b0 --- /dev/null +++ b/cache/cache-redis-jedis/src/main/java/ru/tinkoff/kora/cache/redis/jedis/JedisCacheSyncClient.java @@ -0,0 +1,119 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nonnull; +import redis.clients.jedis.Response; +import redis.clients.jedis.UnifiedJedis; +import redis.clients.jedis.params.GetExParams; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +final class JedisCacheSyncClient implements RedisCacheClient { + + private final UnifiedJedis jedis; + + JedisCacheSyncClient(UnifiedJedis jedis) { + this.jedis = jedis; + } + + @Nonnull + @Override + public byte[] get(byte[] key) { + return jedis.get(key); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + List values = jedis.mget(keys); + + int i = 0; + Map keysAndValues = new LinkedHashMap<>(values.size() + 1); + for (byte[] key : keys) { + byte[] value = values.get(i); + if (value != null) { + keysAndValues.put(key, value); + } + i++; + } + + return keysAndValues; + } + + @Nonnull + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return jedis.getEx(key, GetExParams.getExParams().px(expireAfterMillis)); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + try (var tx = jedis.pipelined()) { + final Map> responses = new LinkedHashMap<>(); + for (byte[] key : keys) { + var response = tx.getEx(key, GetExParams.getExParams().px(expireAfterMillis)); + responses.put(key, response); + } + tx.sync(); + + final Map values = new LinkedHashMap<>(); + responses.forEach((k, r) -> { + byte[] value = r.get(); + if (value != null) { + values.put(k, value); + } + }); + + return values; + } + } + + @Override + public void set(byte[] key, byte[] value) { + jedis.set(key, value); + } + + @Override + public void mset(Map keyAndValue) { + var keysAndValues = new ArrayList(keyAndValue.size() * 2); + for (var entry : keyAndValue.entrySet()) { + keysAndValues.add(entry.getKey()); + keysAndValues.add(entry.getValue()); + } + jedis.mset(keysAndValues.toArray(new byte[][]{})); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + jedis.psetex(key, expireAfterMillis, value); + } + + @Override + public void psetex(Map keyAndValue, long expireAfterMillis) { + try (var pipeline = jedis.pipelined()) { + for (var entry : keyAndValue.entrySet()) { + pipeline.psetex(entry.getKey(), expireAfterMillis, entry.getValue()); + } + pipeline.sync(); + } + } + + @Override + public long del(byte[] key) { + return jedis.del(key); + } + + @Override + public long del(byte[][] keys) { + return jedis.del(keys); + } + + @Override + public void flushAll() { + jedis.flushAll(); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java new file mode 100644 index 000000000..e5cf29368 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractAsyncCacheTests.java @@ -0,0 +1,252 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; + +abstract class AbstractAsyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.getAsync("1").toCompletableFuture().join()); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.getAsync("1").toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "???")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "???")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync("2").toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java new file mode 100644 index 000000000..28c17516a --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AbstractSyncCacheTests.java @@ -0,0 +1,229 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +abstract class AbstractSyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.get(key)); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get(key); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.get(keys); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.get("1")); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.get("1"); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return Map.of("1", "1", "2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { + if ("2".equals(keysCompute.iterator().next())) { + return Map.of("2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(Set.of("1", "2")); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get("2"); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java new file mode 100644 index 000000000..f335110dd --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java new file mode 100644 index 000000000..1bffc28fd --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java new file mode 100644 index 000000000..085682a7a --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/AsyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java new file mode 100644 index 000000000..2866b50da --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/CacheRunner.java @@ -0,0 +1,103 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; +import ru.tinkoff.kora.cache.redis.jedis.testdata.DummyCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.redis.jedis.JedisConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; +import java.util.List; + +abstract class CacheRunner extends Assertions implements RedisCacheMapperModule, JedisCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + + @Override + public TelemetryConfig telemetry() { + return null; + } + }; + } + + private RedisCacheClient createJedis(RedisParams redisParams) throws Exception { + var jedisConfig = new JedisConfig() { + @Override + public List uri() { + return List.of(redisParams.uri().toString()); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + }; + + var jedis = jedisClient(jedisConfig); + return new JedisCacheSyncClient(jedis); + } + + private RedisCacheAsyncClient createAsyncJedis(RedisCacheClient cacheClient) { + return new JedisCacheStubAsyncClient(cacheClient); + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var syncClient = createJedis(redisParams); + var asyncClient = createAsyncJedis(syncClient); + return new DummyCache(getConfig(expireWrite, expireRead), syncClient, asyncClient, + (telemetryConfig, args) -> operationName -> new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(Object valueFromCache) {} + @Override + public void recordFailure(Throwable throwable) {} + }, + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java new file mode 100644 index 000000000..75fd7c1d4 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireReadTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java new file mode 100644 index 000000000..7d7319ecc --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java new file mode 100644 index 000000000..01ca94c4d --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/SyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis.jedis; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java new file mode 100644 index 000000000..b1e2badc7 --- /dev/null +++ b/cache/cache-redis-jedis/src/test/java/ru/tinkoff/kora/cache/redis/jedis/testdata/DummyCache.java @@ -0,0 +1,16 @@ +package ru.tinkoff.kora.cache.redis.jedis.testdata; + +import ru.tinkoff.kora.cache.redis.*; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; + +public final class DummyCache extends AbstractRedisCache { + + public DummyCache(RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + super("dummy", config, redisClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper); + } +} diff --git a/cache/cache-redis-lettuce/build.gradle b/cache/cache-redis-lettuce/build.gradle new file mode 100644 index 000000000..29c1f9046 --- /dev/null +++ b/cache/cache-redis-lettuce/build.gradle @@ -0,0 +1,13 @@ +dependencies { + annotationProcessor project(":config:config-annotation-processor") + + api project(":cache:cache-redis-common") + api project(":redis:redis-lettuce") + + implementation project(":config:config-common") + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java new file mode 100644 index 000000000..da353f3b2 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheModule.java @@ -0,0 +1,58 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.AbstractRedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheModule; +import ru.tinkoff.kora.redis.lettuce.LettuceConfig; +import ru.tinkoff.kora.redis.lettuce.LettuceModule; + +import java.net.URI; +import java.util.List; + +public interface LettuceCacheModule extends RedisCacheModule, LettuceModule { + + default RedisCacheAsyncClient lettuceRedisCacheAsyncClient(AbstractRedisClient redisClient, + RedisClusterAsyncCommands lettuceCommands, + LettuceConfig lettuceConfig) { + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final Integer database = lettuceConfig.database(); + final String user = lettuceConfig.user(); + final String password = lettuceConfig.password(); + + final List redisURIs = lettuceConfig.uri().stream() + .flatMap(uri -> RedisClusterURIUtil.toRedisURIs(URI.create(uri)).stream()) + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(lettuceConfig.commandTimeout()) + .build(); + }) + .toList(); + + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceSingleCacheAsyncClient(rc, lettuceCommands, redisURI); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceClusterCacheAsyncClient(rcc, lettuceCommands); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + default RedisCacheClient lettuceRedisCacheSyncClient(RedisCacheAsyncClient redisCacheAsyncClient) { + return new LettuceCacheSyncClient(redisCacheAsyncClient); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java new file mode 100644 index 000000000..bc620c1fe --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceCacheSyncClient.java @@ -0,0 +1,75 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import jakarta.annotation.Nonnull; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; + +import java.util.Map; + +final class LettuceCacheSyncClient implements RedisCacheClient { + + private final RedisCacheAsyncClient redisAsyncClient; + + LettuceCacheSyncClient(RedisCacheAsyncClient redisAsyncClient) { + this.redisAsyncClient = redisAsyncClient; + } + + @Nonnull + @Override + public byte[] get(byte[] key) { + return this.redisAsyncClient.get(key).toCompletableFuture().join(); + } + + @Nonnull + @Override + public Map mget(byte[][] keys) { + return this.redisAsyncClient.mget(keys).toCompletableFuture().join(); + } + + @Nonnull + @Override + public byte[] getex(byte[] key, long expireAfterMillis) { + return this.redisAsyncClient.getex(key, expireAfterMillis).toCompletableFuture().join(); + } + + @Nonnull + @Override + public Map getex(byte[][] keys, long expireAfterMillis) { + return this.redisAsyncClient.getex(keys, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public void set(byte[] key, byte[] value) { + this.redisAsyncClient.set(key, value).toCompletableFuture().join(); + } + + @Override + public void mset(@Nonnull Map keyAndValue) { + this.redisAsyncClient.mset(keyAndValue).toCompletableFuture().join(); + } + + @Override + public void psetex(byte[] key, byte[] value, long expireAfterMillis) { + this.redisAsyncClient.psetex(key, value, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public void psetex(@Nonnull Map keyAndValue, long expireAfterMillis) { + this.redisAsyncClient.psetex(keyAndValue, expireAfterMillis).toCompletableFuture().join(); + } + + @Override + public long del(byte[] key) { + return this.redisAsyncClient.del(key).toCompletableFuture().join(); + } + + @Override + public long del(byte[][] keys) { + return this.redisAsyncClient.del(keys).toCompletableFuture().join(); + } + + @Override + public void flushAll() { + this.redisAsyncClient.flushAll().toCompletableFuture().join(); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java new file mode 100644 index 000000000..9edbef77f --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClusterCacheAsyncClient.java @@ -0,0 +1,183 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import io.lettuce.core.GetExArgs; +import io.lettuce.core.KeyValue; +import io.lettuce.core.Value; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceClusterCacheAsyncClient implements RedisCacheAsyncClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceClusterCacheAsyncClient.class); + + // always use async cause sync uses JDK Proxy wrapped async impl + private final RedisClusterAsyncCommands lettuceCommands; + private final RedisClusterClient lettuceClient; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> lettucePool; + + + LettuceClusterCacheAsyncClient(RedisClusterClient lettuceClient, + RedisClusterAsyncCommands lettuceCommands) { + this.lettuceClient = lettuceClient; + this.lettuceCommands = lettuceCommands; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return lettuceCommands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return lettuceCommands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue, (x, y) -> x, LinkedHashMap::new))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return lettuceCommands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return lettucePool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (x, y) -> x, LinkedHashMap::new))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return lettuceCommands.set(key, value).thenApply(r -> null); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return lettuceCommands.mset(keyAndValue).thenApply(r -> null); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return lettuceCommands.psetex(key, expireAfterMillis, value).thenApply(r -> null); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return lettucePool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return lettuceCommands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return lettuceCommands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return lettuceCommands.flushall(FlushMode.SYNC).thenApply(r -> null); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.lettucePool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> lettuceClient.connectAsync(ByteArrayCodec.INSTANCE), poolConfig, false); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.lettucePool.close(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java new file mode 100644 index 000000000..6b957b036 --- /dev/null +++ b/cache/cache-redis-lettuce/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceSingleCacheAsyncClient.java @@ -0,0 +1,182 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.BoundedAsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import jakarta.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.common.util.TimeUtils; + +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; + +final class LettuceSingleCacheAsyncClient implements RedisCacheAsyncClient, Lifecycle { + + private static final Logger logger = LoggerFactory.getLogger(LettuceSingleCacheAsyncClient.class); + + // always use async cause sync uses JDK Proxy wrapped async impl + private final RedisClusterAsyncCommands lettuceCommands; + private final RedisClient lettuceClient; + private final RedisURI redisURI; + + // use for pipeline commands only cause lettuce have bad performance when using pool + private BoundedAsyncPool> lettucePool; + + LettuceSingleCacheAsyncClient(RedisClient lettuceClient, + RedisClusterAsyncCommands lettuceCommands, + RedisURI redisURI) { + this.lettuceClient = lettuceClient; + this.lettuceCommands = lettuceCommands; + this.redisURI = redisURI; + } + + @Nonnull + @Override + public CompletionStage get(byte[] key) { + return lettuceCommands.get(key); + } + + @Nonnull + @Override + public CompletionStage> mget(byte[][] keys) { + return lettuceCommands.mget(keys) + .thenApply(r -> r.stream() + .filter(Value::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, Value::getValue, (x, y) -> x, LinkedHashMap::new))); + } + + @Nonnull + @Override + public CompletionStage getex(byte[] key, long expireAfterMillis) { + return lettuceCommands.getex(key, GetExArgs.Builder.px(expireAfterMillis)); + } + + @SuppressWarnings("unchecked") + @Nonnull + @Override + public CompletionStage> getex(byte[][] keys, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (byte[] key : keys) { + var future = async.getex(key, GetExArgs.Builder.px(expireAfterMillis)) + .thenApply(v -> (v == null) ? null : Map.entry(key, v)) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return lettucePool.release(connection) + .thenCompose(_v -> CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new))) + .thenApply(_void -> futures.stream() + .map(f -> f.getNow(null)) + .filter(Objects::nonNull) + .map(v -> ((Map.Entry) v)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (x, y) -> x, LinkedHashMap::new))); + }); + } + + @Nonnull + @Override + public CompletionStage set(byte[] key, byte[] value) { + return lettuceCommands.set(key, value).thenApply(r -> null); + } + + @Override + public CompletionStage mset(Map keyAndValue) { + return lettuceCommands.mset(keyAndValue).thenApply(r -> null); + } + + @Nonnull + @Override + public CompletionStage psetex(byte[] key, byte[] value, long expireAfterMillis) { + return lettuceCommands.psetex(key, expireAfterMillis, value).thenApply(r -> null); + } + + @Nonnull + @Override + public CompletionStage psetex(Map keyAndValue, long expireAfterMillis) { + return lettucePool.acquire().thenCompose(connection -> { + connection.setAutoFlushCommands(false); + + List> futures = new ArrayList<>(); + + var async = connection.async(); + for (Map.Entry entry : keyAndValue.entrySet()) { + var future = async.psetex(entry.getKey(), expireAfterMillis, entry.getValue()) + .thenApply(v -> true) + .toCompletableFuture(); + + futures.add(future); + } + + connection.flushCommands(); + connection.setAutoFlushCommands(true); + + return CompletableFuture.allOf(futures.toArray(CompletableFuture[]::new)) + .whenComplete((s, throwable) -> lettucePool.release(connection)); + }); + } + + @Nonnull + @Override + public CompletionStage del(byte[] key) { + return lettuceCommands.del(key); + } + + @Nonnull + @Override + public CompletionStage del(byte[][] keys) { + return lettuceCommands.del(keys); + } + + @Nonnull + @Override + public CompletionStage flushAll() { + return lettuceCommands.flushall(FlushMode.SYNC).thenApply(r -> null); + } + + @Override + public void init() { + logger.debug("Redis Client (Lettuce) starting..."); + final long started = TimeUtils.started(); + + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .maxIdle(Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4) + .minIdle(0) + .testOnAcquire(false) + .testOnCreate(false) + .testOnRelease(false) + .build(); + + this.lettucePool = AsyncConnectionPoolSupport.createBoundedObjectPool(() -> lettuceClient.connectAsync(ByteArrayCodec.INSTANCE, redisURI), poolConfig); + + logger.info("Redis Client (Lettuce) started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + logger.debug("Redis Client (Lettuce) stopping..."); + final long started = TimeUtils.started(); + + this.lettucePool.close(); + + logger.info("Redis Client (Lettuce) stopped in {}", TimeUtils.tookForLogging(started)); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java new file mode 100644 index 000000000..832a91e0d --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractAsyncCacheTests.java @@ -0,0 +1,252 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; + +abstract class AbstractAsyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + Map keyToValue = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.getAsync("1").toCompletableFuture().join()); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.getAsync("1").toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(Set.of("1", "2"), keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "1", "2", "2")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "1")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "2")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(Set.of("1", "2")).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.getAsync(key).toCompletableFuture().join()); + cache.putAsync(key, key).toCompletableFuture().join(); + } + + // when + final Map valueComputed = cache.computeIfAbsentAsync(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return CompletableFuture.completedFuture(Map.of("1", "???", "2", "???")); + } else if ("1".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("1", "???")); + } else if ("2".equals(keysCompute.iterator().next())) { + return CompletableFuture.completedFuture(Map.of("2", "???")); + } + + throw new IllegalStateException("Should not happen"); + }).toCompletableFuture().join(); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.getAsync(keys).toCompletableFuture().join(); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.putAsync(key, value).toCompletableFuture().join(); + + // then + final String fromCache = cache.getAsync("2").toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.putAsync(key, value).toCompletableFuture().join(); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.getAsync(key).toCompletableFuture().join(); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java new file mode 100644 index 000000000..0488159b2 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AbstractSyncCacheTests.java @@ -0,0 +1,229 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import org.junit.jupiter.api.Test; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +abstract class AbstractSyncCacheTests extends CacheRunner { + + protected DummyCache cache = null; + + @Test + void getWhenCacheEmpty() { + // given + var key = "1"; + + // when + assertNull(cache.get(key)); + } + + @Test + void getWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get(key); + assertEquals(value, fromCache); + } + + @Test + void getMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + + // when + Map keyToValue = cache.get(keys); + assertTrue(keyToValue.isEmpty()); + } + + @Test + void getMultiWhenCacheFilledPartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(1, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void getMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + Map keyToValue = cache.get(keys); + assertEquals(2, keyToValue.size()); + keyToValue.forEach((k, v) -> assertTrue(keys.stream().anyMatch(key -> key.equals(k) && key.equals(v)))); + } + + @Test + void computeIfAbsentWhenCacheEmpty() { + // given + + // when + assertNull(cache.get("1")); + final String valueComputed = cache.computeIfAbsent("1", k -> "1"); + assertEquals("1", valueComputed); + + // then + final String cached = cache.get("1"); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheEmpty() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + if (keysCompute.size() == 2) { + return Map.of("1", "1", "2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiOneWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(1, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiAllWhenCachePartly() { + // given + List keys = List.of("1"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(Set.of("1", "2"), keysCompute -> { + if ("2".equals(keysCompute.iterator().next())) { + return Map.of("2", "2"); + } + + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.of("1", "2"), valueComputed.keySet()); + assertEquals(Set.of("1", "2"), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(Set.of("1", "2")); + assertEquals(valueComputed, cached); + } + + @Test + void computeIfAbsentMultiWhenCacheFilled() { + // given + List keys = List.of("1", "2"); + for (String key : keys) { + assertNull(cache.get(key)); + cache.put(key, key); + } + + // when + final Map valueComputed = cache.computeIfAbsent(keys, keysCompute -> { + throw new IllegalStateException("Should not happen"); + }); + assertEquals(2, valueComputed.size()); + assertEquals(Set.copyOf(keys), valueComputed.keySet()); + assertEquals(Set.copyOf(keys), Set.copyOf(valueComputed.values())); + + // then + final Map cached = cache.get(keys); + assertEquals(valueComputed, cached); + } + + @Test + void getWrongKeyWhenCacheFilled() { + // given + var key = "1"; + var value = "1"; + + // when + cache.put(key, value); + + // then + final String fromCache = cache.get("2"); + assertNull(fromCache); + } + + @Test + void getWhenCacheInvalidate() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidate(key); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } + + @Test + void getFromCacheWhenCacheInvalidateAll() { + // given + var key = "1"; + var value = "1"; + cache.put(key, value); + + // when + cache.invalidateAll(); + + // then + final String fromCache = cache.get(key); + assertNull(fromCache); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java new file mode 100644 index 000000000..7c3acecbe --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireReadTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java new file mode 100644 index 000000000..789c987bd --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheExpireWriteTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java new file mode 100644 index 000000000..7af17f562 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/AsyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class AsyncCacheTests extends AbstractAsyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java new file mode 100644 index 000000000..e8ea0251e --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/CacheRunner.java @@ -0,0 +1,129 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.RedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.codec.ByteArrayCodec; +import jakarta.annotation.Nullable; +import org.junit.jupiter.api.Assertions; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient; +import ru.tinkoff.kora.cache.redis.RedisCacheClient; +import ru.tinkoff.kora.cache.redis.RedisCacheConfig; +import ru.tinkoff.kora.cache.redis.RedisCacheMapperModule; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetry; +import ru.tinkoff.kora.redis.lettuce.LettuceConfig; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; +import ru.tinkoff.kora.test.redis.RedisParams; + +import java.time.Duration; +import java.util.List; + +abstract class CacheRunner extends Assertions implements RedisCacheMapperModule, LettuceCacheModule { + + public static RedisCacheConfig getConfig(@Nullable Duration expireWrite, + @Nullable Duration expireRead) { + return new RedisCacheConfig() { + + @Override + public String keyPrefix() { + return "pref"; + } + + @Nullable + @Override + public Duration expireAfterWrite() { + return expireWrite; + } + + @Nullable + @Override + public Duration expireAfterAccess() { + return expireRead; + } + + @Override + public TelemetryConfig telemetry() { + return null; + } + }; + } + + private RedisCacheAsyncClient createLettuce(RedisParams redisParams) throws Exception { + var lettuceClientConfig = new LettuceConfig() { + @Override + public List uri() { + return List.of(redisParams.uri().toString()); + } + + @Override + public Integer database() { + return null; + } + + @Override + public String user() { + return null; + } + + @Override + public String password() { + return null; + } + + @Override + public PoolConfig pool() { + return null; + } + }; + + var lettuceClient = lettuceClient(lettuceClientConfig); + if (lettuceClient instanceof Lifecycle lc) { + lc.init(); + } + + if (!(lettuceClient instanceof RedisClient rc)) { + throw new IllegalStateException(); + } + + Wrapped> statefulConnectionWrapped = lettuceStatefulConnection(lettuceClient, ByteArrayCodec.INSTANCE); + if(statefulConnectionWrapped instanceof Lifecycle l) { + l.init(); + } + var commands = lettuceRedisClusterAsyncCommands(statefulConnectionWrapped.value()); + LettuceSingleCacheAsyncClient lettuceSingleCacheAsyncClient = new LettuceSingleCacheAsyncClient(rc, commands, RedisURI.create(redisParams.uri())); + lettuceSingleCacheAsyncClient.init(); + return lettuceSingleCacheAsyncClient; + } + + private RedisCacheClient createSyncLettuce(RedisCacheAsyncClient asyncClient) { + return new LettuceCacheSyncClient(asyncClient); + } + + private DummyCache createDummyCache(RedisParams redisParams, Duration expireWrite, Duration expireRead) throws Exception { + var lettuceClient = createLettuce(redisParams); + var lettuceSyncClient = createSyncLettuce(lettuceClient); + return new DummyCache(getConfig(expireWrite, expireRead), lettuceSyncClient, lettuceClient, + (telemetryConfig, args) -> operationName -> new CacheTelemetry.CacheTelemetryContext() { + @Override + public void recordSuccess(Object valueFromCache) {} + @Override + public void recordFailure(Throwable throwable) {} + }, + stringRedisKeyMapper(), stringRedisValueMapper()); + } + + protected DummyCache createCache(RedisParams redisParams) throws Exception { + return createDummyCache(redisParams, null, null); + } + + protected DummyCache createCacheExpireWrite(RedisParams redisParams, Duration expireWrite) throws Exception { + return createDummyCache(redisParams, expireWrite, null); + } + + protected DummyCache createCacheExpireRead(RedisParams redisParams, Duration expireRead) throws Exception { + return createDummyCache(redisParams, null, expireRead); + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java new file mode 100644 index 000000000..e36676e23 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireReadTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireReadTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireRead(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java new file mode 100644 index 000000000..39db66914 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheExpireWriteTests.java @@ -0,0 +1,22 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +import java.time.Duration; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheExpireWriteTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCacheExpireWrite(redisParams, Duration.ofSeconds(1)); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java new file mode 100644 index 000000000..e6fb15f0b --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/SyncCacheTests.java @@ -0,0 +1,20 @@ +package ru.tinkoff.kora.cache.redis.lettuce; + +import io.lettuce.core.FlushMode; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInstance; +import ru.tinkoff.kora.test.redis.RedisParams; +import ru.tinkoff.kora.test.redis.RedisTestContainer; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@RedisTestContainer +class SyncCacheTests extends AbstractSyncCacheTests { + + @BeforeEach + void setup(RedisParams redisParams) throws Exception { + redisParams.execute(cmd -> cmd.flushall(FlushMode.SYNC)); + if (cache == null) { + cache = createCache(redisParams); + } + } +} diff --git a/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java new file mode 100644 index 000000000..6321fe1a3 --- /dev/null +++ b/cache/cache-redis-lettuce/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java @@ -0,0 +1,16 @@ +package ru.tinkoff.kora.cache.redis.lettuce.testdata; + +import ru.tinkoff.kora.cache.redis.*; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryFactory; + +public final class DummyCache extends AbstractRedisCache { + + public DummyCache(RedisCacheConfig config, + RedisCacheClient redisClient, + RedisCacheAsyncClient redisAsyncClient, + CacheTelemetryFactory telemetryFactory, + RedisCacheKeyMapper keyMapper, + RedisCacheValueMapper valueMapper) { + super("dummy", config, redisClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper); + } +} diff --git a/cache/cache-redis/README.md b/cache/cache-redis/README.md new file mode 100644 index 000000000..e917a758e --- /dev/null +++ b/cache/cache-redis/README.md @@ -0,0 +1,3 @@ +# DEPRECATED + +Use `cache-redis-lettuce` instead diff --git a/cache/cache-redis/build.gradle b/cache/cache-redis/build.gradle index 748493949..44c085705 100644 --- a/cache/cache-redis/build.gradle +++ b/cache/cache-redis/build.gradle @@ -5,7 +5,7 @@ dependencies { implementation project(":json:json-common") implementation project(":config:config-common") - implementation(libs.lettuce.core) { + implementation(libs.redis.lettuce) { exclude group: 'io.projectreactor', module: 'reactor-core' exclude group: 'io.netty', module: 'netty-common' exclude group: 'io.netty', module: 'netty-handler' diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java index 941591ac8..97a50013b 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/AbstractRedisCache.java @@ -13,6 +13,14 @@ import java.util.function.Function; import java.util.stream.Collectors; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheAsyncClient + *

+ * Check documentation for more information + */ +@Deprecated public abstract class AbstractRedisCache implements AsyncCache { private static final Logger logger = LoggerFactory.getLogger(RedisCache.class); diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java index 75a932976..8c46d888e 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCache.java @@ -2,6 +2,14 @@ import ru.tinkoff.kora.cache.AsyncCache; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCache + *

+ * Check documentation for more information + */ +@Deprecated public interface RedisCache extends AsyncCache { } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java index a5b995bd6..9f692ccb6 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheClient.java @@ -5,6 +5,14 @@ import java.util.Map; import java.util.concurrent.CompletionStage; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheAsyncClient + *

+ * Check documentation for more information + */ +@Deprecated public interface RedisCacheClient { @Nonnull diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java index 120bd511a..a1a3ccc8d 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheConfig.java @@ -6,6 +6,14 @@ import java.time.Duration; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheConfig + *

+ * Check documentation for more information + */ +@Deprecated @ConfigValueExtractor public interface RedisCacheConfig { diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java index f6edc71ad..ff31832ee 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheKeyMapper.java @@ -6,8 +6,15 @@ import java.util.function.Function; /** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheKeyMapper + *

+ * Check documentation for more information + *

* Contract for converting method arguments {@link CacheKeyMapper} into the final key that will be used in Cache implementation. */ +@Deprecated public interface RedisCacheKeyMapper extends Function { /** diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java index 81e48e005..ab8c47e7a 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheMapperModule.java @@ -14,6 +14,14 @@ import java.nio.charset.StandardCharsets; import java.util.UUID; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheMapperModule + *

+ * Check documentation for more information + */ +@Deprecated public interface RedisCacheMapperModule extends JsonCommonModule { @DefaultComponent diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java index fe07914b0..06bf53637 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheModule.java @@ -2,6 +2,14 @@ import ru.tinkoff.kora.cache.redis.lettuce.LettuceModule; +/** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND LettuceCacheModule + *

+ * Check documentation for more information + */ +@Deprecated public interface RedisCacheModule extends RedisCacheMapperModule, LettuceModule { } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java index af84dfdd5..7d9e44c8c 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheTelemetry.java @@ -8,6 +8,10 @@ import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; import ru.tinkoff.kora.cache.telemetry.CacheTracer; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public final class RedisCacheTelemetry { private static final String ORIGIN = "redis"; diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java index cf2037f42..472ec3069 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/RedisCacheValueMapper.java @@ -1,8 +1,15 @@ package ru.tinkoff.kora.cache.redis; /** + * This module is no longer maintained, it was replaced with new one. + *

+ * Use dependency - ru.tinkoff.kora:cache-redis-lettuce AND RedisCacheValueMapper + *

+ * Check documentation for more information + *

* Converts cache value into serializer value to store in cache. */ +@Deprecated public interface RedisCacheValueMapper { /** diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java index 6fb2ee3e9..5eec5745d 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientConfig.java @@ -35,9 +35,13 @@ default Duration commandTimeout() { enum Protocol { - /** Redis 2 to Redis 5 */ + /** + * Redis 2 to Redis 5 + */ RESP2, - /** Redis 6+ */ + /** + * Redis 6+ + */ RESP3 } } diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java index fb29c05e2..7d8c87c7e 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceClientFactory.java @@ -11,6 +11,10 @@ import java.time.Duration; import java.util.List; +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public final class LettuceClientFactory { @Nonnull diff --git a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java index 25cb53904..9e5e4e366 100644 --- a/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java +++ b/cache/cache-redis/src/main/java/ru/tinkoff/kora/cache/redis/lettuce/LettuceModule.java @@ -1,16 +1,15 @@ package ru.tinkoff.kora.cache.redis.lettuce; import io.lettuce.core.cluster.RedisClusterClient; -import io.lettuce.core.protocol.ProtocolVersion; -import jakarta.annotation.Nullable; import ru.tinkoff.kora.cache.redis.RedisCacheClient; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.config.common.Config; -import ru.tinkoff.kora.config.common.ConfigValue; import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; -import java.time.Duration; - +/** + * Use dependency - ru.tinkoff.kora:cache-redis-lettuce + */ +@Deprecated public interface LettuceModule { default LettuceClientConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java index 209ce6ecb..2252e340a 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractAsyncCacheTests.java @@ -1,7 +1,7 @@ package ru.tinkoff.kora.cache.redis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java index f5ec3aa9d..cbc7b6479 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/AbstractSyncCacheTests.java @@ -1,7 +1,7 @@ package ru.tinkoff.kora.cache.redis; import org.junit.jupiter.api.Test; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import java.util.List; import java.util.Map; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java index 3509176e2..fb68a5d8b 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/CacheRunner.java @@ -4,7 +4,7 @@ import org.junit.jupiter.api.Assertions; import ru.tinkoff.kora.application.graph.Lifecycle; import ru.tinkoff.kora.cache.redis.lettuce.LettuceClientConfig; -import ru.tinkoff.kora.cache.redis.testdata.DummyCache; +import ru.tinkoff.kora.cache.redis.lettuce.testdata.DummyCache; import ru.tinkoff.kora.test.redis.RedisParams; import java.time.Duration; diff --git a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java similarity index 89% rename from cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java rename to cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java index 4d098b5ff..92728f10d 100644 --- a/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/testdata/DummyCache.java +++ b/cache/cache-redis/src/test/java/ru/tinkoff/kora/cache/redis/lettuce/testdata/DummyCache.java @@ -1,4 +1,4 @@ -package ru.tinkoff.kora.cache.redis.testdata; +package ru.tinkoff.kora.cache.redis.lettuce.testdata; import ru.tinkoff.kora.cache.redis.*; diff --git a/cache/cache-symbol-processor/build.gradle b/cache/cache-symbol-processor/build.gradle index 475f658f6..b654f0273 100644 --- a/cache/cache-symbol-processor/build.gradle +++ b/cache/cache-symbol-processor/build.gradle @@ -10,7 +10,7 @@ dependencies { testImplementation libs.prometheus.collector.caffeine testImplementation project(":internal:test-logging") testImplementation project(":cache:cache-caffeine") - testImplementation project(":cache:cache-redis") + testImplementation project(":cache:cache-redis-lettuce") testImplementation project(":json:json-common") testImplementation project(":config:config-common") testImplementation testFixtures(project(":symbol-processor-common")) diff --git a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt index f947d2e99..e3198ea7c 100644 --- a/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt +++ b/cache/cache-symbol-processor/src/main/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheSymbolProcessor.kt @@ -26,7 +26,6 @@ import ru.tinkoff.kora.ksp.common.KspCommonUtils.generated import ru.tinkoff.kora.ksp.common.KspCommonUtils.toTypeName import ru.tinkoff.kora.ksp.common.TagUtils.parseTags import ru.tinkoff.kora.ksp.common.TagUtils.toTagAnnotation -import ru.tinkoff.kora.ksp.common.exception.ProcessingErrorException class CacheSymbolProcessor( private val environment: SymbolProcessorEnvironment @@ -35,17 +34,24 @@ class CacheSymbolProcessor( companion object { private val ANNOTATION_CACHE = ClassName("ru.tinkoff.kora.cache.annotation", "Cache") - private val CAFFEINE_TELEMETRY = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheTelemetry") + private val CACHE_TELEMETRY_FACTORY = ClassName("ru.tinkoff.kora.cache.telemetry", "CacheTelemetryFactory") + private val CAFFEINE_CACHE = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCache") private val CAFFEINE_CACHE_FACTORY = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheFactory") private val CAFFEINE_CACHE_CONFIG = ClassName("ru.tinkoff.kora.cache.caffeine", "CaffeineCacheConfig") private val CAFFEINE_CACHE_IMPL = ClassName("ru.tinkoff.kora.cache.caffeine", "AbstractCaffeineCache") + @Deprecated(message = "deprecated redis dependency") private val REDIS_TELEMETRY = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheTelemetry") + + @Deprecated(message = "deprecated redis dependency") + private val REDIS_CACHE_OLD_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE = ClassName("ru.tinkoff.kora.cache.redis", "RedisCache") private val REDIS_CACHE_IMPL = ClassName("ru.tinkoff.kora.cache.redis", "AbstractRedisCache") private val REDIS_CACHE_CONFIG = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheConfig") - private val REDIS_CACHE_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE_SYNC_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheClient") + private val REDIS_CACHE_ASYNC_CLIENT = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheAsyncClient") private val REDIS_CACHE_MAPPER_KEY = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheKeyMapper") private val REDIS_CACHE_MAPPER_VALUE = ClassName("ru.tinkoff.kora.cache.redis", "RedisCacheValueMapper") } @@ -70,7 +76,7 @@ class CacheSymbolProcessor( val cacheImplBase = getCacheImplBase(cacheContractType) val implSpec = TypeSpec.classBuilder(getCacheImpl(cacheContract)) .generated(CacheSymbolProcessor::class) - .primaryConstructor(getCacheConstructor(cacheContractType)) + .primaryConstructor(getCacheConstructor(cacheContractType, cacheContract)) .addSuperclassConstructorParameter(getCacheSuperConstructorCall(cacheContract, cacheContractType)) .superclass(cacheImplBase) .addSuperinterface(cacheContract.toTypeName()) @@ -195,82 +201,161 @@ class CacheSymbolProcessor( .build() ) .addParameter("factory", CAFFEINE_CACHE_FACTORY) - .addParameter("telemetry", CAFFEINE_TELEMETRY) - .addStatement("return %T(config, factory, telemetry)", cacheImplName) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addStatement("return %T(config, factory, telemetryFactory)", cacheImplName) .returns(cacheTypeName) .build() } REDIS_CACHE -> { - val keyType = cacheContract.typeArguments[0] - val valueType = cacheContract.typeArguments[1] - val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) - val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) - - val cacheContractType = cacheClass.getAllSuperTypes() - .filter { i -> i.toTypeName() == cacheContract } - .first() - - val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) - val keyTags = cacheContractType.arguments[0].parseTags() - if (keyTags.isNotEmpty()) { - keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) + if (isRedisDeprecated(cacheClass)) { + return getRedisDeprecatedFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) + } else { + return getRedisFunc(cacheClass, cacheContract, cacheImplName, cacheTypeName, methodName) } + } - val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) - val valueTags = cacheContractType.arguments[1].parseTags() - if (valueTags.isNotEmpty()) { - valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) - } + else -> { + throw IllegalArgumentException("Unknown cache type impl: ${cacheContract.rawType}") + } + } + } - FunSpec.builder(methodName) - .addModifiers(KModifier.PUBLIC) - .addParameter( - ParameterSpec.builder("config", REDIS_CACHE_CONFIG) - .addAnnotation( - AnnotationSpec.builder(CommonClassNames.tag) - .addMember("%T::class", cacheTypeName) - .build() - ) + private fun getRedisFunc( + cacheClass: KSClassDeclaration, + cacheContract: ParameterizedTypeName, + cacheImplName: ClassName, + cacheTypeName: TypeName, + methodName: String + ): FunSpec { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + + val cacheContractType = cacheClass.getAllSuperTypes() + .filter { i -> i.toTypeName() == cacheContract } + .first() + + val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) + val keyTags = cacheContractType.arguments[0].parseTags() + if (keyTags.isNotEmpty()) { + keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) + } + + val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) + val valueTags = cacheContractType.arguments[1].parseTags() + if (valueTags.isNotEmpty()) { + valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) + } + + return FunSpec.builder(methodName) + .addModifiers(KModifier.PUBLIC) + .addParameter( + ParameterSpec.builder("config", REDIS_CACHE_CONFIG) + .addAnnotation( + AnnotationSpec.builder(CommonClassNames.tag) + .addMember("%T::class", cacheTypeName) .build() ) - .addParameter("redisClient", REDIS_CACHE_CLIENT) - .addParameter("telemetry", REDIS_TELEMETRY) - .addParameter(keyMapperBuilder.build()) - .addParameter(valueMapperBuilder.build()) - .addStatement("return %L(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) - .returns(cacheTypeName) .build() - } + ) + .addParameter("redisSyncClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("redisAsyncClient", REDIS_CACHE_ASYNC_CLIENT) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addParameter(keyMapperBuilder.build()) + .addParameter(valueMapperBuilder.build()) + .addStatement("return %L(config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper)", cacheImplName) + .returns(cacheTypeName) + .build() + } - else -> { - throw IllegalArgumentException("Unknown cache type: ${cacheContract.rawType}") - } + @Deprecated(message = "deprecated redis dependency") + private fun getRedisDeprecatedFunc( + cacheClass: KSClassDeclaration, + cacheContract: ParameterizedTypeName, + cacheImplName: ClassName, + cacheTypeName: TypeName, + methodName: String + ): FunSpec { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + + val cacheContractType = cacheClass.getAllSuperTypes() + .filter { i -> i.toTypeName() == cacheContract } + .first() + + val keyMapperBuilder = ParameterSpec.builder("keyMapper", keyMapperType) + val keyTags = cacheContractType.arguments[0].parseTags() + if (keyTags.isNotEmpty()) { + keyMapperBuilder.addAnnotation(keyTags.toTagAnnotation()) + } + + val valueMapperBuilder = ParameterSpec.builder("valueMapper", valueMapperType) + val valueTags = cacheContractType.arguments[1].parseTags() + if (valueTags.isNotEmpty()) { + valueMapperBuilder.addAnnotation(valueTags.toTagAnnotation()) } + + return FunSpec.builder(methodName) + .addModifiers(KModifier.PUBLIC) + .addParameter( + ParameterSpec.builder("config", REDIS_CACHE_CONFIG) + .addAnnotation( + AnnotationSpec.builder(CommonClassNames.tag) + .addMember("%T::class", cacheTypeName) + .build() + ) + .build() + ) + .addParameter("redisClient", REDIS_CACHE_OLD_CLIENT) + .addParameter("telemetry", REDIS_TELEMETRY) + .addParameter(keyMapperBuilder.build()) + .addParameter(valueMapperBuilder.build()) + .addStatement("return %L(config, redisClient, telemetry, keyMapper, valueMapper)", cacheImplName) + .returns(cacheTypeName) + .build() } - private fun getCacheConstructor(cacheContract: ParameterizedTypeName): FunSpec { + private fun getCacheConstructor(cacheContract: ParameterizedTypeName, cacheClass: KSClassDeclaration): FunSpec { return when (cacheContract.rawType) { CAFFEINE_CACHE -> { FunSpec.constructorBuilder() .addParameter("config", CAFFEINE_CACHE_CONFIG) .addParameter("factory", CAFFEINE_CACHE_FACTORY) - .addParameter("telemetry", CAFFEINE_TELEMETRY) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) .build() } REDIS_CACHE -> { - val keyType = cacheContract.typeArguments[0] - val valueType = cacheContract.typeArguments[1] - val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) - val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) - FunSpec.constructorBuilder() - .addParameter("config", REDIS_CACHE_CONFIG) - .addParameter("redisClient", REDIS_CACHE_CLIENT) - .addParameter("telemetry", REDIS_TELEMETRY) - .addParameter("keyMapper", keyMapperType) - .addParameter("valueMapper", valueMapperType) - .build() + if (isRedisDeprecated(cacheClass)) { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + FunSpec.constructorBuilder() + .addParameter("config", REDIS_CACHE_CONFIG) + .addParameter("redisClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("telemetry", REDIS_TELEMETRY) + .addParameter("keyMapper", keyMapperType) + .addParameter("valueMapper", valueMapperType) + .build() + } else { + val keyType = cacheContract.typeArguments[0] + val valueType = cacheContract.typeArguments[1] + val keyMapperType = REDIS_CACHE_MAPPER_KEY.parameterizedBy(keyType) + val valueMapperType = REDIS_CACHE_MAPPER_VALUE.parameterizedBy(valueType) + FunSpec.constructorBuilder() + .addParameter("config", REDIS_CACHE_CONFIG) + .addParameter("redisSyncClient", REDIS_CACHE_SYNC_CLIENT) + .addParameter("redisAsyncClient", REDIS_CACHE_ASYNC_CLIENT) + .addParameter("telemetryFactory", CACHE_TELEMETRY_FACTORY) + .addParameter("keyMapper", keyMapperType) + .addParameter("valueMapper", valueMapperType) + .build() + } } else -> { @@ -279,6 +364,12 @@ class CacheSymbolProcessor( } } + private fun isRedisDeprecated(cacheContract: KSClassDeclaration): Boolean { + return cacheContract.getAllSuperTypes() + .filter { a -> a.toClassName() == REDIS_CACHE } + .any { a -> a.declaration.annotations.any { it.annotationType.toTypeName() == Deprecated::class.asTypeName() } } + } + private fun getCacheRedisKeyMapperForData(keyType: KSClassDeclaration): FunSpec { val prefix = keyType.toClassName().simpleNames.joinToString("_") val methodName = "${prefix}_RedisKeyMapper" @@ -366,8 +457,15 @@ class CacheSymbolProcessor( ?.findValueNoDefault("value")!! return when (cacheType.rawType) { - CAFFEINE_CACHE -> CodeBlock.of("%S, config, factory, telemetry", configPath) - REDIS_CACHE -> CodeBlock.of("%S, config, redisClient, telemetry, keyMapper, valueMapper", configPath) + CAFFEINE_CACHE -> CodeBlock.of("%S, config, factory, telemetryFactory", configPath) + REDIS_CACHE -> { + if (isRedisDeprecated(cacheContract)) { + CodeBlock.of("%S, config, redisClient, telemetry, keyMapper, valueMapper", configPath) + } else { + CodeBlock.of("%S, config, redisSyncClient, redisAsyncClient, telemetryFactory, keyMapper, valueMapper", configPath) + } + } + else -> throw IllegalArgumentException("Unknown cache type: ${cacheType.rawType}") } } diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt index 1849578f4..e05e769d3 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/CacheRunner.kt @@ -3,8 +3,14 @@ package ru.tinkoff.kora.cache.symbol.processor import kotlinx.coroutines.future.await import kotlinx.coroutines.runBlocking import ru.tinkoff.kora.cache.caffeine.CaffeineCacheConfig -import ru.tinkoff.kora.cache.redis.RedisCacheConfig +import ru.tinkoff.kora.cache.redis.RedisCacheAsyncClient import ru.tinkoff.kora.cache.redis.RedisCacheClient +import ru.tinkoff.kora.cache.redis.RedisCacheConfig +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_ConfigValueExtractor`.TelemetryConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_LogConfig_ConfigValueExtractor`.LogConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_MetricsConfig_ConfigValueExtractor`.MetricsConfig_Impl +import ru.tinkoff.kora.telemetry.common.`$TelemetryConfig_TracingConfig_ConfigValueExtractor`.TracingConfig_Impl +import ru.tinkoff.kora.telemetry.common.TelemetryConfig import java.nio.ByteBuffer import java.time.Duration import java.util.* @@ -28,6 +34,10 @@ class CacheRunner { override fun initialSize(): Int? { return null; } + + override fun telemetry(): TelemetryConfig { + return TelemetryConfig_Impl(LogConfig_Impl(false), TracingConfig_Impl(false), MetricsConfig_Impl(false, doubleArrayOf())) + } } } @@ -39,11 +49,15 @@ class CacheRunner { override fun expireAfterWrite(): Duration? = null override fun expireAfterAccess(): Duration? = null + + override fun telemetry(): TelemetryConfig { + return TelemetryConfig_Impl(LogConfig_Impl(false), TracingConfig_Impl(false), MetricsConfig_Impl(false, doubleArrayOf())) + } } } - fun lettuceClient(cache: MutableMap): RedisCacheClient { - return object : RedisCacheClient { + fun lettuceAsyncClient(cache: MutableMap): RedisCacheAsyncClient { + return object : RedisCacheAsyncClient { override fun get(key: ByteArray): CompletionStage { val r = cache[ByteBuffer.wrap(key)] return CompletableFuture.completedFuture(r?.array()) @@ -67,22 +81,22 @@ class CacheRunner { return mget(keys) } - override fun set(key: ByteArray, value: ByteArray) : CompletionStage { + override fun set(key: ByteArray, value: ByteArray): CompletionStage { cache[ByteBuffer.wrap(key)] = ByteBuffer.wrap(value) - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun mset(keyAndValue: MutableMap) : CompletionStage { + override fun mset(keyAndValue: MutableMap): CompletionStage { keyAndValue.forEach { (k, v) -> set(k, v) } - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long): CompletionStage { + override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long): CompletionStage { mset(keyAndValue) - return CompletableFuture.completedFuture(true) + return CompletableFuture.completedFuture(null) } - override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long): CompletionStage { + override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long): CompletionStage { return set(key, value) } @@ -99,9 +113,69 @@ class CacheRunner { return CompletableFuture.completedFuture(counter.toLong()) } - override fun flushAll() : CompletionStage { + override fun flushAll(): CompletionStage { + cache.clear() + return CompletableFuture.completedFuture(null) + } + } + } + + fun lettuceSyncClient(cache: MutableMap): RedisCacheClient { + return object : RedisCacheClient { + override fun get(key: ByteArray): ByteArray? { + val r = cache[ByteBuffer.wrap(key)] + return r?.array() + } + + override fun mget(keys: Array): Map { + val result: MutableMap = HashMap() + for (key in keys) { + Optional.ofNullable(cache[ByteBuffer.wrap(key)]).ifPresent { r: ByteBuffer -> + result[key] = r.array() + } + } + return result + } + + override fun getex(key: ByteArray, expireAfterMillis: Long): ByteArray? { + return get(key) + } + + override fun getex(keys: Array, expireAfterMillis: Long): Map { + return mget(keys) + } + + override fun set(key: ByteArray, value: ByteArray) { + cache[ByteBuffer.wrap(key)] = ByteBuffer.wrap(value) + } + + override fun mset(keyAndValue: MutableMap) { + keyAndValue.forEach { (k, v) -> set(k, v) } + } + + override fun psetex(keyAndValue: MutableMap, expireAfterMillis: Long) { + mset(keyAndValue) + } + + override fun psetex(key: ByteArray, value: ByteArray, expireAfterMillis: Long) { + return set(key, value) + } + + override fun del(key: ByteArray): Long { + val res = if (cache.remove(ByteBuffer.wrap(key)) == null) 0L else 1L + return res + } + + override fun del(keys: Array): Long { + var counter = 0L + for (key in keys) { + counter += runBlocking { del(key) } + } + return counter + } + + override fun flushAll() { cache.clear() - return CompletableFuture.completedFuture(true) } } } diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt index 2372ddd5e..db912ed68 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheAopTests.kt @@ -41,7 +41,7 @@ class SuspendCacheAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val serviceClass = classLoader.loadClass(SERVICE_CLASS) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt index 306e6a2ae..330fec07b 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheManyAopTests.kt @@ -48,15 +48,16 @@ class SuspendCacheManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), RedisCacheKeyMapper { key -> val k1 = key.k1.toByteArray(StandardCharsets.UTF_8) val k2 = key.k2.toString().toByteArray(StandardCharsets.UTF_8) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt index 2c108a2fc..5113184ef 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneAopTests.kt @@ -39,7 +39,7 @@ class SuspendCacheOneAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt index 75f93dfb4..587eed0e2 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SuspendCacheOneManyAopTests.kt @@ -45,15 +45,16 @@ class SuspendCacheOneManyAopTests : CaffeineCacheModule, RedisCacheMapperModule cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper() ) as DummyCache12 diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt index e1368ae19..cd2a800ad 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheAopTests.kt @@ -41,7 +41,7 @@ class SyncCacheAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt index 999c7cccd..bbbc1e4be 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheManyAopTests.kt @@ -47,15 +47,16 @@ class SyncCacheManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache21 val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") val cache = mutableMapOf() cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), RedisCacheKeyMapper { key -> val k1 = key.k1.toByteArray(StandardCharsets.UTF_8) val k2 = key.k2.toString().toByteArray(StandardCharsets.UTF_8) diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt index fb5946eb4..5ad2095f9 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneAopTests.kt @@ -38,7 +38,7 @@ class SyncCacheOneAopTests : CaffeineCacheModule { cache = cacheClass.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val serviceClass = classLoader.loadClass(SERVICE_CLASS) ?: throw IllegalArgumentException("Expected class not found: $SERVICE_CLASS") diff --git a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt index d2fe13cb0..574026a0d 100644 --- a/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt +++ b/cache/cache-symbol-processor/src/test/kotlin/ru/tinkoff/kora/cache/symbol/processor/SyncCacheOneManyAopTests.kt @@ -44,15 +44,16 @@ class SyncCacheOneManyAopTests : CaffeineCacheModule, RedisCacheMapperModule { cache1 = cache1Class.constructors[0].newInstance( CacheRunner.getCaffeineConfig(), caffeineCacheFactory(null), - caffeineCacheTelemetry(null, null) + defaultCacheTelemetryFactory(null, null, null) ) as DummyCache11 val cache = mutableMapOf() val cache2Class = classLoader.loadClass(CACHE2_CLASS) ?: throw IllegalArgumentException("Expected class not found: $CACHE2_CLASS") cache2 = cache2Class.constructors[0].newInstance( CacheRunner.getRedisConfig(), - CacheRunner.lettuceClient(cache), - redisCacheTelemetry(null, null), + CacheRunner.lettuceSyncClient(cache), + CacheRunner.lettuceAsyncClient(cache), + defaultCacheTelemetryFactory(null, null, null), stringRedisKeyMapper(), stringRedisValueMapper() ) as DummyCache12 diff --git a/dependencies.gradle b/dependencies.gradle index 061357c80..25a68d0db 100644 --- a/dependencies.gradle +++ b/dependencies.gradle @@ -120,7 +120,8 @@ dependencyResolutionManagement { DependencyResolutionManagement it -> library("javapoet", "com.squareup", "javapoet").version("1.13.0") library("classgraph", "io.github.classgraph", "classgraph").version("4.8.170") - library('lettuce-core', 'io.lettuce', 'lettuce-core').version('6.5.2.RELEASE') + library('redis-jedis', 'redis.clients', 'jedis').version('5.2.0') + library('redis-lettuce', 'io.lettuce', 'lettuce-core').version('6.5.2.RELEASE') library('apache-pool', 'org.apache.commons', 'commons-pool2').version('2.12.1') library('quartz', 'org.quartz-scheduler', 'quartz').version('2.3.2') diff --git a/internal/test-redis/build.gradle b/internal/test-redis/build.gradle index 3adee5f5a..08f074261 100644 --- a/internal/test-redis/build.gradle +++ b/internal/test-redis/build.gradle @@ -1,6 +1,6 @@ dependencies { api libs.testcontainers.core - api(libs.lettuce.core) { + api(libs.redis.lettuce) { exclude group: 'io.projectreactor', module: 'reactor-core' exclude group: 'io.netty', module: 'netty-common' exclude group: 'io.netty', module: 'netty-handler' diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java index 9af7caf18..3e7cb81e8 100644 --- a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/MetricsModule.java @@ -10,6 +10,7 @@ import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; import ru.tinkoff.kora.http.server.common.HttpServerConfig; import ru.tinkoff.kora.micrometer.module.cache.MicrometerCacheMetrics; +import ru.tinkoff.kora.micrometer.module.cache.MicrometerCacheMetricsFactory; import ru.tinkoff.kora.micrometer.module.cache.caffeine.MicrometerCaffeineCacheMetricCollector; import ru.tinkoff.kora.micrometer.module.camunda.engine.bpmn.MicrometerCamundaEngineBpmnMetricsFactory; import ru.tinkoff.kora.micrometer.module.camunda.rest.MicrometerCamundaRestMetricsFactory; @@ -132,11 +133,17 @@ default MicrometerTimeoutMetrics micrometerTimeoutMetrics(MeterRegistry meterReg return new MicrometerTimeoutMetrics(meterRegistry); } + @Deprecated @DefaultComponent default MicrometerCacheMetrics micrometerCacheMetrics(MeterRegistry meterRegistry) { return new MicrometerCacheMetrics(meterRegistry); } + @DefaultComponent + default MicrometerCacheMetricsFactory micrometerCacheMetricsFactory(MeterRegistry meterRegistry) { + return new MicrometerCacheMetricsFactory(meterRegistry); + } + @DefaultComponent default MicrometerCaffeineCacheMetricCollector micrometerCaffeineCacheMetricsCollector(MeterRegistry meterRegistry) { return new MicrometerCaffeineCacheMetricCollector(meterRegistry); diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java index feffbb01f..1437f0cc4 100644 --- a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetrics.java @@ -12,6 +12,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; +@Deprecated public final class MicrometerCacheMetrics implements CacheMetrics { record Key(String cacheName, String origin, String operationName, String status) {} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java new file mode 100644 index 000000000..a462b0540 --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/MicrometerCacheMetricsFactory.java @@ -0,0 +1,27 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.MeterRegistry; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheMetricsFactory; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class MicrometerCacheMetricsFactory implements CacheMetricsFactory { + + private final MeterRegistry meterRegistry; + + public MicrometerCacheMetricsFactory(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + + @Override + public CacheMetrics get(TelemetryConfig.MetricsConfig config, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(config.enabled(), true)) { + return new Opentelemetry120CacheMetrics(meterRegistry, config); + } else { + return null; + } + } +} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java new file mode 100644 index 000000000..96cb840b5 --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry120CacheMetrics.java @@ -0,0 +1,131 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.MeterRegistry; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public final class Opentelemetry120CacheMetrics implements CacheMetrics { + + record DurationKey(String cacheName, String origin, String operationName, String status) {} + + record RatioKey(String cacheName, String origin, String type) {} + + record OpKey(String cacheName, String origin) {} + + private static final String METRIC_CACHE_DURATION = "cache.duration"; + private static final String METRIC_CACHE_RATIO = "cache.ratio"; + private static final String METRIC_CACHE_HIT = "cache.hit"; + private static final String METRIC_CACHE_MISS = "cache.miss"; + + private static final String TAG_OPERATION = "operation"; + private static final String TAG_CACHE_NAME = "cache"; + private static final String TAG_ORIGIN = "origin"; + private static final String TAG_STATUS = "status"; + private static final String TAG_TYPE = "type"; + + private static final String STATUS_SUCCESS = "success"; + private static final String STATUS_FAILED = "failed"; + + private static final String TYPE_HIT = "hit"; + private static final String TYPE_MISS = "miss"; + + private final ConcurrentHashMap durations = new ConcurrentHashMap<>(); + private final ConcurrentHashMap counters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap missCounters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap hitCounters = new ConcurrentHashMap<>(); + + private final MeterRegistry meterRegistry; + private final TelemetryConfig.MetricsConfig config; + + public Opentelemetry120CacheMetrics(MeterRegistry meterRegistry, TelemetryConfig.MetricsConfig config) { + this.meterRegistry = meterRegistry; + this.config = config; + } + + @Override + public void recordSuccess(@Nonnull CacheTelemetryOperation op, long durationInNanos, @Nullable Object valueFromCache) { + final DurationKey key = new DurationKey(op.cacheName(), op.origin(), op.name(), STATUS_SUCCESS); + durations.computeIfAbsent(key, k -> duration(key, null)) + .record((double) durationInNanos / 1_000_000); + + + if ("GET".startsWith(op.name())) { + final String ratioType; + var operationKey = new OpKey(op.cacheName(), op.origin()); + if (valueFromCache == null + || valueFromCache instanceof Collection vc && !vc.isEmpty() + || valueFromCache instanceof Map mc && !mc.isEmpty()) { + ratioType = TYPE_MISS; + + var counter = missCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_MISS) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } else { + ratioType = TYPE_HIT; + + var counter = hitCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_HIT) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + + final RatioKey ratioKey = new RatioKey(op.cacheName(), op.origin(), ratioType); + var counter = counters.computeIfAbsent(ratioKey, k -> { + var builder = Counter.builder(METRIC_CACHE_RATIO) + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()) + .tag(TAG_TYPE, ratioType); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + } + + @Override + public void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_FAILED); + durations.computeIfAbsent(key, k -> duration(key, exception)) + .record((double) durationInNanos / 1_000_000); + } + + private DistributionSummary duration(DurationKey key, @Nullable Throwable exception) { + var builder = DistributionSummary.builder(METRIC_CACHE_DURATION) + .serviceLevelObjectives(this.config.slo(TelemetryConfig.MetricsConfig.OpentelemetrySpec.V120)) + .baseUnit("milliseconds") + .tag(TAG_CACHE_NAME, key.cacheName()) + .tag(TAG_OPERATION, key.operationName()) + .tag(TAG_ORIGIN, key.origin()) + .tag(TAG_STATUS, key.status()); + + if (exception != null) { + builder.tag("error", exception.getClass().getCanonicalName()); + } else { + builder.tag("error", ""); + } + + return builder.register(meterRegistry); + } +} diff --git a/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java new file mode 100644 index 000000000..5cbaa7b9c --- /dev/null +++ b/micrometer/micrometer-module/src/main/java/ru/tinkoff/kora/micrometer/module/cache/Opentelemetry123CacheMetrics.java @@ -0,0 +1,133 @@ +package ru.tinkoff.kora.micrometer.module.cache; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.MeterRegistry; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheMetrics; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryOperation; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public final class Opentelemetry123CacheMetrics implements CacheMetrics { + + record DurationKey(String cacheName, String origin, String operationName, String status) { + } + + record RatioKey(String cacheName, String origin, String type) { + } + + record OpKey(String cacheName, String origin) { + } + + private static final String METRIC_CACHE_DURATION = "cache.duration"; + private static final String METRIC_CACHE_RATIO = "cache.ratio"; + private static final String METRIC_CACHE_HIT = "cache.hit"; + private static final String METRIC_CACHE_MISS = "cache.miss"; + + private static final String TAG_OPERATION = "operation"; + private static final String TAG_CACHE_NAME = "cache"; + private static final String TAG_ORIGIN = "origin"; + private static final String TAG_STATUS = "status"; + private static final String TAG_TYPE = "type"; + + private static final String STATUS_SUCCESS = "success"; + private static final String STATUS_FAILED = "failed"; + + private static final String TYPE_HIT = "hit"; + private static final String TYPE_MISS = "miss"; + + private final ConcurrentHashMap durations = new ConcurrentHashMap<>(); + private final ConcurrentHashMap counters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap missCounters = new ConcurrentHashMap<>(); + @Deprecated(forRemoval = true) + private final ConcurrentHashMap hitCounters = new ConcurrentHashMap<>(); + + private final MeterRegistry meterRegistry; + private final TelemetryConfig.MetricsConfig config; + + public Opentelemetry123CacheMetrics(MeterRegistry meterRegistry, TelemetryConfig.MetricsConfig config) { + this.meterRegistry = meterRegistry; + this.config = config; + } + + @Override + public void recordSuccess(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Object valueFromCache) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_SUCCESS); + durations.computeIfAbsent(key, k -> duration(key, null)) + .record((double) durationInNanos / 1_000_000_000); + + if ("GET".startsWith(operation.name())) { + final String ratioType; + var operationKey = new OpKey(operation.cacheName(), operation.origin()); + if (valueFromCache == null + || valueFromCache instanceof Collection vc && !vc.isEmpty() + || valueFromCache instanceof Map mc && !mc.isEmpty()) { + ratioType = TYPE_MISS; + + var counter = missCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_MISS) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } else { + ratioType = TYPE_HIT; + + var counter = hitCounters.computeIfAbsent(operationKey, k -> { + var builder = Counter.builder(METRIC_CACHE_HIT) + .description("!!! DEPRECATED !!! Please use cache.ratio metric") + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + + final RatioKey ratioKey = new RatioKey(operation.cacheName(), operation.origin(), ratioType); + var counter = counters.computeIfAbsent(ratioKey, k -> { + var builder = Counter.builder(METRIC_CACHE_RATIO) + .tag(TAG_CACHE_NAME, k.cacheName()) + .tag(TAG_ORIGIN, k.origin()) + .tag(TAG_TYPE, ratioType); + + return builder.register(meterRegistry); + }); + counter.increment(); + } + } + + @Override + public void recordFailure(@Nonnull CacheTelemetryOperation operation, long durationInNanos, @Nullable Throwable exception) { + final DurationKey key = new DurationKey(operation.cacheName(), operation.origin(), operation.name(), STATUS_FAILED); + durations.computeIfAbsent(key, k -> duration(key, exception)) + .record((double) durationInNanos / 1_000_000_000); + } + + private DistributionSummary duration(DurationKey key, @Nullable Throwable exception) { + var builder = DistributionSummary.builder(METRIC_CACHE_DURATION) + .serviceLevelObjectives(this.config.slo(TelemetryConfig.MetricsConfig.OpentelemetrySpec.V123)) + .baseUnit("s") + .tag(TAG_CACHE_NAME, key.cacheName()) + .tag(TAG_OPERATION, key.operationName()) + .tag(TAG_ORIGIN, key.origin()) + .tag(TAG_STATUS, key.status()); + + if (exception != null) { + builder.tag("error", exception.getClass().getCanonicalName()); + } else { + builder.tag("error", ""); + } + + return builder.register(meterRegistry); + } +} diff --git a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java index 118e45921..e7c0eb3a0 100644 --- a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java +++ b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/OpentelemetryModule.java @@ -3,6 +3,7 @@ import io.opentelemetry.api.trace.Tracer; import ru.tinkoff.kora.common.DefaultComponent; import ru.tinkoff.kora.opentelemetry.module.cache.OpentelementryCacheTracer; +import ru.tinkoff.kora.opentelemetry.module.cache.OpentelementryCacheTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.engine.bpmn.OpentelemetryCamundaEngineBpmnTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.rest.OpentelemetryCamundaRestTracerFactory; import ru.tinkoff.kora.opentelemetry.module.camunda.zeebe.worker.OpentelemetryZeebeWorkerTracerFactory; @@ -71,11 +72,17 @@ default OpentelemetrySchedulingTracerFactory opentelemetrySchedulingTracerFactor return new OpentelemetrySchedulingTracerFactory(tracer); } + @Deprecated @DefaultComponent default OpentelementryCacheTracer opentelemetryCacheTracer(Tracer tracer) { return new OpentelementryCacheTracer(tracer); } + @DefaultComponent + default OpentelementryCacheTracerFactory opentelemetryCacheTracerFactory(Tracer tracer) { + return new OpentelementryCacheTracerFactory(tracer); + } + @DefaultComponent default OpentelemetryS3ClientTracerFactory opentelemetryS3ClientTracerFactory(Tracer tracer) { return new OpentelemetryS3ClientTracerFactory(tracer); diff --git a/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java new file mode 100644 index 000000000..461c67178 --- /dev/null +++ b/opentelemetry/opentelemetry-module/src/main/java/ru/tinkoff/kora/opentelemetry/module/cache/OpentelementryCacheTracerFactory.java @@ -0,0 +1,29 @@ +package ru.tinkoff.kora.opentelemetry.module.cache; + +import io.opentelemetry.api.trace.Tracer; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.cache.telemetry.CacheTelemetryArgs; +import ru.tinkoff.kora.cache.telemetry.CacheTracer; +import ru.tinkoff.kora.cache.telemetry.CacheTracerFactory; +import ru.tinkoff.kora.telemetry.common.TelemetryConfig; + +import java.util.Objects; + +public final class OpentelementryCacheTracerFactory implements CacheTracerFactory { + + private final Tracer tracer; + + public OpentelementryCacheTracerFactory(Tracer tracer) { + this.tracer = tracer; + } + + @Nullable + @Override + public CacheTracer get(TelemetryConfig.TracingConfig tracing, CacheTelemetryArgs args) { + if (Objects.requireNonNullElse(tracing.enabled(), true)) { + return new OpentelementryCacheTracer(tracer); + } else { + return null; + } + } +} diff --git a/redis/redis-jedis/build.gradle b/redis/redis-jedis/build.gradle new file mode 100644 index 000000000..06ae3be25 --- /dev/null +++ b/redis/redis-jedis/build.gradle @@ -0,0 +1,12 @@ +dependencies { + annotationProcessor project(":config:config-annotation-processor") + + api libs.redis.jedis + + implementation project(":config:config-common") + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java new file mode 100644 index 000000000..800dbfd85 --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisConfig.java @@ -0,0 +1,46 @@ +package ru.tinkoff.kora.redis.jedis; + +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; +import java.util.List; + +@ConfigValueExtractor +public interface JedisConfig { + + List uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(10); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(20); + } + + enum Protocol { + + /** + * Redis 2 to Redis 5 + */ + RESP2, + /** + * Redis 6+ + */ + RESP3 + } +} diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java new file mode 100644 index 000000000..10239a82e --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisFactory.java @@ -0,0 +1,96 @@ +package ru.tinkoff.kora.redis.jedis; + +import jakarta.annotation.Nonnull; +import redis.clients.jedis.*; +import redis.clients.jedis.util.JedisURIHelper; + +import java.net.URI; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +final class JedisFactory { + + private JedisFactory() {} + + @Nonnull + static UnifiedJedis build(JedisConfig config) { + return (config.uri().size() == 1) + ? buildRedisClient(config) + : buildRedisClusterClient(config); + } + + @Nonnull + private static JedisPooled buildRedisClient(JedisConfig config) { + URI uri = URI.create(config.uri().get(0)); + + var jedisConfigBuilder = DefaultJedisClientConfig.builder() + .user(JedisURIHelper.getUser(uri)) + .password(JedisURIHelper.getPassword(uri)) + .database(JedisURIHelper.getDBIndex(uri)) + .ssl(JedisURIHelper.isRedisSSLScheme(uri)); + + var protocol = switch (config.protocol()) { + case RESP3 -> RedisProtocol.RESP3; + case RESP2 -> RedisProtocol.RESP2; + }; + jedisConfigBuilder = jedisConfigBuilder.protocol(protocol); + + var uriProtocol = JedisURIHelper.getRedisProtocol(uri); + if (uriProtocol != null) { + jedisConfigBuilder = jedisConfigBuilder.protocol(uriProtocol); + } + if (config.database() != null) { + jedisConfigBuilder = jedisConfigBuilder.database(config.database()); + } + if (config.user() != null) { + jedisConfigBuilder = jedisConfigBuilder.user(config.user()); + } + if (config.password() != null) { + jedisConfigBuilder = jedisConfigBuilder.password(config.password()); + } + + return new JedisPooled(JedisURIHelper.getHostAndPort(uri), jedisConfigBuilder.build()); + } + + @Nonnull + private static JedisCluster buildRedisClusterClient(JedisConfig config) { + List uris = config.uri().stream() + .map(URI::create) + .toList(); + + Set hostAndPorts = uris.stream() + .map(JedisURIHelper::getHostAndPort) + .collect(Collectors.toSet()); + + URI uri = uris.get(0); + var jedisConfigBuilder = DefaultJedisClientConfig.builder() + .user(JedisURIHelper.getUser(uri)) + .password(JedisURIHelper.getPassword(uri)) + .database(JedisURIHelper.getDBIndex(uri)) + .protocol(JedisURIHelper.getRedisProtocol(uri)) + .ssl(JedisURIHelper.isRedisSSLScheme(uri)); + + var protocol = switch (config.protocol()) { + case RESP3 -> RedisProtocol.RESP3; + case RESP2 -> RedisProtocol.RESP2; + }; + jedisConfigBuilder = jedisConfigBuilder.protocol(protocol); + + var uriProtocol = JedisURIHelper.getRedisProtocol(uri); + if (uriProtocol != null) { + jedisConfigBuilder = jedisConfigBuilder.protocol(uriProtocol); + } + if (config.database() != null) { + jedisConfigBuilder = jedisConfigBuilder.database(config.database()); + } + if (config.user() != null) { + jedisConfigBuilder = jedisConfigBuilder.user(config.user()); + } + if (config.password() != null) { + jedisConfigBuilder = jedisConfigBuilder.password(config.password()); + } + + return new JedisCluster(hostAndPorts, jedisConfigBuilder.build()); + } +} diff --git a/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java new file mode 100644 index 000000000..4079b3f02 --- /dev/null +++ b/redis/redis-jedis/src/main/java/ru/tinkoff/kora/redis/jedis/JedisModule.java @@ -0,0 +1,17 @@ +package ru.tinkoff.kora.redis.jedis; + +import redis.clients.jedis.UnifiedJedis; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +public interface JedisModule { + + default JedisConfig jedisConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("jedis"); + return extractor.extract(value); + } + + default UnifiedJedis jedisClient(JedisConfig config) { + return JedisFactory.build(config); + } +} diff --git a/redis/redis-lettuce/build.gradle b/redis/redis-lettuce/build.gradle new file mode 100644 index 000000000..ba982f2ac --- /dev/null +++ b/redis/redis-lettuce/build.gradle @@ -0,0 +1,22 @@ +dependencies { + annotationProcessor project(":config:config-annotation-processor") + + api libs.apache.pool + api(libs.redis.lettuce) { + exclude group: 'io.projectreactor', module: 'reactor-core' + exclude group: 'io.netty', module: 'netty-common' + exclude group: 'io.netty', module: 'netty-handler' + exclude group: 'io.netty', module: 'netty-transport' + } + implementation libs.reactor.core + implementation libs.netty.common + implementation libs.netty.handlers + implementation libs.netty.transports + + implementation project(":config:config-common") + + testImplementation project(":internal:test-logging") + testImplementation project(":internal:test-redis") +} + +apply from: "${project.rootDir}/gradle/in-test-generated.gradle" diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java new file mode 100644 index 000000000..dd7635c0a --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceByteBufferCodec.java @@ -0,0 +1,37 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; + +import java.nio.ByteBuffer; + +final class LettuceByteBufferCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceByteBufferCodec(); + + @Override + public ByteBuffer decodeKey(ByteBuffer bytes) { + return copy(bytes); + } + + @Override + public ByteBuffer decodeValue(ByteBuffer bytes) { + return copy(bytes); + } + + @Override + public ByteBuffer encodeKey(ByteBuffer key) { + return copy(key); + } + + @Override + public ByteBuffer encodeValue(ByteBuffer value) { + return copy(value); + } + + private static ByteBuffer copy(ByteBuffer source) { + ByteBuffer copy = ByteBuffer.allocate(source.remaining()); + copy.put(source); + copy.flip(); + return copy; + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java new file mode 100644 index 000000000..890343159 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceCompositeRedisCodec.java @@ -0,0 +1,39 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.internal.LettuceAssert; + +import java.nio.ByteBuffer; + +final class LettuceCompositeRedisCodec implements RedisCodec { + + private final RedisCodec keyCodec; + private final RedisCodec valueCodec; + + LettuceCompositeRedisCodec(RedisCodec keyCodec, RedisCodec valueCodec) { + LettuceAssert.notNull(keyCodec, "Key codec must not be null"); + LettuceAssert.notNull(valueCodec, "Value codec must not be null"); + this.keyCodec = keyCodec; + this.valueCodec = valueCodec; + } + + @Override + public K decodeKey(ByteBuffer bytes) { + return keyCodec.decodeKey(bytes); + } + + @Override + public V decodeValue(ByteBuffer bytes) { + return valueCodec.decodeValue(bytes); + } + + @Override + public ByteBuffer encodeKey(K key) { + return keyCodec.encodeKey(key); + } + + @Override + public ByteBuffer encodeValue(V value) { + return valueCodec.encodeValue(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java new file mode 100644 index 000000000..34d01f62a --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceConfig.java @@ -0,0 +1,77 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.SocketOptions; +import jakarta.annotation.Nullable; +import ru.tinkoff.kora.config.common.annotation.ConfigValueExtractor; + +import java.time.Duration; +import java.util.List; + +@ConfigValueExtractor +public interface LettuceConfig { + + List uri(); + + @Nullable + Integer database(); + + @Nullable + String user(); + + @Nullable + String password(); + + default Protocol protocol() { + return Protocol.RESP3; + } + + default Duration socketTimeout() { + return Duration.ofSeconds(SocketOptions.DEFAULT_CONNECT_TIMEOUT); + } + + default Duration commandTimeout() { + return Duration.ofSeconds(20); + } + + PoolConfig pool(); + + enum Protocol { + + /** + * Redis 2 to Redis 5 + */ + RESP2, + /** + * Redis 6+ + */ + RESP3 + } + + @ConfigValueExtractor + interface PoolConfig { + + default int maxTotal() { + return Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4; + } + + default int maxIdle() { + return Math.max(Runtime.getRuntime().availableProcessors(), 1) * 4; + } + + default int minIdle() { + return 0; + } + + default boolean validateOnAcquire() { + return false; + } + + default boolean validateOnCreate() { + return false; + } + + default boolean validateOnRelease() { + return false; + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java new file mode 100644 index 000000000..35cac642a --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceFactory.java @@ -0,0 +1,134 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.RedisClusterURIUtil; +import io.lettuce.core.protocol.ProtocolVersion; +import jakarta.annotation.Nonnull; + +import java.net.URI; +import java.time.Duration; +import java.util.List; + +final class LettuceFactory { + + private LettuceFactory() {} + + @Nonnull + static AbstractRedisClient build(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + + final List mappedRedisUris = buildRedisURI(config); + + return (mappedRedisUris.size() == 1) + ? buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion) + : buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClient(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClusterClientInternal(mappedRedisUris, commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClient buildRedisClient(LettuceConfig config) { + final Duration commandTimeout = config.commandTimeout(); + final Duration socketTimeout = config.socketTimeout(); + final ProtocolVersion protocolVersion = switch (config.protocol()) { + case RESP2 -> ProtocolVersion.RESP2; + case RESP3 -> ProtocolVersion.RESP3; + }; + final List mappedRedisUris = buildRedisURI(config); + return buildRedisClientInternal(mappedRedisUris.get(0), commandTimeout, socketTimeout, protocolVersion); + } + + @Nonnull + private static RedisClusterClient buildRedisClusterClientInternal(List redisURIs, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClusterClient client = RedisClusterClient.create(redisURIs); + client.setOptions(ClusterClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.DEFAULT) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + @Nonnull + private static RedisClient buildRedisClientInternal(RedisURI redisURI, + Duration commandTimeout, + Duration socketTimeout, + ProtocolVersion protocolVersion) { + final RedisClient client = RedisClient.create(redisURI); + client.setOptions(ClientOptions.builder() + .autoReconnect(true) + .publishOnScheduler(true) + .suspendReconnectOnProtocolFailure(false) + .disconnectedBehavior(ClientOptions.DisconnectedBehavior.REJECT_COMMANDS) + .protocolVersion(protocolVersion) + .timeoutOptions(TimeoutOptions.builder() + .connectionTimeout() + .fixedTimeout(commandTimeout) + .timeoutCommands(true) + .build()) + .socketOptions(SocketOptions.builder() + .keepAlive(true) + .connectTimeout(socketTimeout) + .build()) + .build()); + + return client; + } + + static List buildRedisURI(LettuceConfig config) { + final Integer database = config.database(); + final String user = config.user(); + final String password = config.password(); + + return config.uri().stream() + .flatMap(uri -> RedisClusterURIUtil.toRedisURIs(URI.create(uri)).stream()) + .map(redisURI -> { + RedisURI.Builder builder = RedisURI.builder(redisURI); + if (database != null) { + builder = builder.withDatabase(database); + } + if (user != null && password != null) { + builder = builder.withAuthentication(user, password); + } else if (password != null) { + builder = builder.withPassword(((CharSequence) password)); + } + + return builder + .withTimeout(config.commandTimeout()) + .build(); + }) + .toList(); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java new file mode 100644 index 000000000..9e075d82e --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceIntegerCodec.java @@ -0,0 +1,32 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; + +import java.nio.ByteBuffer; + +final class LettuceIntegerCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceIntegerCodec(); + + @Override + public Integer decodeKey(ByteBuffer bytes) { + String s = StringCodec.ASCII.decodeKey(bytes); + return s == null ? null : Integer.valueOf(s); + } + + @Override + public Integer decodeValue(ByteBuffer bytes) { + return decodeKey(bytes); + } + + @Override + public ByteBuffer encodeKey(Integer key) { + return StringCodec.ASCII.encodeKey(key == null ? null : key.toString()); + } + + @Override + public ByteBuffer encodeValue(Integer value) { + return encodeKey(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java new file mode 100644 index 000000000..742c972eb --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecycleConnectionWrapper.java @@ -0,0 +1,53 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecycleConnectionWrapper implements Lifecycle, Wrapped> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final ConnectionProvider provider; + + private volatile StatefulConnection connection; + + @FunctionalInterface + interface ConnectionProvider { + StatefulConnection create() throws Exception; + } + + LettuceLifecycleConnectionWrapper(ConnectionProvider provider) { + this.provider = provider; + } + + @Override + public StatefulConnection value() { + return this.connection; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis connection starting..."); + final long started = TimeUtils.started(); + + this.connection = provider.create(); + + logger.info("Lettuce Redis connection started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.connection != null) { + logger.debug("Lettuce Redis connection stopping..."); + final long stopping = TimeUtils.started(); + + this.connection.close(); + + logger.info("Lettuce Redis connection stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java new file mode 100644 index 000000000..7a9e63f0b --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolAsyncWrapper.java @@ -0,0 +1,54 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.support.AsyncPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecyclePoolAsyncWrapper implements Lifecycle, Wrapped>> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final PoolProvider provider; + + private volatile AsyncPool> connection; + + @FunctionalInterface + interface PoolProvider { + AsyncPool> create() throws Exception; + } + + LettuceLifecyclePoolAsyncWrapper(PoolProvider provider) { + this.provider = provider; + } + + @Override + public AsyncPool> value() { + return this.connection; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis async pool starting..."); + final long started = TimeUtils.started(); + + this.connection = provider.create(); + + logger.info("Lettuce Redis async pool started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.connection != null) { + logger.debug("Lettuce Redis async pool stopping..."); + final long stopping = TimeUtils.started(); + + this.connection.close(); + + logger.info("Lettuce Redis async pool stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java new file mode 100644 index 000000000..fcf5294a3 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLifecyclePoolSyncWrapper.java @@ -0,0 +1,54 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.api.StatefulConnection; +import org.apache.commons.pool2.ObjectPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ru.tinkoff.kora.application.graph.Lifecycle; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.util.TimeUtils; + +final class LettuceLifecyclePoolSyncWrapper implements Lifecycle, Wrapped>> { + + private static final Logger logger = LoggerFactory.getLogger(LettuceFactory.class); + + private final PoolProvider provider; + + private volatile ObjectPool> pool; + + @FunctionalInterface + interface PoolProvider { + ObjectPool> create() throws Exception; + } + + LettuceLifecyclePoolSyncWrapper(PoolProvider provider) { + this.provider = provider; + } + + @Override + public ObjectPool> value() { + return this.pool; + } + + @Override + public void init() throws Exception { + logger.debug("Lettuce Redis sync pool starting..."); + final long started = TimeUtils.started(); + + this.pool = provider.create(); + + logger.info("Lettuce Redis sync pool started in {}", TimeUtils.tookForLogging(started)); + } + + @Override + public void release() { + if (this.pool != null) { + logger.debug("Lettuce Redis sync pool stopping..."); + final long stopping = TimeUtils.started(); + + this.pool.close(); + + logger.info("Lettuce Redis sync pool stopped in {}", TimeUtils.tookForLogging(stopping)); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java new file mode 100644 index 000000000..0190aef74 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceLongCodec.java @@ -0,0 +1,32 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; + +import java.nio.ByteBuffer; + +final class LettuceLongCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceLongCodec(); + + @Override + public Long decodeKey(ByteBuffer bytes) { + String s = StringCodec.ASCII.decodeKey(bytes); + return s == null ? null : Long.valueOf(s); + } + + @Override + public Long decodeValue(ByteBuffer bytes) { + return decodeKey(bytes); + } + + @Override + public ByteBuffer encodeKey(Long key) { + return StringCodec.ASCII.encodeKey(key == null ? null : key.toString()); + } + + @Override + public ByteBuffer encodeValue(Long value) { + return encodeKey(value); + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java new file mode 100644 index 000000000..26157d8ec --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceModule.java @@ -0,0 +1,167 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.AbstractRedisClient; +import io.lettuce.core.RedisURI; +import io.lettuce.core.api.StatefulConnection; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; +import io.lettuce.core.cluster.api.reactive.RedisClusterReactiveCommands; +import io.lettuce.core.cluster.api.sync.RedisClusterCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.codec.RedisCodec; +import io.lettuce.core.codec.StringCodec; +import io.lettuce.core.support.AsyncConnectionPoolSupport; +import io.lettuce.core.support.AsyncPool; +import io.lettuce.core.support.BoundedPoolConfig; +import io.lettuce.core.support.ConnectionPoolSupport; +import org.apache.commons.pool2.ObjectPool; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import ru.tinkoff.kora.application.graph.Wrapped; +import ru.tinkoff.kora.common.DefaultComponent; +import ru.tinkoff.kora.config.common.Config; +import ru.tinkoff.kora.config.common.extractor.ConfigValueExtractor; + +import java.nio.ByteBuffer; +import java.util.List; + +public interface LettuceModule { + + default LettuceConfig lettuceConfig(Config config, ConfigValueExtractor extractor) { + var value = config.get("lettuce"); + return extractor.extract(value); + } + + default AbstractRedisClient lettuceClient(LettuceConfig config) { + return LettuceFactory.build(config); + } + + @DefaultComponent + default RedisCodec lettuceRedisVoidCodec() { + return LettuceVoidCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisByteArrayCodec() { + return ByteArrayCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisByteBufferCodec() { + return LettuceByteBufferCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisStringCodec() { + return StringCodec.UTF8; + } + + @DefaultComponent + default RedisCodec lettuceRedisLongCodec() { + return LettuceLongCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisIntegerCodec() { + return LettuceIntegerCodec.INSTANCE; + } + + @DefaultComponent + default RedisCodec lettuceRedisCompositeCodec(RedisCodec keyCodec, + RedisCodec valueCodec) { + return new LettuceCompositeRedisCodec<>(keyCodec, valueCodec); + } + + @DefaultComponent + default Wrapped> lettuceStatefulConnection(AbstractRedisClient redisClient, + RedisCodec codec) { + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + return new LettuceLifecycleConnectionWrapper<>(() -> rc.connect(codec)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecycleConnectionWrapper<>(() -> rcc.connect(codec)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + @DefaultComponent + default Wrapped>> lettuceSyncConnectionPool(AbstractRedisClient redisClient, + LettuceConfig lettuceConfig, + RedisCodec codec) { + final GenericObjectPoolConfig> poolConfig = new GenericObjectPoolConfig<>(); + poolConfig.setMaxTotal(lettuceConfig.pool().maxTotal()); + poolConfig.setMaxIdle(lettuceConfig.pool().maxIdle()); + poolConfig.setMinIdle(lettuceConfig.pool().minIdle()); + poolConfig.setTestOnBorrow(lettuceConfig.pool().validateOnAcquire()); + poolConfig.setTestOnCreate(lettuceConfig.pool().validateOnCreate()); + poolConfig.setTestOnReturn(lettuceConfig.pool().validateOnRelease()); + + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final List redisURIs = LettuceFactory.buildRedisURI(lettuceConfig); + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceLifecyclePoolSyncWrapper<>(() -> ConnectionPoolSupport.createGenericObjectPool(() -> rc.connect(codec, redisURI), poolConfig)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecyclePoolSyncWrapper<>(() -> ConnectionPoolSupport.createGenericObjectPool(() -> rcc.connect(codec), poolConfig, false)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + @DefaultComponent + default Wrapped>> lettuceAsyncConnectionPool(AbstractRedisClient redisClient, + LettuceConfig lettuceConfig, + RedisCodec codec) { + final BoundedPoolConfig poolConfig = BoundedPoolConfig.builder() + .maxTotal(lettuceConfig.pool().maxTotal()) + .maxIdle(lettuceConfig.pool().maxIdle()) + .minIdle(lettuceConfig.pool().minIdle()) + .testOnAcquire(lettuceConfig.pool().validateOnAcquire()) + .testOnCreate(lettuceConfig.pool().validateOnCreate()) + .testOnRelease(lettuceConfig.pool().validateOnRelease()) + .build(); + + if (redisClient instanceof io.lettuce.core.RedisClient rc) { + final List redisURIs = LettuceFactory.buildRedisURI(lettuceConfig); + var redisURI = redisURIs.size() == 1 ? redisURIs.get(0) : null; + return new LettuceLifecyclePoolAsyncWrapper<>(() -> AsyncConnectionPoolSupport.createBoundedObjectPool(() -> rc.connectAsync(codec, redisURI).thenApply(v -> v), poolConfig)); + } else if (redisClient instanceof RedisClusterClient rcc) { + return new LettuceLifecyclePoolAsyncWrapper<>(() -> AsyncConnectionPoolSupport.createBoundedObjectPool(() -> rcc.connectAsync(codec).thenApply(v -> v), poolConfig, false)); + } else { + throw new UnsupportedOperationException("Unknown Redis Client: " + redisClient.getClass()); + } + } + + @DefaultComponent + default RedisClusterCommands lettuceRedisClusterSyncCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.sync(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.sync(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } + + @DefaultComponent + default RedisClusterAsyncCommands lettuceRedisClusterAsyncCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.async(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.async(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } + + @DefaultComponent + default RedisClusterReactiveCommands lettuceRedisClusterReactiveCommands(StatefulConnection connection) { + if (connection instanceof StatefulRedisConnection rc) { + return rc.reactive(); + } else if (connection instanceof StatefulRedisClusterConnection rcc) { + return rcc.reactive(); + } else { + throw new UnsupportedOperationException("Unknown Redis Connection: " + connection.getClass()); + } + } +} diff --git a/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java new file mode 100644 index 000000000..7fae76835 --- /dev/null +++ b/redis/redis-lettuce/src/main/java/ru/tinkoff/kora/redis/lettuce/LettuceVoidCodec.java @@ -0,0 +1,30 @@ +package ru.tinkoff.kora.redis.lettuce; + +import io.lettuce.core.codec.RedisCodec; + +import java.nio.ByteBuffer; + +final class LettuceVoidCodec implements RedisCodec { + + static final RedisCodec INSTANCE = new LettuceVoidCodec(); + + @Override + public Void decodeKey(ByteBuffer bytes) { + return null; + } + + @Override + public Void decodeValue(ByteBuffer bytes) { + return null; + } + + @Override + public ByteBuffer encodeKey(Void key) { + return null; + } + + @Override + public ByteBuffer encodeValue(Void value) { + return null; + } +} diff --git a/settings.gradle b/settings.gradle index 18f037917..45113761d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -96,7 +96,10 @@ include( 'cache:cache-annotation-processor', 'cache:cache-symbol-processor', 'cache:cache-caffeine', - 'cache:cache-redis', + 'cache:cache-redis', // deprecated + 'cache:cache-redis-common', + 'cache:cache-redis-lettuce', + 'cache:cache-redis-jedis', 'validation:validation-common', 'validation:validation-module', 'validation:validation-annotation-processor', @@ -104,6 +107,8 @@ include( 'test:test-junit5', 'mapstruct:mapstruct-java-extension', 'mapstruct:mapstruct-ksp-extension', + 'redis:redis-lettuce', + 'redis:redis-jedis', 'experimental:s3-client-annotation-processor', 'experimental:s3-client-symbol-processor', 'experimental:s3-client-common',