Skip to content

Commit 8e1d85d

Browse files
committed
Erstattet kafka-streams med egen consumer->db->prosessor løsning
1 parent a854132 commit 8e1d85d

32 files changed

+713
-430
lines changed

Diff for: apps/kafka-key-maintenance/nais/nais-prod.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ spec:
99
image: {{ image }}
1010
port: 8080
1111
replicas:
12-
min: 1
13-
max: 1
12+
min: 0
13+
max: 0
1414
resources:
1515
limits:
1616
memory: 4096Mi
Original file line numberDiff line numberDiff line change
@@ -1,71 +1,109 @@
11
package no.nav.paw.kafkakeymaintenance
22

3+
import arrow.core.partially1
34
import io.micrometer.prometheusmetrics.PrometheusConfig
45
import io.micrometer.prometheusmetrics.PrometheusMeterRegistry
6+
import no.nav.paw.arbeidssokerregisteret.intern.v1.HendelseSerializer
57
import no.nav.paw.config.hoplite.loadNaisOrLocalConfiguration
6-
import no.nav.paw.config.kafka.*
7-
import no.nav.paw.health.model.HealthStatus
8-
import no.nav.paw.health.model.LivenessHealthIndicator
9-
import no.nav.paw.health.model.ReadinessHealthIndicator
8+
import no.nav.paw.config.kafka.KAFKA_CONFIG_WITH_SCHEME_REG
9+
import no.nav.paw.config.kafka.KafkaFactory
1010
import no.nav.paw.health.repository.HealthIndicatorRepository
1111
import no.nav.paw.kafkakeygenerator.client.createKafkaKeyGeneratorClient
1212
import no.nav.paw.kafkakeymaintenance.db.DatabaseConfig
1313
import no.nav.paw.kafkakeymaintenance.db.dataSource
1414
import no.nav.paw.kafkakeymaintenance.db.migrateDatabase
15-
import no.nav.paw.kafkakeymaintenance.kafka.Topic
16-
import no.nav.paw.kafkakeymaintenance.kafka.initHwm
17-
import no.nav.paw.kafkakeymaintenance.kafka.txContext
18-
import no.nav.paw.kafkakeymaintenance.pdlprocessor.AktorTopologyConfig
15+
import no.nav.paw.kafkakeymaintenance.kafka.topic
16+
import no.nav.paw.kafkakeymaintenance.pdlprocessor.AktorConfig
17+
import no.nav.paw.kafkakeymaintenance.pdlprocessor.DbReaderContext
18+
import no.nav.paw.kafkakeymaintenance.pdlprocessor.DbReaderTask
1919
import no.nav.paw.kafkakeymaintenance.pdlprocessor.functions.hentAlias
20-
import no.nav.paw.kafkakeymaintenance.perioder.consume
20+
import no.nav.paw.kafkakeymaintenance.pdlprocessor.sendSync
2121
import no.nav.paw.kafkakeymaintenance.perioder.dbPerioder
22+
import org.apache.kafka.common.serialization.LongSerializer
2223
import org.jetbrains.exposed.sql.Database
23-
import org.jetbrains.exposed.sql.transactions.transaction
2424
import org.slf4j.LoggerFactory
25-
import java.util.concurrent.CompletableFuture.runAsync
25+
import java.time.Duration
26+
import java.util.concurrent.LinkedBlockingQueue
27+
import java.util.concurrent.ThreadPoolExecutor
28+
import java.util.concurrent.TimeUnit
2629
import java.util.concurrent.atomic.AtomicBoolean
30+
import kotlin.system.exitProcess
2731

2832
fun main() {
2933
val applicationContext = ApplicationContext(
30-
consumerVersion = PERIODE_CONSUMER_GROUP_VERSION,
34+
periodeConsumerVersion = PERIODE_CONSUMER_GROUP_VERSION,
35+
aktorConsumerVersion = AKTOR_CONSUMER_GROUP_VERSION,
3136
logger = LoggerFactory.getLogger("app"),
3237
meterRegistry = PrometheusMeterRegistry(PrometheusConfig.DEFAULT),
3338
shutdownCalled = AtomicBoolean(false)
3439
)
35-
Runtime.getRuntime().addShutdownHook( Thread { applicationContext.shutdownCalled.set(true) })
40+
Runtime.getRuntime().addShutdownHook(Thread { applicationContext.eventOccured(ShutdownSignal("Shutdown hook")) })
3641
val healthIndicatorRepository = HealthIndicatorRepository()
3742
with(loadNaisOrLocalConfiguration<DatabaseConfig>("database_configuration.toml").dataSource()) {
3843
migrateDatabase(this)
3944
Database.connect(this)
4045
}
41-
val (hwmRebalacingListener, periodeSequence) = with(KafkaFactory(loadNaisOrLocalConfiguration(KAFKA_CONFIG_WITH_SCHEME_REG))) {
42-
initPeriodeConsumer(
43-
periodeTopic = PERIODE_TOPIC,
44-
applicationContext = applicationContext
45-
)
46-
}
47-
val consumerLivenessHealthIndicator = healthIndicatorRepository.addLivenessIndicator(
48-
LivenessHealthIndicator(HealthStatus.UNHEALTHY)
46+
val kafkaFactory = KafkaFactory(loadNaisOrLocalConfiguration(KAFKA_CONFIG_WITH_SCHEME_REG))
47+
val periodeConsumer = kafkaFactory.initPeriodeConsumer(
48+
healthIndicatorRepository = healthIndicatorRepository,
49+
periodeTopic = PERIODE_TOPIC,
50+
applicationContext = applicationContext
4951
)
50-
val consumerReadinessHealthIndicator = healthIndicatorRepository.addReadinessIndicator(ReadinessHealthIndicator())
51-
transaction {
52-
txContext(applicationContext)().initHwm(Topic(PERIODE_TOPIC), 6)
53-
}
54-
runAsync {
55-
consumerReadinessHealthIndicator.setHealthy()
56-
consumerLivenessHealthIndicator.setHealthy()
57-
periodeSequence.consume(txContext(applicationContext))
58-
}.handle { _, throwable ->
59-
throwable?.also { applicationContext.logger.error("Consumer task failed", throwable) }
60-
applicationContext.shutdownCalled.set(true)
61-
consumerReadinessHealthIndicator.setUnhealthy()
62-
consumerLivenessHealthIndicator.setUnhealthy()
63-
}
64-
applicationContext.logger.info("Applikasjonen er startet, consumer: {}", hwmRebalacingListener.currentlyAssignedPartitions)
52+
val aktorConsumer = kafkaFactory.initAktorConsumer(
53+
healthIndicatorRepository = healthIndicatorRepository,
54+
aktorTopic = AKTOR_TOPIC,
55+
applicationContext = applicationContext
56+
)
57+
val producer = kafkaFactory.createProducer(
58+
clientId = "key-maintenance-producer",
59+
keySerializer = LongSerializer::class,
60+
valueSerializer = HendelseSerializer::class
61+
)
62+
val executor = ThreadPoolExecutor(4, 4, 10L, TimeUnit.SECONDS, LinkedBlockingQueue())
63+
val periodeTask = periodeConsumer.run(executor)
64+
val aktorTask = aktorConsumer.run(executor)
65+
val aktorConfig = loadNaisOrLocalConfiguration<AktorConfig>(AktorConfig.configFile)
66+
val antallHendelsePartisjoner = producer.partitionsFor(aktorConfig.hendelseloggTopic).size
67+
val kafkaKeysClient = createKafkaKeyGeneratorClient()
68+
val dbReaderTask = DbReaderTask(
69+
healthIndicatorRepository = healthIndicatorRepository,
70+
applicationContext = applicationContext,
71+
dbReaderContext = DbReaderContext(
72+
aktorConfig = aktorConfig,
73+
receiver = producer::sendSync.partially1(topic(aktorConfig.aktorTopic)),
74+
perioder = dbPerioder(applicationContext),
75+
hentAlias = kafkaKeysClient::hentAlias.partially1(antallHendelsePartisjoner),
76+
aktorDeSerializer = kafkaFactory.kafkaAvroDeSerializer()
77+
)
78+
).run(executor)
79+
80+
applicationContext.logger.info("Applikasjonen er startet")
6581
initKtor(
6682
healthIndicatorRepository = healthIndicatorRepository,
6783
prometheusMeterRegistry = applicationContext.meterRegistry
68-
).start(wait = true)
69-
applicationContext.shutdownCalled.set(true)
70-
applicationContext.logger.info("Applikasjonen er stoppet")
84+
).start(wait = false)
85+
awaitShutdownSignalOrError(applicationContext)
86+
}
87+
88+
fun awaitShutdownSignalOrError(applicationContext: ApplicationContext) {
89+
while (!applicationContext.shutdownCalled.get() && !Thread.currentThread().isInterrupted) {
90+
applicationContext.pollMessage(Duration.ofSeconds(2)).let {
91+
when (it) {
92+
is ErrorOccurred -> {
93+
applicationContext.shutdownCalled.set(true)
94+
Thread.sleep(Duration.ofSeconds(2))
95+
applicationContext.logger.error("Error occurred", it.throwable)
96+
exitProcess(1)
97+
}
98+
99+
is ShutdownSignal -> {
100+
applicationContext.shutdownCalled.set(true)
101+
applicationContext.logger.info("Shutdown signal received from ${it.source}")
102+
exitProcess(0)
103+
}
104+
105+
is Noop -> {}
106+
}
107+
}
108+
}
71109
}
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,42 @@
11
package no.nav.paw.kafkakeymaintenance
22

33
import io.micrometer.prometheusmetrics.PrometheusMeterRegistry
4+
import no.nav.paw.kafkakeymaintenance.kafka.TransactionContext
5+
import org.jetbrains.exposed.sql.Transaction
46
import org.slf4j.Logger
7+
import java.time.Duration
8+
import java.util.concurrent.BlockingQueue
9+
import java.util.concurrent.LinkedBlockingQueue
10+
import java.util.concurrent.TimeUnit
511
import java.util.concurrent.atomic.AtomicBoolean
612

7-
@JvmRecord
813
data class ApplicationContext(
9-
val consumerVersion: Int,
14+
val periodeConsumerVersion: Int,
15+
val aktorConsumerVersion: Int,
1016
val logger: Logger,
1117
val meterRegistry: PrometheusMeterRegistry,
1218
val shutdownCalled: AtomicBoolean = AtomicBoolean(false),
13-
)
19+
) {
20+
private val messages: BlockingQueue<Message> = LinkedBlockingQueue()
21+
22+
fun eventOccured(message: Message) {
23+
this.messages.add(message)
24+
}
25+
26+
fun pollMessage(timeout: Duration = Duration.ofSeconds(1)): Message =
27+
messages.poll(timeout.toMillis(), TimeUnit.MILLISECONDS) ?: Noop
28+
29+
}
30+
31+
val ApplicationContext.periodeTxContext: Transaction.() -> TransactionContext get() = {
32+
TransactionContext(periodeConsumerVersion, this)
33+
}
34+
35+
val ApplicationContext.aktorTxContext: Transaction.() -> TransactionContext get() = {
36+
TransactionContext(aktorConsumerVersion, this)
37+
}
38+
39+
sealed interface Message
40+
data object Noop: Message
41+
data class ErrorOccurred(val throwable: Throwable): Message
42+
data class ShutdownSignal(val source: String): Message
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
package no.nav.paw.kafkakeymaintenance
2+
3+
import no.nav.paw.config.kafka.KafkaFactory
4+
import no.nav.paw.health.repository.HealthIndicatorRepository
5+
import no.nav.paw.kafkakeymaintenance.kafka.*
6+
import no.nav.paw.kafkakeymaintenance.pdlprocessor.lagring.lagreAktorMelding
7+
import org.apache.kafka.common.serialization.ByteArrayDeserializer
8+
import org.apache.kafka.common.serialization.StringDeserializer
9+
import org.jetbrains.exposed.sql.transactions.transaction
10+
import java.time.Duration
11+
12+
fun KafkaFactory.initAktorConsumer(
13+
healthIndicatorRepository: HealthIndicatorRepository,
14+
aktorTopic: Topic,
15+
applicationContext: ApplicationContext
16+
): HwmConsumer<String, ByteArray> {
17+
18+
val aktorConsumer = createConsumer(
19+
groupId = "kafka-key-maintenance-aktor-v${applicationContext.periodeConsumerVersion}",
20+
clientId = "kafka-key-maintenance-aktor-client-v${applicationContext.periodeConsumerVersion}",
21+
keyDeserializer = StringDeserializer::class,
22+
valueDeserializer = ByteArrayDeserializer::class,
23+
autoCommit = false,
24+
autoOffsetReset = "earliest",
25+
maxPollrecords = 1000
26+
)
27+
val reblancingListener = HwmRebalanceListener(
28+
contextFactory = applicationContext.aktorTxContext,
29+
context = applicationContext,
30+
consumer = aktorConsumer
31+
)
32+
transaction {
33+
txContext(applicationContext.aktorConsumerVersion)().initHwm(
34+
aktorTopic,
35+
aktorConsumer.partitionsFor(aktorTopic.value).count()
36+
)
37+
}
38+
aktorConsumer.subscribe(listOf(aktorTopic.value), reblancingListener)
39+
return HwmConsumer(
40+
name = "${aktorTopic}-consumer",
41+
healthIndicatorRepository = healthIndicatorRepository,
42+
applicationContext = applicationContext,
43+
contextFactory = { tx -> txContext(aktorConsumerVersion)(tx) },
44+
consumer = aktorConsumer,
45+
function = lagreAktorMelding,
46+
pollTimeout = Duration.ofMillis(1000)
47+
)
48+
}
49+

Diff for: apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/InitKafkaStreams.kt

-95
This file was deleted.

0 commit comments

Comments
 (0)