Skip to content

Commit 8093120

Browse files
committed
Endret til å benytte individuelle JDBC env vars
1 parent 0cf530e commit 8093120

File tree

12 files changed

+99
-72
lines changed

12 files changed

+99
-72
lines changed

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Application.kt

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import no.nav.paw.kafkakeygenerator.config.KAFKA_TOPOLOGY_CONFIG
2121
import no.nav.paw.kafkakeygenerator.config.KafkaTopologyConfig
2222
import no.nav.paw.kafkakeygenerator.config.PDL_CLIENT_CONFIG
2323
import no.nav.paw.kafkakeygenerator.config.PdlClientConfig
24-
import no.nav.paw.kafkakeygenerator.database.createDataSource
24+
import no.nav.paw.kafkakeygenerator.utils.createDataSource
2525
import no.nav.paw.kafkakeygenerator.merge.MergeDetector
2626
import no.nav.paw.kafkakeygenerator.plugin.configSerialization
2727
import no.nav.paw.kafkakeygenerator.plugin.configureAuthentication

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseConfig.kt

+8-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,13 @@ package no.nav.paw.kafkakeygenerator.config
33
const val DATABASE_CONFIG = "database_config.toml"
44

55
data class DatabaseConfig(
6-
val jdbcUrl: String,
6+
val host: String,
7+
val port: Int,
8+
val database: String,
9+
val username: String,
10+
val password: String,
711
val driverClassName: String,
812
val autoCommit: Boolean
9-
)
13+
) {
14+
val jdbcUrl = "jdbc:postgresql://$host:$port/$database?user=$username&password=$password"
15+
}

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Kafka.kt

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ import org.apache.kafka.clients.consumer.ConsumerRecords
77
import org.apache.kafka.clients.consumer.KafkaConsumer
88

99
fun <K, V> Application.configureKafka(
10-
consumeFunction: ((Sequence<ConsumerRecords<K, V>>) -> Unit),
11-
successFunction: ((Unit) -> Unit)? = null,
10+
consumeFunction: ((ConsumerRecords<K, V>) -> Unit),
11+
successFunction: ((ConsumerRecords<K, V>) -> Unit)? = null,
1212
errorFunction: ((throwable: Throwable) -> Unit),
1313
kafkaConsumer: KafkaConsumer<K, V>,
1414
kafkaTopics: List<String>

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/KafkaConsumerPlugin.kt

+30-19
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ import io.ktor.util.KtorDsl
1212
import kotlinx.coroutines.Dispatchers
1313
import kotlinx.coroutines.Job
1414
import kotlinx.coroutines.launch
15-
import no.nav.paw.config.kafka.asSequence
1615
import no.nav.paw.kafkakeygenerator.listener.NoopConsumerRebalanceListener
1716
import no.nav.paw.kafkakeygenerator.utils.buildApplicationLogger
1817
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener
@@ -21,12 +20,13 @@ import org.apache.kafka.clients.consumer.KafkaConsumer
2120
import java.time.Duration
2221
import java.util.concurrent.atomic.AtomicBoolean
2322

23+
private val logger = buildApplicationLogger
2424
val KafkaConsumerReady: EventDefinition<Application> = EventDefinition()
2525

2626
@KtorDsl
27-
class KafkaConsumerPluginConfig<K, V> {
28-
var consumeFunction: ((Sequence<ConsumerRecords<K, V>>) -> Unit)? = null
29-
var successFunction: ((Unit) -> Unit)? = null
27+
class KafkaConsumerPluginConfig<K, V, R> {
28+
var consumeFunction: ((ConsumerRecords<K, V>) -> Unit)? = null
29+
var successFunction: ((ConsumerRecords<K, V>) -> Unit)? = null
3030
var errorFunction: ((throwable: Throwable) -> Unit)? = null
3131
var kafkaConsumer: KafkaConsumer<K, V>? = null
3232
var kafkaTopics: Collection<String>? = null
@@ -40,15 +40,26 @@ class KafkaConsumerPluginConfig<K, V> {
4040
}
4141
}
4242

43-
fun <K, V> kafkaConsumerPlugin(): ApplicationPlugin<KafkaConsumerPluginConfig<K, V>> =
43+
private fun <K, V> KafkaConsumer<K, V>.defaultSuccessFunction(records: ConsumerRecords<K, V>) {
44+
if (!records.isEmpty) {
45+
logger.debug("Kafka Consumer success. {} records processed", records.count())
46+
this.commitSync()
47+
}
48+
}
49+
50+
private fun defaultErrorFunction(throwable: Throwable) {
51+
logger.error("Kafka Consumer failed", throwable)
52+
throw throwable
53+
}
54+
55+
fun <K, V> kafkaConsumerPlugin(): ApplicationPlugin<KafkaConsumerPluginConfig<K, V, Unit>> =
4456
createApplicationPlugin(KafkaConsumerPluginConfig.PLUGIN_NAME, ::KafkaConsumerPluginConfig) {
4557
application.log.info("Oppretter {}", KafkaConsumerPluginConfig.PLUGIN_NAME)
46-
val logger = buildApplicationLogger
47-
val consumeFunction = requireNotNull(pluginConfig.consumeFunction) { "ConsumeFunction er null" }
48-
val successFunction = pluginConfig.successFunction ?: { logger.debug("Kafka Consumer poll fullførte") }
49-
val errorFunction = pluginConfig.errorFunction ?: { logger.error("Kafka Consumer poll feilet") }
50-
val kafkaConsumer = requireNotNull(pluginConfig.kafkaConsumer) { "KafkaConsumer er null" }
5158
val kafkaTopics = requireNotNull(pluginConfig.kafkaTopics) { "KafkaTopics er null" }
59+
val kafkaConsumer = requireNotNull(pluginConfig.kafkaConsumer) { "KafkaConsumer er null" }
60+
val consumeFunction = requireNotNull(pluginConfig.consumeFunction) { "ConsumeFunction er null" }
61+
val successFunction = pluginConfig.successFunction ?: kafkaConsumer::defaultSuccessFunction
62+
val errorFunction = pluginConfig.errorFunction ?: ::defaultErrorFunction
5263
val pollTimeout = pluginConfig.pollTimeout ?: Duration.ofMillis(100)
5364
val closeTimeout = pluginConfig.closeTimeout ?: Duration.ofSeconds(1)
5465
val rebalanceListener = pluginConfig.rebalanceListener ?: NoopConsumerRebalanceListener()
@@ -72,15 +83,15 @@ fun <K, V> kafkaConsumerPlugin(): ApplicationPlugin<KafkaConsumerPluginConfig<K,
7283
on(MonitoringEvent(KafkaConsumerReady)) { application ->
7384
consumeJob = application.launch(Dispatchers.IO) {
7485
logger.info("Kafka Consumer starter")
75-
kafkaConsumer
76-
.asSequence(
77-
stop = shutdownFlag,
78-
pollTimeout = pollTimeout,
79-
closeTimeout = closeTimeout
80-
)
81-
.runCatching(consumeFunction)
82-
.mapCatching { kafkaConsumer.commitSync() }
83-
.fold(onSuccess = successFunction, onFailure = errorFunction)
86+
while (!shutdownFlag.get()) {
87+
try {
88+
val records = kafkaConsumer.poll(pollTimeout)
89+
consumeFunction(records)
90+
successFunction(records)
91+
} catch (throwable: Throwable) {
92+
errorFunction(throwable)
93+
}
94+
}
8495
logger.info("Kafka Consumer avsluttet")
8596
}
8697
}

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerService.kt

+31-37
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ import no.nav.paw.kafkakeygenerator.repository.IdentitetRepository
1414
import no.nav.paw.kafkakeygenerator.repository.KafkaKeysAuditRepository
1515
import no.nav.paw.kafkakeygenerator.utils.buildErrorLogger
1616
import no.nav.paw.kafkakeygenerator.utils.buildLogger
17-
import no.nav.paw.kafkakeygenerator.utils.countKafkaFailed
1817
import no.nav.paw.kafkakeygenerator.utils.countKafkaIgnored
1918
import no.nav.paw.kafkakeygenerator.utils.countKafkaInserted
2019
import no.nav.paw.kafkakeygenerator.utils.countKafkaProcessed
@@ -44,30 +43,38 @@ class KafkaConsumerService(
4443
.addReadinessIndicator(ReadinessHealthIndicator(HealthStatus.HEALTHY))
4544

4645
@WithSpan
47-
fun handleRecords(sequence: Sequence<ConsumerRecords<Long, Hendelse>>) {
48-
sequence.forEach { records ->
49-
records
50-
.map { it.value() }
51-
.onEach {
52-
meterRegistry.countKafkaReceived()
53-
if (it is IdentitetsnummerSammenslaatt) {
54-
logger.debug("Prosesserer hendelse av type {}", it.hendelseType)
55-
meterRegistry.countKafkaProcessed()
56-
} else {
57-
logger.debug("Ignorerer hendelse av type {}", it.hendelseType)
58-
meterRegistry.countKafkaIgnored()
59-
}
60-
}
61-
.filterIsInstance<IdentitetsnummerSammenslaatt>()
62-
.forEach { hendelse ->
63-
logger.info("Mottok hendelse om sammenslåing av Identitetsnummer")
64-
val identitetsnummer = hendelse.alleIdentitetsnummer
65-
.map { Identitetsnummer(it) } + Identitetsnummer(hendelse.identitetsnummer)
66-
val fraArbeidssoekerId = ArbeidssoekerId(hendelse.id)
67-
val tilArbeidssoekerId = ArbeidssoekerId(hendelse.flyttetTilArbeidssoekerId)
68-
updateIdentiteter(HashSet(identitetsnummer), fraArbeidssoekerId, tilArbeidssoekerId)
46+
fun handleRecords(
47+
records: ConsumerRecords<Long, Hendelse>
48+
) {
49+
records
50+
.onEach { record ->
51+
logger.debug(
52+
"Mottok melding på topic: {}, partition: {}, offset {}",
53+
record.topic(),
54+
record.partition(),
55+
record.offset()
56+
)
57+
}
58+
.map { it.value() }
59+
.onEach { event ->
60+
meterRegistry.countKafkaReceived()
61+
if (event is IdentitetsnummerSammenslaatt) {
62+
logger.debug("Prosesserer hendelse av type {}", event.hendelseType)
63+
meterRegistry.countKafkaProcessed()
64+
} else {
65+
logger.debug("Ignorerer hendelse av type {}", event.hendelseType)
66+
meterRegistry.countKafkaIgnored()
6967
}
70-
}
68+
}
69+
.filterIsInstance<IdentitetsnummerSammenslaatt>()
70+
.forEach { event ->
71+
logger.info("Mottok hendelse om sammenslåing av Identitetsnummer")
72+
val identitetsnummer = event.alleIdentitetsnummer
73+
.map { Identitetsnummer(it) } + Identitetsnummer(event.identitetsnummer)
74+
val fraArbeidssoekerId = ArbeidssoekerId(event.id)
75+
val tilArbeidssoekerId = ArbeidssoekerId(event.flyttetTilArbeidssoekerId)
76+
updateIdentiteter(HashSet(identitetsnummer), fraArbeidssoekerId, tilArbeidssoekerId)
77+
}
7178
}
7279

7380
private fun updateIdentiteter(
@@ -76,19 +83,6 @@ class KafkaConsumerService(
7683
tilArbeidssoekerId: ArbeidssoekerId
7784
) {
7885
transaction(database) {
79-
identitetRepository.find(fraArbeidssoekerId).let {
80-
if (it == null) {
81-
meterRegistry.countKafkaFailed()
82-
throw IllegalStateException("ArbeidssøkerId ikke funnet")
83-
}
84-
}
85-
identitetRepository.find(tilArbeidssoekerId).let {
86-
if (it == null) {
87-
meterRegistry.countKafkaFailed()
88-
throw IllegalStateException("ArbeidssøkerId ikke funnet")
89-
}
90-
}
91-
9286
identitetsnummerSet.forEach { identitetsnummer ->
9387
val kafkaKey = identitetRepository.find(identitetsnummer)
9488
if (kafkaKey != null) {

apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/DataSource.kt apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/DataSourceFactory.kt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package no.nav.paw.kafkakeygenerator.database
1+
package no.nav.paw.kafkakeygenerator.utils
22

33
import com.zaxxer.hikari.HikariConfig
44
import com.zaxxer.hikari.HikariDataSource
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1-
jdbcUrl = "jdbc:postgresql://localhost:5432/pawkafkakeys?user=admin&password=admin"
1+
host = "localhost"
2+
port = 5432
3+
database = "pawkafkakeys"
4+
username = "admin"
5+
password = "admin"
26
driverClassName = "org.postgresql.Driver"
37
autoCommit = false

apps/kafka-key-generator/src/main/resources/logback.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
</root>
2323
<logger name="org.apache.kafka.clients.consumer.ConsumerConfig" level="WARN" />
2424
<logger name="org.apache.kafka.clients.producer.ProducerConfig" level="WARN" />
25-
<logger name="no.nav.paw.kafkakeygenerator" level="DEBUG" />
25+
<logger name="no.nav.paw" level="DEBUG" />
2626
</else>
2727
</if>
2828
</configuration>
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1-
jdbcUrl = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_URL}"
1+
host = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_HOST}"
2+
port = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PORT}"
3+
database = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_DATABASE}"
4+
username = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_USERNAME}"
5+
password = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PASSWORD}"
26
driverClassName = "org.postgresql.Driver"
37
autoCommit = false

apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerServiceTest.kt

+4-4
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import no.nav.paw.kafkakeygenerator.repository.IdentitetRepository
1212
import no.nav.paw.kafkakeygenerator.repository.KafkaKeysAuditRepository
1313
import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository
1414
import no.nav.paw.kafkakeygenerator.test.TestData
15-
import no.nav.paw.kafkakeygenerator.test.asConsumerRecordsSequence
15+
import no.nav.paw.kafkakeygenerator.test.asConsumerRecords
1616
import no.nav.paw.kafkakeygenerator.test.initTestDatabase
1717
import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId
1818
import no.nav.paw.kafkakeygenerator.vo.Failure
@@ -60,7 +60,7 @@ class KafkaConsumerServiceTest : FreeSpec({
6060
TestData.getIdentitetsnummerOpphoert(identitetsnummer, arbeidssoekerId)
6161
)
6262

63-
kafkaConsumerService.handleRecords(hendelser.asConsumerRecordsSequence())
63+
kafkaConsumerService.handleRecords(hendelser.asConsumerRecords())
6464

6565
val keyResult = kafkaKeysRepository.hent(identitetsnummer)
6666
val auditResult = kafkaKeysAuditRepository.find(identitetsnummer)
@@ -80,7 +80,7 @@ class KafkaConsumerServiceTest : FreeSpec({
8080
)
8181

8282
shouldThrow<IllegalStateException> {
83-
kafkaConsumerService.handleRecords(hendelser.asConsumerRecordsSequence())
83+
kafkaConsumerService.handleRecords(hendelser.asConsumerRecords())
8484
}
8585

8686
val keyResult = kafkaKeysRepository.hent(identitetsnummer)
@@ -110,7 +110,7 @@ class KafkaConsumerServiceTest : FreeSpec({
110110
)
111111
)
112112

113-
kafkaConsumerService.handleRecords(hendelser.asConsumerRecordsSequence())
113+
kafkaConsumerService.handleRecords(hendelser.asConsumerRecords())
114114

115115
val keyResult1 = kafkaKeysRepository.hent(identitetsnummer1)
116116
val keyResult2 = kafkaKeysRepository.hent(identitetsnummer2)

apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestData.kt

+5-1
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,11 @@ fun MockRequestHandleScope.genererResponse(it: HttpRequestData): HttpResponseDat
143143
)
144144
}
145145

146-
fun List<Hendelse>.asConsumerRecordsSequence(): Sequence<ConsumerRecords<Long, Hendelse>> =
146+
fun List<Hendelse>.asConsumerRecords(): ConsumerRecords<Long, Hendelse> =
147+
this.map { TestData.getConsumerRecord(nextLong(), it) }
148+
.let { TestData.getConsumerRecords(it) }
149+
150+
fun List<Hendelse>.asConsumerSequence(): Sequence<ConsumerRecords<Long, Hendelse>> =
147151
this.map { TestData.getConsumerRecord(nextLong(), it) }
148152
.let { TestData.getConsumerRecords(it) }
149153
.let { sequenceOf(it) }

apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestDatabase.kt

+6-2
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,19 @@
11
package no.nav.paw.kafkakeygenerator.test
22

33
import no.nav.paw.kafkakeygenerator.config.DatabaseConfig
4-
import no.nav.paw.kafkakeygenerator.database.createDataSource
4+
import no.nav.paw.kafkakeygenerator.utils.createDataSource
55
import org.testcontainers.containers.PostgreSQLContainer
66
import org.testcontainers.containers.wait.strategy.Wait
77
import javax.sql.DataSource
88

99
fun initTestDatabase(): DataSource {
1010
val config = postgreSQLContainer().let {
1111
DatabaseConfig(
12-
jdbcUrl = "jdbc:postgresql://${it.host}:${it.firstMappedPort}/${it.databaseName}?user=${it.username}&password=${it.password}",
12+
host = it.host,
13+
port = it.firstMappedPort,
14+
database = it.databaseName,
15+
username = it.username,
16+
password = it.password,
1317
driverClassName = "org.postgresql.Driver",
1418
autoCommit = false
1519
)

0 commit comments

Comments
 (0)