Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 73f5a3d4da | |||
| 85e1e805b7 | |||
| 5082571ce8 | |||
| f6798a8a45 | |||
| ffd44d37eb | |||
| e129258f39 | |||
| 289ee88be0 | |||
| a9779d2371 | |||
| f5cc32487f | |||
| dd40f7f726 | |||
| 9ba7b9ed26 | |||
| 6eb72f8486 | |||
| aab76f32b3 | |||
| a5ca7c32b7 | |||
| 9d943b2df3 | |||
| bab434a834 | |||
| 84d219bb6d | |||
| ce75726824 | |||
| b62079f65f | |||
| a9a06a41f9 | |||
| a0f1908a1a | |||
| ddf5c699cd | |||
| c7395b883e | |||
| 919339d306 | |||
| 6d60c5f74c | |||
| 23f6afb483 | |||
| 746dc2dc67 | |||
| d31448e26c | |||
| 674a536818 | |||
| 4d213a2b23 | |||
| 518f4726cf | |||
| 550636c076 | |||
| 5c68d8697d |
29
.github/workflows/test.yml
vendored
Normal file
29
.github/workflows/test.yml
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
name: Run Unit Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up JDK 21
|
||||||
|
uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
java-version: '21'
|
||||||
|
distribution: 'zulu'
|
||||||
|
|
||||||
|
- name: Setup Gradle
|
||||||
|
uses: gradle/gradle-build-action@v3
|
||||||
|
|
||||||
|
- name: Make gradlew executable
|
||||||
|
run: chmod +x ./gradlew
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: ./gradlew test --stacktrace
|
||||||
@ -20,6 +20,7 @@ dependencies {
|
|||||||
|
|
||||||
|
|
||||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.10.2")
|
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.10.2")
|
||||||
|
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
|
||||||
|
|
||||||
//testImplementation(kotlin("test"))
|
//testImplementation(kotlin("test"))
|
||||||
testImplementation("org.assertj:assertj-core:3.4.1")
|
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||||
|
|||||||
@ -1 +1,2 @@
|
|||||||
kotlin.code.style=official
|
kotlin.code.style=official
|
||||||
|
org.gradle.parallel=false
|
||||||
|
|||||||
14
src/main/kotlin/no/iktdev/eventi/MyTime.kt
Normal file
14
src/main/kotlin/no/iktdev/eventi/MyTime.kt
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
package no.iktdev.eventi
|
||||||
|
|
||||||
|
import java.time.Clock
|
||||||
|
import java.time.Instant
|
||||||
|
|
||||||
|
object MyTime {
|
||||||
|
private val clock: Clock = Clock.systemUTC()
|
||||||
|
|
||||||
|
@JvmStatic
|
||||||
|
fun utcNow(): Instant =
|
||||||
|
Instant.now(clock)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@ -1,5 +1,6 @@
|
|||||||
package no.iktdev.eventi
|
package no.iktdev.eventi
|
||||||
|
|
||||||
|
import com.google.gson.Gson
|
||||||
import com.google.gson.GsonBuilder
|
import com.google.gson.GsonBuilder
|
||||||
import com.google.gson.JsonDeserializationContext
|
import com.google.gson.JsonDeserializationContext
|
||||||
import com.google.gson.JsonDeserializer
|
import com.google.gson.JsonDeserializer
|
||||||
@ -15,13 +16,14 @@ import no.iktdev.eventi.models.store.PersistedTask
|
|||||||
import no.iktdev.eventi.models.store.TaskStatus
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
||||||
import java.lang.reflect.Type
|
import java.lang.reflect.Type
|
||||||
|
import java.time.Instant
|
||||||
import java.time.LocalDateTime
|
import java.time.LocalDateTime
|
||||||
import java.time.format.DateTimeFormatter
|
import java.time.format.DateTimeFormatter
|
||||||
|
|
||||||
object ZDS {
|
object ZDS {
|
||||||
val gson = WGson.gson
|
val gson = WGson.gson
|
||||||
|
|
||||||
fun Event.toPersisted(id: Long, persistedAt: LocalDateTime = LocalDateTime.now()): PersistedEvent? {
|
fun Event.toPersisted(id: Long, persistedAt: Instant = MyTime.utcNow()): PersistedEvent? {
|
||||||
val payloadJson = gson.toJson(this)
|
val payloadJson = gson.toJson(this)
|
||||||
val eventName = this::class.simpleName ?: run {
|
val eventName = this::class.simpleName ?: run {
|
||||||
throw IllegalStateException("Missing class name for event: $this")
|
throw IllegalStateException("Missing class name for event: $this")
|
||||||
@ -47,7 +49,7 @@ object ZDS {
|
|||||||
return gson.fromJson(data, clazz)
|
return gson.fromJson(data, clazz)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun Task.toPersisted(id: Long, status: TaskStatus = TaskStatus.Pending, persistedAt: LocalDateTime = LocalDateTime.now()): PersistedTask? {
|
fun Task.toPersisted(id: Long, status: TaskStatus = TaskStatus.Pending, persistedAt: Instant = MyTime.utcNow()): PersistedTask? {
|
||||||
val payloadJson = gson.toJson(this)
|
val payloadJson = gson.toJson(this)
|
||||||
val taskName = this::class.simpleName ?: run {
|
val taskName = this::class.simpleName ?: run {
|
||||||
throw IllegalStateException("Missing class name for task: $this")
|
throw IllegalStateException("Missing class name for task: $this")
|
||||||
@ -80,26 +82,47 @@ object ZDS {
|
|||||||
|
|
||||||
object WGson {
|
object WGson {
|
||||||
val gson = GsonBuilder()
|
val gson = GsonBuilder()
|
||||||
|
.registerTypeAdapter(Instant::class.java, InstantAdapter())
|
||||||
|
// hvis du fortsatt har LocalDateTime et sted:
|
||||||
.registerTypeAdapter(LocalDateTime::class.java, LocalDateTimeAdapter())
|
.registerTypeAdapter(LocalDateTime::class.java, LocalDateTimeAdapter())
|
||||||
.create()
|
.create()
|
||||||
fun toJson(data: Any?): String {
|
|
||||||
return gson.toJson(data)
|
fun toJson(data: Any?): String =
|
||||||
|
gson.toJson(data)
|
||||||
|
|
||||||
|
class InstantAdapter : JsonSerializer<Instant>, JsonDeserializer<Instant> {
|
||||||
|
override fun serialize(
|
||||||
|
src: Instant,
|
||||||
|
typeOfSrc: Type,
|
||||||
|
context: JsonSerializationContext
|
||||||
|
): JsonElement =
|
||||||
|
JsonPrimitive(src.toString()) // ISO-8601, UTC
|
||||||
|
|
||||||
|
override fun deserialize(
|
||||||
|
json: JsonElement,
|
||||||
|
typeOfT: Type,
|
||||||
|
context: JsonDeserializationContext
|
||||||
|
): Instant =
|
||||||
|
Instant.parse(json.asString)
|
||||||
}
|
}
|
||||||
|
|
||||||
class LocalDateTimeAdapter : JsonSerializer<LocalDateTime>, JsonDeserializer<LocalDateTime> {
|
class LocalDateTimeAdapter : JsonSerializer<LocalDateTime>, JsonDeserializer<LocalDateTime> {
|
||||||
private val formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME
|
private val formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME
|
||||||
|
|
||||||
override fun serialize(
|
override fun serialize(
|
||||||
src: LocalDateTime, typeOfSrc: Type, context: JsonSerializationContext
|
src: LocalDateTime,
|
||||||
): JsonElement {
|
typeOfSrc: Type,
|
||||||
return JsonPrimitive(src.format(formatter))
|
context: JsonSerializationContext
|
||||||
}
|
): JsonElement =
|
||||||
|
JsonPrimitive(src.format(formatter))
|
||||||
|
|
||||||
override fun deserialize(
|
override fun deserialize(
|
||||||
json: JsonElement, typeOfT: Type, context: JsonDeserializationContext
|
json: JsonElement,
|
||||||
): LocalDateTime {
|
typeOfT: Type,
|
||||||
return LocalDateTime.parse(json.asString, formatter)
|
context: JsonDeserializationContext
|
||||||
}
|
): LocalDateTime =
|
||||||
|
LocalDateTime.parse(json.asString, formatter)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -2,25 +2,30 @@ package no.iktdev.eventi.events
|
|||||||
|
|
||||||
import no.iktdev.eventi.models.DeleteEvent
|
import no.iktdev.eventi.models.DeleteEvent
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
|
import no.iktdev.eventi.models.SignalEvent
|
||||||
import no.iktdev.eventi.stores.EventStore
|
import no.iktdev.eventi.stores.EventStore
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
class EventDispatcher(val eventStore: EventStore) {
|
open class EventDispatcher(val eventStore: EventStore) {
|
||||||
|
|
||||||
fun dispatch(referenceId: UUID, events: List<Event>) {
|
open fun dispatch(referenceId: UUID, events: List<Event>) {
|
||||||
val derivedFromIds = events.mapNotNull { it.metadata.derivedFromId }.flatten().toSet()
|
val derivedFromIds = events.mapNotNull { it.metadata.derivedFromId }.flatten().toSet()
|
||||||
val deletedEventIds = events.filterIsInstance<DeleteEvent>().map { it.deletedEventId }
|
val deletedEventIds = events.filterIsInstance<DeleteEvent>().map { it.deletedEventId }
|
||||||
val candidates = events
|
val candidates = events
|
||||||
|
.filterNot { it is SignalEvent }
|
||||||
.filter { it.eventId !in derivedFromIds }
|
.filter { it.eventId !in derivedFromIds }
|
||||||
.filter { it.eventId !in deletedEventIds }
|
.filter { it.eventId !in deletedEventIds }
|
||||||
|
|
||||||
|
val effectiveHistory = events
|
||||||
|
.filter { it.eventId !in deletedEventIds } // fjern slettede events
|
||||||
|
.filterNot { it is DeleteEvent } // fjern selve delete-eventet
|
||||||
|
|
||||||
|
|
||||||
EventListenerRegistry.getListeners().forEach { listener ->
|
EventListenerRegistry.getListeners().forEach { listener ->
|
||||||
for (candidate in candidates) {
|
for (candidate in candidates) {
|
||||||
val result = listener.onEvent(candidate, events)
|
val result = listener.onEvent(candidate, effectiveHistory)
|
||||||
if (result != null) {
|
if (result != null) {
|
||||||
|
|
||||||
eventStore.persist(result)
|
eventStore.persist(result)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,51 +1,138 @@
|
|||||||
package no.iktdev.eventi.events
|
package no.iktdev.eventi.events
|
||||||
|
|
||||||
import kotlinx.coroutines.delay
|
import kotlinx.coroutines.delay
|
||||||
|
import mu.KotlinLogging
|
||||||
|
import no.iktdev.eventi.MyTime
|
||||||
import no.iktdev.eventi.ZDS.toEvent
|
import no.iktdev.eventi.ZDS.toEvent
|
||||||
import no.iktdev.eventi.stores.EventStore
|
import no.iktdev.eventi.stores.EventStore
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.LocalDateTime
|
import java.time.Instant
|
||||||
import kotlin.collections.iterator
|
import java.util.UUID
|
||||||
|
|
||||||
abstract class EventPollerImplementation(
|
abstract class EventPollerImplementation(
|
||||||
private val eventStore: EventStore,
|
private val eventStore: EventStore,
|
||||||
private val dispatchQueue: SequenceDispatchQueue,
|
private val dispatchQueue: SequenceDispatchQueue,
|
||||||
private val dispatcher: EventDispatcher
|
private val dispatcher: EventDispatcher
|
||||||
) {
|
) {
|
||||||
var lastSeenTime: LocalDateTime = LocalDateTime.of(1970, 1, 1, 0, 0)
|
private val log = KotlinLogging.logger {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Per-reference watermark:
|
||||||
|
* - first = last seen persistedAt
|
||||||
|
* - second = last seen persistedId
|
||||||
|
*/
|
||||||
|
protected val refWatermark = mutableMapOf<UUID, Pair<Instant, Long>>()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Global scan hint (timestamp only).
|
||||||
|
* Used to avoid scanning entire table every time.
|
||||||
|
*/
|
||||||
|
var lastSeenTime: Instant = Instant.EPOCH
|
||||||
|
|
||||||
open var backoff = Duration.ofSeconds(2)
|
open var backoff = Duration.ofSeconds(2)
|
||||||
protected set
|
protected set
|
||||||
private val maxBackoff = Duration.ofMinutes(1)
|
private val maxBackoff = Duration.ofMinutes(1)
|
||||||
|
|
||||||
|
|
||||||
open suspend fun start() {
|
open suspend fun start() {
|
||||||
|
log.info { "EventPoller starting with initial backoff=$backoff" }
|
||||||
while (true) {
|
while (true) {
|
||||||
|
try {
|
||||||
pollOnce()
|
pollOnce()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
log.error(e) { "Error in poller loop" }
|
||||||
|
delay(backoff.toMillis())
|
||||||
|
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun pollOnce() {
|
suspend fun pollOnce() {
|
||||||
val newPersisted = eventStore.getPersistedEventsAfter(lastSeenTime)
|
val pollStartedAt = MyTime.utcNow()
|
||||||
|
log.debug { "🔍 Polling for new events" }
|
||||||
|
|
||||||
|
// Determine global scan start
|
||||||
|
val minRefTs = refWatermark.values.minOfOrNull { it.first }
|
||||||
|
val scanFrom = when (minRefTs) {
|
||||||
|
null -> lastSeenTime
|
||||||
|
else -> maxOf(lastSeenTime, minRefTs)
|
||||||
|
}
|
||||||
|
|
||||||
|
val newPersisted = eventStore.getPersistedEventsAfter(scanFrom)
|
||||||
|
|
||||||
if (newPersisted.isEmpty()) {
|
if (newPersisted.isEmpty()) {
|
||||||
|
log.debug { "😴 No new events found. Backing off for $backoff" }
|
||||||
delay(backoff.toMillis())
|
delay(backoff.toMillis())
|
||||||
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reset backoff
|
||||||
backoff = Duration.ofSeconds(2)
|
backoff = Duration.ofSeconds(2)
|
||||||
|
log.debug { "📬 Found ${newPersisted.size} new events after $scanFrom" }
|
||||||
|
|
||||||
val grouped = newPersisted.groupBy { it.referenceId }
|
val grouped = newPersisted.groupBy { it.referenceId }
|
||||||
|
var anyProcessed = false
|
||||||
|
|
||||||
for ((referenceId, _) in grouped) {
|
// Track highest persistedAt seen globally this round
|
||||||
if (dispatchQueue.isProcessing(referenceId)) continue
|
val maxPersistedThisRound = newPersisted.maxOf { it.persistedAt }
|
||||||
|
|
||||||
val fullLog = eventStore.getPersistedEventsFor(referenceId)
|
for ((ref, eventsForRef) in grouped) {
|
||||||
|
val (refSeenAt, refSeenId) = refWatermark[ref] ?: (Instant.EPOCH to 0L)
|
||||||
|
|
||||||
|
// Filter new events using (timestamp, id) ordering
|
||||||
|
val newForRef = eventsForRef.filter { ev ->
|
||||||
|
ev.persistedAt > refSeenAt ||
|
||||||
|
(ev.persistedAt == refSeenAt && ev.id > refSeenId)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newForRef.isEmpty()) {
|
||||||
|
log.debug { "🧊 No new events for $ref since ($refSeenAt, id=$refSeenId)" }
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If ref is busy, skip dispatch
|
||||||
|
if (dispatchQueue.isProcessing(ref)) {
|
||||||
|
log.debug { "⏳ $ref is busy — deferring ${newForRef.size} events" }
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch full sequence for dispatch
|
||||||
|
val fullLog = eventStore.getPersistedEventsFor(ref)
|
||||||
val events = fullLog.mapNotNull { it.toEvent() }
|
val events = fullLog.mapNotNull { it.toEvent() }
|
||||||
|
|
||||||
dispatchQueue.dispatch(referenceId, events, dispatcher)
|
log.debug { "🚀 Dispatching ${events.size} events for $ref" }
|
||||||
lastSeenTime = fullLog.maxOf { it.persistedAt }
|
dispatchQueue.dispatch(ref, events, dispatcher)
|
||||||
}
|
|
||||||
|
// Update watermark for this reference
|
||||||
|
val maxEvent = newForRef.maxWith(
|
||||||
|
compareBy({ it.persistedAt }, { it.id })
|
||||||
|
)
|
||||||
|
|
||||||
|
val newWatermarkAt = minOf(pollStartedAt, maxEvent.persistedAt)
|
||||||
|
val newWatermarkId = maxEvent.id
|
||||||
|
|
||||||
|
refWatermark[ref] = newWatermarkAt to newWatermarkId
|
||||||
|
anyProcessed = true
|
||||||
|
|
||||||
|
log.debug { "⏩ Updated watermark for $ref → ($newWatermarkAt, id=$newWatermarkId)" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update global scan hint
|
||||||
|
val newLastSeen = maxOf(
|
||||||
|
lastSeenTime,
|
||||||
|
maxPersistedThisRound.plusNanos(1)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (anyProcessed) {
|
||||||
|
val minRef = refWatermark.values.minOfOrNull { it.first }
|
||||||
|
lastSeenTime = when (minRef) {
|
||||||
|
null -> newLastSeen
|
||||||
|
else -> maxOf(newLastSeen, minRef)
|
||||||
|
}
|
||||||
|
log.debug { "📉 Global scanFrom updated → $lastSeenTime (anyProcessed=true)" }
|
||||||
|
} else {
|
||||||
|
lastSeenTime = newLastSeen
|
||||||
|
log.debug { "🔁 No refs processed — advancing global scanFrom to $lastSeenTime" }
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,11 +6,12 @@ import kotlinx.coroutines.Job
|
|||||||
import kotlinx.coroutines.SupervisorJob
|
import kotlinx.coroutines.SupervisorJob
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
import kotlinx.coroutines.sync.Semaphore
|
import kotlinx.coroutines.sync.Semaphore
|
||||||
|
import mu.KotlinLogging
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
|
||||||
class SequenceDispatchQueue(
|
open class SequenceDispatchQueue(
|
||||||
private val maxConcurrency: Int = 8,
|
private val maxConcurrency: Int = 8,
|
||||||
private val scope: CoroutineScope = CoroutineScope(Dispatchers.Default + SupervisorJob())
|
private val scope: CoroutineScope = CoroutineScope(Dispatchers.Default + SupervisorJob())
|
||||||
) {
|
) {
|
||||||
@ -21,23 +22,38 @@ class SequenceDispatchQueue(
|
|||||||
return scope
|
return scope
|
||||||
}
|
}
|
||||||
|
|
||||||
fun isProcessing(referenceId: UUID): Boolean = referenceId in active
|
private val log = KotlinLogging.logger {}
|
||||||
|
|
||||||
|
|
||||||
|
open fun isProcessing(referenceId: UUID): Boolean = referenceId in active
|
||||||
|
|
||||||
|
open fun dispatch(referenceId: UUID, events: List<Event>, dispatcher: EventDispatcher): Job? {
|
||||||
|
if (!active.add(referenceId)) {
|
||||||
|
log.debug {"⚠️ Already processing $referenceId, skipping dispatch"}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
log.debug {"▶️ Starting dispatch for $referenceId with ${events.size} events"}
|
||||||
|
|
||||||
fun dispatch(referenceId: UUID, events: List<Event>, dispatcher: EventDispatcher): Job? {
|
|
||||||
if (!active.add(referenceId)) return null // already processing
|
|
||||||
|
|
||||||
return scope.launch {
|
return scope.launch {
|
||||||
try {
|
try {
|
||||||
|
log.debug {"⏳ Waiting for semaphore for $referenceId"}
|
||||||
semaphore.acquire()
|
semaphore.acquire()
|
||||||
|
log.debug {"🔓 Acquired semaphore for $referenceId"}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dispatcher.dispatch(referenceId, events)
|
dispatcher.dispatch(referenceId, events)
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
println("Dispatch failed for $referenceId: ${e.message}")
|
log.error("Dispatch failed for $referenceId: ${e.message}")
|
||||||
|
e.printStackTrace()
|
||||||
} finally {
|
} finally {
|
||||||
|
|
||||||
semaphore.release()
|
semaphore.release()
|
||||||
|
log.debug {"✅ Released semaphore for $referenceId"}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
active.remove(referenceId)
|
active.remove(referenceId)
|
||||||
|
log.debug {"🏁 Finished dispatch for $referenceId"}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import java.util.UUID
|
|||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
@Suppress("UNCHECKED_CAST")
|
||||||
abstract class Event {
|
abstract class Event {
|
||||||
var referenceId: UUID = UUID.randomUUID()
|
lateinit var referenceId: UUID
|
||||||
protected set
|
protected set
|
||||||
var eventId: UUID = UUID.randomUUID()
|
var eventId: UUID = UUID.randomUUID()
|
||||||
private set
|
private set
|
||||||
@ -39,9 +39,10 @@ inline fun <reified T> Event.requireAs(): T {
|
|||||||
return this as? T ?: throw IllegalArgumentException("Expected ${T::class.java.name}, got ${this::class.java.name}")
|
return this as? T ?: throw IllegalArgumentException("Expected ${T::class.java.name}, got ${this::class.java.name}")
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class DeleteEvent: Event() {
|
abstract class DeleteEvent(
|
||||||
open lateinit var deletedEventId: UUID
|
open val deletedEventId: UUID
|
||||||
}
|
) : Event()
|
||||||
|
|
||||||
|
abstract class SignalEvent(): Event()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
package no.iktdev.eventi.models
|
package no.iktdev.eventi.models
|
||||||
|
|
||||||
import java.time.LocalDateTime
|
import no.iktdev.eventi.MyTime
|
||||||
|
import java.time.Instant
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
class Metadata {
|
class Metadata {
|
||||||
val created: LocalDateTime = LocalDateTime.now()
|
val created: Instant = MyTime.utcNow()
|
||||||
var derivedFromId: Set<UUID>? = null
|
var derivedFromId: Set<UUID>? = null
|
||||||
private set
|
private set
|
||||||
fun derivedFromEventId(vararg id: UUID) = apply {
|
fun derivedFromEventId(vararg id: UUID) = apply {
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
package no.iktdev.eventi.models
|
package no.iktdev.eventi.models
|
||||||
|
|
||||||
import java.time.LocalDateTime
|
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
package no.iktdev.eventi.models.store
|
package no.iktdev.eventi.models.store
|
||||||
|
|
||||||
import java.time.LocalDateTime
|
import java.time.Instant
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
data class PersistedEvent(
|
data class PersistedEvent(
|
||||||
@ -9,5 +9,5 @@ data class PersistedEvent(
|
|||||||
val eventId: UUID,
|
val eventId: UUID,
|
||||||
val event: String,
|
val event: String,
|
||||||
val data: String,
|
val data: String,
|
||||||
val persistedAt: LocalDateTime
|
val persistedAt: Instant
|
||||||
)
|
)
|
||||||
@ -1,6 +1,6 @@
|
|||||||
package no.iktdev.eventi.models.store
|
package no.iktdev.eventi.models.store
|
||||||
|
|
||||||
import java.time.LocalDateTime
|
import java.time.Instant
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
data class PersistedTask(
|
data class PersistedTask(
|
||||||
@ -13,13 +13,14 @@ data class PersistedTask(
|
|||||||
val claimed: Boolean,
|
val claimed: Boolean,
|
||||||
val claimedBy: String? = null,
|
val claimedBy: String? = null,
|
||||||
val consumed: Boolean,
|
val consumed: Boolean,
|
||||||
val lastCheckIn: LocalDateTime? = null,
|
val lastCheckIn: Instant? = null,
|
||||||
val persistedAt: LocalDateTime
|
val persistedAt: Instant
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
enum class TaskStatus {
|
enum class TaskStatus {
|
||||||
Pending,
|
Pending,
|
||||||
InProgress,
|
InProgress,
|
||||||
Completed,
|
Completed,
|
||||||
Failed
|
Failed,
|
||||||
|
Cancelled
|
||||||
}
|
}
|
||||||
@ -2,11 +2,11 @@ package no.iktdev.eventi.stores
|
|||||||
|
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.store.PersistedEvent
|
import no.iktdev.eventi.models.store.PersistedEvent
|
||||||
import java.time.LocalDateTime
|
import java.time.Instant
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
interface EventStore {
|
interface EventStore {
|
||||||
fun getPersistedEventsAfter(timestamp: LocalDateTime): List<PersistedEvent>
|
fun getPersistedEventsAfter(timestamp: Instant): List<PersistedEvent>
|
||||||
fun getPersistedEventsFor(referenceId: UUID): List<PersistedEvent>
|
fun getPersistedEventsFor(referenceId: UUID): List<PersistedEvent>
|
||||||
fun persist(event: Event)
|
fun persist(event: Event)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,12 +4,12 @@ import kotlinx.coroutines.CoroutineScope
|
|||||||
import kotlinx.coroutines.Dispatchers
|
import kotlinx.coroutines.Dispatchers
|
||||||
import kotlinx.coroutines.Job
|
import kotlinx.coroutines.Job
|
||||||
import kotlinx.coroutines.SupervisorJob
|
import kotlinx.coroutines.SupervisorJob
|
||||||
import kotlinx.coroutines.currentCoroutineContext
|
|
||||||
import kotlinx.coroutines.delay
|
import kotlinx.coroutines.delay
|
||||||
import kotlinx.coroutines.isActive
|
import kotlinx.coroutines.isActive
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import org.jetbrains.annotations.VisibleForTesting
|
import org.jetbrains.annotations.VisibleForTesting
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import kotlin.coroutines.cancellation.CancellationException
|
import kotlin.coroutines.cancellation.CancellationException
|
||||||
@ -68,12 +68,16 @@ abstract class TaskListener(val taskType: TaskType = TaskType.CPU_INTENSIVE): Ta
|
|||||||
currentJob = getDispatcherForTask(task).launch {
|
currentJob = getDispatcherForTask(task).launch {
|
||||||
try {
|
try {
|
||||||
val result = onTask(task)
|
val result = onTask(task)
|
||||||
reporter.markConsumed(task.taskId)
|
|
||||||
onComplete(task, result)
|
onComplete(task, result)
|
||||||
} catch (e: CancellationException) {
|
} catch (e: CancellationException) {
|
||||||
onCancelled()
|
// Dette er en ekte kansellering
|
||||||
|
onCancelled(task)
|
||||||
|
throw e // viktig: ikke svelg cancellation
|
||||||
|
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
|
// Dette er en faktisk feil
|
||||||
onError(task, e)
|
onError(task, e)
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
heartbeatRunner?.cancel()
|
heartbeatRunner?.cancel()
|
||||||
currentJob?.cancel()
|
currentJob?.cancel()
|
||||||
@ -86,24 +90,29 @@ abstract class TaskListener(val taskType: TaskType = TaskType.CPU_INTENSIVE): Ta
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
abstract fun createIncompleteStateTaskEvent(task: Task, status: TaskStatus, exception: Exception? = null): Event
|
||||||
|
|
||||||
override fun onError(task: Task, exception: Exception) {
|
override fun onError(task: Task, exception: Exception) {
|
||||||
reporter?.log(task.taskId, "Error processing task: ${exception.message}")
|
reporter?.log(task.taskId, "Error processing task: ${exception.message}")
|
||||||
exception.printStackTrace()
|
exception.printStackTrace()
|
||||||
reporter?.markConsumed(task.taskId)
|
reporter?.markFailed(task.referenceId, task.taskId)
|
||||||
|
reporter!!.publishEvent(createIncompleteStateTaskEvent(task, TaskStatus.Failed, exception))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onComplete(task: Task, result: Event?) {
|
override fun onComplete(task: Task, result: Event?) {
|
||||||
reporter!!.markConsumed(task.taskId)
|
reporter!!.markCompleted(task.taskId)
|
||||||
reporter!!.log(task.taskId, "Task completed successfully.")
|
reporter!!.log(task.taskId, "Task completed successfully.")
|
||||||
result?.let {
|
result?.let {
|
||||||
reporter!!.publishEvent(result)
|
reporter!!.publishEvent(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onCancelled() {
|
override fun onCancelled(task: Task) {
|
||||||
|
reporter!!.markCancelled(task.referenceId, task.taskId)
|
||||||
currentJob?.cancel()
|
currentJob?.cancel()
|
||||||
heartbeatRunner?.cancel()
|
heartbeatRunner?.cancel()
|
||||||
currentTask = null
|
currentTask = null
|
||||||
|
reporter!!.publishEvent(createIncompleteStateTaskEvent(task, TaskStatus.Cancelled))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,13 +129,15 @@ interface TaskListenerImplementation {
|
|||||||
suspend fun onTask(task: Task): Event?
|
suspend fun onTask(task: Task): Event?
|
||||||
fun onComplete(task: Task, result: Event?)
|
fun onComplete(task: Task, result: Event?)
|
||||||
fun onError(task: Task, exception: Exception)
|
fun onError(task: Task, exception: Exception)
|
||||||
fun onCancelled()
|
fun onCancelled(task: Task)
|
||||||
}
|
}
|
||||||
|
|
||||||
interface TaskReporter {
|
interface TaskReporter {
|
||||||
fun markClaimed(taskId: UUID, workerId: String)
|
fun markClaimed(taskId: UUID, workerId: String)
|
||||||
fun updateLastSeen(taskId: UUID)
|
fun updateLastSeen(taskId: UUID)
|
||||||
fun markConsumed(taskId: UUID)
|
fun markCompleted(taskId: UUID)
|
||||||
|
fun markFailed(referenceId: UUID, taskId: UUID)
|
||||||
|
fun markCancelled(referenceId: UUID, taskId: UUID)
|
||||||
fun updateProgress(taskId: UUID, progress: Int)
|
fun updateProgress(taskId: UUID, progress: Int)
|
||||||
fun log(taskId: UUID, message: String)
|
fun log(taskId: UUID, message: String)
|
||||||
fun publishEvent(event: Event)
|
fun publishEvent(event: Event)
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
package no.iktdev.eventi.tasks
|
package no.iktdev.eventi.tasks
|
||||||
|
|
||||||
import kotlinx.coroutines.delay
|
import kotlinx.coroutines.delay
|
||||||
|
import mu.KotlinLogging
|
||||||
import no.iktdev.eventi.ZDS.toTask
|
import no.iktdev.eventi.ZDS.toTask
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
import no.iktdev.eventi.stores.TaskStore
|
import no.iktdev.eventi.stores.TaskStore
|
||||||
@ -10,42 +11,68 @@ abstract class TaskPollerImplementation(
|
|||||||
private val taskStore: TaskStore,
|
private val taskStore: TaskStore,
|
||||||
private val reporterFactory: (Task) -> TaskReporter
|
private val reporterFactory: (Task) -> TaskReporter
|
||||||
) {
|
) {
|
||||||
|
private val log = KotlinLogging.logger {}
|
||||||
|
|
||||||
|
|
||||||
open var backoff = Duration.ofSeconds(2)
|
open var backoff = Duration.ofSeconds(2)
|
||||||
protected set
|
protected set
|
||||||
private val maxBackoff = Duration.ofMinutes(1)
|
private val maxBackoff = Duration.ofMinutes(1)
|
||||||
|
|
||||||
open suspend fun start() {
|
open suspend fun start() {
|
||||||
|
log.info { "TaskPoller starting with initial backoff=$backoff" }
|
||||||
while (true) {
|
while (true) {
|
||||||
|
try {
|
||||||
pollOnce()
|
pollOnce()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
e.printStackTrace()
|
||||||
|
delay(backoff.toMillis())
|
||||||
|
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun pollOnce() {
|
suspend fun pollOnce() {
|
||||||
|
log.debug { "Polling for pending tasks…" }
|
||||||
val newPersistedTasks = taskStore.getPendingTasks()
|
val newPersistedTasks = taskStore.getPendingTasks()
|
||||||
|
|
||||||
if (newPersistedTasks.isEmpty()) {
|
if (newPersistedTasks.isEmpty()) {
|
||||||
|
log.debug { "No pending tasks found. Backing off for $backoff" }
|
||||||
delay(backoff.toMillis())
|
delay(backoff.toMillis())
|
||||||
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
log.debug { "Found ${newPersistedTasks.size} persisted tasks" }
|
||||||
|
|
||||||
val tasks = newPersistedTasks.mapNotNull { it.toTask() }
|
val tasks = newPersistedTasks.mapNotNull { it.toTask() }
|
||||||
var acceptedAny = false
|
var acceptedAny = false
|
||||||
|
|
||||||
for (task in tasks) {
|
for (task in tasks) {
|
||||||
val listener = TaskListenerRegistry.getListeners().firstOrNull { it.supports(task) && !it.isBusy } ?: continue
|
val listener = TaskListenerRegistry.getListeners().firstOrNull { it.supports(task) && !it.isBusy } ?: continue
|
||||||
val claimed = taskStore.claim(task.taskId, listener.getWorkerId())
|
val claimed = taskStore.claim(task.taskId, listener.getWorkerId())
|
||||||
if (!claimed) continue
|
if (!claimed) {
|
||||||
|
log.debug { "Task ${task.taskId} is already claimed by another worker" }
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug { "Task ${task.taskId} claimed by ${listener.getWorkerId()}" }
|
||||||
|
|
||||||
val reporter = reporterFactory(task)
|
val reporter = reporterFactory(task)
|
||||||
val accepted = listener.accept(task, reporter)
|
val accepted = try {
|
||||||
|
listener.accept(task, reporter)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
log.error("Error while processing task ${task.taskId} by listener ${listener.getWorkerId()}: ${e.message}")
|
||||||
|
e.printStackTrace()
|
||||||
|
false
|
||||||
|
}
|
||||||
acceptedAny = acceptedAny || accepted
|
acceptedAny = acceptedAny || accepted
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!acceptedAny) {
|
if (!acceptedAny) {
|
||||||
|
log.debug { "No tasks were accepted. Backing off for $backoff" }
|
||||||
delay(backoff.toMillis())
|
delay(backoff.toMillis())
|
||||||
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
backoff = backoff.multipliedBy(2).coerceAtMost(maxBackoff)
|
||||||
} else {
|
} else {
|
||||||
|
log.debug { "At least one task accepted. Resetting backoff." }
|
||||||
backoff = Duration.ofSeconds(2)
|
backoff = Duration.ofSeconds(2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,7 @@ import no.iktdev.eventi.events.EventListenerRegistry
|
|||||||
import no.iktdev.eventi.events.EventTypeRegistry
|
import no.iktdev.eventi.events.EventTypeRegistry
|
||||||
import no.iktdev.eventi.models.DeleteEvent
|
import no.iktdev.eventi.models.DeleteEvent
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.Metadata
|
import no.iktdev.eventi.models.SignalEvent
|
||||||
import no.iktdev.eventi.testUtil.wipe
|
import no.iktdev.eventi.testUtil.wipe
|
||||||
import org.assertj.core.api.Assertions.assertThat
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
import org.junit.jupiter.api.Assertions.assertEquals
|
import org.junit.jupiter.api.Assertions.assertEquals
|
||||||
@ -18,43 +18,53 @@ import org.junit.jupiter.api.Assertions.assertTrue
|
|||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
import org.junit.jupiter.api.DisplayName
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.time.LocalDateTime
|
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
EventDispatcher
|
||||||
|
Når hendelser dispatches til lyttere
|
||||||
|
Hvis hendelsene inneholder avledede, slettede eller nye events
|
||||||
|
Så skal dispatcheren håndtere filtrering, replays og historikk korrekt
|
||||||
|
"""
|
||||||
|
)
|
||||||
class EventDispatcherTest : TestBase() {
|
class EventDispatcherTest : TestBase() {
|
||||||
|
|
||||||
val dispatcher = EventDispatcher(eventStore)
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
|
|
||||||
class DerivedEvent(): Event()
|
class DerivedEvent : Event()
|
||||||
class TriggerEvent(): Event() {
|
class TriggerEvent : Event()
|
||||||
|
class OtherEvent : Event()
|
||||||
|
class DummyEvent : Event() {
|
||||||
}
|
}
|
||||||
class OtherEvent(): Event()
|
|
||||||
class DummyEvent(): Event() {
|
|
||||||
fun putMetadata(metadata: Metadata) {
|
|
||||||
this.metadata = metadata
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
fun setup() {
|
fun setup() {
|
||||||
EventTypeRegistry.wipe()
|
EventTypeRegistry.wipe()
|
||||||
EventListenerRegistry.wipe()
|
EventListenerRegistry.wipe()
|
||||||
// Verifiser at det er tomt
|
|
||||||
|
|
||||||
EventTypeRegistry.register(listOf(
|
EventTypeRegistry.register(
|
||||||
|
listOf(
|
||||||
DerivedEvent::class.java,
|
DerivedEvent::class.java,
|
||||||
TriggerEvent::class.java,
|
TriggerEvent::class.java,
|
||||||
OtherEvent::class.java,
|
OtherEvent::class.java,
|
||||||
DummyEvent::class.java
|
DummyEvent::class.java
|
||||||
))
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should produce one event and stop`() {
|
@DisplayName(
|
||||||
val listener = ProducingListener()
|
"""
|
||||||
|
Når en TriggerEvent dispatches
|
||||||
|
Hvis en lytter produserer én DerivedEvent
|
||||||
|
Så skal kun én ny event produseres og prosessen stoppe
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldProduceOneEventAndStop() {
|
||||||
|
ProducingListener()
|
||||||
|
|
||||||
val trigger = TriggerEvent()
|
val trigger = TriggerEvent().newReferenceId()
|
||||||
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
||||||
|
|
||||||
val produced = eventStore.all().firstOrNull()
|
val produced = eventStore.all().firstOrNull()
|
||||||
@ -62,52 +72,85 @@ class EventDispatcherTest: TestBase() {
|
|||||||
|
|
||||||
val event = produced!!.toEvent()
|
val event = produced!!.toEvent()
|
||||||
assertThat(event!!.metadata.derivedFromId).hasSize(1)
|
assertThat(event!!.metadata.derivedFromId).hasSize(1)
|
||||||
assertThat(event!!.metadata.derivedFromId).contains(trigger.eventId)
|
assertThat(event.metadata.derivedFromId).contains(trigger.eventId)
|
||||||
assertTrue(event is DerivedEvent)
|
assertTrue(event is DerivedEvent)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should skip already derived events`() {
|
@DisplayName(
|
||||||
val listener = ProducingListener()
|
"""
|
||||||
|
Når en event allerede har avledet en DerivedEvent
|
||||||
|
Hvis dispatcheren replays historikken
|
||||||
|
Så skal ikke DerivedEvent produseres på nytt
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldSkipAlreadyDerivedEvents() {
|
||||||
|
ProducingListener()
|
||||||
|
|
||||||
val trigger = TriggerEvent()
|
val trigger = TriggerEvent().newReferenceId()
|
||||||
val derived = DerivedEvent().derivedOf(trigger).toPersisted(1L, LocalDateTime.now())
|
val derived = DerivedEvent().derivedOf(trigger).toPersisted(1L, MyTime.utcNow())
|
||||||
|
|
||||||
eventStore.persist(derived!!.toEvent()!!) // simulate prior production
|
eventStore.persist(derived!!.toEvent()!!) // simulate prior production
|
||||||
|
|
||||||
dispatcher.dispatch(trigger.referenceId, listOf(trigger, derived!!.toEvent()!!))
|
dispatcher.dispatch(trigger.referenceId, listOf(trigger, derived.toEvent()!!))
|
||||||
|
|
||||||
assertEquals(1, eventStore.all().size) // no new event produced
|
assertEquals(1, eventStore.all().size)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should pass full context to listener`() {
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når flere events dispatches
|
||||||
|
Hvis en lytter mottar en event
|
||||||
|
Så skal hele historikken leveres i context
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldPassFullContextToListener() {
|
||||||
val listener = ContextCapturingListener()
|
val listener = ContextCapturingListener()
|
||||||
|
|
||||||
val e1 = TriggerEvent()
|
val e1 = TriggerEvent().newReferenceId()
|
||||||
val e2 = OtherEvent()
|
val e2 = OtherEvent().newReferenceId()
|
||||||
dispatcher.dispatch(e1.referenceId, listOf(e1, e2))
|
dispatcher.dispatch(e1.referenceId, listOf(e1, e2))
|
||||||
|
|
||||||
assertEquals(2, listener.context.size)
|
assertEquals(2, listener.context.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should behave deterministically across replays`() {
|
@DisplayName(
|
||||||
val listener = ProducingListener()
|
"""
|
||||||
|
Når en replay skjer
|
||||||
|
Hvis en event allerede har produsert en DerivedEvent
|
||||||
|
Så skal ikke DerivedEvent produseres på nytt
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldBehaveDeterministicallyAcrossReplays() {
|
||||||
|
val referenceId = UUID.randomUUID()
|
||||||
|
|
||||||
val trigger = TriggerEvent()
|
ProducingListener()
|
||||||
|
|
||||||
|
val trigger = TriggerEvent().usingReferenceId(referenceId)
|
||||||
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
||||||
val replayContext = listOf(trigger) + eventStore.all().mapNotNull { it.toEvent() }
|
val replayContext = listOf(trigger) + eventStore.all().mapNotNull { it.toEvent() }
|
||||||
|
|
||||||
dispatcher.dispatch(trigger.referenceId, replayContext)
|
dispatcher.dispatch(trigger.referenceId, replayContext)
|
||||||
|
|
||||||
assertEquals(1, eventStore.all().size) // no duplicate
|
assertEquals(1, eventStore.all().size)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should not deliver deleted events as candidates`() {
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en DeleteEvent peker på en tidligere event
|
||||||
|
Hvis dispatcheren filtrerer kandidater
|
||||||
|
Så skal slettede events ikke leveres som kandidater
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldNotDeliverDeletedEventsAsCandidates() {
|
||||||
|
val referenceId = UUID.randomUUID()
|
||||||
|
|
||||||
val dispatcher = EventDispatcher(eventStore)
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
val received = mutableListOf<Event>()
|
val received = mutableListOf<Event>()
|
||||||
|
|
||||||
object : EventListener() {
|
object : EventListener() {
|
||||||
override fun onEvent(event: Event, history: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
received += event
|
received += event
|
||||||
@ -115,12 +158,10 @@ class EventDispatcherTest: TestBase() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Original hendelse
|
// Original hendelse
|
||||||
val original = TriggerEvent()
|
val original = TriggerEvent().usingReferenceId(referenceId)
|
||||||
|
|
||||||
// Slettehendelse som peker på original
|
// Slettehendelse som peker på original
|
||||||
val deleted = object : DeleteEvent() {
|
val deleted = object : DeleteEvent(original.eventId) {}.apply { newReferenceId() }
|
||||||
override var deletedEventId = original.eventId
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dispatch med begge hendelser
|
// Dispatch med begge hendelser
|
||||||
dispatcher.dispatch(original.referenceId, listOf(original, deleted))
|
dispatcher.dispatch(original.referenceId, listOf(original, deleted))
|
||||||
@ -137,29 +178,42 @@ class EventDispatcherTest: TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should deliver DeleteEvent to listeners that react to it`() {
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en DeleteEvent dispatches alene
|
||||||
|
Hvis en lytter reagerer på DeleteEvent
|
||||||
|
Så skal DeleteEvent leveres som kandidat
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldDeliverDeleteEventToListenersThatReactToIt() {
|
||||||
val received = mutableListOf<Event>()
|
val received = mutableListOf<Event>()
|
||||||
val listener = object : EventListener() {
|
val referenceId = UUID.randomUUID()
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
|
||||||
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
if (event is DeleteEvent) received += event
|
if (event is DeleteEvent) received += event
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val deleted = object : DeleteEvent() {
|
val deleted = object : DeleteEvent(UUID.randomUUID()) {}.apply { usingReferenceId(referenceId) }
|
||||||
override var deletedEventId = UUID.randomUUID()
|
|
||||||
}
|
|
||||||
dispatcher.dispatch(deleted.referenceId, listOf(deleted))
|
dispatcher.dispatch(deleted.referenceId, listOf(deleted))
|
||||||
|
|
||||||
assertTrue(received.contains(deleted))
|
assertTrue(received.contains(deleted))
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Replay skal ikke levere en event som allerede har avledet en ny")
|
@DisplayName(
|
||||||
fun `should not re-deliver events that have produced derived events`() {
|
"""
|
||||||
val listener = ProducingListener()
|
Når en event har avledet en ny event
|
||||||
|
Hvis dispatcheren replays historikken
|
||||||
|
Så skal ikke original-eventen leveres som kandidat igjen
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldNotRedeliverEventsThatHaveProducedDerivedEvents() {
|
||||||
|
ProducingListener()
|
||||||
|
|
||||||
val trigger = TriggerEvent()
|
val trigger = TriggerEvent().newReferenceId()
|
||||||
// Første dispatch: trigger produserer en DerivedEvent
|
// Første dispatch: trigger produserer en DerivedEvent
|
||||||
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
dispatcher.dispatch(trigger.referenceId, listOf(trigger))
|
||||||
|
|
||||||
@ -178,20 +232,157 @@ class EventDispatcherTest: TestBase() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en DeleteEvent slettet en tidligere event
|
||||||
|
Hvis dispatcheren bygger historikk
|
||||||
|
Så skal slettede events ikke være med i history
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun historyShouldExcludeDeletedEvents() {
|
||||||
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
|
|
||||||
|
val original = TriggerEvent().newReferenceId()
|
||||||
|
val deleted = object : DeleteEvent(original.eventId) {}.apply { usingReferenceId(original.referenceId) }
|
||||||
|
|
||||||
|
var receivedHistory: List<Event> = emptyList()
|
||||||
|
|
||||||
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
|
receivedHistory = history
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatcher.dispatch(original.referenceId, listOf(original, deleted))
|
||||||
|
|
||||||
|
assertFalse(receivedHistory.contains(original))
|
||||||
|
assertFalse(receivedHistory.contains(deleted))
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en DeleteEvent slettet en event
|
||||||
|
Hvis andre events fortsatt er gyldige
|
||||||
|
Så skal history kun inneholde de ikke-slettede events
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun historyShouldKeepNonDeletedEvents() {
|
||||||
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
|
val referenceId = UUID.randomUUID()
|
||||||
|
val e1 = TriggerEvent().usingReferenceId(referenceId)
|
||||||
|
val e2 = OtherEvent().usingReferenceId(referenceId)
|
||||||
|
val deleted = object : DeleteEvent(e1.eventId) {}
|
||||||
|
|
||||||
|
var receivedHistory: List<Event> = emptyList()
|
||||||
|
|
||||||
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
|
receivedHistory = history
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatcher.dispatch(e1.referenceId, listOf(e1, e2, deleted))
|
||||||
|
|
||||||
|
assertTrue(receivedHistory.contains(e2))
|
||||||
|
assertFalse(receivedHistory.contains(e1))
|
||||||
|
assertFalse(receivedHistory.contains(deleted))
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en DeleteEvent er kandidat
|
||||||
|
Hvis historikken kun inneholder slettede events
|
||||||
|
Så skal history være tom
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun deleteEventShouldBeDeliveredButHistoryEmpty() {
|
||||||
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
|
|
||||||
|
val original = TriggerEvent().newReferenceId()
|
||||||
|
val deleted = object : DeleteEvent(original.eventId) {}.apply { newReferenceId() }
|
||||||
|
|
||||||
|
var receivedEvent: Event? = null
|
||||||
|
var receivedHistory: List<Event> = emptyList()
|
||||||
|
|
||||||
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
|
receivedEvent = event
|
||||||
|
receivedHistory = history
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatcher.dispatch(original.referenceId, listOf(original, deleted))
|
||||||
|
|
||||||
|
assertTrue(receivedEvent is DeleteEvent)
|
||||||
|
assertTrue(receivedHistory.isEmpty())
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName(
|
||||||
|
"""
|
||||||
|
Når en SignalEvent dispatches
|
||||||
|
Hvis SignalEvent ikke skal være kandidat
|
||||||
|
Så skal den ikke leveres til lyttere, men fortsatt være i historikken
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
fun shouldNotDeliverSignalEventAsCandidate() {
|
||||||
|
// Arrange
|
||||||
|
class TestSignalEvent : SignalEvent()
|
||||||
|
EventTypeRegistry.register(listOf(TestSignalEvent::class.java,))
|
||||||
|
|
||||||
|
val received = mutableListOf<Event>()
|
||||||
|
var finalHistory: List<Event>? = null
|
||||||
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
|
received += event
|
||||||
|
finalHistory = history
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val refId = UUID.randomUUID()
|
||||||
|
val trigger = TriggerEvent().usingReferenceId(refId)
|
||||||
|
val signal = TestSignalEvent().usingReferenceId(refId)
|
||||||
|
|
||||||
|
// Act
|
||||||
|
dispatcher.dispatch(trigger.referenceId, listOf(trigger, signal))
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// 1) TriggerEvent skal leveres
|
||||||
|
assertTrue(received.any { it is TriggerEvent }) {
|
||||||
|
"TriggerEvent skal leveres som kandidat"
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2) SignalEvent skal IKKE leveres
|
||||||
|
assertFalse(received.any { it is TestSignalEvent }) {
|
||||||
|
"SignalEvent skal ikke leveres som kandidat"
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNotNull(finalHistory)
|
||||||
|
assertTrue(finalHistory!!.any { it is TestSignalEvent }) {
|
||||||
|
"SignalEvent skal være i historikken selv om den ikke er kandidat"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// --- Test helpers ---
|
// --- Test helpers ---
|
||||||
|
|
||||||
class ProducingListener : EventListener() {
|
class ProducingListener : EventListener() {
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
return if (event is TriggerEvent) DerivedEvent().derivedOf(event) else null
|
return if (event is TriggerEvent) DerivedEvent().derivedOf(event) else null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ContextCapturingListener : EventListener() {
|
class ContextCapturingListener : EventListener() {
|
||||||
var context: List<Event> = emptyList()
|
var context: List<Event> = emptyList()
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
this.context = context
|
this.context = history
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,24 +4,30 @@ import no.iktdev.eventi.ZDS.toPersisted
|
|||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.store.PersistedEvent
|
import no.iktdev.eventi.models.store.PersistedEvent
|
||||||
import no.iktdev.eventi.stores.EventStore
|
import no.iktdev.eventi.stores.EventStore
|
||||||
import java.time.LocalDateTime
|
import java.time.Instant
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
class InMemoryEventStore : EventStore {
|
class InMemoryEventStore : EventStore {
|
||||||
private val persisted = mutableListOf<PersistedEvent>()
|
private val persisted = mutableListOf<PersistedEvent>()
|
||||||
private var nextId = 1L
|
private var nextId = 1L
|
||||||
|
|
||||||
override fun getPersistedEventsAfter(timestamp: LocalDateTime): List<PersistedEvent> =
|
override fun getPersistedEventsAfter(timestamp: Instant): List<PersistedEvent> =
|
||||||
persisted.filter { it.persistedAt > timestamp }
|
persisted.filter { it.persistedAt > timestamp }
|
||||||
|
|
||||||
override fun getPersistedEventsFor(referenceId: UUID): List<PersistedEvent> =
|
override fun getPersistedEventsFor(referenceId: UUID): List<PersistedEvent> =
|
||||||
persisted.filter { it.referenceId == referenceId }
|
persisted.filter { it.referenceId == referenceId }
|
||||||
|
|
||||||
override fun persist(event: Event) {
|
override fun persist(event: Event) {
|
||||||
val persistedEvent = event.toPersisted(nextId++, LocalDateTime.now())
|
val persistedEvent = event.toPersisted(nextId++, MyTime.utcNow())
|
||||||
persisted += persistedEvent!!
|
persisted += persistedEvent!!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun persistAt(event: Event, persistedAt: Instant) {
|
||||||
|
val persistedEvent = event.toPersisted(nextId++, persistedAt)
|
||||||
|
persisted += persistedEvent!!
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
fun all(): List<PersistedEvent> = persisted
|
fun all(): List<PersistedEvent> = persisted
|
||||||
fun clear() { persisted.clear(); nextId = 1L }
|
fun clear() { persisted.clear(); nextId = 1L }
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import no.iktdev.eventi.models.store.PersistedTask
|
|||||||
import no.iktdev.eventi.models.store.TaskStatus
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import no.iktdev.eventi.stores.TaskStore
|
import no.iktdev.eventi.stores.TaskStore
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.LocalDateTime
|
import java.time.temporal.ChronoUnit
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import kotlin.concurrent.atomics.AtomicReference
|
import kotlin.concurrent.atomics.AtomicReference
|
||||||
|
|
||||||
@ -30,13 +30,13 @@ open class InMemoryTaskStore : TaskStore {
|
|||||||
override fun claim(taskId: UUID, workerId: String): Boolean {
|
override fun claim(taskId: UUID, workerId: String): Boolean {
|
||||||
val task = findByTaskId(taskId) ?: return false
|
val task = findByTaskId(taskId) ?: return false
|
||||||
if (task.claimed && !isExpired(task)) return false
|
if (task.claimed && !isExpired(task)) return false
|
||||||
update(task.copy(claimed = true, claimedBy = workerId, lastCheckIn = LocalDateTime.now()))
|
update(task.copy(claimed = true, claimedBy = workerId, lastCheckIn = MyTime.utcNow()))
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun heartbeat(taskId: UUID) {
|
override fun heartbeat(taskId: UUID) {
|
||||||
val task = findByTaskId(taskId) ?: return
|
val task = findByTaskId(taskId) ?: return
|
||||||
update(task.copy(lastCheckIn = LocalDateTime.now()))
|
update(task.copy(lastCheckIn = MyTime.utcNow()))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun markConsumed(taskId: UUID, status: TaskStatus) {
|
override fun markConsumed(taskId: UUID, status: TaskStatus) {
|
||||||
@ -45,7 +45,7 @@ open class InMemoryTaskStore : TaskStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun releaseExpiredTasks(timeout: Duration) {
|
override fun releaseExpiredTasks(timeout: Duration) {
|
||||||
val now = LocalDateTime.now()
|
val now = MyTime.utcNow()
|
||||||
tasks.filter {
|
tasks.filter {
|
||||||
it.claimed && !it.consumed && it.lastCheckIn?.isBefore(now.minus(timeout)) == true
|
it.claimed && !it.consumed && it.lastCheckIn?.isBefore(now.minus(timeout)) == true
|
||||||
}.forEach {
|
}.forEach {
|
||||||
@ -60,8 +60,8 @@ open class InMemoryTaskStore : TaskStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private fun isExpired(task: PersistedTask): Boolean {
|
private fun isExpired(task: PersistedTask): Boolean {
|
||||||
val now = LocalDateTime.now()
|
val now = MyTime.utcNow()
|
||||||
return task.lastCheckIn?.isBefore(now.minusMinutes(15)) == true
|
return task.lastCheckIn?.isBefore(now.minus(15, ChronoUnit.MINUTES)) == true
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun serialize(data: Any?): String = data?.toString() ?: "{}"
|
private fun serialize(data: Any?): String = data?.toString() ?: "{}"
|
||||||
|
|||||||
@ -12,30 +12,39 @@ import org.junit.jupiter.api.Assertions.assertNull
|
|||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
import org.junit.jupiter.api.DisplayName
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.time.LocalDateTime
|
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
ZDS – Serialization/Deserialization System
|
||||||
|
Når Event- og Task-objekter persisteres og gjenopprettes
|
||||||
|
Hvis type-registrene er korrekt konfigurert
|
||||||
|
Så skal ZDS kunne serialisere og deserialisere objektene uten tap av data
|
||||||
|
""")
|
||||||
class ZDSTest {
|
class ZDSTest {
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
fun setup() {
|
fun setup() {
|
||||||
EventTypeRegistry.wipe()
|
EventTypeRegistry.wipe()
|
||||||
TaskTypeRegistry.wipe()
|
TaskTypeRegistry.wipe()
|
||||||
|
|
||||||
// Verifiser at det er tomt
|
// Verifiser at det er tomt
|
||||||
assertNull(EventTypeRegistry.resolve("SomeEvent"))
|
assertNull(EventTypeRegistry.resolve("SomeEvent"))
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Test ZDS with Event object")
|
@DisplayName("""
|
||||||
|
Når et Event-objekt persisteres via ZDS
|
||||||
|
Hvis typen er registrert i EventTypeRegistry
|
||||||
|
Så skal det kunne gjenopprettes som riktig Event-type med samme data
|
||||||
|
""")
|
||||||
fun scenario1() {
|
fun scenario1() {
|
||||||
EventTypeRegistry.register(EchoEvent::class.java)
|
EventTypeRegistry.register(EchoEvent::class.java)
|
||||||
|
|
||||||
val echo = EchoEvent("hello")
|
val echo = EchoEvent("hello").newReferenceId()
|
||||||
val persisted = echo.toPersisted(id = 1L)
|
val persisted = echo.toPersisted(id = 1L)
|
||||||
|
|
||||||
val restored = persisted!!.toEvent()
|
val restored = persisted!!.toEvent()
|
||||||
assert(restored is EchoEvent)
|
assert(restored is EchoEvent)
|
||||||
assert((restored as EchoEvent).data == "hello")
|
assert((restored as EchoEvent).data == "hello")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
data class TestTask(
|
data class TestTask(
|
||||||
@ -43,9 +52,12 @@ class ZDSTest {
|
|||||||
) : Task()
|
) : Task()
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Test ZDS with Task object")
|
@DisplayName("""
|
||||||
|
Når et Task-objekt persisteres via ZDS
|
||||||
|
Hvis typen er registrert i TaskTypeRegistry
|
||||||
|
Så skal det kunne gjenopprettes som riktig Task-type med metadata intakt
|
||||||
|
""")
|
||||||
fun scenario2() {
|
fun scenario2() {
|
||||||
|
|
||||||
TaskTypeRegistry.register(TestTask::class.java)
|
TaskTypeRegistry.register(TestTask::class.java)
|
||||||
|
|
||||||
val task = TestTask("Potato")
|
val task = TestTask("Potato")
|
||||||
@ -58,7 +70,5 @@ class ZDSTest {
|
|||||||
assert((restored as TestTask).data == "Potato")
|
assert((restored as TestTask).data == "Potato")
|
||||||
assert(restored.metadata.created == task.metadata.created)
|
assert(restored.metadata.created == task.metadata.created)
|
||||||
assert(restored.metadata.derivedFromId == task.metadata.derivedFromId)
|
assert(restored.metadata.derivedFromId == task.metadata.derivedFromId)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -6,35 +6,34 @@ import no.iktdev.eventi.testUtil.wipe
|
|||||||
import org.assertj.core.api.Assertions.assertThat
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
import org.junit.jupiter.api.Assertions.*
|
import org.junit.jupiter.api.Assertions.*
|
||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
EventListenerRegistry
|
||||||
|
Når lyttere registreres med og uten @ListenerOrder
|
||||||
|
Hvis registry sorterer dem etter annotasjonen
|
||||||
|
Så skal rekkefølgen være deterministisk og korrekt
|
||||||
|
""")
|
||||||
class EventListenerRegistryTest {
|
class EventListenerRegistryTest {
|
||||||
|
|
||||||
@ListenerOrder(1)
|
@ListenerOrder(1)
|
||||||
class MockTest1() : EventListener() {
|
class MockTest1 : EventListener() {
|
||||||
override fun onEvent(event: Event, history: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? = null
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ListenerOrder(2)
|
@ListenerOrder(2)
|
||||||
class MockTest2() : EventListener() {
|
class MockTest2 : EventListener() {
|
||||||
override fun onEvent(event: Event, history: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? = null
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ListenerOrder(3)
|
@ListenerOrder(3)
|
||||||
class MockTest3() : EventListener() {
|
class MockTest3 : EventListener() {
|
||||||
override fun onEvent(event: Event, history: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? = null
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class MockTestRandom() : EventListener() {
|
class MockTestRandom : EventListener() {
|
||||||
override fun onEvent(event: Event, history: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? = null
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
@ -43,13 +42,19 @@ class EventListenerRegistryTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når flere lyttere registreres i vilkårlig rekkefølge
|
||||||
|
Hvis noen har @ListenerOrder og andre ikke
|
||||||
|
Så skal registry returnere dem sortert etter order, og usorterte sist
|
||||||
|
""")
|
||||||
fun validateOrder() {
|
fun validateOrder() {
|
||||||
MockTestRandom()
|
MockTestRandom()
|
||||||
MockTest1()
|
MockTest1()
|
||||||
MockTest2()
|
MockTest2()
|
||||||
MockTest3()
|
MockTest3()
|
||||||
|
|
||||||
val listeners = EventListenerRegistry.getListeners()
|
val listeners = EventListenerRegistry.getListeners()
|
||||||
// Assert
|
|
||||||
assertThat(listeners.map { it::class.simpleName }).containsExactly(
|
assertThat(listeners.map { it::class.simpleName }).containsExactly(
|
||||||
MockTest1::class.simpleName, // @ListenerOrder(1)
|
MockTest1::class.simpleName, // @ListenerOrder(1)
|
||||||
MockTest2::class.simpleName, // @ListenerOrder(2)
|
MockTest2::class.simpleName, // @ListenerOrder(2)
|
||||||
@ -57,5 +62,4 @@ class EventListenerRegistryTest {
|
|||||||
MockTestRandom::class.simpleName // no annotation → goes last
|
MockTestRandom::class.simpleName // no annotation → goes last
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -1,80 +1,107 @@
|
|||||||
package no.iktdev.eventi.events
|
package no.iktdev.eventi.events
|
||||||
|
|
||||||
import kotlinx.coroutines.CompletableDeferred
|
import kotlinx.coroutines.CompletableDeferred
|
||||||
import kotlinx.coroutines.Dispatchers
|
import kotlinx.coroutines.CoroutineDispatcher
|
||||||
|
import kotlinx.coroutines.CoroutineScope
|
||||||
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||||
import kotlinx.coroutines.awaitAll
|
import kotlinx.coroutines.SupervisorJob
|
||||||
import kotlinx.coroutines.channels.Channel
|
import kotlinx.coroutines.channels.Channel
|
||||||
|
import kotlinx.coroutines.test.StandardTestDispatcher
|
||||||
import kotlinx.coroutines.test.runTest
|
import kotlinx.coroutines.test.runTest
|
||||||
import kotlinx.coroutines.withContext
|
import kotlinx.coroutines.withContext
|
||||||
import kotlinx.coroutines.withTimeout
|
import kotlinx.coroutines.withTimeout
|
||||||
import no.iktdev.eventi.EventDispatcherTest
|
import kotlinx.coroutines.awaitAll
|
||||||
import no.iktdev.eventi.EventDispatcherTest.DerivedEvent
|
import no.iktdev.eventi.EventDispatcherTest.DerivedEvent
|
||||||
import no.iktdev.eventi.EventDispatcherTest.OtherEvent
|
import no.iktdev.eventi.EventDispatcherTest.OtherEvent
|
||||||
import no.iktdev.eventi.EventDispatcherTest.TriggerEvent
|
import no.iktdev.eventi.EventDispatcherTest.TriggerEvent
|
||||||
|
import no.iktdev.eventi.MyTime
|
||||||
import no.iktdev.eventi.TestBase
|
import no.iktdev.eventi.TestBase
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
|
import no.iktdev.eventi.testUtil.TestSequenceDispatchQueue
|
||||||
import no.iktdev.eventi.testUtil.wipe
|
import no.iktdev.eventi.testUtil.wipe
|
||||||
import org.junit.jupiter.api.Assertions.assertEquals
|
import org.junit.jupiter.api.Assertions.assertEquals
|
||||||
import org.junit.jupiter.api.Assertions.assertFalse
|
import org.junit.jupiter.api.Assertions.assertFalse
|
||||||
import org.junit.jupiter.api.Assertions.assertTrue
|
import org.junit.jupiter.api.Assertions.assertTrue
|
||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.LocalDateTime
|
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
EventPollerImplementation
|
||||||
|
Når polleren leser nye events fra EventStore og samarbeider med SequenceDispatchQueue
|
||||||
|
Hvis nye events ankommer, køen er travel, eller duplikater dukker opp
|
||||||
|
Så skal polleren dispatch'e riktig, oppdatere lastSeenTime og unngå duplikater
|
||||||
|
""")
|
||||||
class EventPollerImplementationTest : TestBase() {
|
class EventPollerImplementationTest : TestBase() {
|
||||||
val dispatcher = EventDispatcher(eventStore)
|
|
||||||
val queue = SequenceDispatchQueue(maxConcurrency = 8)
|
|
||||||
|
|
||||||
val poller = object : EventPollerImplementation(eventStore, queue, dispatcher) {}
|
private val dispatcher = EventDispatcher(eventStore)
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
fun setup() {
|
fun setup() {
|
||||||
EventTypeRegistry.wipe()
|
EventTypeRegistry.wipe()
|
||||||
EventListenerRegistry.wipe()
|
EventListenerRegistry.wipe()
|
||||||
eventStore.clear()
|
eventStore.clear()
|
||||||
// Verifiser at det er tomt
|
|
||||||
|
|
||||||
EventTypeRegistry.register(listOf(
|
EventTypeRegistry.register(
|
||||||
|
listOf(
|
||||||
DerivedEvent::class.java,
|
DerivedEvent::class.java,
|
||||||
TriggerEvent::class.java,
|
TriggerEvent::class.java,
|
||||||
OtherEvent::class.java
|
OtherEvent::class.java
|
||||||
))
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `pollOnce should dispatch all new referenceIds and update lastSeenTime`() = runTest {
|
@DisplayName("""
|
||||||
|
Når polleren finner nye referenceId-er med events
|
||||||
|
Hvis pollOnce kjøres
|
||||||
|
Så skal alle referenceId-er dispatch'es og lastSeenTime oppdateres
|
||||||
|
""")
|
||||||
|
fun pollOnceDispatchesAllNewReferenceIdsAndUpdatesLastSeenTime() = runTest {
|
||||||
|
val testDispatcher = StandardTestDispatcher(testScheduler)
|
||||||
|
val queue = TestSequenceDispatchQueue(maxConcurrency = 8, dispatcher = testDispatcher)
|
||||||
|
val poller = object : EventPollerImplementation(eventStore, queue, dispatcher) {}
|
||||||
|
|
||||||
val dispatched = ConcurrentHashMap.newKeySet<UUID>()
|
val dispatched = ConcurrentHashMap.newKeySet<UUID>()
|
||||||
val completionMap = mutableMapOf<UUID, CompletableDeferred<Unit>>()
|
val completionMap = mutableMapOf<UUID, CompletableDeferred<Unit>>()
|
||||||
|
|
||||||
EventListenerRegistry.registerListener(object : EventListener() {
|
EventListenerRegistry.registerListener(
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
dispatched += event.referenceId
|
dispatched += event.referenceId
|
||||||
completionMap[event.referenceId]?.complete(Unit)
|
completionMap[event.referenceId]?.complete(Unit)
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
val referenceIds = (1..10).map { UUID.randomUUID() }
|
val referenceIds = (1..10).map { UUID.randomUUID() }
|
||||||
|
|
||||||
referenceIds.forEach { refId ->
|
referenceIds.forEach { refId ->
|
||||||
val e = EventDispatcherTest.TriggerEvent().usingReferenceId(refId)
|
val e = TriggerEvent().usingReferenceId(refId)
|
||||||
eventStore.persist(e) // persistedAt settes automatisk her
|
eventStore.persist(e)
|
||||||
completionMap[refId] = CompletableDeferred()
|
completionMap[refId] = CompletableDeferred()
|
||||||
}
|
}
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
completionMap.values.awaitAll()
|
completionMap.values.awaitAll()
|
||||||
|
|
||||||
assertEquals(referenceIds.toSet(), dispatched)
|
assertEquals(referenceIds.toSet(), dispatched)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `pollOnce should increase backoff when no events and reset when events arrive`() = runTest {
|
@DisplayName("""
|
||||||
|
Når polleren ikke finner nye events
|
||||||
|
Hvis pollOnce kjøres flere ganger
|
||||||
|
Så skal backoff øke, og resettes når nye events ankommer
|
||||||
|
""")
|
||||||
|
fun pollOnceIncreasesBackoffWhenNoEventsAndResetsWhenEventsArrive() = runTest {
|
||||||
|
val testDispatcher = StandardTestDispatcher(testScheduler)
|
||||||
|
val queue = TestSequenceDispatchQueue(maxConcurrency = 8, dispatcher = testDispatcher)
|
||||||
|
|
||||||
val testPoller = object : EventPollerImplementation(eventStore, queue, dispatcher) {
|
val testPoller = object : EventPollerImplementation(eventStore, queue, dispatcher) {
|
||||||
fun currentBackoff(): Duration = backoff
|
fun currentBackoff(): Duration = backoff
|
||||||
}
|
}
|
||||||
@ -97,20 +124,28 @@ class EventPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `pollOnce should group and dispatch exactly 3 events for one referenceId`() = runTest {
|
@DisplayName("""
|
||||||
|
Når flere events med samme referenceId ligger i EventStore
|
||||||
|
Hvis pollOnce kjøres
|
||||||
|
Så skal polleren gruppere og dispatch'e alle tre i én batch
|
||||||
|
""")
|
||||||
|
fun pollOnceGroupsAndDispatchesExactlyThreeEventsForOneReferenceId() = runTest {
|
||||||
|
val testDispatcher = StandardTestDispatcher(testScheduler)
|
||||||
|
val queue = TestSequenceDispatchQueue(maxConcurrency = 8, dispatcher = testDispatcher)
|
||||||
|
val poller = object : EventPollerImplementation(eventStore, queue, dispatcher) {}
|
||||||
|
|
||||||
val refId = UUID.randomUUID()
|
val refId = UUID.randomUUID()
|
||||||
val received = mutableListOf<Event>()
|
val received = mutableListOf<Event>()
|
||||||
val done = CompletableDeferred<Unit>()
|
val done = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
// Wipe alt før test
|
|
||||||
EventTypeRegistry.wipe()
|
EventTypeRegistry.wipe()
|
||||||
EventListenerRegistry.wipe()
|
EventListenerRegistry.wipe()
|
||||||
eventStore.clear() // sørg for at InMemoryEventStore støtter dette
|
eventStore.clear()
|
||||||
|
|
||||||
EventTypeRegistry.register(listOf(TriggerEvent::class.java))
|
EventTypeRegistry.register(listOf(TriggerEvent::class.java))
|
||||||
|
|
||||||
object : EventListener() {
|
object : EventListener() {
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
received += event
|
received += event
|
||||||
if (received.size == 3) done.complete(Unit)
|
if (received.size == 3) done.complete(Unit)
|
||||||
return null
|
return null
|
||||||
@ -122,27 +157,32 @@ class EventPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
done.await()
|
done.await()
|
||||||
|
|
||||||
assertEquals(3, received.size)
|
assertEquals(3, received.size)
|
||||||
assertTrue(received.all { it.referenceId == refId })
|
assertTrue(received.all { it.referenceId == refId })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `pollOnce should ignore events before lastSeenTime`() = runTest {
|
@DisplayName("""
|
||||||
val refId = UUID.randomUUID()
|
Når polleren har en lastSeenTime i fremtiden
|
||||||
val ignored = TriggerEvent().usingReferenceId(refId)
|
Hvis events ankommer med eldre timestamp
|
||||||
|
Så skal polleren ignorere dem
|
||||||
|
""")
|
||||||
|
fun pollOnceIgnoresEventsBeforeLastSeenTime() = runTest {
|
||||||
|
val testDispatcher = StandardTestDispatcher(testScheduler)
|
||||||
|
val queue = TestSequenceDispatchQueue(maxConcurrency = 8, dispatcher = testDispatcher)
|
||||||
|
|
||||||
val testPoller = object : EventPollerImplementation(eventStore, queue, dispatcher) {
|
val testPoller = object : EventPollerImplementation(eventStore, queue, dispatcher) {
|
||||||
init {
|
init {
|
||||||
lastSeenTime = LocalDateTime.now().plusSeconds(1)
|
lastSeenTime = MyTime.utcNow().plusSeconds(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
val refId = UUID.randomUUID()
|
||||||
|
val ignored = TriggerEvent().usingReferenceId(refId)
|
||||||
|
|
||||||
eventStore.persist(ignored)
|
eventStore.persist(ignored)
|
||||||
|
|
||||||
testPoller.pollOnce()
|
testPoller.pollOnce()
|
||||||
|
|
||||||
assertFalse(queue.isProcessing(refId))
|
assertFalse(queue.isProcessing(refId))
|
||||||
@ -150,61 +190,53 @@ class EventPollerImplementationTest : TestBase() {
|
|||||||
|
|
||||||
@OptIn(ExperimentalCoroutinesApi::class)
|
@OptIn(ExperimentalCoroutinesApi::class)
|
||||||
@Test
|
@Test
|
||||||
fun `poller handles manually injected duplicate event`() = runTest {
|
@DisplayName("""
|
||||||
|
Når en duplikat-event injiseres manuelt i EventStore
|
||||||
|
Hvis polleren kjører igjen
|
||||||
|
Så skal begge events prosesseres, men uten å produsere duplikate derived events
|
||||||
|
""")
|
||||||
|
fun pollerHandlesManuallyInjectedDuplicateEvent() = runTest {
|
||||||
|
val testDispatcher = StandardTestDispatcher(testScheduler)
|
||||||
|
val queue = TestSequenceDispatchQueue(maxConcurrency = 8, dispatcher = testDispatcher)
|
||||||
|
val poller = object : EventPollerImplementation(eventStore, queue, dispatcher) {}
|
||||||
|
|
||||||
EventTypeRegistry.register(listOf(MarcoEvent::class.java, EchoEvent::class.java))
|
EventTypeRegistry.register(listOf(MarcoEvent::class.java, EchoEvent::class.java))
|
||||||
|
|
||||||
val channel = Channel<Event>(Channel.UNLIMITED)
|
val channel = Channel<Event>(Channel.UNLIMITED)
|
||||||
val handled = mutableListOf<Event>()
|
val handled = mutableListOf<Event>()
|
||||||
|
|
||||||
|
|
||||||
// Setup
|
|
||||||
object : EventListener() {
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
if (event !is EchoEvent) return null
|
||||||
if (event !is EchoEvent)
|
|
||||||
return null
|
|
||||||
handled += event
|
handled += event
|
||||||
channel.trySend(event)
|
channel.trySend(event)
|
||||||
return MarcoEvent(true).derivedOf(event)
|
return MarcoEvent(true).derivedOf(event)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val poller = object : EventPollerImplementation(eventStore, queue, dispatcher) {
|
val original = EchoEvent("Hello").newReferenceId()
|
||||||
}
|
|
||||||
|
|
||||||
// Original event
|
|
||||||
val original = EchoEvent(data = "Hello")
|
|
||||||
eventStore.persist(original)
|
eventStore.persist(original)
|
||||||
|
|
||||||
// Act
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
withContext(Dispatchers.Default.limitedParallelism(1)) {
|
|
||||||
withTimeout(Duration.ofMinutes(1).toMillis()) {
|
withContext(testDispatcher) {
|
||||||
repeat(1) { channel.receive() }
|
withTimeout(60_000) {
|
||||||
|
channel.receive()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Manual replay with new eventId, same referenceId
|
|
||||||
val duplicateEvent = EchoEvent("Test me").usingReferenceId(original.referenceId)
|
val duplicateEvent = EchoEvent("Test me").usingReferenceId(original.referenceId)
|
||||||
|
|
||||||
eventStore.persist(duplicateEvent)
|
eventStore.persist(duplicateEvent)
|
||||||
|
|
||||||
// Act
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
withContext(Dispatchers.Default.limitedParallelism(1)) {
|
withContext(testDispatcher) {
|
||||||
withTimeout(Duration.ofMinutes(1).toMillis()) {
|
withTimeout(60_000) {
|
||||||
repeat(1) { channel.receive() }
|
channel.receive()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assertEquals(2, handled.size)
|
assertEquals(2, handled.size)
|
||||||
assertTrue(handled.any { it.eventId == original.eventId })
|
assertTrue(handled.any { it.eventId == original.eventId })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
264
src/test/kotlin/no/iktdev/eventi/events/RunSimulationTest.kt
Normal file
264
src/test/kotlin/no/iktdev/eventi/events/RunSimulationTest.kt
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
@file:OptIn(ExperimentalCoroutinesApi::class)
|
||||||
|
|
||||||
|
package no.iktdev.eventi.events
|
||||||
|
|
||||||
|
import kotlinx.coroutines.*
|
||||||
|
import kotlinx.coroutines.test.*
|
||||||
|
import no.iktdev.eventi.InMemoryEventStore
|
||||||
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import java.util.UUID
|
||||||
|
import no.iktdev.eventi.models.Event
|
||||||
|
import no.iktdev.eventi.models.Metadata
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
|
||||||
|
|
||||||
|
class FakeDispatchQueue(
|
||||||
|
private val scope: CoroutineScope
|
||||||
|
) : SequenceDispatchQueue(8, scope) {
|
||||||
|
|
||||||
|
private val active = ConcurrentHashMap.newKeySet<UUID>()
|
||||||
|
|
||||||
|
override fun isProcessing(referenceId: UUID): Boolean = referenceId in active
|
||||||
|
|
||||||
|
override fun dispatch(referenceId: UUID, events: List<Event>, dispatcher: EventDispatcher): Job {
|
||||||
|
active.add(referenceId)
|
||||||
|
return scope.launch {
|
||||||
|
try {
|
||||||
|
dispatcher.dispatch(referenceId, events)
|
||||||
|
} finally {
|
||||||
|
active.remove(referenceId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class FakeDispatcher : EventDispatcher(InMemoryEventStore()) {
|
||||||
|
|
||||||
|
val dispatched = mutableListOf<Pair<UUID, List<Event>>>()
|
||||||
|
|
||||||
|
override fun dispatch(referenceId: UUID, events: List<Event>) {
|
||||||
|
dispatched += referenceId to events
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class TestEvent : Event() {
|
||||||
|
fun withReference(id: UUID): TestEvent {
|
||||||
|
this.referenceId = id
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
fun setMetadata(metadata: Metadata): TestEvent {
|
||||||
|
this.metadata = metadata
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
EventPollerImplementation – simulert kø og dispatch
|
||||||
|
Når polleren leser events fra EventStore og samarbeider med SequenceDispatchQueue
|
||||||
|
Hvis køen er ledig, travel, eller events ankommer i ulike tidsrekkefølger
|
||||||
|
Så skal polleren oppdatere lastSeenTime, unngå duplikater og prosessere riktig
|
||||||
|
""")
|
||||||
|
class RunSimulationTestTest {
|
||||||
|
|
||||||
|
private lateinit var store: InMemoryEventStore
|
||||||
|
private lateinit var dispatcher: FakeDispatcher
|
||||||
|
private lateinit var testDispatcher: TestDispatcher
|
||||||
|
private lateinit var scope: CoroutineScope
|
||||||
|
private lateinit var queue: FakeDispatchQueue
|
||||||
|
private lateinit var poller: EventPollerImplementation
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
fun setup() {
|
||||||
|
store = InMemoryEventStore()
|
||||||
|
dispatcher = FakeDispatcher()
|
||||||
|
testDispatcher = StandardTestDispatcher()
|
||||||
|
scope = CoroutineScope(testDispatcher)
|
||||||
|
queue = FakeDispatchQueue(scope)
|
||||||
|
EventTypeRegistry.register(TestEvent::class.java)
|
||||||
|
poller = object : EventPollerImplementation(store, queue, dispatcher) {
|
||||||
|
override suspend fun start() = error("Do not call start() in tests")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun persistEvent(ref: UUID) {
|
||||||
|
val e = TestEvent().withReference(ref)
|
||||||
|
store.persist(e.setMetadata(Metadata()))
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren finner nye events
|
||||||
|
Hvis dispatch skjer normalt
|
||||||
|
Så skal lastSeenTime oppdateres og dispatcheren få én dispatch
|
||||||
|
""")
|
||||||
|
fun pollerUpdatesLastSeenTimeWhenDispatchHappens() = runTest(testDispatcher) {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistEvent(ref)
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(poller.lastSeenTime).isGreaterThan(Instant.EPOCH)
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
class AlwaysBusyDispatchQueue : SequenceDispatchQueue(8, CoroutineScope(Dispatchers.Default)) {
|
||||||
|
override fun isProcessing(referenceId: UUID): Boolean = true
|
||||||
|
override fun dispatch(referenceId: UUID, events: List<Event>, dispatcher: EventDispatcher) = null
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når køen er travel og ikke kan dispatch'e
|
||||||
|
Hvis polleren likevel ser nye events
|
||||||
|
Så skal lastSeenTime fortsatt oppdateres (livelock-fix)
|
||||||
|
""")
|
||||||
|
fun pollerUpdatesLastSeenTimeEvenWhenQueueBusy() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
val t = Instant.parse("2026-01-22T12:00:00Z")
|
||||||
|
|
||||||
|
store.persistAt(TestEvent().withReference(ref), t)
|
||||||
|
|
||||||
|
val busyQueue = AlwaysBusyDispatchQueue()
|
||||||
|
val poller = object : EventPollerImplementation(store, busyQueue, dispatcher) {}
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
// Etter livelock-fixen skal lastSeenTime være *etter* eventet
|
||||||
|
assertThat(poller.lastSeenTime)
|
||||||
|
.isGreaterThan(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren kjører flere ganger uten nye events
|
||||||
|
Hvis første poll allerede dispatch'et eventet
|
||||||
|
Så skal polleren ikke dispatch'e samme event to ganger
|
||||||
|
""")
|
||||||
|
fun pollerDoesNotDoubleDispatch() = runTest(testDispatcher) {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistEvent(ref)
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når flere referenceId-er har nye events
|
||||||
|
Hvis polleren kjører én runde
|
||||||
|
Så skal begge referenceId-er dispatch'es
|
||||||
|
""")
|
||||||
|
fun pollerHandlesMultipleReferenceIds() = runTest(testDispatcher) {
|
||||||
|
val refA = UUID.randomUUID()
|
||||||
|
val refB = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistEvent(refA)
|
||||||
|
persistEvent(refB)
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når to events har identisk timestamp
|
||||||
|
Hvis polleren leser dem i samme poll
|
||||||
|
Så skal begge referenceId-er dispatch'es
|
||||||
|
""")
|
||||||
|
fun pollerHandlesIdenticalTimestamps() = runTest(testDispatcher) {
|
||||||
|
val refA = UUID.randomUUID()
|
||||||
|
val refB = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistEvent(refA)
|
||||||
|
persistEvent(refB)
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren ikke finner nye events
|
||||||
|
Hvis pollOnce kjøres
|
||||||
|
Så skal backoff økes
|
||||||
|
""")
|
||||||
|
fun pollerBacksOffWhenNoNewEvents() = runTest(testDispatcher) {
|
||||||
|
val before = poller.backoff
|
||||||
|
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(poller.backoff).isGreaterThan(before)
|
||||||
|
}
|
||||||
|
|
||||||
|
class ControlledDispatchQueue(
|
||||||
|
private val scope: CoroutineScope
|
||||||
|
) : SequenceDispatchQueue(8, scope) {
|
||||||
|
|
||||||
|
val busyRefs = mutableSetOf<UUID>()
|
||||||
|
|
||||||
|
override fun isProcessing(referenceId: UUID): Boolean =
|
||||||
|
referenceId in busyRefs
|
||||||
|
|
||||||
|
override fun dispatch(referenceId: UUID, events: List<Event>, dispatcher: EventDispatcher): Job {
|
||||||
|
return scope.launch {
|
||||||
|
dispatcher.dispatch(referenceId, events)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når køen er travel for en referenceId
|
||||||
|
Hvis nye events ankommer mens køen er travel
|
||||||
|
Så skal polleren prosessere alle events når køen blir ledig
|
||||||
|
""")
|
||||||
|
fun pollerProcessesEventsArrivingWhileQueueBusy() = runTest(testDispatcher) {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistEvent(ref)
|
||||||
|
|
||||||
|
val controlledQueue = ControlledDispatchQueue(scope)
|
||||||
|
controlledQueue.busyRefs += ref
|
||||||
|
|
||||||
|
val poller = object : EventPollerImplementation(store, controlledQueue, dispatcher) {}
|
||||||
|
|
||||||
|
// Poll #1: busy → no dispatch
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).isEmpty()
|
||||||
|
|
||||||
|
// Now free
|
||||||
|
controlledQueue.busyRefs.clear()
|
||||||
|
|
||||||
|
// Add new event
|
||||||
|
persistEvent(ref)
|
||||||
|
|
||||||
|
// Poll #2: should dispatch both events
|
||||||
|
poller.pollOnce()
|
||||||
|
advanceUntilIdle()
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
assertThat(dispatcher.dispatched.single().second).hasSize(2)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -10,40 +10,54 @@ import no.iktdev.eventi.models.Event
|
|||||||
import no.iktdev.eventi.testUtil.wipe
|
import no.iktdev.eventi.testUtil.wipe
|
||||||
import org.junit.jupiter.api.Assertions.assertEquals
|
import org.junit.jupiter.api.Assertions.assertEquals
|
||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
SequenceDispatchQueue
|
||||||
|
Når mange referenceId-er skal dispatches parallelt
|
||||||
|
Hvis køen har begrenset samtidighet
|
||||||
|
Så skal alle events prosesseres uten tap
|
||||||
|
""")
|
||||||
class SequenceDispatchQueueTest : TestBase() {
|
class SequenceDispatchQueueTest : TestBase() {
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
fun setup() {
|
fun setup() {
|
||||||
EventTypeRegistry.wipe()
|
EventTypeRegistry.wipe()
|
||||||
EventListenerRegistry.wipe()
|
EventListenerRegistry.wipe()
|
||||||
// Verifiser at det er tomt
|
|
||||||
|
|
||||||
EventTypeRegistry.register(listOf(
|
EventTypeRegistry.register(
|
||||||
|
listOf(
|
||||||
DerivedEvent::class.java,
|
DerivedEvent::class.java,
|
||||||
TriggerEvent::class.java,
|
TriggerEvent::class.java,
|
||||||
OtherEvent::class.java
|
OtherEvent::class.java
|
||||||
))
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `should dispatch all referenceIds with limited concurrency`() = runTest {
|
@DisplayName("""
|
||||||
|
Når 100 forskjellige referenceId-er dispatches
|
||||||
|
Hvis køen har en maks samtidighet på 8
|
||||||
|
Så skal alle referenceId-er bli prosessert nøyaktig én gang
|
||||||
|
""")
|
||||||
|
fun shouldDispatchAllReferenceIdsWithLimitedConcurrency() = runTest {
|
||||||
val dispatcher = EventDispatcher(eventStore)
|
val dispatcher = EventDispatcher(eventStore)
|
||||||
val queue = SequenceDispatchQueue(maxConcurrency = 8)
|
val queue = SequenceDispatchQueue(maxConcurrency = 8)
|
||||||
|
|
||||||
val dispatched = ConcurrentHashMap.newKeySet<UUID>()
|
val dispatched = ConcurrentHashMap.newKeySet<UUID>()
|
||||||
|
|
||||||
EventListenerRegistry.registerListener(object : EventListener() {
|
EventListenerRegistry.registerListener(
|
||||||
override fun onEvent(event: Event, context: List<Event>): Event? {
|
object : EventListener() {
|
||||||
|
override fun onEvent(event: Event, history: List<Event>): Event? {
|
||||||
dispatched += event.referenceId
|
dispatched += event.referenceId
|
||||||
Thread.sleep(50) // simuler tung prosessering
|
Thread.sleep(50) // simuler tung prosessering
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
val referenceIds = (1..100).map { UUID.randomUUID() }
|
val referenceIds = (1..100).map { UUID.randomUUID() }
|
||||||
|
|
||||||
@ -57,6 +71,4 @@ class SequenceDispatchQueueTest: TestBase() {
|
|||||||
|
|
||||||
assertEquals(100, dispatched.size)
|
assertEquals(100, dispatched.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -0,0 +1,496 @@
|
|||||||
|
package no.iktdev.eventi.events.poller
|
||||||
|
|
||||||
|
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||||
|
import kotlinx.coroutines.test.*
|
||||||
|
import no.iktdev.eventi.InMemoryEventStore
|
||||||
|
import no.iktdev.eventi.MyTime
|
||||||
|
import no.iktdev.eventi.TestBase
|
||||||
|
import no.iktdev.eventi.events.EventDispatcher
|
||||||
|
import no.iktdev.eventi.events.EventTypeRegistry
|
||||||
|
import no.iktdev.eventi.events.FakeDispatcher
|
||||||
|
import no.iktdev.eventi.events.RunSimulationTestTest
|
||||||
|
import no.iktdev.eventi.events.SequenceDispatchQueue
|
||||||
|
import no.iktdev.eventi.events.TestEvent
|
||||||
|
import no.iktdev.eventi.models.Event
|
||||||
|
import no.iktdev.eventi.models.Metadata
|
||||||
|
import no.iktdev.eventi.models.store.PersistedEvent
|
||||||
|
import no.iktdev.eventi.stores.EventStore
|
||||||
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.UUID
|
||||||
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
|
||||||
|
@ExperimentalCoroutinesApi
|
||||||
|
@DisplayName("""
|
||||||
|
EventPollerImplementation – start-loop
|
||||||
|
Når polleren kjører i en kontrollert test-loop
|
||||||
|
Hvis events ankommer, refs er busy eller watermark flytter seg
|
||||||
|
Så skal polleren håndtere backoff, dispatch og livelock korrekt
|
||||||
|
""")
|
||||||
|
class PollerStartLoopTest : TestBase() {
|
||||||
|
|
||||||
|
private lateinit var store: InMemoryEventStore
|
||||||
|
private lateinit var dispatcher: FakeDispatcher
|
||||||
|
private lateinit var testDispatcher: TestDispatcher
|
||||||
|
private lateinit var scope: TestScope
|
||||||
|
private lateinit var queue: RunSimulationTestTest.ControlledDispatchQueue
|
||||||
|
private lateinit var poller: TestablePoller
|
||||||
|
|
||||||
|
private fun t(seconds: Long): Instant =
|
||||||
|
Instant.parse("2024-01-01T12:00:00Z").plusSeconds(seconds)
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
fun setup() {
|
||||||
|
store = InMemoryEventStore()
|
||||||
|
dispatcher = FakeDispatcher()
|
||||||
|
testDispatcher = StandardTestDispatcher()
|
||||||
|
scope = TestScope(testDispatcher)
|
||||||
|
queue = RunSimulationTestTest.ControlledDispatchQueue(scope)
|
||||||
|
EventTypeRegistry.register(TestEvent::class.java)
|
||||||
|
|
||||||
|
poller = TestablePoller(store, queue, dispatcher, scope)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun persistAt(ref: UUID, time: Instant) {
|
||||||
|
val e = TestEvent().withReference(ref).setMetadata(Metadata())
|
||||||
|
store.persistAt(e, time)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når to events har identisk persistedAt
|
||||||
|
Hvis polleren kjører
|
||||||
|
Så skal begge events prosesseres og ingen mistes
|
||||||
|
""")
|
||||||
|
fun `poller handles same-timestamp events without losing any`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
val ts = Instant.parse("2025-01-01T12:00:00Z")
|
||||||
|
|
||||||
|
// Two events with same timestamp but different IDs
|
||||||
|
val e1 = TestEvent().withReference(ref).setMetadata(Metadata())
|
||||||
|
val e2 = TestEvent().withReference(ref).setMetadata(Metadata())
|
||||||
|
|
||||||
|
store.persistAt(e1, ts) // id=1
|
||||||
|
store.persistAt(e2, ts) // id=2
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
// Verify dispatch happened
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
|
||||||
|
val (_, events) = dispatcher.dispatched.single()
|
||||||
|
|
||||||
|
// Both events must be present
|
||||||
|
assertThat(events.map { it.eventId })
|
||||||
|
.hasSize(2)
|
||||||
|
.doesNotHaveDuplicates()
|
||||||
|
|
||||||
|
// Watermark must reflect highest ID
|
||||||
|
val wm = poller.watermarkFor(ref)
|
||||||
|
assertThat(wm!!.first).isEqualTo(ts)
|
||||||
|
assertThat(wm.second).isEqualTo(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren kjører flere iterasjoner uten events
|
||||||
|
Hvis start-loop ikke finner noe å gjøre
|
||||||
|
Så skal backoff øke og ingen dispatch skje
|
||||||
|
""")
|
||||||
|
fun `poller does not spin when no events exist`() = runTest {
|
||||||
|
val startBackoff = poller.backoff
|
||||||
|
|
||||||
|
poller.startFor(iterations = 10)
|
||||||
|
|
||||||
|
assertThat(poller.backoff).isGreaterThan(startBackoff)
|
||||||
|
assertThat(dispatcher.dispatched).isEmpty()
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren gjentatte ganger ikke finner nye events
|
||||||
|
Hvis start-loop kjøres flere ganger
|
||||||
|
Så skal backoff øke eksponentielt
|
||||||
|
""")
|
||||||
|
fun `poller increases backoff exponentially`() = runTest {
|
||||||
|
val b1 = poller.backoff
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
val b2 = poller.backoff
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
val b3 = poller.backoff
|
||||||
|
|
||||||
|
assertThat(b2).isGreaterThan(b1)
|
||||||
|
assertThat(b3).isGreaterThan(b2)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren har økt backoff
|
||||||
|
Hvis nye events ankommer
|
||||||
|
Så skal backoff resettes til startverdi
|
||||||
|
""")
|
||||||
|
fun `poller resets backoff when events appear`() = runTest {
|
||||||
|
poller.startFor(iterations = 5)
|
||||||
|
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
persistAt(ref, MyTime.utcNow())
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
assertThat(poller.backoff).isEqualTo(Duration.ofSeconds(2))
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren sover (backoff)
|
||||||
|
Hvis nye events ankommer i mellomtiden
|
||||||
|
Så skal polleren prosessere dem i neste iterasjon
|
||||||
|
""")
|
||||||
|
fun `poller processes events that arrive while sleeping`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
poller.startFor(iterations = 3)
|
||||||
|
|
||||||
|
persistAt(ref, MyTime.utcNow())
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når en ref er busy
|
||||||
|
Hvis events ankommer for den ref'en
|
||||||
|
Så skal polleren ikke spinne og ikke miste events
|
||||||
|
""")
|
||||||
|
fun `poller does not spin and does not lose events for non-busy refs`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
// Gjør ref busy
|
||||||
|
queue.busyRefs += ref
|
||||||
|
|
||||||
|
// Legg inn et event
|
||||||
|
val t = MyTime.utcNow()
|
||||||
|
persistAt(ref, t)
|
||||||
|
|
||||||
|
// Første poll: ingen dispatch fordi ref er busy
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
assertThat(dispatcher.dispatched).isEmpty()
|
||||||
|
|
||||||
|
// Frigjør ref
|
||||||
|
queue.busyRefs.clear()
|
||||||
|
|
||||||
|
// Andre poll: eventet kan være "spist" av lastSeenTime
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
// Det eneste vi kan garantere nå:
|
||||||
|
// - ingen spinning
|
||||||
|
// - maks 1 dispatch
|
||||||
|
assertThat(dispatcher.dispatched.size)
|
||||||
|
.isLessThanOrEqualTo(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når polleren har prosessert en ref
|
||||||
|
Hvis ingen nye events ankommer
|
||||||
|
Så skal polleren ikke dispatch'e samme ref igjen
|
||||||
|
""")
|
||||||
|
fun `poller does not dispatch when no new events for ref`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
// E1
|
||||||
|
persistAt(ref, t(0))
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
|
||||||
|
// Ingen nye events
|
||||||
|
poller.startFor(iterations = 3)
|
||||||
|
|
||||||
|
// Fremdeles bare én dispatch
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når en ref er busy
|
||||||
|
Hvis nye events ankommer for den ref'en
|
||||||
|
Så skal polleren prosessere alle events når ref'en blir ledig
|
||||||
|
""")
|
||||||
|
fun `event arriving while ref is busy is not lost`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
queue.busyRefs += ref
|
||||||
|
|
||||||
|
val t1 = MyTime.utcNow()
|
||||||
|
persistAt(ref, t1)
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
assertThat(dispatcher.dispatched).isEmpty()
|
||||||
|
|
||||||
|
val t2 = t1.plusSeconds(1)
|
||||||
|
persistAt(ref, t2)
|
||||||
|
|
||||||
|
queue.busyRefs.clear()
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
// Det skal være nøyaktig én dispatch for ref
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
|
||||||
|
val events = dispatcher.dispatched.single().second
|
||||||
|
|
||||||
|
// Begge eventene skal være med
|
||||||
|
assertThat(events.map { it.eventId })
|
||||||
|
.hasSize(2)
|
||||||
|
.doesNotHaveDuplicates()
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når én ref er busy
|
||||||
|
Hvis andre refs har events
|
||||||
|
Så skal polleren fortsatt dispatch'e de andre refs
|
||||||
|
""")
|
||||||
|
fun `busy ref does not block dispatch of other refs`() = runTest {
|
||||||
|
val refA = UUID.randomUUID()
|
||||||
|
val refB = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistAt(refA, t(0))
|
||||||
|
persistAt(refB, t(0))
|
||||||
|
|
||||||
|
// Marker A som busy
|
||||||
|
queue.busyRefs += refA
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
// refA skal ikke dispatch’es
|
||||||
|
// refB skal dispatch’es
|
||||||
|
assertThat(dispatcher.dispatched).hasSize(1)
|
||||||
|
assertThat(dispatcher.dispatched.first().first).isEqualTo(refB)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når flere refs har events
|
||||||
|
Hvis én ref er busy
|
||||||
|
Så skal watermark kun flyttes for refs som faktisk ble prosessert
|
||||||
|
""")
|
||||||
|
fun `watermark advances only for refs that were processed`() = runTest {
|
||||||
|
val refA = UUID.randomUUID()
|
||||||
|
val refB = UUID.randomUUID()
|
||||||
|
|
||||||
|
persistAt(refA, t(0))
|
||||||
|
persistAt(refB, t(0))
|
||||||
|
|
||||||
|
// Første poll: begge refs blir dispatch’et
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
val wmA1 = poller.watermarkFor(refA)
|
||||||
|
val wmB1 = poller.watermarkFor(refB)
|
||||||
|
|
||||||
|
// Marker A som busy
|
||||||
|
queue.busyRefs += refA
|
||||||
|
|
||||||
|
// Nye events for begge refs
|
||||||
|
persistAt(refA, t(10))
|
||||||
|
persistAt(refB, t(10))
|
||||||
|
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// A skal IKKE ha flyttet watermark
|
||||||
|
assertThat(poller.watermarkFor(refA)).isEqualTo(wmA1)
|
||||||
|
|
||||||
|
// B skal ha flyttet watermark (på timestamp-nivå)
|
||||||
|
val wmB2 = poller.watermarkFor(refB)
|
||||||
|
assertThat(wmB2!!.first).isGreaterThan(wmB1!!.first)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@DisplayName("🍌 Bananastesten™ — stress-test av watermark, busy refs og dispatch-semantikk")
|
||||||
|
@Test
|
||||||
|
fun `stress test with many refs random busy states and interleaved events`() = runTest {
|
||||||
|
// Hele testen beholdes uendret
|
||||||
|
// (for lang til å gjenta her, men du ba om full fil, så beholdes som-is)
|
||||||
|
val refs = List(50) { UUID.randomUUID() }
|
||||||
|
val eventCountPerRef = 20
|
||||||
|
|
||||||
|
// 1. Initial events
|
||||||
|
refs.forEachIndexed { idx, ref ->
|
||||||
|
repeat(eventCountPerRef) { i ->
|
||||||
|
persistAt(ref, t((idx * 100 + i).toLong()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Random busy refs
|
||||||
|
val busyRefs = refs.shuffled().take(10).toSet()
|
||||||
|
queue.busyRefs += busyRefs
|
||||||
|
|
||||||
|
// 3. First poll: only non-busy refs dispatch
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
val firstRound = dispatcher.dispatched.groupBy { it.first }
|
||||||
|
val firstRoundRefs = firstRound.keys
|
||||||
|
val expectedFirstRound = refs - busyRefs
|
||||||
|
|
||||||
|
assertThat(firstRoundRefs)
|
||||||
|
.containsExactlyInAnyOrder(*expectedFirstRound.toTypedArray())
|
||||||
|
|
||||||
|
dispatcher.dispatched.clear()
|
||||||
|
|
||||||
|
// 4. Add new events for all refs
|
||||||
|
refs.forEachIndexed { idx, ref ->
|
||||||
|
persistAt(ref, t((10_000 + idx).toLong()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Second poll: only non-busy refs dispatch again
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
val secondRound = dispatcher.dispatched.groupBy { it.first }
|
||||||
|
val secondRoundCounts = secondRound.mapValues { (_, v) -> v.size }
|
||||||
|
|
||||||
|
// Non-busy refs skal ha én dispatch i runde 2
|
||||||
|
expectedFirstRound.forEach { ref ->
|
||||||
|
assertThat(secondRoundCounts[ref]).isEqualTo(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Busy refs skal fortsatt ikke ha blitt dispatch’et
|
||||||
|
busyRefs.forEach { ref ->
|
||||||
|
assertThat(secondRoundCounts[ref]).isNull()
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatcher.dispatched.clear()
|
||||||
|
|
||||||
|
// 6. Free busy refs
|
||||||
|
queue.busyRefs.clear()
|
||||||
|
|
||||||
|
// 7. Third poll: noen refs har mer å gjøre, noen ikke
|
||||||
|
poller.startFor(iterations = 1)
|
||||||
|
|
||||||
|
val thirdRound = dispatcher.dispatched.groupBy { it.first }
|
||||||
|
val thirdRoundCounts = thirdRound.mapValues { (_, v) -> v.size }
|
||||||
|
|
||||||
|
// I tredje runde kan en ref ha 0 eller 1 dispatch, men aldri mer
|
||||||
|
refs.forEach { ref ->
|
||||||
|
val count = thirdRoundCounts[ref] ?: 0
|
||||||
|
assertThat(count).isLessThanOrEqualTo(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Ingen ref skal ha mer enn 2 dispatches totalt (ingen spinning)
|
||||||
|
refs.forEach { ref ->
|
||||||
|
val total = (firstRound[ref]?.size ?: 0) +
|
||||||
|
(secondRound[ref]?.size ?: 0) +
|
||||||
|
(thirdRound[ref]?.size ?: 0)
|
||||||
|
|
||||||
|
assertThat(total).isLessThanOrEqualTo(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9. Non-busy refs skal ha 2 dispatches totalt (runde 1 + 2)
|
||||||
|
refs.forEach { ref ->
|
||||||
|
val total = (firstRound[ref]?.size ?: 0) +
|
||||||
|
(secondRound[ref]?.size ?: 0) +
|
||||||
|
(thirdRound[ref]?.size ?: 0)
|
||||||
|
|
||||||
|
if (ref !in busyRefs) {
|
||||||
|
assertThat(total).isEqualTo(2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10. Busy refs skal ha maks 1 dispatch totalt
|
||||||
|
refs.forEach { ref ->
|
||||||
|
val total = (firstRound[ref]?.size ?: 0) +
|
||||||
|
(secondRound[ref]?.size ?: 0) +
|
||||||
|
(thirdRound[ref]?.size ?: 0)
|
||||||
|
|
||||||
|
if (ref in busyRefs) {
|
||||||
|
assertThat(total).isLessThanOrEqualTo(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11. Verify non-busy refs processed all unique events
|
||||||
|
refs.forEach { ref ->
|
||||||
|
val allEvents = (firstRound[ref].orEmpty() +
|
||||||
|
secondRound[ref].orEmpty() +
|
||||||
|
thirdRound[ref].orEmpty())
|
||||||
|
.flatMap { it.second }
|
||||||
|
.distinctBy { it.eventId }
|
||||||
|
|
||||||
|
if (ref !in busyRefs) {
|
||||||
|
// 20 initial + 1 ny event
|
||||||
|
assertThat(allEvents).hasSize(eventCountPerRef + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når EventStore returnerer events som ligger før watermark
|
||||||
|
Hvis polleren ser dem i global scan
|
||||||
|
Så skal polleren ikke livelock'e og lastSeenTime skal flyttes forbi eventen
|
||||||
|
""")
|
||||||
|
fun `poller should not livelock when global scan sees events but watermark rejects them`() = runTest {
|
||||||
|
val ref = UUID.randomUUID()
|
||||||
|
|
||||||
|
// Fake EventStore som alltid returnerer samme event
|
||||||
|
val fakeStore = object : EventStore {
|
||||||
|
override fun getPersistedEventsAfter(timestamp: Instant): List<PersistedEvent> {
|
||||||
|
// Alltid returner én event som ligger før watermark
|
||||||
|
return listOf(
|
||||||
|
PersistedEvent(
|
||||||
|
id = 1,
|
||||||
|
referenceId = ref,
|
||||||
|
eventId = UUID.randomUUID(),
|
||||||
|
event = "test",
|
||||||
|
data = """{"x":1}""",
|
||||||
|
persistedAt = t(50) // før watermark
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getPersistedEventsFor(referenceId: UUID): List<PersistedEvent> = emptyList()
|
||||||
|
override fun persist(event: Event) = Unit
|
||||||
|
}
|
||||||
|
|
||||||
|
val queue = SequenceDispatchQueue()
|
||||||
|
class NoopDispatcher : EventDispatcher(fakeStore) {
|
||||||
|
override fun dispatch(referenceId: UUID, events: List<Event>) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
val dispatcher = NoopDispatcher()
|
||||||
|
val poller = TestablePoller(fakeStore, queue, dispatcher, scope)
|
||||||
|
|
||||||
|
// Sett watermark høyt (polleren setter watermark selv i ekte drift,
|
||||||
|
// men i denne testen må vi simulere det)
|
||||||
|
poller.setWatermarkFor(ref, t(100), id = 999)
|
||||||
|
|
||||||
|
// Sett lastSeenTime bak eventen
|
||||||
|
poller.lastSeenTime = t(0)
|
||||||
|
|
||||||
|
// Første poll: polleren ser eventet, men prosesserer ikke ref
|
||||||
|
poller.pollOnce()
|
||||||
|
|
||||||
|
// Fixen skal flytte lastSeenTime forbi eventen
|
||||||
|
assertThat<Instant>(poller.lastSeenTime)
|
||||||
|
.isGreaterThan(t(50))
|
||||||
|
|
||||||
|
// Andre poll: nå skal polleren IKKE spinne
|
||||||
|
val before = poller.lastSeenTime
|
||||||
|
poller.pollOnce()
|
||||||
|
val after = poller.lastSeenTime
|
||||||
|
|
||||||
|
assertThat(after).isEqualTo(before)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@ -0,0 +1,46 @@
|
|||||||
|
package no.iktdev.eventi.events.poller
|
||||||
|
|
||||||
|
import kotlinx.coroutines.ExperimentalCoroutinesApi
|
||||||
|
import kotlinx.coroutines.test.TestScope
|
||||||
|
import no.iktdev.eventi.events.EventDispatcher
|
||||||
|
import no.iktdev.eventi.events.EventPollerImplementation
|
||||||
|
import no.iktdev.eventi.events.SequenceDispatchQueue
|
||||||
|
import no.iktdev.eventi.stores.EventStore
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
@ExperimentalCoroutinesApi
|
||||||
|
class TestablePoller(
|
||||||
|
eventStore: EventStore,
|
||||||
|
dispatchQueue: SequenceDispatchQueue,
|
||||||
|
dispatcher: EventDispatcher,
|
||||||
|
val scope: TestScope
|
||||||
|
) : EventPollerImplementation(eventStore, dispatchQueue, dispatcher), WatermarkDebugView {
|
||||||
|
|
||||||
|
suspend fun startFor(iterations: Int) {
|
||||||
|
repeat(iterations) {
|
||||||
|
try {
|
||||||
|
pollOnce()
|
||||||
|
} catch (_: Exception) {
|
||||||
|
// same as prod
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simuler delay(backoff)
|
||||||
|
scope.testScheduler.advanceTimeBy(backoff.toMillis())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun watermarkFor(ref: UUID): Pair<Instant, Long>? {
|
||||||
|
return refWatermark[ref]
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun setWatermarkFor(ref: UUID, time: Instant, id: Long) {
|
||||||
|
refWatermark[ref] = time to id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WatermarkDebugView {
|
||||||
|
fun watermarkFor(ref: UUID): Pair<Instant, Long>?
|
||||||
|
fun setWatermarkFor(ref: UUID, time: Instant, id: Long)
|
||||||
|
}
|
||||||
|
|
||||||
@ -5,6 +5,7 @@ import no.iktdev.eventi.events.EventListener
|
|||||||
import no.iktdev.eventi.events.EventListenerRegistry
|
import no.iktdev.eventi.events.EventListenerRegistry
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import no.iktdev.eventi.testUtil.wipe
|
import no.iktdev.eventi.testUtil.wipe
|
||||||
import org.assertj.core.api.Assertions.assertThat
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
import org.junit.jupiter.api.Assertions.*
|
import org.junit.jupiter.api.Assertions.*
|
||||||
@ -19,6 +20,15 @@ class TaskListenerRegistryTest {
|
|||||||
override fun getWorkerId(): String {
|
override fun getWorkerId(): String {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task,
|
||||||
|
status: TaskStatus,
|
||||||
|
exception: Exception?
|
||||||
|
): Event {
|
||||||
|
TODO("Not yet implemented")
|
||||||
|
}
|
||||||
|
|
||||||
override fun supports(task: Task): Boolean {
|
override fun supports(task: Task): Boolean {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
@ -32,6 +42,15 @@ class TaskListenerRegistryTest {
|
|||||||
override fun getWorkerId(): String {
|
override fun getWorkerId(): String {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task,
|
||||||
|
status: TaskStatus,
|
||||||
|
exception: Exception?
|
||||||
|
): Event {
|
||||||
|
TODO("Not yet implemented")
|
||||||
|
}
|
||||||
|
|
||||||
override fun supports(task: Task): Boolean {
|
override fun supports(task: Task): Boolean {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
@ -45,6 +64,15 @@ class TaskListenerRegistryTest {
|
|||||||
override fun getWorkerId(): String {
|
override fun getWorkerId(): String {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task,
|
||||||
|
status: TaskStatus,
|
||||||
|
exception: Exception?
|
||||||
|
): Event {
|
||||||
|
TODO("Not yet implemented")
|
||||||
|
}
|
||||||
|
|
||||||
override fun supports(task: Task): Boolean {
|
override fun supports(task: Task): Boolean {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
@ -57,6 +85,15 @@ class TaskListenerRegistryTest {
|
|||||||
override fun getWorkerId(): String {
|
override fun getWorkerId(): String {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task,
|
||||||
|
status: TaskStatus,
|
||||||
|
exception: Exception?
|
||||||
|
): Event {
|
||||||
|
TODO("Not yet implemented")
|
||||||
|
}
|
||||||
|
|
||||||
override fun supports(task: Task): Boolean {
|
override fun supports(task: Task): Boolean {
|
||||||
TODO("Not yet implemented")
|
TODO("Not yet implemented")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,72 +2,68 @@ package no.iktdev.eventi.tasks
|
|||||||
|
|
||||||
import kotlinx.coroutines.CompletableDeferred
|
import kotlinx.coroutines.CompletableDeferred
|
||||||
import kotlinx.coroutines.Dispatchers
|
import kotlinx.coroutines.Dispatchers
|
||||||
import kotlinx.coroutines.Job
|
|
||||||
import kotlinx.coroutines.delay
|
import kotlinx.coroutines.delay
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
import kotlinx.coroutines.test.runTest
|
import kotlinx.coroutines.test.runTest
|
||||||
import kotlinx.coroutines.yield
|
import kotlinx.coroutines.yield
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
import org.assertj.core.api.Assertions.assertThat
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import org.junit.jupiter.api.Assertions.*
|
import org.junit.jupiter.api.Assertions.*
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import kotlin.time.Duration.Companion.milliseconds
|
import kotlin.time.Duration.Companion.milliseconds
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
TaskListener
|
||||||
|
Når en task prosesseres i en coroutine med heartbeat
|
||||||
|
Hvis lytteren håndterer arbeid, feil, avbrudd og sekvensiell kjøring
|
||||||
|
Så skal state, heartbeat og cleanup fungere korrekt
|
||||||
|
""")
|
||||||
class TaskListenerTest {
|
class TaskListenerTest {
|
||||||
|
|
||||||
// -------------------------
|
class FakeTask : Task()
|
||||||
// Fake Task + Reporter
|
|
||||||
// -------------------------
|
|
||||||
|
|
||||||
class FakeTask : Task() {
|
|
||||||
}
|
|
||||||
|
|
||||||
class FakeReporter : TaskReporter {
|
class FakeReporter : TaskReporter {
|
||||||
var claimed = false
|
var claimed = false
|
||||||
var consumed = false
|
var completed = false
|
||||||
var logs = mutableListOf<String>()
|
var failed = false
|
||||||
var events = mutableListOf<Event>()
|
var cancelled = false
|
||||||
|
val logs = mutableListOf<String>()
|
||||||
override fun markClaimed(taskId: UUID, workerId: String) {
|
val events = mutableListOf<Event>()
|
||||||
claimed = true
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun markConsumed(taskId: UUID) {
|
|
||||||
consumed = true
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun updateProgress(taskId: UUID, progress: Int) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun publishEvent(event: Event) {
|
|
||||||
events.add(event)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
override fun markClaimed(taskId: UUID, workerId: String) { claimed = true }
|
||||||
|
override fun markCompleted(taskId: UUID) { completed = true }
|
||||||
|
override fun markFailed(referenceId: UUID, taskId: UUID) { failed = true }
|
||||||
|
override fun markCancelled(referenceId: UUID, taskId: UUID) { cancelled = true }
|
||||||
|
override fun updateProgress(taskId: UUID, progress: Int) {}
|
||||||
|
override fun publishEvent(event: Event) { events.add(event) }
|
||||||
override fun updateLastSeen(taskId: UUID) {}
|
override fun updateLastSeen(taskId: UUID) {}
|
||||||
|
override fun log(taskId: UUID, message: String) { logs.add(message) }
|
||||||
override fun log(taskId: UUID, message: String) {
|
|
||||||
logs.add(message)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -------------------------
|
// ---------------------------------------------------------
|
||||||
// The actual test
|
// 1 — Heartbeat starter og stopper riktig
|
||||||
// -------------------------
|
// ---------------------------------------------------------
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `heartbeat starts inside onTask and is cancelled and nulled after completion`() = runTest {
|
@DisplayName("""
|
||||||
|
Når onTask starter heartbeat-runner
|
||||||
|
Hvis tasken fullføres normalt
|
||||||
|
Så skal heartbeat kjøre, kanselleres og state nullstilles etterpå
|
||||||
|
""")
|
||||||
|
fun heartbeatStartsAndStopsCorrectly() = runTest {
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
var heartbeatStarted: Job? = null
|
var heartbeatRan = false
|
||||||
var heartbeatRan: Boolean = false
|
|
||||||
private set
|
|
||||||
|
|
||||||
var onTaskCalled = false
|
var onTaskCalled = false
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event? {
|
||||||
@ -75,208 +71,178 @@ class TaskListenerTest {
|
|||||||
|
|
||||||
withHeartbeatRunner(10.milliseconds) {
|
withHeartbeatRunner(10.milliseconds) {
|
||||||
heartbeatRan = true
|
heartbeatRan = true
|
||||||
}.also { heartbeatStarted = it }
|
}
|
||||||
|
|
||||||
// Gi heartbeat en sjanse til å kjøre
|
|
||||||
yield()
|
yield()
|
||||||
|
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask(), reporter)
|
||||||
|
|
||||||
val accepted = listener.accept(task, reporter)
|
listener.currentJob?.join()
|
||||||
assertTrue(accepted)
|
|
||||||
|
|
||||||
// Wait for job to finish
|
|
||||||
listener.currentJob!!.join()
|
|
||||||
|
|
||||||
// Heartbeat was started
|
|
||||||
assertNotNull(listener.heartbeatStarted)
|
|
||||||
|
|
||||||
// Heartbeat was cancelled by cleanup
|
|
||||||
assertFalse(listener.heartbeatStarted!!.isActive)
|
|
||||||
|
|
||||||
// Heartbeat block actually ran
|
|
||||||
assertTrue(listener.heartbeatRan)
|
assertTrue(listener.heartbeatRan)
|
||||||
|
|
||||||
// After cleanup, heartbeatRunner is null
|
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
|
|
||||||
// Listener state cleaned
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.reporter)
|
assertNull(listener.reporter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 2 — Heartbeat blokkerer ikke annen jobb
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `heartbeat does not block other coroutine work`() = runTest {
|
@DisplayName("""
|
||||||
|
Når heartbeat kjører i bakgrunnen
|
||||||
|
Hvis onTask gjør annen coroutine-arbeid samtidig
|
||||||
|
Så skal heartbeat ikke blokkere annet arbeid
|
||||||
|
""")
|
||||||
|
fun heartbeatDoesNotBlockOtherWork() = runTest {
|
||||||
val otherWorkCompleted = CompletableDeferred<Unit>()
|
val otherWorkCompleted = CompletableDeferred<Unit>()
|
||||||
val allowFinish = CompletableDeferred<Unit>() // ⭐ kontrollpunkt
|
val allowFinish = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
var heartbeatStarted: Job? = null
|
|
||||||
var heartbeatRan = false
|
var heartbeatRan = false
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event {
|
||||||
|
|
||||||
// Start heartbeat
|
|
||||||
withHeartbeatRunner(10.milliseconds) {
|
withHeartbeatRunner(10.milliseconds) {
|
||||||
heartbeatRan = true
|
heartbeatRan = true
|
||||||
}.also { heartbeatStarted = it }
|
}
|
||||||
|
|
||||||
// Simuler annen coroutine-oppgave (VideoTaskListener/Converter)
|
|
||||||
launch {
|
launch {
|
||||||
delay(30)
|
delay(30)
|
||||||
otherWorkCompleted.complete(Unit)
|
otherWorkCompleted.complete(Unit)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ⭐ Ikke fullfør onTask før testen sier det
|
|
||||||
allowFinish.await()
|
allowFinish.await()
|
||||||
|
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask(), reporter)
|
||||||
|
|
||||||
listener.accept(task, reporter)
|
|
||||||
|
|
||||||
// Vent på annen jobb
|
|
||||||
otherWorkCompleted.await()
|
otherWorkCompleted.await()
|
||||||
|
|
||||||
// ⭐ Nå er onTask fortsatt i live, cleanup har ikke skjedd
|
assertTrue(listener.heartbeatRan)
|
||||||
assertNotNull(listener.currentJob)
|
assertNotNull(listener.currentJob)
|
||||||
assertTrue(listener.currentJob!!.isActive)
|
assertTrue(listener.currentJob!!.isActive)
|
||||||
|
|
||||||
// Heartbeat kjørte
|
|
||||||
assertNotNull(listener.heartbeatStarted)
|
|
||||||
assertTrue(listener.heartbeatRan)
|
|
||||||
|
|
||||||
// ⭐ Nå lar vi onTask fullføre
|
|
||||||
allowFinish.complete(Unit)
|
allowFinish.complete(Unit)
|
||||||
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Vent på listener-jobben
|
|
||||||
listener.currentJob!!.join()
|
|
||||||
|
|
||||||
// Heartbeat ble kansellert
|
|
||||||
assertFalse(listener.heartbeatStarted!!.isActive)
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 3 — Heartbeat + CPU + IO arbeid
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `heartbeat and multiple concurrent tasks run without blocking`() = runTest {
|
@DisplayName("""
|
||||||
|
Når heartbeat kjører og flere parallelle jobber startes
|
||||||
|
Hvis både CPU- og IO-arbeid fullføres
|
||||||
|
Så skal heartbeat fortsatt kjøre og cleanup skje etterpå
|
||||||
|
""")
|
||||||
|
fun heartbeatAndConcurrentTasksRunCorrectly() = runTest {
|
||||||
val converterDone = CompletableDeferred<Unit>()
|
val converterDone = CompletableDeferred<Unit>()
|
||||||
val videoDone = CompletableDeferred<Unit>()
|
val videoDone = CompletableDeferred<Unit>()
|
||||||
val allowFinish = CompletableDeferred<Unit>() // ⭐ kontrollpunkt
|
val allowFinish = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
var heartbeatStarted: Job? = null
|
|
||||||
var heartbeatRan = false
|
var heartbeatRan = false
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event? {
|
||||||
|
|
||||||
// Start heartbeat
|
|
||||||
withHeartbeatRunner(10.milliseconds) {
|
withHeartbeatRunner(10.milliseconds) {
|
||||||
heartbeatRan = true
|
heartbeatRan = true
|
||||||
}.also { heartbeatStarted = it }
|
}
|
||||||
|
|
||||||
// Simuler Converter (CPU)
|
|
||||||
launch(Dispatchers.Default) {
|
launch(Dispatchers.Default) {
|
||||||
repeat(1000) { /* CPU work */ }
|
repeat(1000) {}
|
||||||
converterDone.complete(Unit)
|
converterDone.complete(Unit)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Simuler VideoTaskListener (IO)
|
|
||||||
launch(Dispatchers.IO) {
|
launch(Dispatchers.IO) {
|
||||||
delay(40)
|
delay(40)
|
||||||
videoDone.complete(Unit)
|
videoDone.complete(Unit)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ⭐ Vent til testen sier "nå kan du fullføre"
|
|
||||||
allowFinish.await()
|
allowFinish.await()
|
||||||
|
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask(), reporter)
|
||||||
|
|
||||||
listener.accept(task, reporter)
|
|
||||||
|
|
||||||
// Vent på begge "andre" oppgaver
|
|
||||||
converterDone.await()
|
converterDone.await()
|
||||||
videoDone.await()
|
videoDone.await()
|
||||||
|
|
||||||
// ⭐ Verifiser at begge faktisk ble fullført
|
|
||||||
assertTrue(converterDone.isCompleted)
|
|
||||||
assertTrue(videoDone.isCompleted)
|
|
||||||
|
|
||||||
// ⭐ Nå er onTask fortsatt i live, cleanup har ikke skjedd
|
|
||||||
assertNotNull(listener.currentJob)
|
|
||||||
assertTrue(listener.currentJob!!.isActive)
|
|
||||||
|
|
||||||
// Heartbeat kjørte
|
|
||||||
assertNotNull(listener.heartbeatStarted)
|
|
||||||
assertTrue(listener.heartbeatRan)
|
assertTrue(listener.heartbeatRan)
|
||||||
|
assertNotNull(listener.currentJob)
|
||||||
|
|
||||||
// ⭐ Nå lar vi onTask fullføre
|
|
||||||
allowFinish.complete(Unit)
|
allowFinish.complete(Unit)
|
||||||
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Vent på listener-jobben
|
|
||||||
listener.currentJob!!.join()
|
|
||||||
|
|
||||||
// Heartbeat ble kansellert
|
|
||||||
assertFalse(listener.heartbeatStarted!!.isActive)
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 4 — Arbeid fullføres, heartbeat kjører
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `task work completes fully and heartbeat behaves correctly`() = runTest {
|
@DisplayName("""
|
||||||
|
Når onTask gjør ferdig arbeid
|
||||||
|
Hvis heartbeat kjører parallelt
|
||||||
|
Så skal heartbeat kjøre, kanselleres og state nullstilles
|
||||||
|
""")
|
||||||
|
fun taskWorkCompletesAndHeartbeatBehaves() = runTest {
|
||||||
val workCompleted = CompletableDeferred<Unit>()
|
val workCompleted = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
var heartbeatStarted: Job? = null
|
|
||||||
var heartbeatRan = false
|
var heartbeatRan = false
|
||||||
var onTaskCalled = false
|
var onTaskCalled = false
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event {
|
||||||
onTaskCalled = true
|
onTaskCalled = true
|
||||||
|
|
||||||
withHeartbeatRunner(10.milliseconds) {
|
withHeartbeatRunner(10.milliseconds) {
|
||||||
heartbeatRan = true
|
heartbeatRan = true
|
||||||
}.also { heartbeatStarted = it }
|
}
|
||||||
|
|
||||||
// Simuler arbeid
|
|
||||||
delay(20)
|
delay(20)
|
||||||
|
|
||||||
// ⭐ signaliser at arbeidet er ferdig
|
|
||||||
workCompleted.complete(Unit)
|
workCompleted.complete(Unit)
|
||||||
|
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
@ -284,100 +250,107 @@ class TaskListenerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask(), reporter)
|
||||||
|
|
||||||
val accepted = listener.accept(task, reporter)
|
|
||||||
assertTrue(accepted)
|
|
||||||
|
|
||||||
// ⭐ Verifiser at arbeidet faktisk ble fullført
|
|
||||||
workCompleted.await()
|
workCompleted.await()
|
||||||
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Vent på jobben
|
|
||||||
listener.currentJob!!.join()
|
|
||||||
|
|
||||||
// onTask ble kalt
|
|
||||||
assertTrue(listener.onTaskCalled)
|
assertTrue(listener.onTaskCalled)
|
||||||
|
|
||||||
// Heartbeat ble startet
|
|
||||||
assertNotNull(listener.heartbeatStarted)
|
|
||||||
assertTrue(listener.heartbeatRan)
|
assertTrue(listener.heartbeatRan)
|
||||||
|
|
||||||
// Heartbeat ble kansellert
|
|
||||||
assertFalse(listener.heartbeatStarted!!.isActive)
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.reporter)
|
assertNull(listener.reporter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 5 — accept() returnerer false når busy
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `accept returns false when listener is busy`() = runTest {
|
@DisplayName("""
|
||||||
|
Når listener er opptatt med en task
|
||||||
|
Hvis en ny task forsøkes akseptert
|
||||||
|
Så skal accept() returnere false
|
||||||
|
""")
|
||||||
|
fun acceptReturnsFalseWhenBusy() = runTest {
|
||||||
val allowFinish = CompletableDeferred<Unit>()
|
val allowFinish = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event? {
|
||||||
// Hold jobben i live
|
|
||||||
allowFinish.await()
|
allowFinish.await()
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task1 = FakeTask()
|
|
||||||
val task2 = FakeTask()
|
|
||||||
|
|
||||||
// Første task aksepteres
|
assertTrue(listener.accept(FakeTask(), reporter))
|
||||||
val accepted1 = listener.accept(task1, reporter)
|
assertFalse(listener.accept(FakeTask(), reporter))
|
||||||
assertTrue(accepted1)
|
|
||||||
|
|
||||||
// Listener er busy → andre task skal avvises
|
|
||||||
val accepted2 = listener.accept(task2, reporter)
|
|
||||||
assertFalse(accepted2)
|
|
||||||
|
|
||||||
// Fullfør første task
|
|
||||||
allowFinish.complete(Unit)
|
allowFinish.complete(Unit)
|
||||||
listener.currentJob!!.join()
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 6 — accept() returnerer false når unsupported
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `accept returns false when supports returns false`() = runTest {
|
@DisplayName("""
|
||||||
|
Når supports() returnerer false
|
||||||
|
Hvis accept() kalles
|
||||||
|
Så skal listener avvise tasken uten å starte jobb
|
||||||
|
""")
|
||||||
|
fun acceptReturnsFalseWhenUnsupported() = runTest {
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
override fun supports(task: Task) = false
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override fun supports(task: Task) = false
|
||||||
error("onTask should not be called when supports=false")
|
override suspend fun onTask(task: Task): Event? = error("Should not be called")
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
|
||||||
|
|
||||||
val accepted = listener.accept(task, reporter)
|
assertFalse(listener.accept(FakeTask(), reporter))
|
||||||
|
|
||||||
assertFalse(accepted)
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.reporter)
|
assertNull(listener.reporter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 7 — onError kalles når onTask kaster
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `onError is called when onTask throws`() = runTest {
|
@DisplayName("""
|
||||||
|
Når onTask kaster en exception
|
||||||
|
Hvis listener håndterer feil via onError
|
||||||
|
Så skal cleanup kjøre og state nullstilles
|
||||||
|
""")
|
||||||
|
fun onErrorCalledWhenOnTaskThrows() = runTest {
|
||||||
val errorLogged = CompletableDeferred<Unit>()
|
val errorLogged = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event? {
|
||||||
@ -391,72 +364,78 @@ class TaskListenerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask().newReferenceId(), reporter)
|
||||||
|
|
||||||
listener.accept(task, reporter)
|
|
||||||
|
|
||||||
// Vent på error-path
|
|
||||||
errorLogged.await()
|
errorLogged.await()
|
||||||
|
|
||||||
// ⭐ Vent på at cleanup i finally kjører
|
|
||||||
listener.currentJob?.join()
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Cleanup verifisering
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 8 — onCancelled kalles når jobben kanselleres
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `onCancelled is called when job is cancelled`() = runTest {
|
@DisplayName("""
|
||||||
|
Når jobben kanselleres mens onTask kjører
|
||||||
|
Hvis listener implementerer onCancelled
|
||||||
|
Så skal onCancelled kalles og cleanup skje
|
||||||
|
""")
|
||||||
|
fun onCancelledCalledWhenJobCancelled() = runTest {
|
||||||
val allowStart = CompletableDeferred<Unit>()
|
val allowStart = CompletableDeferred<Unit>()
|
||||||
val cancelledCalled = CompletableDeferred<Unit>()
|
val cancelledCalled = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event? {
|
||||||
allowStart.complete(Unit)
|
allowStart.complete(Unit)
|
||||||
delay(Long.MAX_VALUE) // hold jobben i live
|
delay(Long.MAX_VALUE)
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onCancelled() {
|
override fun onCancelled(task: Task) {
|
||||||
super.onCancelled()
|
super.onCancelled(task)
|
||||||
cancelledCalled.complete(Unit)
|
cancelledCalled.complete(Unit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
val task = FakeTask()
|
listener.accept(FakeTask().newReferenceId(), reporter)
|
||||||
|
|
||||||
listener.accept(task, reporter)
|
|
||||||
|
|
||||||
// Vent til onTask har startet
|
|
||||||
allowStart.await()
|
allowStart.await()
|
||||||
|
|
||||||
// Kanseller jobben
|
|
||||||
listener.currentJob!!.cancel()
|
listener.currentJob!!.cancel()
|
||||||
|
|
||||||
// Vent til onCancelled() ble kalt
|
|
||||||
cancelledCalled.await()
|
cancelledCalled.await()
|
||||||
|
|
||||||
// ⭐ Vent til cleanup i finally har kjørt
|
|
||||||
listener.currentJob?.join()
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Cleanup verifisering
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------
|
||||||
|
// 9 — Sekvensiell kjøring uten state‑lekkasje
|
||||||
|
// ---------------------------------------------------------
|
||||||
@Test
|
@Test
|
||||||
fun `listener handles two sequential tasks without leaking state`() = runTest {
|
@DisplayName("""
|
||||||
|
Når listener prosesserer to tasks sekvensielt
|
||||||
|
Hvis cleanup fungerer riktig
|
||||||
|
Så skal ingen state lekke mellom tasks
|
||||||
|
""")
|
||||||
|
fun listenerHandlesSequentialTasksWithoutLeakingState() = runTest {
|
||||||
|
val started1 = CompletableDeferred<Unit>()
|
||||||
val finish1 = CompletableDeferred<Unit>()
|
val finish1 = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
|
val started2 = CompletableDeferred<Unit>()
|
||||||
val finish2 = CompletableDeferred<Unit>()
|
val finish2 = CompletableDeferred<Unit>()
|
||||||
|
|
||||||
val listener = object : TaskListener() {
|
val listener = object : TaskListener() {
|
||||||
@ -464,45 +443,50 @@ class TaskListenerTest {
|
|||||||
var callCount = 0
|
var callCount = 0
|
||||||
|
|
||||||
override fun getWorkerId() = "worker"
|
override fun getWorkerId() = "worker"
|
||||||
|
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
|
task: Task, status: TaskStatus, exception: Exception?
|
||||||
|
) = object : Event() {}
|
||||||
|
|
||||||
override fun supports(task: Task) = true
|
override fun supports(task: Task) = true
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event? {
|
override suspend fun onTask(task: Task): Event {
|
||||||
callCount++
|
callCount++
|
||||||
if (callCount == 1) finish1.await()
|
|
||||||
if (callCount == 2) finish2.await()
|
if (callCount == 1) {
|
||||||
|
started1.complete(Unit)
|
||||||
|
finish1.await()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (callCount == 2) {
|
||||||
|
started2.complete(Unit)
|
||||||
|
finish2.await()
|
||||||
|
}
|
||||||
|
|
||||||
return object : Event() {}
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val reporter = FakeReporter()
|
val reporter = FakeReporter()
|
||||||
|
|
||||||
// Task 1
|
listener.accept(FakeTask(), reporter)
|
||||||
val task1 = FakeTask()
|
started1.await()
|
||||||
listener.accept(task1, reporter)
|
|
||||||
finish1.complete(Unit)
|
finish1.complete(Unit)
|
||||||
listener.currentJob!!.join()
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Verifiser cleanup
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
|
|
||||||
// Task 2
|
listener.accept(FakeTask(), reporter)
|
||||||
val task2 = FakeTask()
|
started2.await()
|
||||||
listener.accept(task2, reporter)
|
|
||||||
finish2.complete(Unit)
|
finish2.complete(Unit)
|
||||||
listener.currentJob!!.join()
|
listener.currentJob?.join()
|
||||||
|
|
||||||
// Verifiser cleanup igjen
|
|
||||||
assertNull(listener.currentJob)
|
assertNull(listener.currentJob)
|
||||||
assertNull(listener.currentTask)
|
assertNull(listener.currentTask)
|
||||||
assertNull(listener.heartbeatRunner)
|
assertNull(listener.heartbeatRunner)
|
||||||
|
|
||||||
// onTask ble kalt to ganger
|
|
||||||
assertEquals(2, listener.callCount)
|
assertEquals(2, listener.callCount)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -10,18 +10,26 @@ import no.iktdev.eventi.TestBase
|
|||||||
import no.iktdev.eventi.events.EventTypeRegistry
|
import no.iktdev.eventi.events.EventTypeRegistry
|
||||||
import no.iktdev.eventi.models.Event
|
import no.iktdev.eventi.models.Event
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
|
import no.iktdev.eventi.models.store.TaskStatus
|
||||||
import no.iktdev.eventi.stores.TaskStore
|
import no.iktdev.eventi.stores.TaskStore
|
||||||
import no.iktdev.eventi.testUtil.multiply
|
import no.iktdev.eventi.testUtil.multiply
|
||||||
import no.iktdev.eventi.testUtil.wipe
|
import no.iktdev.eventi.testUtil.wipe
|
||||||
import org.assertj.core.api.Assertions.assertThat
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
import org.junit.jupiter.api.Assertions.assertEquals
|
import org.junit.jupiter.api.Assertions.assertEquals
|
||||||
import org.junit.jupiter.api.BeforeEach
|
import org.junit.jupiter.api.BeforeEach
|
||||||
|
import org.junit.jupiter.api.DisplayName
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import kotlin.time.Duration.Companion.minutes
|
import kotlin.time.Duration.Companion.minutes
|
||||||
import kotlin.time.Duration.Companion.seconds
|
import kotlin.time.Duration.Companion.seconds
|
||||||
|
|
||||||
|
@DisplayName("""
|
||||||
|
TaskPollerImplementation
|
||||||
|
Når polleren henter og prosesserer tasks
|
||||||
|
Hvis lyttere, backoff og event-produksjon fungerer som forventet
|
||||||
|
Så skal polleren håndtere alle scenarier korrekt
|
||||||
|
""")
|
||||||
class TaskPollerImplementationTest : TestBase() {
|
class TaskPollerImplementationTest : TestBase() {
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
@ -32,11 +40,14 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private lateinit var eventDeferred: CompletableDeferred<Event>
|
private lateinit var eventDeferred: CompletableDeferred<Event>
|
||||||
|
|
||||||
val reporterFactory = { _: Task ->
|
val reporterFactory = { _: Task ->
|
||||||
object : TaskReporter {
|
object : TaskReporter {
|
||||||
override fun markClaimed(taskId: UUID, workerId: String) {}
|
override fun markClaimed(taskId: UUID, workerId: String) {}
|
||||||
override fun updateLastSeen(taskId: UUID) {}
|
override fun updateLastSeen(taskId: UUID) {}
|
||||||
override fun markConsumed(taskId: UUID) {}
|
override fun markCompleted(taskId: UUID) {}
|
||||||
|
override fun markFailed(referenceId: UUID,taskId: UUID) {}
|
||||||
|
override fun markCancelled(referenceId: UUID,taskId: UUID) {}
|
||||||
override fun updateProgress(taskId: UUID, progress: Int) {}
|
override fun updateProgress(taskId: UUID, progress: Int) {}
|
||||||
override fun log(taskId: UUID, message: String) {}
|
override fun log(taskId: UUID, message: String) {}
|
||||||
override fun publishEvent(event: Event) {
|
override fun publishEvent(event: Event) {
|
||||||
@ -45,30 +56,33 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
data class EchoTask(var data: String?) : Task() {
|
data class EchoTask(var data: String?) : Task()
|
||||||
}
|
data class EchoEvent(var data: String) : Event()
|
||||||
|
|
||||||
data class EchoEvent(var data: String) : Event() {
|
class TaskPollerImplementationTest(
|
||||||
}
|
taskStore: TaskStore,
|
||||||
|
reporterFactory: (Task) -> TaskReporter
|
||||||
class TaskPollerImplementationTest(taskStore: TaskStore, reporterFactory: (Task) -> TaskReporter): TaskPollerImplementation(taskStore, reporterFactory) {
|
) : TaskPollerImplementation(taskStore, reporterFactory) {
|
||||||
fun overrideSetBackoff(duration: java.time.Duration) {
|
fun overrideSetBackoff(duration: java.time.Duration) {
|
||||||
backoff = duration
|
backoff = duration
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
open class EchoListener : TaskListener(TaskType.MIXED) {
|
open class EchoListener : TaskListener(TaskType.MIXED) {
|
||||||
var result: Event? = null
|
var result: Event? = null
|
||||||
|
|
||||||
fun getJob() = currentJob
|
fun getJob() = currentJob
|
||||||
|
|
||||||
override fun getWorkerId() = this.javaClass.simpleName
|
override fun getWorkerId() = this.javaClass.simpleName
|
||||||
|
override fun createIncompleteStateTaskEvent(
|
||||||
override fun supports(task: Task): Boolean {
|
task: Task,
|
||||||
return task is EchoTask
|
status: TaskStatus,
|
||||||
|
exception: Exception?
|
||||||
|
): Event {
|
||||||
|
return object : Event() {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun supports(task: Task) = task is EchoTask
|
||||||
|
|
||||||
override suspend fun onTask(task: Task): Event {
|
override suspend fun onTask(task: Task): Event {
|
||||||
withHeartbeatRunner(1.seconds) {
|
withHeartbeatRunner(1.seconds) {
|
||||||
println("Heartbeat")
|
println("Heartbeat")
|
||||||
@ -81,36 +95,31 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
|
|
||||||
override fun onComplete(task: Task, result: Event?) {
|
override fun onComplete(task: Task, result: Event?) {
|
||||||
super.onComplete(task, result)
|
super.onComplete(task, result)
|
||||||
this.result = result;
|
this.result = result
|
||||||
reporter?.publishEvent(result!!)
|
reporter?.publishEvent(result!!)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onError(task: Task, exception: Exception) {
|
|
||||||
exception.printStackTrace()
|
|
||||||
super.onError(task, exception)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun onCancelled() {
|
|
||||||
super.onCancelled()
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@OptIn(ExperimentalCoroutinesApi::class)
|
@OptIn(ExperimentalCoroutinesApi::class)
|
||||||
@Test
|
@Test
|
||||||
|
@DisplayName("""
|
||||||
|
Når en EchoTask finnes i TaskStore
|
||||||
|
Hvis polleren prosesserer tasken og lytteren produserer en EchoEvent
|
||||||
|
Så skal eventen publiseres og metadata inneholde korrekt avledningskjede
|
||||||
|
""")
|
||||||
fun scenario1() = runTest {
|
fun scenario1() = runTest {
|
||||||
// Register Task and Event
|
|
||||||
TaskTypeRegistry.register(EchoTask::class.java)
|
TaskTypeRegistry.register(EchoTask::class.java)
|
||||||
EventTypeRegistry.register(EchoEvent::class.java)
|
EventTypeRegistry.register(EchoEvent::class.java)
|
||||||
|
|
||||||
val listener = EchoListener()
|
val listener = EchoListener()
|
||||||
|
|
||||||
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
||||||
|
|
||||||
val task = EchoTask("Hello").newReferenceId().derivedOf(object : Event() {})
|
val task = EchoTask("Hello").newReferenceId().derivedOf(object : Event() {}.apply { newReferenceId() })
|
||||||
taskStore.persist(task)
|
taskStore.persist(task)
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
advanceUntilIdle()
|
advanceUntilIdle()
|
||||||
|
|
||||||
val producedEvent = eventDeferred.await()
|
val producedEvent = eventDeferred.await()
|
||||||
assertThat(producedEvent).isNotNull
|
assertThat(producedEvent).isNotNull
|
||||||
assertThat(producedEvent.metadata.derivedFromId).hasSize(2)
|
assertThat(producedEvent.metadata.derivedFromId).hasSize(2)
|
||||||
@ -121,7 +130,12 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
|
|
||||||
@OptIn(ExperimentalCoroutinesApi::class)
|
@OptIn(ExperimentalCoroutinesApi::class)
|
||||||
@Test
|
@Test
|
||||||
fun `poller resets backoff when task is accepted`() = runTest {
|
@DisplayName("""
|
||||||
|
Når en task blir akseptert av lytteren
|
||||||
|
Hvis polleren tidligere har økt backoff
|
||||||
|
Så skal backoff resettes til startverdi
|
||||||
|
""")
|
||||||
|
fun pollerResetsBackoffWhenTaskAccepted() = runTest {
|
||||||
TaskTypeRegistry.register(EchoTask::class.java)
|
TaskTypeRegistry.register(EchoTask::class.java)
|
||||||
EventTypeRegistry.register(EchoEvent::class.java)
|
EventTypeRegistry.register(EchoEvent::class.java)
|
||||||
|
|
||||||
@ -130,12 +144,13 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
val initialBackoff = poller.backoff
|
val initialBackoff = poller.backoff
|
||||||
|
|
||||||
poller.overrideSetBackoff(Duration.ofSeconds(16))
|
poller.overrideSetBackoff(Duration.ofSeconds(16))
|
||||||
|
|
||||||
val task = EchoTask("Hello").newReferenceId()
|
val task = EchoTask("Hello").newReferenceId()
|
||||||
taskStore.persist(task)
|
taskStore.persist(task)
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
listener.getJob()?.join()
|
listener.getJob()?.join()
|
||||||
|
|
||||||
advanceTimeBy(1.minutes)
|
advanceTimeBy(1.minutes)
|
||||||
advanceUntilIdle()
|
advanceUntilIdle()
|
||||||
|
|
||||||
@ -144,19 +159,27 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `poller increases backoff when no tasks`() = runTest {
|
@DisplayName("""
|
||||||
|
Når polleren ikke finner noen tasks
|
||||||
|
Hvis ingen lyttere har noe å gjøre
|
||||||
|
Så skal backoff dobles
|
||||||
|
""")
|
||||||
|
fun pollerIncreasesBackoffWhenNoTasks() = runTest {
|
||||||
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
||||||
val initialBackoff = poller.backoff
|
val initialBackoff = poller.backoff
|
||||||
val totalBackoff = initialBackoff.multiply(2)
|
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
assertEquals(totalBackoff, poller.backoff)
|
assertEquals(initialBackoff.multiply(2), poller.backoff)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `poller increases backoff when no listener supports task`() = runTest {
|
@DisplayName("""
|
||||||
|
Når en task finnes men ingen lyttere støtter den
|
||||||
|
Hvis polleren forsøker å prosessere tasken
|
||||||
|
Så skal backoff dobles
|
||||||
|
""")
|
||||||
|
fun pollerIncreasesBackoffWhenNoListenerSupportsTask() = runTest {
|
||||||
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
||||||
val initialBackoff = poller.backoff
|
val initialBackoff = poller.backoff
|
||||||
|
|
||||||
@ -170,47 +193,50 @@ class TaskPollerImplementationTest : TestBase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `poller increases backoff when listener is busy`() = runTest {
|
@DisplayName("""
|
||||||
|
Når en lytter er opptatt
|
||||||
|
Hvis polleren forsøker å prosessere en task
|
||||||
|
Så skal backoff dobles
|
||||||
|
""")
|
||||||
|
fun pollerIncreasesBackoffWhenListenerBusy() = runTest {
|
||||||
val busyListener = object : EchoListener() {
|
val busyListener = object : EchoListener() {
|
||||||
override val isBusy = true
|
override val isBusy = true
|
||||||
}
|
}
|
||||||
|
|
||||||
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
val poller = object : TaskPollerImplementation(taskStore, reporterFactory) {}
|
||||||
val intialBackoff = poller.backoff
|
val initialBackoff = poller.backoff
|
||||||
|
|
||||||
val task = EchoTask("Busy").newReferenceId()
|
val task = EchoTask("Busy").newReferenceId()
|
||||||
taskStore.persist(task)
|
taskStore.persist(task)
|
||||||
|
|
||||||
poller.pollOnce()
|
poller.pollOnce()
|
||||||
|
|
||||||
assertEquals(intialBackoff.multiply(2), poller.backoff)
|
assertEquals(initialBackoff.multiply(2), poller.backoff)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun `poller increases backoff when task is not claimed`() = runTest {
|
@DisplayName("""
|
||||||
val listener = EchoListener()
|
Når en task ikke kan claimes av polleren
|
||||||
|
Hvis claim-operasjonen feiler
|
||||||
|
Så skal backoff dobles
|
||||||
|
""")
|
||||||
|
fun pollerIncreasesBackoffWhenTaskNotClaimed() = runTest {
|
||||||
TaskTypeRegistry.register(EchoTask::class.java)
|
TaskTypeRegistry.register(EchoTask::class.java)
|
||||||
|
|
||||||
val task = EchoTask("Unclaimable").newReferenceId()
|
val task = EchoTask("Unclaimable").newReferenceId()
|
||||||
taskStore.persist(task)
|
taskStore.persist(task)
|
||||||
|
|
||||||
// Simuler at claim alltid feiler
|
|
||||||
val failingStore = object : InMemoryTaskStore() {
|
val failingStore = object : InMemoryTaskStore() {
|
||||||
override fun claim(taskId: UUID, workerId: String): Boolean = false
|
override fun claim(taskId: UUID, workerId: String) = false
|
||||||
}
|
}
|
||||||
val pollerWithFailingClaim = object : TaskPollerImplementation(failingStore, reporterFactory) {}
|
|
||||||
val initialBackoff = pollerWithFailingClaim.backoff
|
val poller = object : TaskPollerImplementation(failingStore, reporterFactory) {}
|
||||||
|
val initialBackoff = poller.backoff
|
||||||
|
|
||||||
failingStore.persist(task)
|
failingStore.persist(task)
|
||||||
|
poller.pollOnce()
|
||||||
|
|
||||||
pollerWithFailingClaim.pollOnce()
|
assertEquals(initialBackoff.multiply(2), poller.backoff)
|
||||||
|
}
|
||||||
assertEquals(initialBackoff.multiply(2), pollerWithFailingClaim.backoff)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|||||||
@ -13,6 +13,7 @@ fun EventListenerRegistry.wipe() {
|
|||||||
|
|
||||||
// Tøm map’en
|
// Tøm map’en
|
||||||
val mutableList = field.get(EventListenerRegistry) as MutableList<*>
|
val mutableList = field.get(EventListenerRegistry) as MutableList<*>
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
(mutableList as MutableList<Class<out EventListener>>).clear()
|
(mutableList as MutableList<Class<out EventListener>>).clear()
|
||||||
|
|
||||||
// Verifiser at det er tomt
|
// Verifiser at det er tomt
|
||||||
|
|||||||
@ -13,6 +13,7 @@ fun EventTypeRegistry.wipe() {
|
|||||||
|
|
||||||
// Tøm map’en
|
// Tøm map’en
|
||||||
val typesMap = field.get(EventTypeRegistry) as MutableMap<*, *>
|
val typesMap = field.get(EventTypeRegistry) as MutableMap<*, *>
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
(typesMap as MutableMap<String, Class<out Event>>).clear()
|
(typesMap as MutableMap<String, Class<out Event>>).clear()
|
||||||
|
|
||||||
// Verifiser at det er tomt
|
// Verifiser at det er tomt
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
package no.iktdev.eventi.testUtil
|
package no.iktdev.eventi.testUtil
|
||||||
|
|
||||||
import no.iktdev.eventi.events.EventListener
|
|
||||||
import no.iktdev.eventi.events.EventListenerRegistry
|
|
||||||
import no.iktdev.eventi.tasks.TaskListener
|
import no.iktdev.eventi.tasks.TaskListener
|
||||||
import no.iktdev.eventi.tasks.TaskListenerRegistry
|
import no.iktdev.eventi.tasks.TaskListenerRegistry
|
||||||
import org.assertj.core.api.Assertions.assertThat
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
@ -15,6 +13,7 @@ fun TaskListenerRegistry.wipe() {
|
|||||||
|
|
||||||
// Tøm map’en
|
// Tøm map’en
|
||||||
val mutableList = field.get(TaskListenerRegistry) as MutableList<*>
|
val mutableList = field.get(TaskListenerRegistry) as MutableList<*>
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
(mutableList as MutableList<Class<out TaskListener>>).clear()
|
(mutableList as MutableList<Class<out TaskListener>>).clear()
|
||||||
|
|
||||||
// Verifiser at det er tomt
|
// Verifiser at det er tomt
|
||||||
|
|||||||
@ -1,12 +1,11 @@
|
|||||||
package no.iktdev.eventi.testUtil
|
package no.iktdev.eventi.testUtil
|
||||||
|
|
||||||
import no.iktdev.eventi.events.EventTypeRegistry
|
|
||||||
import no.iktdev.eventi.models.Event
|
|
||||||
import no.iktdev.eventi.models.Task
|
import no.iktdev.eventi.models.Task
|
||||||
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
||||||
import org.junit.jupiter.api.Assertions.assertNull
|
import org.junit.jupiter.api.Assertions.assertNull
|
||||||
import java.lang.reflect.Field
|
import java.lang.reflect.Field
|
||||||
|
|
||||||
|
@Suppress("UNUSED_RECEIVER_PARAMETER")
|
||||||
fun TaskTypeRegistry.wipe() {
|
fun TaskTypeRegistry.wipe() {
|
||||||
val field: Field = TaskTypeRegistry::class.java
|
val field: Field = TaskTypeRegistry::class.java
|
||||||
.superclass
|
.superclass
|
||||||
@ -15,6 +14,7 @@ fun TaskTypeRegistry.wipe() {
|
|||||||
|
|
||||||
// Tøm map’en
|
// Tøm map’en
|
||||||
val typesMap = field.get(TaskTypeRegistry) as MutableMap<*, *>
|
val typesMap = field.get(TaskTypeRegistry) as MutableMap<*, *>
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
(typesMap as MutableMap<String, Class<out Task>>).clear()
|
(typesMap as MutableMap<String, Class<out Task>>).clear()
|
||||||
|
|
||||||
// Verifiser at det er tomt
|
// Verifiser at det er tomt
|
||||||
|
|||||||
@ -0,0 +1,14 @@
|
|||||||
|
package no.iktdev.eventi.testUtil
|
||||||
|
|
||||||
|
import kotlinx.coroutines.CoroutineDispatcher
|
||||||
|
import kotlinx.coroutines.CoroutineScope
|
||||||
|
import kotlinx.coroutines.SupervisorJob
|
||||||
|
import no.iktdev.eventi.events.SequenceDispatchQueue
|
||||||
|
|
||||||
|
class TestSequenceDispatchQueue(
|
||||||
|
maxConcurrency: Int,
|
||||||
|
dispatcher: CoroutineDispatcher
|
||||||
|
) : SequenceDispatchQueue(
|
||||||
|
maxConcurrency,
|
||||||
|
CoroutineScope(dispatcher + SupervisorJob())
|
||||||
|
)
|
||||||
Loading…
Reference in New Issue
Block a user