Updates
This commit is contained in:
parent
6090c2e8c0
commit
f0a8e14aaa
@ -18,13 +18,12 @@ class ClaimsService() {
|
|||||||
|
|
||||||
@Scheduled(fixedDelay = (300_000))
|
@Scheduled(fixedDelay = (300_000))
|
||||||
fun validateClaims() {
|
fun validateClaims() {
|
||||||
val expiredClaims = persistentReader.getExpiredClaimsProcessEvents()
|
val expiredClaims = eventManager.getProcessEventsWithExpiredClaim()
|
||||||
expiredClaims.forEach {
|
expiredClaims.forEach {
|
||||||
log.info { "Found event with expired claim: ${it.referenceId}::${it.eventId}::${it.event}" }
|
log.info { "Found event with expired claim: ${it.referenceId}::${it.eventId}::${it.event}" }
|
||||||
}
|
}
|
||||||
val store = persistentWriter
|
|
||||||
expiredClaims.forEach {
|
expiredClaims.forEach {
|
||||||
val result = store.releaseProcessEventClaim(referenceId = it.referenceId, eventId = it.eventId)
|
val result = eventManager.deleteProcessEventClaim(referenceId = it.referenceId, eventId = it.eventId)
|
||||||
if (result) {
|
if (result) {
|
||||||
log.info { "Released claim on ${it.referenceId}::${it.eventId}::${it.event}" }
|
log.info { "Released claim on ${it.referenceId}::${it.eventId}::${it.event}" }
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import no.iktdev.mediaprocessing.shared.common.DatabaseEnvConfig
|
|||||||
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentEventManager
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.processerEvents
|
import no.iktdev.mediaprocessing.shared.common.persistance.processerEvents
|
||||||
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
||||||
import org.springframework.boot.autoconfigure.SpringBootApplication
|
import org.springframework.boot.autoconfigure.SpringBootApplication
|
||||||
@ -20,8 +21,7 @@ fun getContext(): ApplicationContext? {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
lateinit var persistentReader: PersistentDataReader
|
lateinit var eventManager: PersistentEventManager
|
||||||
lateinit var persistentWriter: PersistentDataStore
|
|
||||||
|
|
||||||
private lateinit var eventsDatabase: MySqlDataSource
|
private lateinit var eventsDatabase: MySqlDataSource
|
||||||
fun getEventsDatabase(): MySqlDataSource {
|
fun getEventsDatabase(): MySqlDataSource {
|
||||||
@ -33,8 +33,7 @@ fun main(args: Array<String>) {
|
|||||||
eventsDatabase.createDatabase()
|
eventsDatabase.createDatabase()
|
||||||
eventsDatabase.createTables(processerEvents)
|
eventsDatabase.createTables(processerEvents)
|
||||||
|
|
||||||
persistentReader = PersistentDataReader(eventsDatabase)
|
eventManager = PersistentEventManager(eventsDatabase)
|
||||||
persistentWriter = PersistentDataStore(eventsDatabase)
|
|
||||||
|
|
||||||
context = runApplication<ConvertApplication>(*args)
|
context = runApplication<ConvertApplication>(*args)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -40,14 +40,14 @@ class ConverterCoordinator() : CoordinatorBase<PersistentProcessDataMessage, Per
|
|||||||
|
|
||||||
|
|
||||||
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
||||||
if (event.key == KafkaEvents.EVENT_WORK_CONVERT_CREATED) {
|
if (event.key == KafkaEvents.EventWorkConvertCreated) {
|
||||||
val success = persistentWriter.storeProcessDataMessage(event.key.event, event.value)
|
val success = eventManager.setProcessEvent(event.key, event.value)
|
||||||
if (!success) {
|
if (!success) {
|
||||||
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().database}!" }
|
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().database}!" }
|
||||||
} else {
|
} else {
|
||||||
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
||||||
}
|
}
|
||||||
} else if (event.key == KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED) {
|
} else if (event.key == KafkaEvents.EventWorkExtractPerformed) {
|
||||||
readAllInQueue()
|
readAllInQueue()
|
||||||
} else {
|
} else {
|
||||||
log.debug { "Skipping ${event.key}" }
|
log.debug { "Skipping ${event.key}" }
|
||||||
@ -55,7 +55,7 @@ class ConverterCoordinator() : CoordinatorBase<PersistentProcessDataMessage, Per
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun readAllInQueue() {
|
fun readAllInQueue() {
|
||||||
val messages = persistentReader.getAvailableProcessEvents()
|
val messages = eventManager.getProcessEventsClaimable()// persistentReader.getAvailableProcessEvents()
|
||||||
io.launch {
|
io.launch {
|
||||||
messages.forEach {
|
messages.forEach {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
@ -65,7 +65,7 @@ class ConverterCoordinator() : CoordinatorBase<PersistentProcessDataMessage, Per
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
||||||
val messages = persistentReader.getAvailableProcessEvents()
|
val messages = eventManager.getProcessEventsClaimable() // persistentReader.getAvailableProcessEvents()
|
||||||
createTasksBasedOnEventsAndPersistence(referenceId, eventId, messages)
|
createTasksBasedOnEventsAndPersistence(referenceId, eventId, messages)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,11 +3,8 @@ package no.iktdev.mediaprocessing.converter.tasks
|
|||||||
import kotlinx.coroutines.delay
|
import kotlinx.coroutines.delay
|
||||||
import kotlinx.coroutines.runBlocking
|
import kotlinx.coroutines.runBlocking
|
||||||
import mu.KotlinLogging
|
import mu.KotlinLogging
|
||||||
import no.iktdev.mediaprocessing.converter.ConverterCoordinator
|
import no.iktdev.mediaprocessing.converter.*
|
||||||
import no.iktdev.mediaprocessing.converter.TaskCreator
|
|
||||||
import no.iktdev.mediaprocessing.converter.convert.Converter
|
import no.iktdev.mediaprocessing.converter.convert.Converter
|
||||||
import no.iktdev.mediaprocessing.converter.persistentReader
|
|
||||||
import no.iktdev.mediaprocessing.converter.persistentWriter
|
|
||||||
import no.iktdev.mediaprocessing.shared.common.getComputername
|
import no.iktdev.mediaprocessing.shared.common.getComputername
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentProcessDataMessage
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentProcessDataMessage
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
@ -37,18 +34,18 @@ class ConvertService(@Autowired override var coordinator: ConverterCoordinator)
|
|||||||
|
|
||||||
override val listensForEvents: List<KafkaEvents>
|
override val listensForEvents: List<KafkaEvents>
|
||||||
get() = listOf(
|
get() = listOf(
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED,
|
KafkaEvents.EventWorkExtractPerformed,
|
||||||
KafkaEvents.EVENT_WORK_CONVERT_CREATED
|
KafkaEvents.EventWorkConvertCreated
|
||||||
)
|
)
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_WORK_CONVERT_PERFORMED
|
get() = KafkaEvents.EventWorkConvertPerformed
|
||||||
|
|
||||||
|
|
||||||
fun getRequiredExtractProcessForContinuation(
|
fun getRequiredExtractProcessForContinuation(
|
||||||
referenceId: String,
|
referenceId: String,
|
||||||
requiresEventId: String
|
requiresEventId: String
|
||||||
): PersistentProcessDataMessage? {
|
): PersistentProcessDataMessage? {
|
||||||
return persistentReader.getProcessEvent(referenceId, requiresEventId)
|
return eventManager.getProcessEventWith(referenceId, requiresEventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun canConvert(extract: PersistentProcessDataMessage?): Boolean {
|
fun canConvert(extract: PersistentProcessDataMessage?): Boolean {
|
||||||
@ -61,7 +58,7 @@ class ConvertService(@Autowired override var coordinator: ConverterCoordinator)
|
|||||||
events: List<PersistentProcessDataMessage>
|
events: List<PersistentProcessDataMessage>
|
||||||
): MessageDataWrapper? {
|
): MessageDataWrapper? {
|
||||||
val convertEvent =
|
val convertEvent =
|
||||||
events.find { it.event == KafkaEvents.EVENT_WORK_CONVERT_CREATED && it.data is ConvertWorkerRequest }
|
events.find { it.event == KafkaEvents.EventWorkConvertCreated && it.data is ConvertWorkerRequest }
|
||||||
if (convertEvent == null) {
|
if (convertEvent == null) {
|
||||||
// No convert here..
|
// No convert here..
|
||||||
return null
|
return null
|
||||||
@ -94,17 +91,16 @@ class ConvertService(@Autowired override var coordinator: ConverterCoordinator)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val isAlreadyClaimed =
|
val isAlreadyClaimed = eventManager.isProcessEventClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
||||||
persistentReader.isProcessEventAlreadyClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
|
||||||
if (isAlreadyClaimed) {
|
if (isAlreadyClaimed) {
|
||||||
log.warn { "Process is already claimed!" }
|
log.warn { "Process is already claimed!" }
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
val setClaim = persistentWriter.setProcessEventClaim(
|
val setClaim = eventManager.setProcessEventClaim(
|
||||||
referenceId = event.referenceId,
|
referenceId = event.referenceId,
|
||||||
eventId = event.eventId,
|
eventId = event.eventId,
|
||||||
claimedBy = serviceId
|
claimer = serviceId
|
||||||
)
|
)
|
||||||
if (!setClaim) {
|
if (!setClaim) {
|
||||||
return null
|
return null
|
||||||
@ -133,20 +129,19 @@ class ConvertService(@Autowired override var coordinator: ConverterCoordinator)
|
|||||||
}
|
}
|
||||||
|
|
||||||
val consumedIsSuccessful =
|
val consumedIsSuccessful =
|
||||||
persistentWriter.setProcessEventCompleted(event.referenceId, event.eventId, serviceId)
|
eventManager.setProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
runBlocking {
|
runBlocking {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
if (!consumedIsSuccessful) {
|
if (!consumedIsSuccessful) {
|
||||||
persistentWriter.setProcessEventCompleted(event.referenceId, event.eventId, serviceId)
|
eventManager.setProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
}
|
}
|
||||||
delay(1000)
|
delay(1000)
|
||||||
var readbackIsSuccess =
|
var readbackIsSuccess = eventManager.isProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
persistentReader.isProcessEventDefinedAsConsumed(event.referenceId, event.eventId, serviceId)
|
|
||||||
|
|
||||||
while (!readbackIsSuccess) {
|
while (!readbackIsSuccess) {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
readbackIsSuccess =
|
readbackIsSuccess =
|
||||||
persistentReader.isProcessEventDefinedAsConsumed(event.referenceId, event.eventId, serviceId)
|
eventManager.isProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
@ -208,13 +203,13 @@ class ConvertService(@Autowired override var coordinator: ConverterCoordinator)
|
|||||||
}
|
}
|
||||||
|
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
persistentWriter.setProcessEventCompleted(referenceId, event.eventId, serviceId)
|
eventManager.setProcessEventCompleted(referenceId, event.eventId)
|
||||||
failed.add(event)
|
failed.add(event)
|
||||||
log.error { "Canceling event ${event.eventId}\n\t by declaring it as consumed." }
|
log.error { "Canceling event ${event.eventId}\n\t by declaring it as consumed." }
|
||||||
producer.sendMessage(
|
producer.sendMessage(
|
||||||
referenceId = referenceId,
|
referenceId = referenceId,
|
||||||
event = producesEvent,
|
event = producesEvent,
|
||||||
data = SimpleMessageData(Status.SKIPPED, "Required event: ${ce?.requiresEventId} is not found. Skipping convert work for referenceId: ${referenceId}")
|
data = SimpleMessageData(Status.SKIPPED, "Required event: ${ce?.requiresEventId} is not found. Skipping convert work for referenceId: ${referenceId}", derivedFromEventId = event.eventId)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,16 +7,12 @@ import no.iktdev.exfl.coroutines.Coroutines
|
|||||||
import no.iktdev.mediaprocessing.coordinator.coordination.PersistentEventBasedMessageListener
|
import no.iktdev.mediaprocessing.coordinator.coordination.PersistentEventBasedMessageListener
|
||||||
import no.iktdev.mediaprocessing.shared.common.CoordinatorBase
|
import no.iktdev.mediaprocessing.shared.common.CoordinatorBase
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.isOfEvent
|
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.isSuccess
|
|
||||||
import no.iktdev.mediaprocessing.shared.contract.ProcessType
|
import no.iktdev.mediaprocessing.shared.contract.ProcessType
|
||||||
import no.iktdev.mediaprocessing.shared.contract.dto.ProcessStartOperationEvents
|
import no.iktdev.mediaprocessing.shared.contract.dto.ProcessStartOperationEvents
|
||||||
import no.iktdev.mediaprocessing.shared.contract.dto.RequestStartOperationEvents
|
import no.iktdev.mediaprocessing.shared.contract.dto.RequestStartOperationEvents
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.*
|
import no.iktdev.mediaprocessing.shared.kafka.dto.*
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.work.ProcesserEncodeWorkPerformed
|
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.work.ProcesserExtractWorkPerformed
|
|
||||||
import org.springframework.stereotype.Service
|
import org.springframework.stereotype.Service
|
||||||
import java.io.File
|
import java.io.File
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
@ -30,13 +26,10 @@ class Coordinator() : CoordinatorBase<PersistentMessage, PersistentEventBasedMes
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
||||||
val success = persistentWriter.storeEventDataMessage(event.key.event, event.value)
|
val success = eventManager.setEvent(event.key, event.value)
|
||||||
if (!success) {
|
if (!success) {
|
||||||
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().config.databaseName}" }
|
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().config.databaseName}" }
|
||||||
} else {
|
} else {
|
||||||
deleteOlderEventsIfSuperseded(event.key, event.value)
|
|
||||||
|
|
||||||
|
|
||||||
io.launch {
|
io.launch {
|
||||||
delay(1000) // Give the database a few sec to update
|
delay(1000) // Give the database a few sec to update
|
||||||
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
||||||
@ -78,7 +71,7 @@ class Coordinator() : CoordinatorBase<PersistentMessage, PersistentEventBasedMes
|
|||||||
file = file.absolutePath,
|
file = file.absolutePath,
|
||||||
type = type
|
type = type
|
||||||
)
|
)
|
||||||
producer.sendMessage(UUID.randomUUID().toString(), KafkaEvents.EVENT_MEDIA_PROCESS_STARTED, processStartEvent)
|
producer.sendMessage(UUID.randomUUID().toString(), KafkaEvents.EventMediaProcessStarted, processStartEvent)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,14 +89,14 @@ class Coordinator() : CoordinatorBase<PersistentMessage, PersistentEventBasedMes
|
|||||||
fun permitWorkToProceedOn(referenceId: String, message: String) {
|
fun permitWorkToProceedOn(referenceId: String, message: String) {
|
||||||
producer.sendMessage(
|
producer.sendMessage(
|
||||||
referenceId = referenceId,
|
referenceId = referenceId,
|
||||||
KafkaEvents.EVENT_MEDIA_WORK_PROCEED_PERMITTED,
|
KafkaEvents.EventMediaWorkProceedPermitted,
|
||||||
SimpleMessageData(Status.COMPLETED, message)
|
SimpleMessageData(Status.COMPLETED, message, null)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fun readAllUncompletedMessagesInQueue() {
|
fun readAllUncompletedMessagesInQueue() {
|
||||||
val messages = persistentReader.getUncompletedMessages()
|
val messages = eventManager.getEventsUncompleted()
|
||||||
io.launch {
|
io.launch {
|
||||||
messages.forEach {
|
messages.forEach {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
@ -117,17 +110,17 @@ class Coordinator() : CoordinatorBase<PersistentMessage, PersistentEventBasedMes
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
||||||
val messages = persistentReader.getMessagesFor(referenceId)
|
val messages = eventManager.getEventsWith(referenceId)
|
||||||
if (messages.find { it.eventId == eventId && it.referenceId == referenceId } == null) {
|
if (messages.find { it.eventId == eventId && it.referenceId == referenceId } == null) {
|
||||||
log.warn { "EventId ($eventId) for ReferenceId ($referenceId) has not been made available in the database yet." }
|
log.warn { "EventId ($eventId) for ReferenceId ($referenceId) has not been made available in the database yet." }
|
||||||
io.launch {
|
io.launch {
|
||||||
val fixedDelay = 1000L
|
val fixedDelay = 1000L
|
||||||
delay(fixedDelay)
|
delay(fixedDelay)
|
||||||
var delayed = 0L
|
var delayed = 0L
|
||||||
var msc = persistentReader.getMessagesFor(referenceId)
|
var msc = eventManager.getEventsWith(referenceId)
|
||||||
while (msc.find { it.eventId == eventId } != null || delayed < 1000 * 60) {
|
while (msc.find { it.eventId == eventId } != null || delayed < 1000 * 60) {
|
||||||
delayed += fixedDelay
|
delayed += fixedDelay
|
||||||
msc = persistentReader.getMessagesFor(referenceId)
|
msc = eventManager.getEventsWith(referenceId)
|
||||||
}
|
}
|
||||||
operationToRunOnMessages(referenceId, eventId, msc)
|
operationToRunOnMessages(referenceId, eventId, msc)
|
||||||
}
|
}
|
||||||
@ -145,63 +138,7 @@ class Coordinator() : CoordinatorBase<PersistentMessage, PersistentEventBasedMes
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun getProcessStarted(messages: List<PersistentMessage>): MediaProcessStarted? {
|
fun getProcessStarted(messages: List<PersistentMessage>): MediaProcessStarted? {
|
||||||
return messages.find { it.event == KafkaEvents.EVENT_MEDIA_PROCESS_STARTED }?.data as MediaProcessStarted
|
return messages.find { it.event == KafkaEvents.EventMediaProcessStarted }?.data as MediaProcessStarted
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fun deleteOlderEventsIfSuperseded(event: KafkaEvents, value: Message<out MessageDataWrapper>) {
|
|
||||||
var existingMessages = persistentReader.getMessagesFor(value.referenceId)
|
|
||||||
|
|
||||||
if (!KafkaEvents.isOfWork(event)) {
|
|
||||||
val superseded = existingMessages.filter { it.event == event && it.eventId != value.eventId }
|
|
||||||
superseded.forEach {
|
|
||||||
persistentWriter.deleteStoredEventDataMessage(
|
|
||||||
referenceId = it.referenceId,
|
|
||||||
eventId = it.eventId,
|
|
||||||
event = it.event
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
existingMessages = persistentReader.getMessagesFor(value.referenceId)
|
|
||||||
val workItems = existingMessages.filter { KafkaEvents.isOfWork(it.event) }
|
|
||||||
for (item: PersistentMessage in workItems) {
|
|
||||||
val originatorId = if (item.isOfEvent(KafkaEvents.EVENT_WORK_ENCODE_CREATED) ||
|
|
||||||
item.isOfEvent(KafkaEvents.EVENT_WORK_EXTRACT_CREATED)
|
|
||||||
) {
|
|
||||||
val ec = item.data as FfmpegWorkRequestCreated
|
|
||||||
ec.derivedFromEventId
|
|
||||||
} else if (item.isOfEvent(KafkaEvents.EVENT_WORK_ENCODE_PERFORMED)) {
|
|
||||||
try {
|
|
||||||
(item.data as ProcesserEncodeWorkPerformed).derivedFromEventId
|
|
||||||
} catch (e: Exception) {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
} else if (item.isOfEvent(KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED)) {
|
|
||||||
try {
|
|
||||||
(item.data as ProcesserExtractWorkPerformed).derivedFromEventId
|
|
||||||
} catch (e: Exception) {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
} else null
|
|
||||||
|
|
||||||
originatorId?.let { originator ->
|
|
||||||
deleteEventsIfNoOriginator(item.referenceId, item.eventId, item.event, originator, existingMessages)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun deleteEventsIfNoOriginator(
|
|
||||||
referenceId: String,
|
|
||||||
eventId: String,
|
|
||||||
event: KafkaEvents,
|
|
||||||
originatorId: String,
|
|
||||||
existingMessages: List<PersistentMessage>
|
|
||||||
) {
|
|
||||||
val originator = existingMessages.find { it.eventId == originatorId }
|
|
||||||
if (originator == null) {
|
|
||||||
persistentWriter.deleteStoredEventDataMessage(referenceId, eventId, event)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import no.iktdev.mediaprocessing.shared.common.SharedConfig
|
|||||||
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentEventManager
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.events
|
import no.iktdev.mediaprocessing.shared.common.persistance.events
|
||||||
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
||||||
import no.iktdev.mediaprocessing.shared.common.toStoredDatabase
|
import no.iktdev.mediaprocessing.shared.common.toStoredDatabase
|
||||||
@ -46,8 +47,7 @@ fun getEventsDatabase(): MySqlDataSource {
|
|||||||
return eventsDatabase
|
return eventsDatabase
|
||||||
}
|
}
|
||||||
|
|
||||||
lateinit var persistentReader: PersistentDataReader
|
lateinit var eventManager: PersistentEventManager
|
||||||
lateinit var persistentWriter: PersistentDataStore
|
|
||||||
|
|
||||||
fun main(args: Array<String>) {
|
fun main(args: Array<String>) {
|
||||||
Coroutines.addListener(listener = object: Observables.ObservableValue.ValueListener<Throwable> {
|
Coroutines.addListener(listener = object: Observables.ObservableValue.ValueListener<Throwable> {
|
||||||
@ -57,16 +57,19 @@ fun main(args: Array<String>) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
eventsDatabase = DatabaseEnvConfig.toEventsDatabase()
|
eventsDatabase = DatabaseEnvConfig.toEventsDatabase()
|
||||||
storeDatabase = DatabaseEnvConfig.toStoredDatabase()
|
|
||||||
|
|
||||||
eventsDatabase.createDatabase()
|
eventsDatabase.createDatabase()
|
||||||
|
|
||||||
|
storeDatabase = DatabaseEnvConfig.toStoredDatabase()
|
||||||
storeDatabase.createDatabase()
|
storeDatabase.createDatabase()
|
||||||
|
|
||||||
|
|
||||||
|
eventManager = PersistentEventManager(eventsDatabase)
|
||||||
|
|
||||||
|
|
||||||
val kafkaTables = listOf(
|
val kafkaTables = listOf(
|
||||||
events, // For kafka
|
events, // For kafka
|
||||||
)
|
)
|
||||||
|
|
||||||
eventsDatabase.createTables(*kafkaTables.toTypedArray())
|
|
||||||
|
|
||||||
val tables = arrayOf(
|
val tables = arrayOf(
|
||||||
catalog,
|
catalog,
|
||||||
@ -83,9 +86,8 @@ fun main(args: Array<String>) {
|
|||||||
)
|
)
|
||||||
storeDatabase.createTables(*tables)
|
storeDatabase.createTables(*tables)
|
||||||
|
|
||||||
persistentReader = PersistentDataReader(eventsDatabase)
|
|
||||||
persistentWriter = PersistentDataStore(eventsDatabase)
|
|
||||||
|
|
||||||
|
eventsDatabase.createTables(*kafkaTables.toTypedArray())
|
||||||
context = runApplication<CoordinatorApplication>(*args)
|
context = runApplication<CoordinatorApplication>(*args)
|
||||||
printSharedConfig()
|
printSharedConfig()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,8 +2,7 @@ package no.iktdev.mediaprocessing.coordinator.controller
|
|||||||
|
|
||||||
import com.google.gson.Gson
|
import com.google.gson.Gson
|
||||||
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
||||||
import no.iktdev.mediaprocessing.coordinator.persistentReader
|
import no.iktdev.mediaprocessing.coordinator.eventManager
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
|
||||||
import no.iktdev.mediaprocessing.shared.contract.dto.RequestWorkProceed
|
import no.iktdev.mediaprocessing.shared.contract.dto.RequestWorkProceed
|
||||||
import org.springframework.beans.factory.annotation.Autowired
|
import org.springframework.beans.factory.annotation.Autowired
|
||||||
import org.springframework.http.HttpStatus
|
import org.springframework.http.HttpStatus
|
||||||
@ -20,7 +19,7 @@ class ActionEventController(@Autowired var coordinator: Coordinator) {
|
|||||||
@RequestMapping("/flow/proceed")
|
@RequestMapping("/flow/proceed")
|
||||||
fun permitRunOnSequence(@RequestBody data: RequestWorkProceed): ResponseEntity<String> {
|
fun permitRunOnSequence(@RequestBody data: RequestWorkProceed): ResponseEntity<String> {
|
||||||
|
|
||||||
val set = persistentReader.getMessagesFor(data.referenceId)
|
val set = eventManager.getEventsWith(data.referenceId)
|
||||||
if (set.isEmpty()) {
|
if (set.isEmpty()) {
|
||||||
return ResponseEntity.status(HttpStatus.NO_CONTENT).body(Gson().toJson(data))
|
return ResponseEntity.status(HttpStatus.NO_CONTENT).body(Gson().toJson(data))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -30,10 +30,10 @@ class ProcessMapping(val events: List<PersistentMessage>) {
|
|||||||
|
|
||||||
|
|
||||||
fun waitsForEncode(): Boolean {
|
fun waitsForEncode(): Boolean {
|
||||||
val arguments = events.filter { it.event == KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED }
|
val arguments = events.filter { it.event == KafkaEvents.EventMediaParameterEncodeCreated }
|
||||||
val created = events.filter { it.event == KafkaEvents.EVENT_WORK_ENCODE_CREATED}
|
val created = events.filter { it.event == KafkaEvents.EventWorkEncodeCreated}
|
||||||
|
|
||||||
val performed = events.filter { it.event == KafkaEvents.EVENT_WORK_ENCODE_PERFORMED }
|
val performed = events.filter { it.event == KafkaEvents.EventWorkEncodePerformed }
|
||||||
val isSkipped = events.filter { it.isSkipped() }
|
val isSkipped = events.filter { it.isSkipped() }
|
||||||
|
|
||||||
return (arguments.isNotEmpty() && created.isEmpty()) || created.size > performed.size + isSkipped.size
|
return (arguments.isNotEmpty() && created.isEmpty()) || created.size > performed.size + isSkipped.size
|
||||||
@ -41,18 +41,18 @@ class ProcessMapping(val events: List<PersistentMessage>) {
|
|||||||
|
|
||||||
fun waitsForExtract(): Boolean {
|
fun waitsForExtract(): Boolean {
|
||||||
// Check if message is declared as skipped with statis
|
// Check if message is declared as skipped with statis
|
||||||
val arguments = events.filter { it.event == KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED }.filter { it.data.isSuccess() }
|
val arguments = events.filter { it.event == KafkaEvents.EventMediaParameterExtractCreated }.filter { it.data.isSuccess() }
|
||||||
val created = events.filter { it.event == KafkaEvents.EVENT_WORK_EXTRACT_CREATED }
|
val created = events.filter { it.event == KafkaEvents.EventWorkExtractCreated }
|
||||||
|
|
||||||
val performed = events.filter { it.event == KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED }
|
val performed = events.filter { it.event == KafkaEvents.EventWorkExtractPerformed }
|
||||||
val isSkipped = events.filter { it.isSkipped() }
|
val isSkipped = events.filter { it.isSkipped() }
|
||||||
|
|
||||||
return (arguments.isNotEmpty() && created.isEmpty()) || created.size > performed.size + isSkipped.size
|
return (arguments.isNotEmpty() && created.isEmpty()) || created.size > performed.size + isSkipped.size
|
||||||
}
|
}
|
||||||
|
|
||||||
fun waitsForConvert(): Boolean {
|
fun waitsForConvert(): Boolean {
|
||||||
val created = events.filter { it.event == KafkaEvents.EVENT_WORK_CONVERT_CREATED }
|
val created = events.filter { it.event == KafkaEvents.EventWorkConvertCreated }
|
||||||
val performed = events.filter { it.event == KafkaEvents.EVENT_WORK_CONVERT_PERFORMED }
|
val performed = events.filter { it.event == KafkaEvents.EventWorkConvertPerformed }
|
||||||
val isSkipped = events.filter { it.isSkipped() }
|
val isSkipped = events.filter { it.isSkipped() }
|
||||||
|
|
||||||
return created.size > performed.size + isSkipped.size
|
return created.size > performed.size + isSkipped.size
|
||||||
|
|||||||
@ -21,9 +21,9 @@ class BaseInfoFromFile(@Autowired override var coordinator: Coordinator) : TaskC
|
|||||||
val log = KotlinLogging.logger {}
|
val log = KotlinLogging.logger {}
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED
|
get() = KafkaEvents.EventMediaReadBaseInfoPerformed
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(KafkaEvents.EVENT_MEDIA_PROCESS_STARTED)
|
override val requiredEvents: List<KafkaEvents> = listOf(KafkaEvents.EventMediaProcessStarted)
|
||||||
|
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
||||||
@ -34,22 +34,23 @@ class BaseInfoFromFile(@Autowired override var coordinator: Coordinator) : TaskC
|
|||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
log.info { "${event.referenceId} triggered by ${event.event}" }
|
log.info { "${event.referenceId} triggered by ${event.event}" }
|
||||||
val selected = events.lastOrSuccessOf(KafkaEvents.EVENT_MEDIA_PROCESS_STARTED) ?: return null
|
val selected = events.lastOrSuccessOf(KafkaEvents.EventMediaProcessStarted) ?: return null
|
||||||
return readFileInfo(selected.data as MediaProcessStarted)
|
return readFileInfo(selected.data as MediaProcessStarted, event.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun readFileInfo(started: MediaProcessStarted): MessageDataWrapper {
|
fun readFileInfo(started: MediaProcessStarted, eventId: String): MessageDataWrapper {
|
||||||
val result = try {
|
val result = try {
|
||||||
val fileName = File(started.file).nameWithoutExtension
|
val fileName = File(started.file).nameWithoutExtension
|
||||||
val fileNameParser = FileNameParser(fileName)
|
val fileNameParser = FileNameParser(fileName)
|
||||||
BaseInfoPerformed(
|
BaseInfoPerformed(
|
||||||
Status.COMPLETED,
|
Status.COMPLETED,
|
||||||
title = fileNameParser.guessDesiredTitle(),
|
title = fileNameParser.guessDesiredTitle(),
|
||||||
sanitizedName = fileNameParser.guessDesiredFileName()
|
sanitizedName = fileNameParser.guessDesiredFileName(),
|
||||||
|
derivedFromEventId = eventId
|
||||||
)
|
)
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
e.printStackTrace()
|
e.printStackTrace()
|
||||||
SimpleMessageData(Status.ERROR, e.message ?: "Unable to obtain proper info from file")
|
SimpleMessageData(Status.ERROR, e.message ?: "Unable to obtain proper info from file", eventId)
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package no.iktdev.mediaprocessing.coordinator.tasks.event
|
|||||||
import mu.KotlinLogging
|
import mu.KotlinLogging
|
||||||
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
||||||
import no.iktdev.mediaprocessing.coordinator.TaskCreator
|
import no.iktdev.mediaprocessing.coordinator.TaskCreator
|
||||||
|
import no.iktdev.mediaprocessing.coordinator.getStoreDatabase
|
||||||
import no.iktdev.mediaprocessing.coordinator.mapping.ProcessMapping
|
import no.iktdev.mediaprocessing.coordinator.mapping.ProcessMapping
|
||||||
import no.iktdev.mediaprocessing.shared.common.datasource.executeOrException
|
import no.iktdev.mediaprocessing.shared.common.datasource.executeOrException
|
||||||
import no.iktdev.mediaprocessing.shared.common.datasource.executeWithStatus
|
import no.iktdev.mediaprocessing.shared.common.datasource.executeWithStatus
|
||||||
@ -32,7 +33,7 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
override val producesEvent: KafkaEvents = KafkaEvents.EVENT_COLLECT_AND_STORE
|
override val producesEvent: KafkaEvents = KafkaEvents.EVENT_COLLECT_AND_STORE
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
EVENT_MEDIA_PROCESS_STARTED,
|
EventMediaProcessStarted,
|
||||||
EVENT_MEDIA_PROCESS_COMPLETED
|
EVENT_MEDIA_PROCESS_COMPLETED
|
||||||
)
|
)
|
||||||
override val listensForEvents: List<KafkaEvents> = KafkaEvents.entries
|
override val listensForEvents: List<KafkaEvents> = KafkaEvents.entries
|
||||||
@ -40,7 +41,7 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
|
|
||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
val started = events.lastOrSuccessOf(EVENT_MEDIA_PROCESS_STARTED) ?: return null
|
val started = events.lastOrSuccessOf(EventMediaProcessStarted) ?: return null
|
||||||
val completed = events.lastOrSuccessOf(EVENT_MEDIA_PROCESS_COMPLETED) ?: return null
|
val completed = events.lastOrSuccessOf(EVENT_MEDIA_PROCESS_COMPLETED) ?: return null
|
||||||
if (!started.data.isSuccess() || !completed.data.isSuccess() && completed.data.status != Status.SKIPPED) {
|
if (!started.data.isSuccess() || !completed.data.isSuccess() && completed.data.status != Status.SKIPPED) {
|
||||||
return null
|
return null
|
||||||
@ -65,20 +66,20 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
null
|
null
|
||||||
else
|
else
|
||||||
storeCatalog(metadata = meta,genres = genres, videoFile = videoFile, videoDetails = videoInfo)
|
storeCatalog(metadata = meta,genres = genres, videoFile = videoFile, videoDetails = videoInfo)
|
||||||
} ?: return SimpleMessageData(Status.ERROR, "Unable to store catalog when metadata is null")
|
} ?: return SimpleMessageData(Status.ERROR, "Unable to store catalog when metadata is null", event.eventId)
|
||||||
|
|
||||||
mapped.metadata?.let {
|
mapped.metadata?.let {
|
||||||
storeMetadata(catalogId = catalogId, metadata = it)
|
storeMetadata(catalogId = catalogId, metadata = it)
|
||||||
}
|
}
|
||||||
|
|
||||||
return SimpleMessageData(Status.COMPLETED)
|
return SimpleMessageData(Status.COMPLETED, derivedFromEventId = event.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun storeSubtitles(collection: String, subtitles: List<String>): Boolean {
|
private fun storeSubtitles(collection: String, subtitles: List<String>): Boolean {
|
||||||
val result = subtitles.map { subtitle ->
|
val result = subtitles.map { subtitle ->
|
||||||
val subtitleFile = File(subtitle)
|
val subtitleFile = File(subtitle)
|
||||||
val language = subtitleFile.parentFile.name
|
val language = subtitleFile.parentFile.name
|
||||||
subtitle to executeWithStatus {
|
subtitle to executeWithStatus(getStoreDatabase()) {
|
||||||
SubtitleQuery(
|
SubtitleQuery(
|
||||||
collection = collection,
|
collection = collection,
|
||||||
associatedWithVideo = subtitleFile.nameWithoutExtension,
|
associatedWithVideo = subtitleFile.nameWithoutExtension,
|
||||||
@ -93,7 +94,7 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
|
|
||||||
private fun storeMetadata(catalogId: Int, metadata: MetadataDto) {
|
private fun storeMetadata(catalogId: Int, metadata: MetadataDto) {
|
||||||
metadata.summary.forEach {
|
metadata.summary.forEach {
|
||||||
withTransaction {
|
withTransaction(getStoreDatabase()) {
|
||||||
SummaryQuery(
|
SummaryQuery(
|
||||||
cid = catalogId,
|
cid = catalogId,
|
||||||
language = it.language,
|
language = it.language,
|
||||||
@ -104,7 +105,7 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
}
|
}
|
||||||
|
|
||||||
private fun storeAndGetGenres(genres: List<String>): String? {
|
private fun storeAndGetGenres(genres: List<String>): String? {
|
||||||
return withTransaction {
|
return withTransaction(getStoreDatabase()) {
|
||||||
val gq = GenreQuery( *genres.toTypedArray() )
|
val gq = GenreQuery( *genres.toTypedArray() )
|
||||||
gq.insertAndGetIds()
|
gq.insertAndGetIds()
|
||||||
gq.getIds().joinToString(",")
|
gq.getIds().joinToString(",")
|
||||||
@ -141,7 +142,7 @@ class CollectAndStoreTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
}
|
}
|
||||||
val ignoreException = result?.cause is SQLIntegrityConstraintViolationException && (result as ExposedSQLException).errorCode == 1062
|
val ignoreException = result?.cause is SQLIntegrityConstraintViolationException && (result as ExposedSQLException).errorCode == 1062
|
||||||
return if (result == null || ignoreException ) {
|
return if (result == null || ignoreException ) {
|
||||||
return withTransaction {
|
return withTransaction(getStoreDatabase()) {
|
||||||
precreatedCatalogQuery.getId()
|
precreatedCatalogQuery.getId()
|
||||||
}
|
}
|
||||||
} else null
|
} else null
|
||||||
|
|||||||
@ -22,16 +22,16 @@ class CompleteMediaTask(@Autowired override var coordinator: Coordinator) : Task
|
|||||||
override val producesEvent: KafkaEvents = KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED
|
override val producesEvent: KafkaEvents = KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
EVENT_MEDIA_PROCESS_STARTED,
|
EventMediaProcessStarted,
|
||||||
EVENT_MEDIA_READ_BASE_INFO_PERFORMED,
|
EventMediaReadBaseInfoPerformed,
|
||||||
EVENT_MEDIA_READ_OUT_NAME_AND_TYPE
|
EventMediaReadOutNameAndType
|
||||||
)
|
)
|
||||||
override val listensForEvents: List<KafkaEvents> = KafkaEvents.entries
|
override val listensForEvents: List<KafkaEvents> = KafkaEvents.entries
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
val started = events.lastOrSuccessOf(EVENT_MEDIA_PROCESS_STARTED) ?: return null
|
val started = events.lastOrSuccessOf(EventMediaProcessStarted) ?: return null
|
||||||
if (!started.data.isSuccess()) {
|
if (!started.data.isSuccess()) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
@ -40,9 +40,9 @@ class CompleteMediaTask(@Autowired override var coordinator: Coordinator) : Task
|
|||||||
// TODO: Add filter in case a metadata request was performed or a cover download was performed. for now, for base functionality, it requires a performed event.
|
// TODO: Add filter in case a metadata request was performed or a cover download was performed. for now, for base functionality, it requires a performed event.
|
||||||
|
|
||||||
val requiresOneOf = listOf(
|
val requiresOneOf = listOf(
|
||||||
EVENT_WORK_CONVERT_PERFORMED,
|
EventWorkConvertPerformed,
|
||||||
EVENT_WORK_EXTRACT_PERFORMED,
|
EventWorkExtractPerformed,
|
||||||
EVENT_WORK_ENCODE_PERFORMED
|
EventWorkEncodePerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
if (requiresOneOf.none { it in receivedEvents }) {
|
if (requiresOneOf.none { it in receivedEvents }) {
|
||||||
@ -56,7 +56,7 @@ class CompleteMediaTask(@Autowired override var coordinator: Coordinator) : Task
|
|||||||
|
|
||||||
val mapper = ProcessMapping(events)
|
val mapper = ProcessMapping(events)
|
||||||
if (mapper.canCollect()) {
|
if (mapper.canCollect()) {
|
||||||
return ProcessCompleted(Status.COMPLETED)
|
return ProcessCompleted(Status.COMPLETED, event.eventId)
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|||||||
@ -39,9 +39,9 @@ class CompleteRequestTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
// TODO: Add filter in case a metadata request was performed or a cover download was performed. for now, for base functionality, it requires a performed event.
|
// TODO: Add filter in case a metadata request was performed or a cover download was performed. for now, for base functionality, it requires a performed event.
|
||||||
|
|
||||||
val requiresOneOf = listOf(
|
val requiresOneOf = listOf(
|
||||||
EVENT_WORK_CONVERT_PERFORMED,
|
EventWorkConvertPerformed,
|
||||||
EVENT_WORK_EXTRACT_PERFORMED,
|
EventWorkExtractPerformed,
|
||||||
EVENT_WORK_ENCODE_PERFORMED
|
EventWorkEncodePerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
if (requiresOneOf.none { it in receivedEvents }) {
|
if (requiresOneOf.none { it in receivedEvents }) {
|
||||||
@ -55,7 +55,7 @@ class CompleteRequestTask(@Autowired override var coordinator: Coordinator) : Ta
|
|||||||
|
|
||||||
val mapper = ProcessMapping(events)
|
val mapper = ProcessMapping(events)
|
||||||
if (mapper.canCollect()) {
|
if (mapper.canCollect()) {
|
||||||
return ProcessCompleted(Status.COMPLETED)
|
return ProcessCompleted(Status.COMPLETED, event.eventId)
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|||||||
@ -16,11 +16,11 @@ import java.io.File
|
|||||||
@Service
|
@Service
|
||||||
class CreateConvertWorkTask(@Autowired override var coordinator: Coordinator) : TaskCreator(coordinator) {
|
class CreateConvertWorkTask(@Autowired override var coordinator: Coordinator) : TaskCreator(coordinator) {
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_WORK_CONVERT_CREATED
|
get() = KafkaEvents.EventWorkConvertCreated
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents>
|
override val requiredEvents: List<KafkaEvents>
|
||||||
get() = listOf(
|
get() = listOf(
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_CREATED
|
KafkaEvents.EventWorkExtractCreated
|
||||||
// TODO: Add event for request as well
|
// TODO: Add event for request as well
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ class CreateConvertWorkTask(@Autowired override var coordinator: Coordinator) :
|
|||||||
}
|
}
|
||||||
val eventData = event.data as FfmpegWorkRequestCreated? ?: return null
|
val eventData = event.data as FfmpegWorkRequestCreated? ?: return null
|
||||||
|
|
||||||
val requiredEventId = if (event.event == KafkaEvents.EVENT_WORK_EXTRACT_CREATED) {
|
val requiredEventId = if (event.event == KafkaEvents.EventWorkExtractCreated) {
|
||||||
event.eventId
|
event.eventId
|
||||||
} else null;
|
} else null;
|
||||||
|
|
||||||
@ -41,7 +41,8 @@ class CreateConvertWorkTask(@Autowired override var coordinator: Coordinator) :
|
|||||||
inputFile = eventData.outFile,
|
inputFile = eventData.outFile,
|
||||||
allowOverwrite = true,
|
allowOverwrite = true,
|
||||||
outFileBaseName = outFile.nameWithoutExtension,
|
outFileBaseName = outFile.nameWithoutExtension,
|
||||||
outDirectory = outFile.parentFile.absolutePath
|
outDirectory = outFile.parentFile.absolutePath,
|
||||||
|
derivedFromEventId = event.eventId
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,9 +9,9 @@ import org.springframework.stereotype.Service
|
|||||||
@Service
|
@Service
|
||||||
class CreateEncodeWorkTask(@Autowired override var coordinator: Coordinator) : CreateProcesserWorkTask(coordinator) {
|
class CreateEncodeWorkTask(@Autowired override var coordinator: Coordinator) : CreateProcesserWorkTask(coordinator) {
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_WORK_ENCODE_CREATED
|
get() = KafkaEvents.EventWorkEncodeCreated
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents>
|
override val requiredEvents: List<KafkaEvents>
|
||||||
get() = listOf(KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED)
|
get() = listOf(KafkaEvents.EventMediaParameterEncodeCreated)
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -9,8 +9,8 @@ import org.springframework.stereotype.Service
|
|||||||
@Service
|
@Service
|
||||||
class CreateExtractWorkTask(@Autowired override var coordinator: Coordinator) : CreateProcesserWorkTask(coordinator) {
|
class CreateExtractWorkTask(@Autowired override var coordinator: Coordinator) : CreateProcesserWorkTask(coordinator) {
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_WORK_EXTRACT_CREATED
|
get() = KafkaEvents.EventWorkExtractCreated
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents>
|
override val requiredEvents: List<KafkaEvents>
|
||||||
get() = listOf(KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED)
|
get() = listOf(KafkaEvents.EventMediaParameterExtractCreated)
|
||||||
}
|
}
|
||||||
@ -24,13 +24,13 @@ class DownloadAndStoreCoverTask(@Autowired override var coordinator: Coordinator
|
|||||||
|
|
||||||
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_WORK_DOWNLOAD_COVER_PERFORMED
|
get() = KafkaEvents.EventWorkDownloadCoverPerformed
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents>
|
override val requiredEvents: List<KafkaEvents>
|
||||||
get() = listOf(
|
get() = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED,
|
KafkaEvents.EventMediaMetadataSearchPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_COVER,
|
KafkaEvents.EventMediaReadOutCover,
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_PERFORMED
|
KafkaEvents.EventWorkEncodePerformed
|
||||||
)
|
)
|
||||||
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
||||||
return super.prerequisitesRequired(events) + listOf {
|
return super.prerequisitesRequired(events) + listOf {
|
||||||
@ -39,14 +39,14 @@ class DownloadAndStoreCoverTask(@Autowired override var coordinator: Coordinator
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
val cover = events.find { it.event == KafkaEvents.EVENT_MEDIA_READ_OUT_COVER }
|
val cover = events.find { it.event == KafkaEvents.EventMediaReadOutCover }
|
||||||
if (cover == null || cover.data !is CoverInfoPerformed) {
|
if (cover == null || cover.data !is CoverInfoPerformed) {
|
||||||
return SimpleMessageData(Status.ERROR, "Wrong type triggered and caused an execution for $serviceId")
|
return SimpleMessageData(Status.ERROR, "Wrong type triggered and caused an execution for $serviceId", event.eventId)
|
||||||
}
|
}
|
||||||
val coverData = cover.data as CoverInfoPerformed
|
val coverData = cover.data as CoverInfoPerformed
|
||||||
val outDir = File(coverData.outDir)
|
val outDir = File(coverData.outDir)
|
||||||
if (!outDir.exists())
|
if (!outDir.exists())
|
||||||
return SimpleMessageData(Status.ERROR, "Check for output directory for cover storage failed for $serviceId")
|
return SimpleMessageData(Status.ERROR, "Check for output directory for cover storage failed for $serviceId", event.eventId)
|
||||||
|
|
||||||
val client = DownloadClient(coverData.url, File(coverData.outDir), coverData.outFileBaseName)
|
val client = DownloadClient(coverData.url, File(coverData.outDir), coverData.outFileBaseName)
|
||||||
|
|
||||||
@ -67,10 +67,10 @@ class DownloadAndStoreCoverTask(@Autowired override var coordinator: Coordinator
|
|||||||
}
|
}
|
||||||
|
|
||||||
return if (result == null) {
|
return if (result == null) {
|
||||||
SimpleMessageData(Status.ERROR, "Could not download cover, check logs")
|
SimpleMessageData(Status.ERROR, "Could not download cover, check logs", event.eventId)
|
||||||
} else {
|
} else {
|
||||||
val status = if (result.exists() && result.canRead()) Status.COMPLETED else Status.ERROR
|
val status = if (result.exists() && result.canRead()) Status.COMPLETED else Status.ERROR
|
||||||
CoverDownloadWorkPerformed(status = status, message = message, coverFile = result.absolutePath)
|
CoverDownloadWorkPerformed(status = status, message = message, coverFile = result.absolutePath, event.eventId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -20,12 +20,12 @@ class MetadataAndBaseInfoToCoverTask(@Autowired override var coordinator: Coordi
|
|||||||
|
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_READ_OUT_COVER
|
get() = KafkaEvents.EventMediaReadOutCover
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED,
|
KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE,
|
KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED
|
KafkaEvents.EventMediaMetadataSearchPerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
||||||
@ -50,7 +50,8 @@ class MetadataAndBaseInfoToCoverTask(@Autowired override var coordinator: Coordi
|
|||||||
status = Status.COMPLETED,
|
status = Status.COMPLETED,
|
||||||
url = coverUrl,
|
url = coverUrl,
|
||||||
outFileBaseName = baseInfo.title,
|
outFileBaseName = baseInfo.title,
|
||||||
outDir = fileOut.outDirectory
|
outDir = fileOut.outDirectory,
|
||||||
|
derivedFromEventId = event.eventId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -39,23 +39,23 @@ class MetadataAndBaseInfoToFileOut(@Autowired override var coordinator: Coordina
|
|||||||
val metadataTimeout = KafkaEnv.metadataTimeoutMinutes * 60
|
val metadataTimeout = KafkaEnv.metadataTimeoutMinutes * 60
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE
|
get() = KafkaEvents.EventMediaReadOutNameAndType
|
||||||
|
|
||||||
val waitingProcessesForMeta: MutableMap<String, LocalDateTime> = mutableMapOf()
|
val waitingProcessesForMeta: MutableMap<String, MetadataTriggerData> = mutableMapOf()
|
||||||
|
|
||||||
override val listensForEvents: List<KafkaEvents> = listOf(
|
override val listensForEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED,
|
KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED
|
KafkaEvents.EventMediaMetadataSearchPerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
log.info { "${event.referenceId} triggered by ${event.event}" }
|
log.info { "${event.referenceId} triggered by ${event.event}" }
|
||||||
|
|
||||||
val baseInfo = events.lastOrSuccessOf(KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED) { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed?
|
val baseInfo = events.lastOrSuccessOf(KafkaEvents.EventMediaReadBaseInfoPerformed) { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed?
|
||||||
val meta = events.lastOrSuccessOf(KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED) { it.data is MetadataPerformed }?.data as MetadataPerformed?
|
val meta = events.lastOrSuccessOf(KafkaEvents.EventMediaMetadataSearchPerformed) { it.data is MetadataPerformed }?.data as MetadataPerformed?
|
||||||
|
|
||||||
// Only Return here as both baseInfo events are required to continue
|
// Only Return here as both baseInfo events are required to continue
|
||||||
if (!baseInfo.isSuccess() || !baseInfo.hasValidData() || events.any { it.event == KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE }) {
|
if (!baseInfo.isSuccess() || !baseInfo.hasValidData() || events.any { it.event == KafkaEvents.EventMediaReadOutNameAndType }) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (baseInfo.isSuccess() && meta == null) {
|
if (baseInfo.isSuccess() && meta == null) {
|
||||||
@ -65,7 +65,7 @@ class MetadataAndBaseInfoToFileOut(@Autowired override var coordinator: Coordina
|
|||||||
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
|
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
|
||||||
log.info { "Sending ${baseInfo?.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
|
log.info { "Sending ${baseInfo?.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
|
||||||
if (!waitingProcessesForMeta.containsKey(event.referenceId)) {
|
if (!waitingProcessesForMeta.containsKey(event.referenceId)) {
|
||||||
waitingProcessesForMeta[event.referenceId] = LocalDateTime.now()
|
waitingProcessesForMeta[event.referenceId] = MetadataTriggerData(event.eventId, LocalDateTime.now())
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
@ -92,9 +92,9 @@ class MetadataAndBaseInfoToFileOut(@Autowired override var coordinator: Coordina
|
|||||||
|
|
||||||
val vi = fileDeterminate.getDeterminedVideoInfo()?.toJsonObject()
|
val vi = fileDeterminate.getDeterminedVideoInfo()?.toJsonObject()
|
||||||
return if (vi != null) {
|
return if (vi != null) {
|
||||||
VideoInfoPerformed(Status.COMPLETED, vi, outDirectory = outputDirectory.absolutePath)
|
VideoInfoPerformed(Status.COMPLETED, vi, outDirectory = outputDirectory.absolutePath, event.eventId)
|
||||||
} else {
|
} else {
|
||||||
SimpleMessageData(Status.ERROR, "No VideoInfo found...")
|
SimpleMessageData(Status.ERROR, "No VideoInfo found...", event.eventId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,13 +103,15 @@ class MetadataAndBaseInfoToFileOut(@Autowired override var coordinator: Coordina
|
|||||||
@Scheduled(fixedDelay = (1_000))
|
@Scheduled(fixedDelay = (1_000))
|
||||||
fun sendErrorMessageForMetadata() {
|
fun sendErrorMessageForMetadata() {
|
||||||
val expired = waitingProcessesForMeta.filter {
|
val expired = waitingProcessesForMeta.filter {
|
||||||
LocalDateTime.now().toEpochSeconds() > (it.value.toEpochSeconds() + metadataTimeout)
|
LocalDateTime.now().toEpochSeconds() > (it.value.executed.toEpochSeconds() + metadataTimeout)
|
||||||
}
|
}
|
||||||
expired.forEach {
|
expired.forEach {
|
||||||
log.info { "Producing timeout for ${it.key} ${LocalDateTime.now()}" }
|
log.info { "Producing timeout for ${it.key} ${LocalDateTime.now()}" }
|
||||||
producer.sendMessage(it.key, KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED, MetadataPerformed(status = Status.ERROR, "Timed Out by: ${this@MetadataAndBaseInfoToFileOut::class.simpleName}"))
|
producer.sendMessage(it.key, KafkaEvents.EventMediaMetadataSearchPerformed, MetadataPerformed(status = Status.ERROR, "Timed Out by: ${this@MetadataAndBaseInfoToFileOut::class.simpleName}", derivedFromEventId = it.value.eventId))
|
||||||
waitingProcessesForMeta.remove(it.key)
|
waitingProcessesForMeta.remove(it.key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data class MetadataTriggerData(val eventId: String, val executed: LocalDateTime)
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -25,10 +25,10 @@ class ParseVideoFileStreams(@Autowired override var coordinator: Coordinator) :
|
|||||||
|
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED
|
get() = KafkaEvents.EventMediaParseStreamPerformed
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED
|
KafkaEvents.EventMediaReadStreamPerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
||||||
@ -39,11 +39,11 @@ class ParseVideoFileStreams(@Autowired override var coordinator: Coordinator) :
|
|||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
log.info { "${event.referenceId} triggered by ${event.event}" }
|
log.info { "${event.referenceId} triggered by ${event.event}" }
|
||||||
val desiredEvent = events.lastOrSuccessOf(KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED) ?: return null
|
val desiredEvent = events.lastOrSuccessOf(KafkaEvents.EventMediaReadStreamPerformed) ?: return null
|
||||||
return parseStreams(desiredEvent.data as ReaderPerformed)
|
return parseStreams(desiredEvent.data as ReaderPerformed, desiredEvent.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun parseStreams(data: ReaderPerformed): MessageDataWrapper {
|
fun parseStreams(data: ReaderPerformed, eventId: String): MessageDataWrapper {
|
||||||
val gson = Gson()
|
val gson = Gson()
|
||||||
return try {
|
return try {
|
||||||
val jStreams = data.output.getAsJsonArray("streams")
|
val jStreams = data.output.getAsJsonArray("streams")
|
||||||
@ -71,11 +71,11 @@ class ParseVideoFileStreams(@Autowired override var coordinator: Coordinator) :
|
|||||||
audioStream = audioStreams,
|
audioStream = audioStreams,
|
||||||
subtitleStream = subtitleStreams
|
subtitleStream = subtitleStreams
|
||||||
)
|
)
|
||||||
MediaStreamsParsePerformed(Status.COMPLETED, parsedStreams)
|
MediaStreamsParsePerformed(Status.COMPLETED, parsedStreams, eventId)
|
||||||
|
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
e.printStackTrace()
|
e.printStackTrace()
|
||||||
SimpleMessageData(Status.ERROR, message = e.message)
|
SimpleMessageData(Status.ERROR, message = e.message, eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -26,10 +26,10 @@ class ReadVideoFileStreams(@Autowired override var coordinator: Coordinator) : T
|
|||||||
|
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED
|
get() = KafkaEvents.EventMediaReadStreamPerformed
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_PROCESS_STARTED
|
KafkaEvents.EventMediaProcessStarted
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -43,18 +43,18 @@ class ReadVideoFileStreams(@Autowired override var coordinator: Coordinator) : T
|
|||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
log.info { "${event.referenceId} triggered by ${event.event}" }
|
log.info { "${event.referenceId} triggered by ${event.event}" }
|
||||||
val desiredEvent = events.find { it.data is MediaProcessStarted } ?: return null
|
val desiredEvent = events.find { it.data is MediaProcessStarted } ?: return null
|
||||||
return runBlocking { fileReadStreams(desiredEvent.data as MediaProcessStarted) }
|
return runBlocking { fileReadStreams(desiredEvent.data as MediaProcessStarted, desiredEvent.eventId) }
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun fileReadStreams(started: MediaProcessStarted): MessageDataWrapper {
|
suspend fun fileReadStreams(started: MediaProcessStarted, eventId: String): MessageDataWrapper {
|
||||||
val file = File(started.file)
|
val file = File(started.file)
|
||||||
return if (file.exists() && file.isFile) {
|
return if (file.exists() && file.isFile) {
|
||||||
val result = readStreams(file)
|
val result = readStreams(file)
|
||||||
val joined = result.output.joinToString(" ")
|
val joined = result.output.joinToString(" ")
|
||||||
val jsoned = Gson().fromJson(joined, JsonObject::class.java)
|
val jsoned = Gson().fromJson(joined, JsonObject::class.java)
|
||||||
ReaderPerformed(Status.COMPLETED, file = started.file, output = jsoned)
|
ReaderPerformed(Status.COMPLETED, file = started.file, output = jsoned, derivedFromEventId = eventId)
|
||||||
} else {
|
} else {
|
||||||
SimpleMessageData(Status.ERROR, "File in data is not a file or does not exist")
|
SimpleMessageData(Status.ERROR, "File in data is not a file or does not exist", eventId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -12,12 +12,11 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkReques
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkerArgumentsCreated
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkerArgumentsCreated
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
|
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
|
||||||
import org.springframework.beans.factory.annotation.Autowired
|
|
||||||
|
|
||||||
abstract class CreateProcesserWorkTask(override var coordinator: Coordinator) : TaskCreator(coordinator) {
|
abstract class CreateProcesserWorkTask(override var coordinator: Coordinator) : TaskCreator(coordinator) {
|
||||||
|
|
||||||
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
|
||||||
val started = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_PROCESS_STARTED }?.data as MediaProcessStarted?
|
val started = events.findLast { it.event == KafkaEvents.EventMediaProcessStarted }?.data as MediaProcessStarted?
|
||||||
if (started == null) {
|
if (started == null) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
@ -26,7 +25,7 @@ abstract class CreateProcesserWorkTask(override var coordinator: Coordinator) :
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
val proceed = events.find { it.event == KafkaEvents.EVENT_MEDIA_WORK_PROCEED_PERMITTED }
|
val proceed = events.find { it.event == KafkaEvents.EventMediaWorkProceedPermitted }
|
||||||
if (proceed == null && started.type == ProcessType.MANUAL) {
|
if (proceed == null && started.type == ProcessType.MANUAL) {
|
||||||
log.warn { "${event.referenceId} waiting for Proceed event due to Manual process" }
|
log.warn { "${event.referenceId} waiting for Proceed event due to Manual process" }
|
||||||
return null
|
return null
|
||||||
|
|||||||
@ -24,14 +24,14 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
|
|
||||||
val preference = Preference.getPreference()
|
val preference = Preference.getPreference()
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED
|
get() = KafkaEvents.EventMediaParameterEncodeCreated
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> =
|
override val requiredEvents: List<KafkaEvents> =
|
||||||
listOf(
|
listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_PROCESS_STARTED,
|
KafkaEvents.EventMediaProcessStarted,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED,
|
KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED,
|
KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE
|
KafkaEvents.EventMediaReadOutNameAndType
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
|
||||||
@ -61,7 +61,7 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (videoInfoWrapper == null || videoInfo == null) {
|
if (videoInfoWrapper == null || videoInfo == null) {
|
||||||
log.error { "${KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE} result is read as null" }
|
log.error { "${KafkaEvents.EventMediaReadOutNameAndType} result is read as null" }
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,7 +74,8 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
outDir = File(videoInfoWrapper.outDirectory),
|
outDir = File(videoInfoWrapper.outDirectory),
|
||||||
preference = preference.encodePreference,
|
preference = preference.encodePreference,
|
||||||
baseInfo = baseInfo,
|
baseInfo = baseInfo,
|
||||||
serializedParsedStreams = serializedParsedStreams
|
serializedParsedStreams = serializedParsedStreams,
|
||||||
|
eventId = event.eventId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,7 +85,8 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
outDir: File,
|
outDir: File,
|
||||||
preference: EncodingPreference,
|
preference: EncodingPreference,
|
||||||
baseInfo: BaseInfoPerformed,
|
baseInfo: BaseInfoPerformed,
|
||||||
serializedParsedStreams: ParsedMediaStreams
|
serializedParsedStreams: ParsedMediaStreams,
|
||||||
|
eventId: String
|
||||||
): MessageDataWrapper {
|
): MessageDataWrapper {
|
||||||
val outVideoFile = outDir.using("${outFullName}.mp4").absolutePath
|
val outVideoFile = outDir.using("${outFullName}.mp4").absolutePath
|
||||||
|
|
||||||
@ -97,7 +99,7 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
|
|
||||||
val vaArgs = toFfmpegWorkerArguments(vArg, aArg)
|
val vaArgs = toFfmpegWorkerArguments(vArg, aArg)
|
||||||
return if (vaArgs.isEmpty()) {
|
return if (vaArgs.isEmpty()) {
|
||||||
SimpleMessageData(Status.ERROR, message = "Unable to produce arguments")
|
SimpleMessageData(Status.ERROR, message = "Unable to produce arguments", derivedFromEventId = eventId)
|
||||||
} else {
|
} else {
|
||||||
FfmpegWorkerArgumentsCreated(
|
FfmpegWorkerArgumentsCreated(
|
||||||
status = Status.COMPLETED,
|
status = Status.COMPLETED,
|
||||||
@ -107,7 +109,8 @@ class EncodeArgumentCreatorTask(@Autowired override var coordinator: Coordinator
|
|||||||
outputFile = outVideoFile,
|
outputFile = outVideoFile,
|
||||||
arguments = vaArgs
|
arguments = vaArgs
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
|
derivedFromEventId = eventId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,13 +28,13 @@ class ExtractArgumentCreatorTask(@Autowired override var coordinator: Coordinato
|
|||||||
val preference = Preference.getPreference()
|
val preference = Preference.getPreference()
|
||||||
|
|
||||||
override val producesEvent: KafkaEvents
|
override val producesEvent: KafkaEvents
|
||||||
get() = KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED
|
get() = KafkaEvents.EventMediaParameterExtractCreated
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_PROCESS_STARTED,
|
KafkaEvents.EventMediaProcessStarted,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED,
|
KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED,
|
KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE
|
KafkaEvents.EventMediaReadOutNameAndType
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ class ExtractArgumentCreatorTask(@Autowired override var coordinator: Coordinato
|
|||||||
val videoInfo = videoInfoWrapper?.toValueObject()
|
val videoInfo = videoInfoWrapper?.toValueObject()
|
||||||
|
|
||||||
if (videoInfoWrapper == null || videoInfo == null) {
|
if (videoInfoWrapper == null || videoInfo == null) {
|
||||||
log.error { "${KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE} result is read as null" }
|
log.error { "${KafkaEvents.EventMediaReadOutNameAndType} result is read as null" }
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,7 +73,8 @@ class ExtractArgumentCreatorTask(@Autowired override var coordinator: Coordinato
|
|||||||
outFullName = videoInfo.fullName,
|
outFullName = videoInfo.fullName,
|
||||||
outDir = File(videoInfoWrapper.outDirectory),
|
outDir = File(videoInfoWrapper.outDirectory),
|
||||||
baseInfo = baseInfo,
|
baseInfo = baseInfo,
|
||||||
serializedParsedStreams = serializedParsedStreams
|
serializedParsedStreams = serializedParsedStreams,
|
||||||
|
eventId = event.eventId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,7 +83,8 @@ class ExtractArgumentCreatorTask(@Autowired override var coordinator: Coordinato
|
|||||||
outFullName: String,
|
outFullName: String,
|
||||||
outDir: File,
|
outDir: File,
|
||||||
baseInfo: BaseInfoPerformed,
|
baseInfo: BaseInfoPerformed,
|
||||||
serializedParsedStreams: ParsedMediaStreams
|
serializedParsedStreams: ParsedMediaStreams,
|
||||||
|
eventId: String
|
||||||
): MessageDataWrapper? {
|
): MessageDataWrapper? {
|
||||||
val subRootDir = outDir.using("sub")
|
val subRootDir = outDir.using("sub")
|
||||||
val sArg = SubtitleArguments(serializedParsedStreams.subtitleStream).getSubtitleArguments()
|
val sArg = SubtitleArguments(serializedParsedStreams.subtitleStream).getSubtitleArguments()
|
||||||
@ -94,12 +96,13 @@ class ExtractArgumentCreatorTask(@Autowired override var coordinator: Coordinato
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (entries.isEmpty()) {
|
if (entries.isEmpty()) {
|
||||||
return SimpleMessageData(status = Status.SKIPPED, "No entries found!")
|
return SimpleMessageData(status = Status.SKIPPED, "No entries found!", derivedFromEventId = eventId)
|
||||||
}
|
}
|
||||||
return FfmpegWorkerArgumentsCreated(
|
return FfmpegWorkerArgumentsCreated(
|
||||||
status = Status.COMPLETED,
|
status = Status.COMPLETED,
|
||||||
inputFile = inputFile,
|
inputFile = inputFile,
|
||||||
entries = entries
|
entries = entries,
|
||||||
|
derivedFromEventId = eventId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -53,10 +53,17 @@ dependencies {
|
|||||||
implementation(project(mapOf("path" to ":shared:kafka")))
|
implementation(project(mapOf("path" to ":shared:kafka")))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
testImplementation(platform("org.junit:junit-bom:5.9.1"))
|
testImplementation(platform("org.junit:junit-bom:5.9.1"))
|
||||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||||
|
|
||||||
|
testImplementation("io.mockk:mockk:1.12.0")
|
||||||
|
testImplementation("com.h2database:h2:1.4.200")
|
||||||
|
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||||
|
|
||||||
|
testImplementation("org.junit.jupiter:junit-jupiter-api:5.7.2")
|
||||||
|
testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.7.2")
|
||||||
|
testImplementation("io.kotlintest:kotlintest-assertions:3.3.2")
|
||||||
|
testImplementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.0")
|
||||||
implementation(kotlin("stdlib-jdk8"))
|
implementation(kotlin("stdlib-jdk8"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.DeserializedConsumerRecord
|
import no.iktdev.mediaprocessing.shared.kafka.dto.DeserializedConsumerRecord
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.NotificationOfDeletionPerformed
|
||||||
import org.springframework.scheduling.annotation.EnableScheduling
|
import org.springframework.scheduling.annotation.EnableScheduling
|
||||||
import org.springframework.scheduling.annotation.Scheduled
|
import org.springframework.scheduling.annotation.Scheduled
|
||||||
import org.springframework.stereotype.Service
|
import org.springframework.stereotype.Service
|
||||||
@ -22,6 +23,15 @@ class Coordinator(): CoordinatorBase<PersistentProcessDataMessage, PersistentEve
|
|||||||
val io = Coroutines.io()
|
val io = Coroutines.io()
|
||||||
override val listeners = PersistentEventProcessBasedMessageListener()
|
override val listeners = PersistentEventProcessBasedMessageListener()
|
||||||
|
|
||||||
|
private val coordinatorEventListeners: MutableList<CoordinatorEvents> = mutableListOf()
|
||||||
|
fun getRegisteredEventListeners() = coordinatorEventListeners.toList()
|
||||||
|
fun addCoordinatorEventListener(listener: CoordinatorEvents) {
|
||||||
|
coordinatorEventListeners.add(listener)
|
||||||
|
}
|
||||||
|
fun removeCoordinatorEventListener(listener: CoordinatorEvents) {
|
||||||
|
coordinatorEventListeners.remove(listener)
|
||||||
|
}
|
||||||
|
|
||||||
override fun createTasksBasedOnEventsAndPersistence(
|
override fun createTasksBasedOnEventsAndPersistence(
|
||||||
referenceId: String,
|
referenceId: String,
|
||||||
eventId: String,
|
eventId: String,
|
||||||
@ -40,16 +50,18 @@ class Coordinator(): CoordinatorBase<PersistentProcessDataMessage, PersistentEve
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
||||||
if (!processKafkaEvents.contains(event.key)) {
|
if (!acceptEvents.contains(event.key)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (event.key == KafkaEvents.EventNotificationOfWorkItemRemoval) {
|
||||||
|
handleDeletionOfEvents(event)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
val success = persistentWriter.storeProcessDataMessage(event.key.event, event.value)
|
val success = eventManager.setProcessEvent(event.key, event.value)
|
||||||
if (!success) {
|
if (!success) {
|
||||||
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().database}" }
|
log.error { "Unable to store message: ${event.key.event} in database ${getEventsDatabase().database}" }
|
||||||
} else {
|
} else {
|
||||||
deleteOlderEventsIfSuperseded(event.key, event.value)
|
|
||||||
|
|
||||||
io.launch {
|
io.launch {
|
||||||
delay(500)
|
delay(500)
|
||||||
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
readAllMessagesFor(event.value.referenceId, event.value.eventId)
|
||||||
@ -57,29 +69,20 @@ class Coordinator(): CoordinatorBase<PersistentProcessDataMessage, PersistentEve
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun deleteOlderEventsIfSuperseded(event: KafkaEvents, value: Message<out MessageDataWrapper>) {
|
private fun handleDeletionOfEvents(kafkaPayload: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
|
||||||
val existingMessages = persistentReader.getMessagesFor(value.referenceId)
|
if (kafkaPayload.value.data is NotificationOfDeletionPerformed) {
|
||||||
|
val data = kafkaPayload.value.data as NotificationOfDeletionPerformed
|
||||||
val workItems = existingMessages.filter { KafkaEvents.isOfWork(it.event) }
|
if (data.deletedEvent in processKafkaEvents) {
|
||||||
|
coordinatorEventListeners.forEach { it.onCancelOrStopProcess(data.deletedEventId) }
|
||||||
|
eventManager.deleteProcessEvent(kafkaPayload.value.referenceId, data.deletedEventId)
|
||||||
if (KafkaEvents.isOfWork(event)) {
|
|
||||||
// Here i would need to list all of the work events, then proceed to check which one of the derivedId does not correspond to a entry
|
|
||||||
// Nonmatching has been superseded
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
val superseded = existingMessages.filter { it.event == event && it.eventId != value.eventId }
|
|
||||||
superseded.forEach {
|
|
||||||
persistentWriter.deleteStoredEventDataMessage(referenceId = it.referenceId, eventId = it.eventId, event= it.event )
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
log.warn { "Deletion handling was triggered with wrong data" }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fun readAllAvailableInQueue() {
|
fun readAllAvailableInQueue() {
|
||||||
val messages = persistentReader.getAvailableProcessEvents()
|
val messages = eventManager.getProcessEventsClaimable()
|
||||||
io.launch {
|
io.launch {
|
||||||
messages.forEach {
|
messages.forEach {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
@ -89,15 +92,19 @@ class Coordinator(): CoordinatorBase<PersistentProcessDataMessage, PersistentEve
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
fun readAllMessagesFor(referenceId: String, eventId: String) {
|
||||||
val messages = persistentReader.getAvailableProcessEvents()
|
val messages = eventManager.getProcessEventsClaimable()
|
||||||
createTasksBasedOnEventsAndPersistence(referenceId, eventId, messages)
|
createTasksBasedOnEventsAndPersistence(referenceId, eventId, messages)
|
||||||
}
|
}
|
||||||
|
|
||||||
val processKafkaEvents = listOf(
|
private final val processKafkaEvents = listOf(
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_CREATED,
|
KafkaEvents.EventWorkEncodeCreated,
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_CREATED,
|
KafkaEvents.EventWorkExtractCreated,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
private final val acceptEvents = listOf(
|
||||||
|
KafkaEvents.EventNotificationOfWorkItemRemoval
|
||||||
|
) + processKafkaEvents
|
||||||
|
|
||||||
|
|
||||||
@Scheduled(fixedDelay = (5_000))
|
@Scheduled(fixedDelay = (5_000))
|
||||||
fun checkForWork() {
|
fun checkForWork() {
|
||||||
@ -105,4 +112,8 @@ class Coordinator(): CoordinatorBase<PersistentProcessDataMessage, PersistentEve
|
|||||||
readAllAvailableInQueue()
|
readAllAvailableInQueue()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface CoordinatorEvents {
|
||||||
|
fun onCancelOrStopProcess(eventId: String)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -5,6 +5,7 @@ import no.iktdev.mediaprocessing.shared.common.DatabaseEnvConfig
|
|||||||
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentEventManager
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.processerEvents
|
import no.iktdev.mediaprocessing.shared.common.persistance.processerEvents
|
||||||
import no.iktdev.mediaprocessing.shared.common.socket.SocketImplementation
|
import no.iktdev.mediaprocessing.shared.common.socket.SocketImplementation
|
||||||
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
|
||||||
@ -25,16 +26,17 @@ fun getEventsDatabase(): MySqlDataSource {
|
|||||||
return eventsDatabase
|
return eventsDatabase
|
||||||
}
|
}
|
||||||
|
|
||||||
lateinit var persistentReader: PersistentDataReader
|
lateinit var eventManager: PersistentEventManager
|
||||||
lateinit var persistentWriter: PersistentDataStore
|
|
||||||
|
|
||||||
fun main(args: Array<String>) {
|
fun main(args: Array<String>) {
|
||||||
eventsDatabase = DatabaseEnvConfig.toEventsDatabase()
|
eventsDatabase = DatabaseEnvConfig.toEventsDatabase()
|
||||||
eventsDatabase.createDatabase()
|
eventsDatabase.createDatabase()
|
||||||
eventsDatabase.createTables(processerEvents)
|
eventsDatabase.createTables(processerEvents)
|
||||||
|
|
||||||
persistentReader = PersistentDataReader(eventsDatabase)
|
|
||||||
persistentWriter = PersistentDataStore(eventsDatabase)
|
eventManager = PersistentEventManager(eventsDatabase)
|
||||||
|
|
||||||
|
|
||||||
val context = runApplication<ProcesserApplication>(*args)
|
val context = runApplication<ProcesserApplication>(*args)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,23 @@
|
|||||||
|
package no.iktdev.mediaprocessing.processer.controller
|
||||||
|
|
||||||
|
import no.iktdev.mediaprocessing.processer.Coordinator
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired
|
||||||
|
import org.springframework.http.HttpStatus
|
||||||
|
import org.springframework.http.ResponseEntity
|
||||||
|
import org.springframework.stereotype.Controller
|
||||||
|
import org.springframework.web.bind.annotation.RequestBody
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping
|
||||||
|
|
||||||
|
@Controller
|
||||||
|
class CancelController(@Autowired var coordinator: Coordinator) {
|
||||||
|
|
||||||
|
@RequestMapping(path = ["/cancel"])
|
||||||
|
fun cancelProcess(@RequestBody eventId: String? = null): ResponseEntity<String> {
|
||||||
|
if (eventId.isNullOrBlank()) {
|
||||||
|
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("No eventId provided!")
|
||||||
|
}
|
||||||
|
coordinator.getRegisteredEventListeners().forEach { it.onCancelOrStopProcess(eventId) }
|
||||||
|
return ResponseEntity.ok(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -2,17 +2,30 @@ package no.iktdev.mediaprocessing.processer.ffmpeg
|
|||||||
|
|
||||||
import com.github.pgreze.process.Redirect
|
import com.github.pgreze.process.Redirect
|
||||||
import com.github.pgreze.process.process
|
import com.github.pgreze.process.process
|
||||||
import kotlinx.coroutines.Dispatchers
|
import kotlinx.coroutines.*
|
||||||
import kotlinx.coroutines.withContext
|
|
||||||
import mu.KotlinLogging
|
import mu.KotlinLogging
|
||||||
import no.iktdev.exfl.coroutines.Coroutines
|
import no.iktdev.exfl.coroutines.Coroutines
|
||||||
import no.iktdev.exfl.using
|
import no.iktdev.exfl.using
|
||||||
import no.iktdev.mediaprocessing.processer.ProcesserEnv
|
import no.iktdev.mediaprocessing.processer.ProcesserEnv
|
||||||
|
import no.iktdev.mediaprocessing.processer.eventManager
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
|
||||||
import java.io.File
|
import java.io.File
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
class FfmpegWorker(
|
||||||
|
val referenceId: String,
|
||||||
|
val eventId: String,
|
||||||
|
val info: FfmpegWorkRequestCreated,
|
||||||
|
val listener: FfmpegWorkerEvents,
|
||||||
|
val logDir: File
|
||||||
|
) {
|
||||||
|
private val scope = Coroutines.io()
|
||||||
|
private var job: Job? = null
|
||||||
|
|
||||||
|
fun isWorking(): Boolean {
|
||||||
|
return job != null && (job?.isCompleted != true) && scope.isActive
|
||||||
|
}
|
||||||
|
|
||||||
class FfmpegWorker(val referenceId: String, val eventId: String, val info: FfmpegWorkRequestCreated, val listener: FfmpegWorkerEvents, val logDir: File) {
|
|
||||||
val scope = Coroutines.io()
|
|
||||||
val decoder = FfmpegProgressDecoder()
|
val decoder = FfmpegProgressDecoder()
|
||||||
private val outputCache = mutableListOf<String>()
|
private val outputCache = mutableListOf<String>()
|
||||||
private val log = KotlinLogging.logger {}
|
private val log = KotlinLogging.logger {}
|
||||||
@ -44,20 +57,41 @@ class FfmpegWorker(val referenceId: String, val eventId: String, val info: Ffmpe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun run() {
|
fun run() {
|
||||||
val args = FfmpegWorkerArgumentsBuilder().using(info).build()
|
val args = FfmpegWorkerArgumentsBuilder().using(info).build()
|
||||||
execute(args)
|
job = scope.launch {
|
||||||
|
execute(args)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun runWithProgress() {
|
fun runWithProgress() {
|
||||||
val args = FfmpegWorkerArgumentsBuilder().using(info).buildWithProgress()
|
val args = FfmpegWorkerArgumentsBuilder().using(info).buildWithProgress()
|
||||||
execute(args)
|
job = scope.launch {
|
||||||
|
execute(args)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private suspend fun startIAmAlive() {
|
||||||
|
scope.launch {
|
||||||
|
while (scope.isActive && job?.isCompleted != true) {
|
||||||
|
delay(Duration.ofMinutes(5).toMillis())
|
||||||
|
listener.onIAmAlive(referenceId, eventId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun cancel(message: String = "Work was interrupted as requested") {
|
||||||
|
job?.cancel()
|
||||||
|
scope.cancel(message)
|
||||||
|
listener.onError(referenceId, eventId, info, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private suspend fun execute(args: List<String>) {
|
private suspend fun execute(args: List<String>) {
|
||||||
withContext(Dispatchers.IO) {
|
withContext(Dispatchers.IO) {
|
||||||
logFile.createNewFile()
|
logFile.createNewFile()
|
||||||
}
|
}
|
||||||
|
startIAmAlive()
|
||||||
listener.onStarted(referenceId, eventId, info)
|
listener.onStarted(referenceId, eventId, info)
|
||||||
val processOp = process(
|
val processOp = process(
|
||||||
ProcesserEnv.ffmpeg, *args.toTypedArray(),
|
ProcesserEnv.ffmpeg, *args.toTypedArray(),
|
||||||
@ -67,7 +101,8 @@ class FfmpegWorker(val referenceId: String, val eventId: String, val info: Ffmpe
|
|||||||
//log.info { it }
|
//log.info { it }
|
||||||
onOutputChanged(it)
|
onOutputChanged(it)
|
||||||
},
|
},
|
||||||
destroyForcibly = true)
|
destroyForcibly = true
|
||||||
|
)
|
||||||
|
|
||||||
val result = processOp
|
val result = processOp
|
||||||
onOutputChanged("Received exit code: ${result.resultCode}")
|
onOutputChanged("Received exit code: ${result.resultCode}")
|
||||||
@ -86,7 +121,7 @@ class FfmpegWorker(val referenceId: String, val eventId: String, val info: Ffmpe
|
|||||||
decoder.parseVideoProgress(outputCache.toList())?.let { decoded ->
|
decoder.parseVideoProgress(outputCache.toList())?.let { decoded ->
|
||||||
try {
|
try {
|
||||||
val _progress = decoder.getProgress(decoded)
|
val _progress = decoder.getProgress(decoded)
|
||||||
if (progress == null || _progress.progress > (progress?.progress ?: -1) ) {
|
if (progress == null || _progress.progress > (progress?.progress ?: -1)) {
|
||||||
progress = _progress
|
progress = _progress
|
||||||
listener.onProgressChanged(referenceId, eventId, info, _progress)
|
listener.onProgressChanged(referenceId, eventId, info, _progress)
|
||||||
}
|
}
|
||||||
@ -107,8 +142,14 @@ class FfmpegWorker(val referenceId: String, val eventId: String, val info: Ffmpe
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface FfmpegWorkerEvents {
|
interface FfmpegWorkerEvents {
|
||||||
fun onStarted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated,)
|
fun onStarted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated)
|
||||||
fun onCompleted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated)
|
fun onCompleted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated)
|
||||||
fun onError(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated, errorMessage: String)
|
fun onError(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated, errorMessage: String)
|
||||||
fun onProgressChanged(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated, progress: FfmpegDecodedProgress)
|
fun onProgressChanged(
|
||||||
|
referenceId: String,
|
||||||
|
eventId: String,
|
||||||
|
info: FfmpegWorkRequestCreated,
|
||||||
|
progress: FfmpegDecodedProgress
|
||||||
|
)
|
||||||
|
fun onIAmAlive(referenceId: String, eventId: String) {}
|
||||||
}
|
}
|
||||||
@ -2,10 +2,7 @@ package no.iktdev.mediaprocessing.processer.services
|
|||||||
|
|
||||||
import mu.KotlinLogging
|
import mu.KotlinLogging
|
||||||
import no.iktdev.mediaprocessing.processer.Coordinator
|
import no.iktdev.mediaprocessing.processer.Coordinator
|
||||||
import no.iktdev.mediaprocessing.processer.persistentReader
|
import no.iktdev.mediaprocessing.processer.eventManager
|
||||||
import no.iktdev.mediaprocessing.processer.persistentWriter
|
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataReader
|
|
||||||
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentDataStore
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired
|
import org.springframework.beans.factory.annotation.Autowired
|
||||||
import org.springframework.scheduling.annotation.EnableScheduling
|
import org.springframework.scheduling.annotation.EnableScheduling
|
||||||
import org.springframework.scheduling.annotation.Scheduled
|
import org.springframework.scheduling.annotation.Scheduled
|
||||||
@ -21,13 +18,12 @@ class ClaimsService() {
|
|||||||
|
|
||||||
@Scheduled(fixedDelay = (300_000))
|
@Scheduled(fixedDelay = (300_000))
|
||||||
fun validateClaims() {
|
fun validateClaims() {
|
||||||
val expiredClaims = persistentReader.getExpiredClaimsProcessEvents()
|
val expiredClaims = eventManager.getProcessEventsWithExpiredClaim()
|
||||||
expiredClaims.forEach {
|
expiredClaims.forEach {
|
||||||
log.info { "Found event with expired claim: ${it.referenceId}::${it.eventId}::${it.event}" }
|
log.info { "Found event with expired claim: ${it.referenceId}::${it.eventId}::${it.event}" }
|
||||||
}
|
}
|
||||||
val store = persistentWriter
|
|
||||||
expiredClaims.forEach {
|
expiredClaims.forEach {
|
||||||
val result = store.releaseProcessEventClaim(referenceId = it.referenceId, eventId = it.eventId)
|
val result = eventManager.deleteProcessEventClaim(referenceId = it.referenceId, eventId = it.eventId)
|
||||||
if (result) {
|
if (result) {
|
||||||
log.info { "Released claim on ${it.referenceId}::${it.eventId}::${it.event}" }
|
log.info { "Released claim on ${it.referenceId}::${it.eventId}::${it.event}" }
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -28,19 +28,29 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
private val log = KotlinLogging.logger {}
|
private val log = KotlinLogging.logger {}
|
||||||
private val logDir = ProcesserEnv.encodeLogDirectory
|
private val logDir = ProcesserEnv.encodeLogDirectory
|
||||||
|
|
||||||
override val producesEvent = KafkaEvents.EVENT_WORK_ENCODE_PERFORMED
|
override val producesEvent = KafkaEvents.EventWorkEncodePerformed
|
||||||
override val requiredEvents: List<KafkaEvents> = listOf(
|
override val requiredEvents: List<KafkaEvents> = listOf(
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_CREATED
|
KafkaEvents.EventWorkEncodeCreated
|
||||||
)
|
)
|
||||||
|
|
||||||
val scope = Coroutines.io()
|
|
||||||
private var runner: FfmpegWorker? = null
|
private var runner: FfmpegWorker? = null
|
||||||
private var runnerJob: Job? = null
|
|
||||||
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
||||||
|
|
||||||
|
private final val coordinatorEvents = object: Coordinator.CoordinatorEvents {
|
||||||
|
override fun onCancelOrStopProcess(eventId: String) {
|
||||||
|
cancelWorkIfRunning(eventId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
init {
|
init {
|
||||||
log.info { "Starting with id: $serviceId" }
|
log.info { "Starting with id: $serviceId" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun attachListener() {
|
||||||
|
super.attachListener()
|
||||||
|
coordinator.addCoordinatorEventListener(listener = coordinatorEvents)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentProcessDataMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentProcessDataMessage>): List<() -> Boolean> {
|
||||||
return super.prerequisitesRequired(events) + listOf {
|
return super.prerequisitesRequired(events) + listOf {
|
||||||
@ -53,16 +63,16 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (event.data !is FfmpegWorkRequestCreated) {
|
if (event.data !is FfmpegWorkRequestCreated) {
|
||||||
return SimpleMessageData(status = Status.ERROR, message = "Invalid data (${event.data.javaClass.name}) passed for ${event.event.event}")
|
return SimpleMessageData(status = Status.ERROR, message = "Invalid data (${event.data.javaClass.name}) passed for ${event.event.event}", event.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
val isAlreadyClaimed = persistentReader.isProcessEventAlreadyClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
val isAlreadyClaimed = eventManager.isProcessEventClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
||||||
if (isAlreadyClaimed) {
|
if (isAlreadyClaimed) {
|
||||||
log.warn { "Process is already claimed!" }
|
log.warn { "Process is already claimed!" }
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
if (runnerJob?.isActive != true) {
|
if (runner?.isWorking() != true) {
|
||||||
startEncode(event)
|
startEncode(event)
|
||||||
} else {
|
} else {
|
||||||
log.warn { "Worker is already running.." }
|
log.warn { "Worker is already running.." }
|
||||||
@ -78,19 +88,17 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
logDir.mkdirs()
|
logDir.mkdirs()
|
||||||
}
|
}
|
||||||
|
|
||||||
val setClaim = persistentWriter.setProcessEventClaim(referenceId = event.referenceId, eventId = event.eventId, claimedBy = serviceId)
|
val setClaim = eventManager.setProcessEventClaim(referenceId = event.referenceId, eventId = event.eventId, claimer = serviceId)
|
||||||
if (setClaim) {
|
if (setClaim) {
|
||||||
log.info { "Claim successful for ${event.referenceId} encode" }
|
log.info { "Claim successful for ${event.referenceId} encode" }
|
||||||
runner = FfmpegWorker(event.referenceId, event.eventId, info = ffwrc, logDir = logDir, listener = ffmpegWorkerEvents )
|
runner = FfmpegWorker(event.referenceId, event.eventId, info = ffwrc, logDir = logDir, listener = ffmpegWorkerEvents )
|
||||||
if (File(ffwrc.outFile).exists() && ffwrc.arguments.firstOrNull() != "-y") {
|
if (File(ffwrc.outFile).exists() && ffwrc.arguments.firstOrNull() != "-y") {
|
||||||
ffmpegWorkerEvents.onError(event.referenceId, event.eventId, ffwrc, "${this::class.java.simpleName} identified the file as already existing, either allow overwrite or delete the offending file: ${ffwrc.outFile}")
|
ffmpegWorkerEvents.onError(event.referenceId, event.eventId, ffwrc, "${this::class.java.simpleName} identified the file as already existing, either allow overwrite or delete the offending file: ${ffwrc.outFile}")
|
||||||
// Setting consumed to prevent spamming
|
// Setting consumed to prevent spamming
|
||||||
persistentWriter.setProcessEventCompleted(event.referenceId, event.eventId, serviceId)
|
eventManager.setProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
runnerJob = scope.launch {
|
runner?.runWithProgress()
|
||||||
runner!!.runWithProgress()
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
log.error { "Failed to set claim on referenceId: ${event.referenceId} on event ${event.event}" }
|
log.error { "Failed to set claim on referenceId: ${event.referenceId} on event ${event.event}" }
|
||||||
@ -105,7 +113,7 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.info { "Encode started for ${runner.referenceId}" }
|
log.info { "Encode started for ${runner.referenceId}" }
|
||||||
persistentWriter.setProcessEventClaim(runner.referenceId, runner.eventId, serviceId)
|
eventManager.setProcessEventClaim(runner.referenceId, runner.eventId, serviceId)
|
||||||
sendProgress(referenceId, eventId, status = WorkStatus.Started, info, FfmpegDecodedProgress(
|
sendProgress(referenceId, eventId, status = WorkStatus.Started, info, FfmpegDecodedProgress(
|
||||||
progress = 0,
|
progress = 0,
|
||||||
time = "Unkown",
|
time = "Unkown",
|
||||||
@ -113,13 +121,6 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
speed = "0",
|
speed = "0",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
scope.launch {
|
|
||||||
while (runnerJob?.isActive == true) {
|
|
||||||
delay(java.time.Duration.ofMinutes(5).toMillis())
|
|
||||||
persistentWriter.updateCurrentProcessEventClaim(runner.referenceId, runner.eventId, serviceId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onCompleted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated) {
|
override fun onCompleted(referenceId: String, eventId: String, info: FfmpegWorkRequestCreated) {
|
||||||
@ -129,18 +130,18 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.info { "Encode completed for ${runner.referenceId}" }
|
log.info { "Encode completed for ${runner.referenceId}" }
|
||||||
val consumedIsSuccessful = persistentWriter.setProcessEventCompleted(runner.referenceId, runner.eventId, serviceId)
|
val consumedIsSuccessful = eventManager.setProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
runBlocking {
|
runBlocking {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
if (!consumedIsSuccessful) {
|
if (!consumedIsSuccessful) {
|
||||||
persistentWriter.setProcessEventCompleted(runner.referenceId, runner.eventId, serviceId)
|
eventManager.setProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
}
|
}
|
||||||
delay(1000)
|
delay(1000)
|
||||||
var readbackIsSuccess = persistentReader.isProcessEventDefinedAsConsumed(runner.referenceId, runner.eventId, serviceId)
|
var readbackIsSuccess = eventManager.isProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
|
|
||||||
while (!readbackIsSuccess) {
|
while (!readbackIsSuccess) {
|
||||||
delay(1000)
|
delay(1000)
|
||||||
readbackIsSuccess = persistentReader.isProcessEventDefinedAsConsumed(runner.referenceId, runner.eventId, serviceId)
|
readbackIsSuccess = eventManager.isProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
}
|
}
|
||||||
producer.sendMessage(referenceId = runner.referenceId, event = producesEvent,
|
producer.sendMessage(referenceId = runner.referenceId, event = producesEvent,
|
||||||
data = ProcesserEncodeWorkPerformed(status = Status.COMPLETED, producedBy = serviceId, derivedFromEventId = runner.eventId, outFile = runner.info.outFile)
|
data = ProcesserEncodeWorkPerformed(status = Status.COMPLETED, producedBy = serviceId, derivedFromEventId = runner.eventId, outFile = runner.info.outFile)
|
||||||
@ -179,6 +180,10 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
sendProgress(referenceId, eventId, WorkStatus.Working, info, progress)
|
sendProgress(referenceId, eventId, WorkStatus.Working, info, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun onIAmAlive(referenceId: String, eventId: String) {
|
||||||
|
super.onIAmAlive(referenceId, eventId)
|
||||||
|
eventManager.setProcessEventClaimRefresh(referenceId, eventId, serviceId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun sendProgress(referenceId: String, eventId: String, status: WorkStatus, info: FfmpegWorkRequestCreated, progress: FfmpegDecodedProgress? = null) {
|
fun sendProgress(referenceId: String, eventId: String, status: WorkStatus, info: FfmpegWorkRequestCreated, progress: FfmpegDecodedProgress? = null) {
|
||||||
@ -195,13 +200,20 @@ class EncodeService(@Autowired override var coordinator: Coordinator, @Autowired
|
|||||||
|
|
||||||
|
|
||||||
fun clearWorker() {
|
fun clearWorker() {
|
||||||
this.runner?.scope?.cancel()
|
|
||||||
this.runner = null
|
this.runner = null
|
||||||
}
|
}
|
||||||
|
|
||||||
@PreDestroy
|
@PreDestroy
|
||||||
fun shutdown() {
|
fun shutdown() {
|
||||||
scope.cancel()
|
runner?.cancel("Stopping application")
|
||||||
runner?.scope?.cancel("Stopping application")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun cancelWorkIfRunning(eventId: String) {
|
||||||
|
if (runner?.eventId == eventId) {
|
||||||
|
runner?.cancel()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -30,12 +30,11 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
private val logDir = ProcesserEnv.extractLogDirectory
|
private val logDir = ProcesserEnv.extractLogDirectory
|
||||||
|
|
||||||
|
|
||||||
override val producesEvent = KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED
|
override val producesEvent = KafkaEvents.EventWorkExtractPerformed
|
||||||
|
|
||||||
val scope = Coroutines.io()
|
val scope = Coroutines.io()
|
||||||
|
|
||||||
private var runner: FfmpegWorker? = null
|
private var runner: FfmpegWorker? = null
|
||||||
private var runnerJob: Job? = null
|
|
||||||
|
|
||||||
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
|
||||||
init {
|
init {
|
||||||
@ -43,7 +42,7 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
}
|
}
|
||||||
|
|
||||||
override val requiredEvents: List<KafkaEvents>
|
override val requiredEvents: List<KafkaEvents>
|
||||||
get() = listOf(KafkaEvents.EVENT_WORK_EXTRACT_CREATED)
|
get() = listOf(KafkaEvents.EventWorkExtractCreated)
|
||||||
|
|
||||||
override fun prerequisitesRequired(events: List<PersistentProcessDataMessage>): List<() -> Boolean> {
|
override fun prerequisitesRequired(events: List<PersistentProcessDataMessage>): List<() -> Boolean> {
|
||||||
return super.prerequisitesRequired(events) + listOf {
|
return super.prerequisitesRequired(events) + listOf {
|
||||||
@ -56,16 +55,16 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (event.data !is FfmpegWorkRequestCreated) {
|
if (event.data !is FfmpegWorkRequestCreated) {
|
||||||
return SimpleMessageData(status = Status.ERROR, message = "Invalid data (${event.data.javaClass.name}) passed for ${event.event.event}")
|
return SimpleMessageData(status = Status.ERROR, message = "Invalid data (${event.data.javaClass.name}) passed for ${event.event.event}", event.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
val isAlreadyClaimed = persistentReader.isProcessEventAlreadyClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
val isAlreadyClaimed = eventManager.isProcessEventClaimed(referenceId = event.referenceId, eventId = event.eventId)
|
||||||
if (isAlreadyClaimed) {
|
if (isAlreadyClaimed) {
|
||||||
log.warn { "Process is already claimed!" }
|
log.warn { "Process is already claimed!" }
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
if (runnerJob?.isActive != true) {
|
if (runner?.isWorking() != true) {
|
||||||
startExtract(event)
|
startExtract(event)
|
||||||
} else {
|
} else {
|
||||||
log.warn { "Worker is already running.." }
|
log.warn { "Worker is already running.." }
|
||||||
@ -82,7 +81,7 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
val setClaim = persistentWriter.setProcessEventClaim(referenceId = event.referenceId, eventId = event.eventId, claimedBy = serviceId)
|
val setClaim = eventManager.setProcessEventClaim(referenceId = event.referenceId, eventId = event.eventId, claimer = serviceId)
|
||||||
if (setClaim) {
|
if (setClaim) {
|
||||||
log.info { "Claim successful for ${event.referenceId} extract" }
|
log.info { "Claim successful for ${event.referenceId} extract" }
|
||||||
runner = FfmpegWorker(event.referenceId, event.eventId, info = ffwrc, logDir = logDir, listener = ffmpegWorkerEvents)
|
runner = FfmpegWorker(event.referenceId, event.eventId, info = ffwrc, logDir = logDir, listener = ffmpegWorkerEvents)
|
||||||
@ -90,12 +89,10 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
if (File(ffwrc.outFile).exists() && ffwrc.arguments.firstOrNull() != "-y") {
|
if (File(ffwrc.outFile).exists() && ffwrc.arguments.firstOrNull() != "-y") {
|
||||||
ffmpegWorkerEvents.onError(event.referenceId, event.eventId, ffwrc, "${this::class.java.simpleName} identified the file as already existing, either allow overwrite or delete the offending file: ${ffwrc.outFile}")
|
ffmpegWorkerEvents.onError(event.referenceId, event.eventId, ffwrc, "${this::class.java.simpleName} identified the file as already existing, either allow overwrite or delete the offending file: ${ffwrc.outFile}")
|
||||||
// Setting consumed to prevent spamming
|
// Setting consumed to prevent spamming
|
||||||
persistentWriter.setProcessEventCompleted(event.referenceId, event.eventId, serviceId)
|
eventManager.setProcessEventCompleted(event.referenceId, event.eventId)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
runnerJob = scope.launch {
|
runner!!.run()
|
||||||
runner!!.run()
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
log.error { "Failed to set claim on referenceId: ${event.referenceId} on event ${event.event}" }
|
log.error { "Failed to set claim on referenceId: ${event.referenceId} on event ${event.event}" }
|
||||||
}
|
}
|
||||||
@ -110,7 +107,7 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.info { "Extract started for ${runner.referenceId}" }
|
log.info { "Extract started for ${runner.referenceId}" }
|
||||||
persistentWriter.setProcessEventClaim(runner.referenceId, runner.eventId, serviceId)
|
eventManager.setProcessEventClaim(runner.referenceId, runner.eventId, serviceId)
|
||||||
sendProgress(referenceId, eventId, WorkStatus.Started, info)
|
sendProgress(referenceId, eventId, WorkStatus.Started, info)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,12 +118,12 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.info { "Extract completed for ${runner.referenceId}" }
|
log.info { "Extract completed for ${runner.referenceId}" }
|
||||||
var consumedIsSuccessful = persistentWriter.setProcessEventCompleted(runner.referenceId, runner.eventId, serviceId)
|
var consumedIsSuccessful = eventManager.setProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
runBlocking {
|
runBlocking {
|
||||||
|
|
||||||
delay(1000)
|
delay(1000)
|
||||||
limitedWhile({!consumedIsSuccessful}, 1000 * 10, 1000) {
|
limitedWhile({!consumedIsSuccessful}, 1000 * 10, 1000) {
|
||||||
consumedIsSuccessful = persistentWriter.setProcessEventCompleted(runner.referenceId, runner.eventId, serviceId)
|
consumedIsSuccessful = eventManager.setProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info { "Database is reporting extract on ${runner.referenceId} as ${if (consumedIsSuccessful) "CONSUMED" else "NOT CONSUMED"}" }
|
log.info { "Database is reporting extract on ${runner.referenceId} as ${if (consumedIsSuccessful) "CONSUMED" else "NOT CONSUMED"}" }
|
||||||
@ -134,9 +131,9 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
var readbackIsSuccess = persistentReader.isProcessEventDefinedAsConsumed(runner.referenceId, runner.eventId, serviceId)
|
var readbackIsSuccess = eventManager.isProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
limitedWhile({!readbackIsSuccess}, 1000 * 30, 1000) {
|
limitedWhile({!readbackIsSuccess}, 1000 * 30, 1000) {
|
||||||
readbackIsSuccess = persistentReader.isProcessEventDefinedAsConsumed(runner.referenceId, runner.eventId, serviceId)
|
readbackIsSuccess = eventManager.isProcessEventCompleted(runner.referenceId, runner.eventId)
|
||||||
log.info { readbackIsSuccess }
|
log.info { readbackIsSuccess }
|
||||||
}
|
}
|
||||||
log.info { "Database is reporting readback for extract on ${runner.referenceId} as ${if (readbackIsSuccess) "CONSUMED" else "NOT CONSUMED"}" }
|
log.info { "Database is reporting readback for extract on ${runner.referenceId} as ${if (readbackIsSuccess) "CONSUMED" else "NOT CONSUMED"}" }
|
||||||
@ -189,13 +186,12 @@ class ExtractService(@Autowired override var coordinator: Coordinator, @Autowire
|
|||||||
|
|
||||||
|
|
||||||
fun clearWorker() {
|
fun clearWorker() {
|
||||||
this.runner?.scope?.cancel()
|
|
||||||
this.runner = null
|
this.runner = null
|
||||||
}
|
}
|
||||||
|
|
||||||
@PreDestroy
|
@PreDestroy
|
||||||
fun shutdown() {
|
fun shutdown() {
|
||||||
scope.cancel()
|
scope.cancel()
|
||||||
runner?.scope?.cancel("Stopping application")
|
runner?.cancel("Stopping application")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
package no.iktdev.mediaprocessing.processer.services
|
||||||
|
|
||||||
|
class EncodeServiceTest {
|
||||||
|
}
|
||||||
@ -59,7 +59,7 @@ class Coordinator(@Autowired private val eventbasedTopic: EventbasedTopic) : Coo
|
|||||||
|
|
||||||
private fun getCurrentState(events: List<PersistentMessage>, processes: Map<String, EventSummarySubItem>): SummaryState {
|
private fun getCurrentState(events: List<PersistentMessage>, processes: Map<String, EventSummarySubItem>): SummaryState {
|
||||||
val stored = events.findLast { it.event == KafkaEvents.EVENT_COLLECT_AND_STORE }
|
val stored = events.findLast { it.event == KafkaEvents.EVENT_COLLECT_AND_STORE }
|
||||||
val started = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_PROCESS_STARTED }
|
val started = events.findLast { it.event == KafkaEvents.EventMediaProcessStarted }
|
||||||
val completedMediaEvent = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED }
|
val completedMediaEvent = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED }
|
||||||
val completedRequestEvent = events.findLast { it.event == KafkaEvents.EVENT_REQUEST_PROCESS_COMPLETED }
|
val completedRequestEvent = events.findLast { it.event == KafkaEvents.EVENT_REQUEST_PROCESS_COMPLETED }
|
||||||
|
|
||||||
@ -79,9 +79,9 @@ class Coordinator(@Autowired private val eventbasedTopic: EventbasedTopic) : Coo
|
|||||||
}
|
}
|
||||||
|
|
||||||
val workPrepared = events.filter { it.event in listOf(
|
val workPrepared = events.filter { it.event in listOf(
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_CREATED,
|
KafkaEvents.EventWorkExtractCreated,
|
||||||
KafkaEvents.EVENT_WORK_CONVERT_CREATED,
|
KafkaEvents.EventWorkConvertCreated,
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_CREATED
|
KafkaEvents.EventWorkEncodeCreated
|
||||||
) }
|
) }
|
||||||
if (workPrepared.isNotEmpty()) {
|
if (workPrepared.isNotEmpty()) {
|
||||||
return SummaryState.Pending
|
return SummaryState.Pending
|
||||||
@ -92,29 +92,29 @@ class Coordinator(@Autowired private val eventbasedTopic: EventbasedTopic) : Coo
|
|||||||
}
|
}
|
||||||
|
|
||||||
val perparation = events.filter { it.event in listOf(
|
val perparation = events.filter { it.event in listOf(
|
||||||
KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED,
|
KafkaEvents.EventMediaParameterExtractCreated,
|
||||||
KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED,
|
KafkaEvents.EventMediaParameterEncodeCreated,
|
||||||
) }
|
) }
|
||||||
if (perparation.isNotEmpty()) {
|
if (perparation.isNotEmpty()) {
|
||||||
return SummaryState.Preparing
|
return SummaryState.Preparing
|
||||||
}
|
}
|
||||||
|
|
||||||
val analyzed2 = events.findLast { it.event in listOf(KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE) }
|
val analyzed2 = events.findLast { it.event in listOf(KafkaEvents.EventMediaReadOutNameAndType) }
|
||||||
if (analyzed2 != null) {
|
if (analyzed2 != null) {
|
||||||
return SummaryState.Analyzing
|
return SummaryState.Analyzing
|
||||||
}
|
}
|
||||||
|
|
||||||
val waitingForMeta = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED }
|
val waitingForMeta = events.findLast { it.event == KafkaEvents.EventMediaMetadataSearchPerformed }
|
||||||
if (waitingForMeta != null) {
|
if (waitingForMeta != null) {
|
||||||
return SummaryState.Metadata
|
return SummaryState.Metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
val analyzed = events.findLast { it.event in listOf(KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED, KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED, KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE) }
|
val analyzed = events.findLast { it.event in listOf(KafkaEvents.EventMediaParseStreamPerformed, KafkaEvents.EventMediaReadBaseInfoPerformed, KafkaEvents.EventMediaReadOutNameAndType) }
|
||||||
if (analyzed != null) {
|
if (analyzed != null) {
|
||||||
return SummaryState.Analyzing
|
return SummaryState.Analyzing
|
||||||
}
|
}
|
||||||
|
|
||||||
val readEvent = events.findLast { it.event == KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED }
|
val readEvent = events.findLast { it.event == KafkaEvents.EventMediaReadStreamPerformed }
|
||||||
if (readEvent != null) {
|
if (readEvent != null) {
|
||||||
return SummaryState.Read
|
return SummaryState.Read
|
||||||
}
|
}
|
||||||
@ -133,10 +133,10 @@ class Coordinator(@Autowired private val eventbasedTopic: EventbasedTopic) : Coo
|
|||||||
val processesStatuses = getCurrentStateFromProcesserEvents(procM)
|
val processesStatuses = getCurrentStateFromProcesserEvents(procM)
|
||||||
val messageStatus = getCurrentState(it, processesStatuses)
|
val messageStatus = getCurrentState(it, processesStatuses)
|
||||||
|
|
||||||
val baseNameEvent = it.lastOrNull {ke -> ke.event == KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED }?.data.let { data ->
|
val baseNameEvent = it.lastOrNull {ke -> ke.event == KafkaEvents.EventMediaReadBaseInfoPerformed }?.data.let { data ->
|
||||||
if (data is BaseInfoPerformed) data else null
|
if (data is BaseInfoPerformed) data else null
|
||||||
}
|
}
|
||||||
val mediaNameEvent = it.lastOrNull { ke -> ke.event == KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE }?.data.let { data ->
|
val mediaNameEvent = it.lastOrNull { ke -> ke.event == KafkaEvents.EventMediaReadOutNameAndType }?.data.let { data ->
|
||||||
if (data is VideoInfoPerformed) data else null
|
if (data is VideoInfoPerformed) data else null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id("java")
|
id("java")
|
||||||
kotlin("jvm")
|
kotlin("jvm")
|
||||||
|
id("org.jetbrains.kotlin.plugin.serialization") version "1.5.0" // Legg til Kotlin Serialization-plugin
|
||||||
}
|
}
|
||||||
|
|
||||||
group = "no.iktdev.mediaprocessing.shared"
|
group = "no.iktdev.mediaprocessing.shared"
|
||||||
@ -48,9 +48,13 @@ dependencies {
|
|||||||
|
|
||||||
testImplementation("io.mockk:mockk:1.12.0")
|
testImplementation("io.mockk:mockk:1.12.0")
|
||||||
testImplementation("com.h2database:h2:1.4.200")
|
testImplementation("com.h2database:h2:1.4.200")
|
||||||
|
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||||
|
|
||||||
testImplementation("org.junit.jupiter:junit-jupiter-api:5.7.2")
|
testImplementation("org.junit.jupiter:junit-jupiter-api:5.7.2")
|
||||||
testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.7.2")
|
testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.7.2")
|
||||||
testImplementation("io.kotlintest:kotlintest-assertions:3.3.2")
|
testImplementation("io.kotlintest:kotlintest-assertions:3.3.2")
|
||||||
|
testImplementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.0")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.test {
|
tasks.test {
|
||||||
|
|||||||
@ -20,6 +20,8 @@ abstract class DataSource(val config: DatabaseConnectionConfig) {
|
|||||||
|
|
||||||
abstract fun toConnectionUrl(): String
|
abstract fun toConnectionUrl(): String
|
||||||
|
|
||||||
|
abstract fun toDatabaseConnectionUrl(database: String): String
|
||||||
|
|
||||||
fun toPortedAddress(): String {
|
fun toPortedAddress(): String {
|
||||||
var baseAddress = config.address
|
var baseAddress = config.address
|
||||||
if (!config.port.isNullOrBlank()) {
|
if (!config.port.isNullOrBlank()) {
|
||||||
|
|||||||
@ -17,7 +17,7 @@ open class MySqlDataSource(conf: DatabaseConnectionConfig): DataSource(conf) {
|
|||||||
override fun createDatabase(): Database? {
|
override fun createDatabase(): Database? {
|
||||||
val ok = transaction(toDatabaseServerConnection()) {
|
val ok = transaction(toDatabaseServerConnection()) {
|
||||||
val tmc = TransactionManager.current().connection
|
val tmc = TransactionManager.current().connection
|
||||||
val query = "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${config.databaseName}'"
|
val query = "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${config.databaseName}';"
|
||||||
val stmt = tmc.prepareStatement(query, true)
|
val stmt = tmc.prepareStatement(query, true)
|
||||||
|
|
||||||
val resultSet = stmt.executeQuery()
|
val resultSet = stmt.executeQuery()
|
||||||
@ -52,7 +52,7 @@ open class MySqlDataSource(conf: DatabaseConnectionConfig): DataSource(conf) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun createDatabaseStatement(): String {
|
override fun createDatabaseStatement(): String {
|
||||||
return "CREATE DATABASE ${config.databaseName}"
|
return "CREATE DATABASE ${config.databaseName};"
|
||||||
}
|
}
|
||||||
|
|
||||||
protected fun toDatabaseServerConnection(): Database {
|
protected fun toDatabaseServerConnection(): Database {
|
||||||
@ -66,7 +66,7 @@ open class MySqlDataSource(conf: DatabaseConnectionConfig): DataSource(conf) {
|
|||||||
|
|
||||||
override fun toDatabase(): Database {
|
override fun toDatabase(): Database {
|
||||||
val database = Database.connect(
|
val database = Database.connect(
|
||||||
"${toConnectionUrl()}/${config.databaseName}",
|
toDatabaseConnectionUrl(config.databaseName),
|
||||||
user = config.username,
|
user = config.username,
|
||||||
password = config.password
|
password = config.password
|
||||||
)
|
)
|
||||||
@ -74,6 +74,10 @@ open class MySqlDataSource(conf: DatabaseConnectionConfig): DataSource(conf) {
|
|||||||
return database
|
return database
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun toDatabaseConnectionUrl(database: String): String {
|
||||||
|
return toConnectionUrl() + "/$database"
|
||||||
|
}
|
||||||
|
|
||||||
override fun toConnectionUrl(): String {
|
override fun toConnectionUrl(): String {
|
||||||
return "jdbc:mysql://${toPortedAddress()}"
|
return "jdbc:mysql://${toPortedAddress()}"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
package no.iktdev.mediaprocessing.shared.common.datasource
|
package no.iktdev.mediaprocessing.shared.common.datasource
|
||||||
|
|
||||||
|
import org.jetbrains.exposed.exceptions.ExposedSQLException
|
||||||
import org.jetbrains.exposed.sql.Database
|
import org.jetbrains.exposed.sql.Database
|
||||||
import org.jetbrains.exposed.sql.Table
|
import org.jetbrains.exposed.sql.Table
|
||||||
|
|
||||||
import org.jetbrains.exposed.sql.transactions.transaction
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
import java.sql.Connection
|
import java.sql.Connection
|
||||||
|
import java.sql.SQLIntegrityConstraintViolationException
|
||||||
|
|
||||||
open class TableDefaultOperations<T : Table> {
|
open class TableDefaultOperations<T : Table> {
|
||||||
|
|
||||||
@ -46,6 +48,11 @@ fun <T> withTransaction(db: Database? = null, block: () -> T): T? {
|
|||||||
null
|
null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
fun <T> withTransaction(db: DataSource? = null, block: () -> T): T? {
|
||||||
|
return withTransaction(db?.database, block)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fun <T> insertWithSuccess(db: Database? = null, block: () -> T): Boolean {
|
fun <T> insertWithSuccess(db: Database? = null, block: () -> T): Boolean {
|
||||||
return try {
|
return try {
|
||||||
@ -125,6 +132,18 @@ fun <T> executeWithStatus(db: Database? = null, block: () -> T): Boolean {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun <T> executeWithStatus(db: DataSource? = null, block: () -> T): Boolean {
|
||||||
|
return executeWithStatus(db?.database, block)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun Exception.isExposedSqlException(): Boolean {
|
||||||
|
return this is ExposedSQLException
|
||||||
|
}
|
||||||
|
|
||||||
|
fun ExposedSQLException.isCausedByDuplicateError(): Boolean {
|
||||||
|
return if (this.cause is SQLIntegrityConstraintViolationException) {
|
||||||
|
return this.errorCode == 1062
|
||||||
|
} else false
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import java.time.LocalDateTime
|
|||||||
class PersistentDataReader(var dataSource: DataSource) {
|
class PersistentDataReader(var dataSource: DataSource) {
|
||||||
val dzz = DeserializingRegistry()
|
val dzz = DeserializingRegistry()
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getAllEventsGrouped")
|
||||||
fun getAllMessages(): List<List<PersistentMessage>> {
|
fun getAllMessages(): List<List<PersistentMessage>> {
|
||||||
val events = withTransaction(dataSource.database) {
|
val events = withTransaction(dataSource.database) {
|
||||||
events.selectAll()
|
events.selectAll()
|
||||||
@ -19,6 +20,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
return events?.mapNotNull { it.value.mapNotNull { v -> fromRowToPersistentMessage(v, dzz) } } ?: emptyList()
|
return events?.mapNotNull { it.value.mapNotNull { v -> fromRowToPersistentMessage(v, dzz) } } ?: emptyList()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getEvetnsWith")
|
||||||
fun getMessagesFor(referenceId: String): List<PersistentMessage> {
|
fun getMessagesFor(referenceId: String): List<PersistentMessage> {
|
||||||
return withTransaction(dataSource.database) {
|
return withTransaction(dataSource.database) {
|
||||||
events.select { events.referenceId eq referenceId }
|
events.select { events.referenceId eq referenceId }
|
||||||
@ -27,6 +29,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
} ?: emptyList()
|
} ?: emptyList()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getEventsUncompleted")
|
||||||
fun getUncompletedMessages(): List<List<PersistentMessage>> {
|
fun getUncompletedMessages(): List<List<PersistentMessage>> {
|
||||||
val result = withDirtyRead(dataSource.database) {
|
val result = withDirtyRead(dataSource.database) {
|
||||||
events.selectAll()
|
events.selectAll()
|
||||||
@ -37,6 +40,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated(message = "Use PersistentEventManager.isProcessEventCompleted")
|
||||||
fun isProcessEventAlreadyClaimed(referenceId: String, eventId: String): Boolean {
|
fun isProcessEventAlreadyClaimed(referenceId: String, eventId: String): Boolean {
|
||||||
val result = withDirtyRead(dataSource.database) {
|
val result = withDirtyRead(dataSource.database) {
|
||||||
processerEvents.select {
|
processerEvents.select {
|
||||||
@ -47,6 +51,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
return result?.claimed ?: true
|
return result?.claimed ?: true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated(message = "Use PersistentEventManager.isProcessEventCompleted")
|
||||||
fun isProcessEventDefinedAsConsumed(referenceId: String, eventId: String, claimedBy: String): Boolean {
|
fun isProcessEventDefinedAsConsumed(referenceId: String, eventId: String, claimedBy: String): Boolean {
|
||||||
return withDirtyRead(dataSource.database) {
|
return withDirtyRead(dataSource.database) {
|
||||||
processerEvents.select {
|
processerEvents.select {
|
||||||
@ -57,6 +62,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
}?.singleOrNull()?.consumed ?: false
|
}?.singleOrNull()?.consumed ?: false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated(message = "Use PersistentEventManager.getProcessEventsClaimable")
|
||||||
fun getAvailableProcessEvents(): List<PersistentProcessDataMessage> {
|
fun getAvailableProcessEvents(): List<PersistentProcessDataMessage> {
|
||||||
return withDirtyRead(dataSource.database) {
|
return withDirtyRead(dataSource.database) {
|
||||||
processerEvents.select {
|
processerEvents.select {
|
||||||
@ -66,6 +72,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
} ?: emptyList()
|
} ?: emptyList()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getProcessEventsWithExpiredClaim")
|
||||||
fun getExpiredClaimsProcessEvents(): List<PersistentProcessDataMessage> {
|
fun getExpiredClaimsProcessEvents(): List<PersistentProcessDataMessage> {
|
||||||
val deadline = LocalDateTime.now()
|
val deadline = LocalDateTime.now()
|
||||||
val entries = withTransaction(dataSource.database) {
|
val entries = withTransaction(dataSource.database) {
|
||||||
@ -77,6 +84,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
return entries.filter { it.lastCheckIn == null || it.lastCheckIn.plusMinutes(15) < deadline }
|
return entries.filter { it.lastCheckIn == null || it.lastCheckIn.plusMinutes(15) < deadline }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getProcessEventWith")
|
||||||
fun getProcessEvent(referenceId: String, eventId: String): PersistentProcessDataMessage? {
|
fun getProcessEvent(referenceId: String, eventId: String): PersistentProcessDataMessage? {
|
||||||
val message = withDirtyRead(dataSource.database) {
|
val message = withDirtyRead(dataSource.database) {
|
||||||
processerEvents.select {
|
processerEvents.select {
|
||||||
@ -87,6 +95,7 @@ class PersistentDataReader(var dataSource: DataSource) {
|
|||||||
return message
|
return message
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated("Use PersistentEventManager.getAllEventsProcesser")
|
||||||
fun getProcessEvents(): List<PersistentProcessDataMessage> {
|
fun getProcessEvents(): List<PersistentProcessDataMessage> {
|
||||||
return withTransaction(dataSource.database) {
|
return withTransaction(dataSource.database) {
|
||||||
processerEvents.selectAll()
|
processerEvents.selectAll()
|
||||||
|
|||||||
@ -0,0 +1,292 @@
|
|||||||
|
package no.iktdev.mediaprocessing.shared.common.persistance
|
||||||
|
|
||||||
|
import kotlinx.coroutines.launch
|
||||||
|
import mu.KotlinLogging
|
||||||
|
import no.iktdev.exfl.coroutines.Coroutines
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.datasource.*
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.DeserializingRegistry
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
|
||||||
|
import org.jetbrains.exposed.exceptions.ExposedSQLException
|
||||||
|
import org.jetbrains.exposed.sql.*
|
||||||
|
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
|
||||||
|
import org.jetbrains.exposed.sql.javatime.CurrentDateTime
|
||||||
|
import java.sql.SQLIntegrityConstraintViolationException
|
||||||
|
import java.time.LocalDateTime
|
||||||
|
import javax.xml.crypto.Data
|
||||||
|
import kotlin.coroutines.coroutineContext
|
||||||
|
|
||||||
|
private val log = KotlinLogging.logger {}
|
||||||
|
|
||||||
|
class PersistentEventManager(private val dataSource: DataSource) {
|
||||||
|
val dzz = DeserializingRegistry()
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes the events
|
||||||
|
*/
|
||||||
|
private fun deleteSupersededEvents(superseded: List<PersistentMessage>) {
|
||||||
|
withTransaction(dataSource) {
|
||||||
|
superseded.forEach { duplicate ->
|
||||||
|
events.deleteWhere {
|
||||||
|
(events.referenceId eq duplicate.referenceId) and
|
||||||
|
(events.eventId eq duplicate.eventId) and
|
||||||
|
(events.event eq duplicate.event.event)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param referenceId Reference
|
||||||
|
* @param eventId Current eventId for the message, required to prevent deletion of itself
|
||||||
|
* @param event Current event for the message
|
||||||
|
*/
|
||||||
|
private fun deleteSupersededEvents(referenceId: String, eventId: String, event: KafkaEvents) {
|
||||||
|
val present = getEventsWith(referenceId).filter { it.eventId != eventId }
|
||||||
|
|
||||||
|
val superseded = present.filter { it.event == event && it.eventId != eventId }
|
||||||
|
val availableForRemoval = mutableListOf<PersistentMessage>()
|
||||||
|
val helper = PersistentMessageHelper(present)
|
||||||
|
superseded.forEach { availableForRemoval.addAll(helper.getCascadingFrom(it.eventId)) }
|
||||||
|
|
||||||
|
deleteSupersededEvents(availableForRemoval)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//region Database read
|
||||||
|
|
||||||
|
fun getEventsWith(referenceId: String): List<PersistentMessage> {
|
||||||
|
return withDirtyRead(dataSource.database) {
|
||||||
|
events.select {
|
||||||
|
(events.referenceId eq referenceId)
|
||||||
|
}
|
||||||
|
.orderBy(events.created, SortOrder.ASC)
|
||||||
|
.toPersistentMessage(dzz)
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getProcessEventWith(referenceId: String, eventId: String): PersistentProcessDataMessage? {
|
||||||
|
return withDirtyRead(dataSource.database) {
|
||||||
|
processerEvents.select {
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId)
|
||||||
|
}.toPersistentProcesserMessage(dzz)
|
||||||
|
}?.singleOrNull()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getAllEvents(): List<PersistentMessage> {
|
||||||
|
return withDirtyRead(dataSource.database) {
|
||||||
|
events.selectAll()
|
||||||
|
.toPersistentMessage(dzz)
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getAllEventsGrouped(): List<List<PersistentMessage>> {
|
||||||
|
return getAllEvents().toGrouped()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getAllProcessEvents(): List<PersistentProcessDataMessage> {
|
||||||
|
return withDirtyRead(dataSource.database) {
|
||||||
|
processerEvents.selectAll()
|
||||||
|
.toPersistentProcesserMessage(dzz)
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getEventsUncompleted(): List<List<PersistentMessage>> {
|
||||||
|
val identifiesAsCompleted = listOf(
|
||||||
|
KafkaEvents.EVENT_REQUEST_PROCESS_COMPLETED,
|
||||||
|
KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED,
|
||||||
|
KafkaEvents.EVENT_COLLECT_AND_STORE
|
||||||
|
)
|
||||||
|
val all = getAllEventsGrouped()
|
||||||
|
return all.filter { entry -> entry.none { it.event in identifiesAsCompleted } }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getProcessEventsUncompleted(): List<PersistentProcessDataMessage> {
|
||||||
|
return withTransaction(dataSource.database) {
|
||||||
|
processerEvents.select {
|
||||||
|
(processerEvents.consumed eq false)
|
||||||
|
}.toPersistentProcesserMessage(dzz)
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getProcessEventsClaimable(): List<PersistentProcessDataMessage> {
|
||||||
|
return withTransaction(dataSource.database) {
|
||||||
|
processerEvents.select {
|
||||||
|
(processerEvents.consumed eq false) and
|
||||||
|
(processerEvents.claimed eq false)
|
||||||
|
}.toPersistentProcesserMessage(dzz)
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getProcessEventsWithExpiredClaim(): List<PersistentProcessDataMessage> {
|
||||||
|
val deadline = LocalDateTime.now()
|
||||||
|
return getProcessEventsUncompleted()
|
||||||
|
.filter { it.claimed && if (it.lastCheckIn != null) it.lastCheckIn.plusMinutes(15) < deadline else true }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun isProcessEventClaimed(referenceId: String, eventId: String): Boolean {
|
||||||
|
return getProcessEventWith(referenceId, eventId)?.claimed ?: false
|
||||||
|
}
|
||||||
|
|
||||||
|
fun isProcessEventCompleted(referenceId: String, eventId: String): Boolean {
|
||||||
|
return getProcessEventWith(referenceId, eventId)?.consumed ?: false
|
||||||
|
}
|
||||||
|
|
||||||
|
//endregion
|
||||||
|
|
||||||
|
//region Database write
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores the kafka event and its data in the database as PersistentMessage
|
||||||
|
* @param event KafkaEvents
|
||||||
|
* @param message Kafka message object
|
||||||
|
*/
|
||||||
|
fun setEvent(event: KafkaEvents, message: Message<*>): Boolean {
|
||||||
|
val existing = getEventsWith(message.referenceId)
|
||||||
|
val derivedId = message.data?.derivedFromEventId
|
||||||
|
if (derivedId != null) {
|
||||||
|
val isNewEventOrphan = existing.none { it.eventId == derivedId }
|
||||||
|
if (isNewEventOrphan) {
|
||||||
|
log.warn { "Message not saved! ${message.referenceId} with eventId(${message.eventId}) has derivedEventId($derivedId) which does not exist!" }
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val exception = executeOrException(dataSource.database) {
|
||||||
|
events.insert {
|
||||||
|
it[referenceId] = message.referenceId
|
||||||
|
it[eventId] = message.eventId
|
||||||
|
it[events.event] = event.event
|
||||||
|
it[data] = message.dataAsJson()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val success = if (exception != null) {
|
||||||
|
if (exception.isExposedSqlException()) {
|
||||||
|
if ((exception as ExposedSQLException).isCausedByDuplicateError()) {
|
||||||
|
log.info { "Error is of SQLIntegrityConstraintViolationException" }
|
||||||
|
} else {
|
||||||
|
log.info { "Error code is: ${exception.errorCode}" }
|
||||||
|
exception.printStackTrace()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
exception.printStackTrace()
|
||||||
|
}
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
if (success) {
|
||||||
|
deleteSupersededEvents(referenceId = message.referenceId, eventId = message.eventId, event = event)
|
||||||
|
}
|
||||||
|
return success
|
||||||
|
}
|
||||||
|
|
||||||
|
fun setProcessEvent(event: KafkaEvents, message: Message<*>): Boolean {
|
||||||
|
val exception = executeOrException(dataSource.database) {
|
||||||
|
processerEvents.insert {
|
||||||
|
it[processerEvents.referenceId] = message.referenceId
|
||||||
|
it[processerEvents.eventId] = message.eventId
|
||||||
|
it[processerEvents.event] = event.event
|
||||||
|
it[processerEvents.data] = message.dataAsJson()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return if (exception != null) {
|
||||||
|
if (exception.isExposedSqlException()) {
|
||||||
|
if ((exception as ExposedSQLException).isCausedByDuplicateError()) {
|
||||||
|
log.info { "Error is of SQLIntegrityConstraintViolationException" }
|
||||||
|
} else {
|
||||||
|
log.info { "Error code is: ${exception.errorCode}" }
|
||||||
|
exception.printStackTrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun setProcessEventClaim(referenceId: String, eventId: String, claimer: String): Boolean {
|
||||||
|
return executeWithStatus(dataSource.database) {
|
||||||
|
processerEvents.update({
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId) and
|
||||||
|
(processerEvents.claimed eq false) and
|
||||||
|
(processerEvents.consumed eq false)
|
||||||
|
}) {
|
||||||
|
it[claimedBy] = claimer
|
||||||
|
it[lastCheckIn] = CurrentDateTime
|
||||||
|
it[claimed] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun setProcessEventCompleted(referenceId: String, eventId: String): Boolean {
|
||||||
|
return executeWithStatus(dataSource) {
|
||||||
|
processerEvents.update({
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId)
|
||||||
|
}) {
|
||||||
|
it[consumed] = true
|
||||||
|
it[claimed] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun setProcessEventClaimRefresh(referenceId: String, eventId: String, claimer: String): Boolean {
|
||||||
|
return executeWithStatus(dataSource) {
|
||||||
|
processerEvents.update({
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId) and
|
||||||
|
(processerEvents.claimed eq true) and
|
||||||
|
(processerEvents.claimedBy eq claimer)
|
||||||
|
}) {
|
||||||
|
it[lastCheckIn] = CurrentDateTime
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the claim set on the process event
|
||||||
|
*/
|
||||||
|
fun deleteProcessEventClaim(referenceId: String, eventId: String): Boolean {
|
||||||
|
return executeWithStatus(dataSource) {
|
||||||
|
processerEvents.update({
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId)
|
||||||
|
}) {
|
||||||
|
it[claimed] = false
|
||||||
|
it[claimedBy] = null
|
||||||
|
it[lastCheckIn] = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun deleteProcessEvent(referenceId: String, eventId: String): Boolean {
|
||||||
|
return executeWithStatus (dataSource) {
|
||||||
|
processerEvents.deleteWhere {
|
||||||
|
(processerEvents.referenceId eq referenceId) and
|
||||||
|
(processerEvents.eventId eq eventId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//endregion
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fun List<PersistentMessage>?.toGrouped(): List<List<PersistentMessage>> {
|
||||||
|
return this?.groupBy { it.referenceId }?.mapNotNull { it.value } ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun Query?.toPersistentMessage(dzz: DeserializingRegistry): List<PersistentMessage> {
|
||||||
|
return this?.mapNotNull { fromRowToPersistentMessage(it, dzz) } ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun Query?.toPersistentProcesserMessage(dzz: DeserializingRegistry): List<PersistentProcessDataMessage> {
|
||||||
|
return this?.mapNotNull { fromRowToPersistentProcessDataMessage(it, dzz) } ?: emptyList()
|
||||||
|
}
|
||||||
@ -38,7 +38,40 @@ fun PersistentMessage.isSkipped(): Boolean {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class PersistentMessageHelper(val messages: List<PersistentMessage>) {
|
||||||
|
|
||||||
|
fun findOrphanedEvents(): List<PersistentMessage> {
|
||||||
|
val withDerivedId = messages.filter { it.data.derivedFromEventId != null }
|
||||||
|
val idsFlat = messages.map { it.eventId }
|
||||||
|
return withDerivedId.filter { it.data.derivedFromEventId !in idsFlat }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getCascadingFrom(eventId: String): List<PersistentMessage> {
|
||||||
|
val triggered = messages.firstOrNull { it.eventId == eventId } ?: return emptyList()
|
||||||
|
val usableEvents = messages.filter { it.eventId != eventId && it.data.derivedFromEventId != null }
|
||||||
|
|
||||||
|
val derivedEventsMap = mutableMapOf<String, MutableList<String>>()
|
||||||
|
for (event in usableEvents) {
|
||||||
|
derivedEventsMap.getOrPut(event.data.derivedFromEventId!!) { mutableListOf() }.add(event.eventId)
|
||||||
|
}
|
||||||
|
val eventsToDelete = mutableSetOf<String>()
|
||||||
|
|
||||||
|
// Utfør DFS for å finne alle avledede hendelser som skal slettes
|
||||||
|
dfs(triggered.eventId, derivedEventsMap, eventsToDelete)
|
||||||
|
|
||||||
|
return messages.filter { it.eventId in eventsToDelete }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param eventId Initial eventId
|
||||||
|
*/
|
||||||
|
fun dfs(eventId: String, derivedEventsMap: Map<String, List<String>>, eventsToDelete: MutableSet<String>) {
|
||||||
|
eventsToDelete.add(eventId)
|
||||||
|
derivedEventsMap[eventId]?.forEach { derivedEventId ->
|
||||||
|
dfs(derivedEventId, derivedEventsMap, eventsToDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fun fromRowToPersistentMessage(row: ResultRow, dez: DeserializingRegistry): PersistentMessage? {
|
fun fromRowToPersistentMessage(row: ResultRow, dez: DeserializingRegistry): PersistentMessage? {
|
||||||
val kev = try {
|
val kev = try {
|
||||||
|
|||||||
@ -25,7 +25,7 @@ abstract class TaskCreatorImpl<C : CoordinatorBase<V, L>, V, L : EventBasedMessa
|
|||||||
return Tasks(taskHandler = this, producesEvent = producesEvent, listensForEvents = reactableEvents)
|
return Tasks(taskHandler = this, producesEvent = producesEvent, listensForEvents = reactableEvents)
|
||||||
}
|
}
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
fun attachListener() {
|
open fun attachListener() {
|
||||||
coordinator.listeners.add(getListener())
|
coordinator.listeners.add(getListener())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -11,9 +11,21 @@ import javax.sql.DataSource
|
|||||||
|
|
||||||
class H2DataSource(private val jdbcDataSource: JdbcDataSource, databaseName: String) : DataSource, MySqlDataSource(
|
class H2DataSource(private val jdbcDataSource: JdbcDataSource, databaseName: String) : DataSource, MySqlDataSource(
|
||||||
DatabaseConnectionConfig(
|
DatabaseConnectionConfig(
|
||||||
databaseName = databaseName, address = jdbcDataSource.getUrl(), username = jdbcDataSource.user, password = jdbcDataSource.password, port = null
|
databaseName = databaseName, address = jdbcDataSource.getUrl(), username = jdbcDataSource.user, password = "", port = null
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
val connectionUrl = "jdbc:h2:test;MODE=MySQL" //"jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;"
|
||||||
|
fun getDatasource(): JdbcDataSource {
|
||||||
|
val ds = JdbcDataSource()
|
||||||
|
ds.setUrl(connectionUrl)
|
||||||
|
ds.user = "test"
|
||||||
|
ds.password = ""
|
||||||
|
return ds
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override fun getConnection(): Connection {
|
override fun getConnection(): Connection {
|
||||||
return jdbcDataSource.connection
|
return jdbcDataSource.connection
|
||||||
}
|
}
|
||||||
@ -61,6 +73,6 @@ class H2DataSource(private val jdbcDataSource: JdbcDataSource, databaseName: Str
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun toConnectionUrl(): String {
|
override fun toConnectionUrl(): String {
|
||||||
return "jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;"
|
return connectionUrl // "jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,23 @@
|
|||||||
|
package no.iktdev.mediaprocessing.shared.common
|
||||||
|
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.datasource.DatabaseConnectionConfig
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.datasource.MySqlDataSource
|
||||||
|
import org.jetbrains.exposed.sql.Database
|
||||||
|
|
||||||
|
class H2DataSource2(conf: DatabaseConnectionConfig): MySqlDataSource(conf) {
|
||||||
|
|
||||||
|
override fun createDatabaseStatement(): String {
|
||||||
|
return "CREATE SCHEMA ${config.databaseName};"
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun toDatabaseConnectionUrl(database: String): String {
|
||||||
|
return toConnectionUrl()
|
||||||
|
}
|
||||||
|
override fun toDatabase(): Database {
|
||||||
|
return super.toDatabase()
|
||||||
|
}
|
||||||
|
override fun toConnectionUrl(): String {
|
||||||
|
return "jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;CASE_INSENSITIVE_IDENTIFIERS=TRUE;"
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,56 @@
|
|||||||
|
package no.iktdev.mediaprocessing.shared.common
|
||||||
|
|
||||||
|
import kotlinx.serialization.json.*
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.events
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.DeserializingRegistry
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
|
import org.json.JSONArray
|
||||||
|
import java.time.LocalDateTime
|
||||||
|
import java.time.format.DateTimeFormatter
|
||||||
|
|
||||||
|
|
||||||
|
class PersistentMessageFromJsonDump(events: String) {
|
||||||
|
private var data: JsonArray?
|
||||||
|
|
||||||
|
init {
|
||||||
|
val jsonArray = Json.parseToJsonElement(events) as JsonArray
|
||||||
|
data = jsonArray.firstOrNull { it.jsonObject["data"] != null }?.jsonObject?.get("data") as? JsonArray
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getPersistentMessages(): List<PersistentMessage> {
|
||||||
|
return data?.mapNotNull {
|
||||||
|
try {
|
||||||
|
mapToPersistentMessage(it)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
System.err.print(it.toString())
|
||||||
|
e.printStackTrace()
|
||||||
|
null
|
||||||
|
}
|
||||||
|
} ?: emptyList()
|
||||||
|
}
|
||||||
|
|
||||||
|
val dzz = DeserializingRegistry()
|
||||||
|
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS")
|
||||||
|
private fun mapToPersistentMessage(e: JsonElement): PersistentMessage? {
|
||||||
|
val referenceId: String = e.jsonObject["referenceId"]?.jsonPrimitive?.content ?: throw RuntimeException("No ReferenceId found")
|
||||||
|
val eventId: String = e.jsonObject["eventId"]?.jsonPrimitive?.content ?: throw RuntimeException("No EventId")
|
||||||
|
val event: String = e.jsonObject["event"]?.jsonPrimitive?.content ?: throw RuntimeException("No Event")
|
||||||
|
val data: String = e.jsonObject["data"]?.jsonPrimitive?.content ?: throw RuntimeException("No data")
|
||||||
|
val created: String = e.jsonObject["created"]?.jsonPrimitive?.content ?: throw RuntimeException("No Created date time found")
|
||||||
|
|
||||||
|
val kev = KafkaEvents.toEvent(event) ?: throw RuntimeException("Not able to convert event to Enum")
|
||||||
|
val dzdata = dzz.deserializeData(kev, data)
|
||||||
|
|
||||||
|
return PersistentMessage(
|
||||||
|
referenceId = referenceId,
|
||||||
|
eventId = eventId,
|
||||||
|
event = kev,
|
||||||
|
data = dzdata,
|
||||||
|
created = LocalDateTime.parse(created, formatter)
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,257 @@
|
|||||||
|
package no.iktdev.mediaprocessing.shared.common.tests
|
||||||
|
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.H2DataSource2
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.datasource.DatabaseConnectionConfig
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.datasource.withTransaction
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentEventManager
|
||||||
|
import no.iktdev.mediaprocessing.shared.common.persistance.events
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import java.util.UUID
|
||||||
|
import org.assertj.core.api.Assertions.assertThat
|
||||||
|
import org.jetbrains.exposed.sql.deleteAll
|
||||||
|
|
||||||
|
|
||||||
|
class PersistentEventMangerTest {
|
||||||
|
val defaultReferenceId = UUID.randomUUID().toString()
|
||||||
|
val dataSource = H2DataSource2(DatabaseConnectionConfig(
|
||||||
|
address = "",
|
||||||
|
username = "",
|
||||||
|
password = "",
|
||||||
|
databaseName = "test",
|
||||||
|
port = null
|
||||||
|
))
|
||||||
|
val eventManager: PersistentEventManager = PersistentEventManager(dataSource)
|
||||||
|
|
||||||
|
init {
|
||||||
|
val kafkaTables = listOf(
|
||||||
|
events, // For kafka
|
||||||
|
)
|
||||||
|
dataSource.createDatabase()
|
||||||
|
dataSource.createTables(*kafkaTables.toTypedArray())
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testDatabaseIsCreated() {
|
||||||
|
val success = dataSource.createDatabase()
|
||||||
|
assertThat(success).isNotNull()
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testDatabaseInit() {
|
||||||
|
val referenceId = UUID.randomUUID().toString()
|
||||||
|
val mStart = Message<MediaProcessStarted>(
|
||||||
|
referenceId = referenceId,
|
||||||
|
eventId = UUID.randomUUID().toString(),
|
||||||
|
data = MediaProcessStarted(
|
||||||
|
status = Status.COMPLETED,
|
||||||
|
file = "Nan"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
eventManager.setEvent(KafkaEvents.EventMediaProcessStarted, mStart)
|
||||||
|
val stored = eventManager.getEventsWith(referenceId);
|
||||||
|
assertThat(stored).isNotEmpty()
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testSuperseded1() {
|
||||||
|
val startEvent = EventToMessage(KafkaEvents.EventMediaProcessStarted, createMessage())
|
||||||
|
val oldStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadStreamPerformed,
|
||||||
|
createMessage(eventId = "48c72454-6c7b-406b-b598-fc0a961dabde", derivedFromEventId = startEvent.message.eventId)),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
|
createMessage(eventId = "1d8d995d-a7e4-4d6e-a501-fe82f521cf72", derivedFromEventId ="48c72454-6c7b-406b-b598-fc0a961dabde")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
|
createMessage(eventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e", derivedFromEventId ="1d8d995d-a7e4-4d6e-a501-fe82f521cf72")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaMetadataSearchPerformed,
|
||||||
|
createMessage(eventId = "cbb1e871-e9a5-496d-a655-db719ac4903c", derivedFromEventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
|
createMessage(eventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutCover,
|
||||||
|
createMessage(eventId = "98a39721-41ff-4d79-905e-ced260478524", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterEncodeCreated,
|
||||||
|
createMessage(eventId = "9e8f2e04-4950-437f-a203-cfd566203078", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterExtractCreated,
|
||||||
|
createMessage(eventId = "af7f2519-0f1d-4679-82bd-0314d1b97b68", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
)
|
||||||
|
eventManager.setEvent(startEvent.event, startEvent.message)
|
||||||
|
for (entry in oldStack) {
|
||||||
|
eventManager.setEvent(entry.event, entry.message)
|
||||||
|
}
|
||||||
|
val currentTableWithOldStack = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
assertThat(currentTableWithOldStack).hasSize(oldStack.size +1)
|
||||||
|
|
||||||
|
val supersedingStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
|
createMessage(eventId = "2c3a40bb-2225-4dd4-a8c3-32c6356f8764", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c"))
|
||||||
|
).forEach {entry -> eventManager.setEvent(entry.event, entry.message)}
|
||||||
|
|
||||||
|
|
||||||
|
// Final check
|
||||||
|
|
||||||
|
val result = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
val idsThatShouldBeRemoved = listOf(
|
||||||
|
"9e8f2e04-4950-437f-a203-cfd566203078",
|
||||||
|
"af7f2519-0f1d-4679-82bd-0314d1b97b68"
|
||||||
|
)
|
||||||
|
val search = result.filter { it.eventId in idsThatShouldBeRemoved }
|
||||||
|
assertThat(search).isEmpty()
|
||||||
|
|
||||||
|
|
||||||
|
val expectedInList = listOf(
|
||||||
|
startEvent.message.eventId,
|
||||||
|
"48c72454-6c7b-406b-b598-fc0a961dabde",
|
||||||
|
"1d8d995d-a7e4-4d6e-a501-fe82f521cf72",
|
||||||
|
"f6cae204-7c8e-4003-b598-f7b4e566d03e",
|
||||||
|
"cbb1e871-e9a5-496d-a655-db719ac4903c",
|
||||||
|
"98a39721-41ff-4d79-905e-ced260478524",
|
||||||
|
"2c3a40bb-2225-4dd4-a8c3-32c6356f8764"
|
||||||
|
)
|
||||||
|
val searchForExpected = result.map { it.eventId }
|
||||||
|
assertThat(expectedInList).isEqualTo(searchForExpected)
|
||||||
|
withTransaction(dataSource) {
|
||||||
|
events.deleteAll()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testSuperseded2() {
|
||||||
|
val startEvent = EventToMessage(KafkaEvents.EventMediaProcessStarted, createMessage()).also {
|
||||||
|
eventManager.setEvent(it.event, it.message)
|
||||||
|
}
|
||||||
|
val keepStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadStreamPerformed,
|
||||||
|
createMessage(eventId = "48c72454-6c7b-406b-b598-fc0a961dabde", derivedFromEventId = startEvent.message.eventId)),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
|
createMessage(eventId = "1d8d995d-a7e4-4d6e-a501-fe82f521cf72", derivedFromEventId ="48c72454-6c7b-406b-b598-fc0a961dabde")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
|
createMessage(eventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e", derivedFromEventId ="1d8d995d-a7e4-4d6e-a501-fe82f521cf72")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaMetadataSearchPerformed,
|
||||||
|
createMessage(eventId = "cbb1e871-e9a5-496d-a655-db719ac4903c", derivedFromEventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutCover,
|
||||||
|
createMessage(eventId = "98a39721-41ff-4d79-905e-ced260478524", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
val toBeReplaced = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
|
createMessage(eventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterEncodeCreated,
|
||||||
|
createMessage(eventId = "9e8f2e04-4950-437f-a203-cfd566203078", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterExtractCreated,
|
||||||
|
createMessage(eventId = "af7f2519-0f1d-4679-82bd-0314d1b97b68", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
|
||||||
|
val currentTableWithOldStack = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
assertThat(currentTableWithOldStack).hasSize(keepStack.size + toBeReplaced.size +1)
|
||||||
|
|
||||||
|
val supersedingStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
|
createMessage(eventId = "2c3a40bb-2225-4dd4-a8c3-32c6356f8764", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c"))
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
|
||||||
|
// Final check
|
||||||
|
|
||||||
|
val result = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
|
||||||
|
val idsRemoved = toBeReplaced.map { it.message.eventId }
|
||||||
|
val search = result.filter { it.eventId in idsRemoved }
|
||||||
|
assertThat(search).isEmpty()
|
||||||
|
|
||||||
|
|
||||||
|
val expectedInList = listOf(startEvent.message.eventId) + keepStack.map { it.message.eventId } + supersedingStack.map { it.message.eventId }
|
||||||
|
val searchForExpected = result.map { it.eventId }
|
||||||
|
assertThat(expectedInList).isEqualTo(searchForExpected)
|
||||||
|
|
||||||
|
withTransaction(dataSource) {
|
||||||
|
events.deleteAll()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testSuperseded3() {
|
||||||
|
val startEvent = EventToMessage(KafkaEvents.EventMediaProcessStarted, createMessage()).also {
|
||||||
|
eventManager.setEvent(it.event, it.message)
|
||||||
|
}
|
||||||
|
val keepStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadStreamPerformed,
|
||||||
|
createMessage(eventId = "48c72454-6c7b-406b-b598-fc0a961dabde", derivedFromEventId = startEvent.message.eventId)),
|
||||||
|
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
val toBeReplaced = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
|
createMessage(eventId = "1d8d995d-a7e4-4d6e-a501-fe82f521cf72", derivedFromEventId ="48c72454-6c7b-406b-b598-fc0a961dabde")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadBaseInfoPerformed,
|
||||||
|
createMessage(eventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e", derivedFromEventId ="1d8d995d-a7e4-4d6e-a501-fe82f521cf72")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaMetadataSearchPerformed,
|
||||||
|
createMessage(eventId = "cbb1e871-e9a5-496d-a655-db719ac4903c", derivedFromEventId = "f6cae204-7c8e-4003-b598-f7b4e566d03e")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutCover,
|
||||||
|
createMessage(eventId = "98a39721-41ff-4d79-905e-ced260478524", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaReadOutNameAndType,
|
||||||
|
createMessage(eventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9", derivedFromEventId = "cbb1e871-e9a5-496d-a655-db719ac4903c")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterEncodeCreated,
|
||||||
|
createMessage(eventId = "9e8f2e04-4950-437f-a203-cfd566203078", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParameterExtractCreated,
|
||||||
|
createMessage(eventId = "af7f2519-0f1d-4679-82bd-0314d1b97b68", derivedFromEventId = "3f376b72-f55a-4dd7-af87-fb1755ba4ad9")),
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
|
||||||
|
val currentTableWithOldStack = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
assertThat(currentTableWithOldStack).hasSize(keepStack.size + toBeReplaced.size +1)
|
||||||
|
|
||||||
|
val supersedingStack = listOf(
|
||||||
|
EventToMessage(KafkaEvents.EventMediaParseStreamPerformed,
|
||||||
|
createMessage(eventId = "2c3a40bb-2225-4dd4-a8c3-32c6356f8764", derivedFromEventId = "48c72454-6c7b-406b-b598-fc0a961dabde"))
|
||||||
|
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
|
||||||
|
|
||||||
|
|
||||||
|
// Final check
|
||||||
|
|
||||||
|
val result = eventManager.getEventsWith(defaultReferenceId)
|
||||||
|
|
||||||
|
val idsRemoved = toBeReplaced.map { it.message.eventId }
|
||||||
|
val search = result.filter { it.eventId in idsRemoved }
|
||||||
|
assertThat(search).isEmpty()
|
||||||
|
|
||||||
|
|
||||||
|
val expectedInList = listOf(startEvent.message.eventId) + keepStack.map { it.message.eventId } + supersedingStack.map { it.message.eventId }
|
||||||
|
val searchForExpected = result.map { it.eventId }
|
||||||
|
assertThat(expectedInList).isEqualTo(searchForExpected)
|
||||||
|
|
||||||
|
withTransaction(dataSource) {
|
||||||
|
events.deleteAll()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun testDerivedOrphanNotInserted() {
|
||||||
|
val startEvent = EventToMessage(KafkaEvents.EventMediaProcessStarted, createMessage()).also {
|
||||||
|
eventManager.setEvent(it.event, it.message)
|
||||||
|
}
|
||||||
|
val result = eventManager.setEvent(KafkaEvents.EventMediaReadStreamPerformed,
|
||||||
|
createMessage(derivedFromEventId = UUID.randomUUID().toString()))
|
||||||
|
assertThat(result).isFalse()
|
||||||
|
}
|
||||||
|
|
||||||
|
data class EventToMessage(val event: KafkaEvents, val message: Message<*>)
|
||||||
|
|
||||||
|
private fun createMessage(referenceId: String = defaultReferenceId, eventId: String = UUID.randomUUID().toString(), derivedFromEventId: String? = null): Message<SimpleMessageData>{
|
||||||
|
return Message<SimpleMessageData>(
|
||||||
|
referenceId = referenceId,
|
||||||
|
eventId = eventId,
|
||||||
|
data = SimpleMessageData(
|
||||||
|
status = Status.COMPLETED,
|
||||||
|
message = "Potato",
|
||||||
|
derivedFromEventId = derivedFromEventId
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -15,30 +15,29 @@ class DeserializingRegistry {
|
|||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
val deserializables = mutableMapOf(
|
val deserializables = mutableMapOf(
|
||||||
KafkaEvents.EVENT_MEDIA_PROCESS_STARTED to MediaProcessStarted::class.java,
|
KafkaEvents.EventMediaProcessStarted to MediaProcessStarted::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED to ReaderPerformed::class.java,
|
KafkaEvents.EventMediaReadStreamPerformed to ReaderPerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED to MediaStreamsParsePerformed::class.java,
|
KafkaEvents.EventMediaParseStreamPerformed to MediaStreamsParsePerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED to BaseInfoPerformed::class.java,
|
KafkaEvents.EventMediaReadBaseInfoPerformed to BaseInfoPerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED to MetadataPerformed::class.java,
|
KafkaEvents.EventMediaMetadataSearchPerformed to MetadataPerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE to VideoInfoPerformed::class.java,
|
KafkaEvents.EventMediaReadOutNameAndType to VideoInfoPerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_READ_OUT_COVER to CoverInfoPerformed::class.java,
|
KafkaEvents.EventMediaReadOutCover to CoverInfoPerformed::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED to FfmpegWorkerArgumentsCreated::class.java,
|
KafkaEvents.EventMediaParameterEncodeCreated to FfmpegWorkerArgumentsCreated::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED to FfmpegWorkerArgumentsCreated::class.java,
|
KafkaEvents.EventMediaParameterExtractCreated to FfmpegWorkerArgumentsCreated::class.java,
|
||||||
KafkaEvents.EVENT_MEDIA_CONVERT_PARAMETER_CREATED to null,
|
KafkaEvents.EventMediaParameterConvertCreated to null,
|
||||||
KafkaEvents.EVENT_MEDIA_DOWNLOAD_COVER_PARAMETER_CREATED to null,
|
KafkaEvents.EventMediaParameterDownloadCoverCreated to null,
|
||||||
|
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_CREATED to FfmpegWorkRequestCreated::class.java,
|
KafkaEvents.EventNotificationOfWorkItemRemoval to NotificationOfDeletionPerformed::class.java,
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_CREATED to FfmpegWorkRequestCreated::class.java,
|
|
||||||
KafkaEvents.EVENT_WORK_CONVERT_CREATED to ConvertWorkerRequest::class.java,
|
|
||||||
|
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_PERFORMED to ProcesserEncodeWorkPerformed::class.java,
|
KafkaEvents.EventWorkEncodeCreated to FfmpegWorkRequestCreated::class.java,
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED to ProcesserExtractWorkPerformed::class.java,
|
KafkaEvents.EventWorkExtractCreated to FfmpegWorkRequestCreated::class.java,
|
||||||
KafkaEvents.EVENT_WORK_CONVERT_PERFORMED to ConvertWorkPerformed::class.java,
|
KafkaEvents.EventWorkConvertCreated to ConvertWorkerRequest::class.java,
|
||||||
KafkaEvents.EVENT_WORK_DOWNLOAD_COVER_PERFORMED to CoverDownloadWorkPerformed::class.java,
|
|
||||||
|
KafkaEvents.EventWorkEncodePerformed to ProcesserEncodeWorkPerformed::class.java,
|
||||||
|
KafkaEvents.EventWorkExtractPerformed to ProcesserExtractWorkPerformed::class.java,
|
||||||
|
KafkaEvents.EventWorkConvertPerformed to ConvertWorkPerformed::class.java,
|
||||||
|
KafkaEvents.EventWorkDownloadCoverPerformed to CoverDownloadWorkPerformed::class.java,
|
||||||
|
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_SKIPPED to null,
|
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_SKIPPED to null,
|
|
||||||
KafkaEvents.EVENT_WORK_CONVERT_SKIPPED to null,
|
|
||||||
|
|
||||||
KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED to ProcessCompleted::class.java
|
KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED to ProcessCompleted::class.java
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,31 +1,34 @@
|
|||||||
package no.iktdev.mediaprocessing.shared.kafka.core
|
package no.iktdev.mediaprocessing.shared.kafka.core
|
||||||
|
|
||||||
enum class KafkaEvents(val event: String) {
|
enum class KafkaEvents(val event: String) {
|
||||||
EVENT_MEDIA_PROCESS_STARTED("event:media-process:started"),
|
EventMediaProcessStarted("event:media-process:started"),
|
||||||
EVENT_REQUEST_PROCESS_STARTED("event:request-process:started"),
|
EVENT_REQUEST_PROCESS_STARTED("event:request-process:started"),
|
||||||
|
|
||||||
EVENT_MEDIA_READ_STREAM_PERFORMED("event:media-read-stream:performed"),
|
EventMediaReadStreamPerformed("event:media-read-stream:performed"),
|
||||||
EVENT_MEDIA_PARSE_STREAM_PERFORMED("event:media-parse-stream:performed"),
|
EventMediaParseStreamPerformed("event:media-parse-stream:performed"),
|
||||||
EVENT_MEDIA_READ_BASE_INFO_PERFORMED("event:media-read-base-info:performed"),
|
EventMediaReadBaseInfoPerformed("event:media-read-base-info:performed"),
|
||||||
EVENT_MEDIA_METADATA_SEARCH_PERFORMED("event:media-metadata-search:performed"),
|
EventMediaMetadataSearchPerformed("event:media-metadata-search:performed"),
|
||||||
EVENT_MEDIA_READ_OUT_NAME_AND_TYPE("event:media-read-out-name-and-type:performed"),
|
EventMediaReadOutNameAndType("event:media-read-out-name-and-type:performed"),
|
||||||
EVENT_MEDIA_READ_OUT_COVER("event:media-read-out-cover:performed"),
|
EventMediaReadOutCover("event:media-read-out-cover:performed"),
|
||||||
|
|
||||||
EVENT_MEDIA_ENCODE_PARAMETER_CREATED("event:media-encode-parameter:created"),
|
EventMediaParameterEncodeCreated("event:media-encode-parameter:created"),
|
||||||
EVENT_MEDIA_EXTRACT_PARAMETER_CREATED("event:media-extract-parameter:created"),
|
EventMediaParameterExtractCreated("event:media-extract-parameter:created"),
|
||||||
EVENT_MEDIA_CONVERT_PARAMETER_CREATED("event:media-convert-parameter:created"),
|
EventMediaParameterConvertCreated("event:media-convert-parameter:created"),
|
||||||
EVENT_MEDIA_DOWNLOAD_COVER_PARAMETER_CREATED("event:media-download-cover-parameter:created"),
|
EventMediaParameterDownloadCoverCreated("event:media-download-cover-parameter:created"),
|
||||||
|
|
||||||
EVENT_MEDIA_WORK_PROCEED_PERMITTED("event:media-work-proceed:permitted"),
|
EventMediaWorkProceedPermitted("event:media-work-proceed:permitted"),
|
||||||
|
|
||||||
EVENT_WORK_ENCODE_CREATED("event:work-encode:created"),
|
// This event is to be used for commuincating across all appss taht an event has ben removed and to rterminate existint events
|
||||||
EVENT_WORK_EXTRACT_CREATED("event:work-extract:created"),
|
EventNotificationOfWorkItemRemoval("event:notification-work-item-removal"),
|
||||||
EVENT_WORK_CONVERT_CREATED("event:work-convert:created"),
|
|
||||||
|
|
||||||
EVENT_WORK_ENCODE_PERFORMED("event:work-encode:performed"),
|
EventWorkEncodeCreated("event:work-encode:created"),
|
||||||
EVENT_WORK_EXTRACT_PERFORMED("event:work-extract:performed"),
|
EventWorkExtractCreated("event:work-extract:created"),
|
||||||
EVENT_WORK_CONVERT_PERFORMED("event:work-convert:performed"),
|
EventWorkConvertCreated("event:work-convert:created"),
|
||||||
EVENT_WORK_DOWNLOAD_COVER_PERFORMED("event:work-download-cover:performed"),
|
|
||||||
|
EventWorkEncodePerformed("event:work-encode:performed"),
|
||||||
|
EventWorkExtractPerformed("event:work-extract:performed"),
|
||||||
|
EventWorkConvertPerformed("event:work-convert:performed"),
|
||||||
|
EventWorkDownloadCoverPerformed("event:work-download-cover:performed"),
|
||||||
|
|
||||||
|
|
||||||
EVENT_STORE_VIDEO_PERFORMED("event:store-video:performed"),
|
EVENT_STORE_VIDEO_PERFORMED("event:store-video:performed"),
|
||||||
@ -45,13 +48,13 @@ enum class KafkaEvents(val event: String) {
|
|||||||
fun isOfWork(event: KafkaEvents): Boolean {
|
fun isOfWork(event: KafkaEvents): Boolean {
|
||||||
return event in listOf(
|
return event in listOf(
|
||||||
|
|
||||||
EVENT_WORK_CONVERT_CREATED,
|
EventWorkConvertCreated,
|
||||||
EVENT_WORK_EXTRACT_CREATED,
|
EventWorkExtractCreated,
|
||||||
EVENT_WORK_ENCODE_CREATED,
|
EventWorkEncodeCreated,
|
||||||
|
|
||||||
EVENT_WORK_ENCODE_PERFORMED,
|
EventWorkEncodePerformed,
|
||||||
EVENT_WORK_CONVERT_PERFORMED,
|
EventWorkConvertPerformed,
|
||||||
EVENT_WORK_EXTRACT_PERFORMED
|
EventWorkExtractPerformed
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,15 +3,17 @@ package no.iktdev.mediaprocessing.shared.kafka.dto
|
|||||||
|
|
||||||
abstract class MessageDataWrapper(
|
abstract class MessageDataWrapper(
|
||||||
@Transient open val status: Status = Status.ERROR,
|
@Transient open val status: Status = Status.ERROR,
|
||||||
@Transient open val message: String? = null
|
@Transient open val message: String? = null,
|
||||||
|
@Transient open val derivedFromEventId: String? = null
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
data class SimpleMessageData(
|
data class SimpleMessageData(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null
|
override val message: String? = null,
|
||||||
) : MessageDataWrapper(status, message)
|
override val derivedFromEventId: String?
|
||||||
|
) : MessageDataWrapper(status, message, derivedFromEventId)
|
||||||
|
|
||||||
|
|
||||||
fun MessageDataWrapper?.isSuccess(): Boolean {
|
fun MessageDataWrapper?.isSuccess(): Boolean {
|
||||||
|
|||||||
@ -5,12 +5,13 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_READ_BASE_INFO_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaReadBaseInfoPerformed)
|
||||||
data class BaseInfoPerformed(
|
data class BaseInfoPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val title: String,
|
val title: String,
|
||||||
val sanitizedName: String
|
val sanitizedName: String,
|
||||||
) : MessageDataWrapper(status)
|
override val derivedFromEventId: String
|
||||||
|
) : MessageDataWrapper(status = status, derivedFromEventId = derivedFromEventId)
|
||||||
|
|
||||||
fun BaseInfoPerformed?.hasValidData(): Boolean {
|
fun BaseInfoPerformed?.hasValidData(): Boolean {
|
||||||
return this != null && this.title.isNotBlank() && this.sanitizedName.isNotBlank()
|
return this != null && this.title.isNotBlank() && this.sanitizedName.isNotBlank()
|
||||||
|
|||||||
@ -5,11 +5,11 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_WORK_CONVERT_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventWorkConvertPerformed)
|
||||||
data class ConvertWorkPerformed(
|
data class ConvertWorkPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null,
|
override val message: String? = null,
|
||||||
val producedBy: String,
|
val producedBy: String,
|
||||||
val derivedFromEventId: String,
|
override val derivedFromEventId: String,
|
||||||
val outFiles: List<String> = listOf()
|
val outFiles: List<String> = listOf()
|
||||||
): MessageDataWrapper(status, message)
|
): MessageDataWrapper(status, message, derivedFromEventId)
|
||||||
@ -6,13 +6,14 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_WORK_CONVERT_CREATED)
|
@KafkaBelongsToEvent(KafkaEvents.EventWorkConvertCreated)
|
||||||
data class ConvertWorkerRequest(
|
data class ConvertWorkerRequest(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val requiresEventId: String? = null,
|
val requiresEventId: String? = null,
|
||||||
|
override val derivedFromEventId: String? = null,
|
||||||
val inputFile: String,
|
val inputFile: String,
|
||||||
val allowOverwrite: Boolean,
|
val allowOverwrite: Boolean,
|
||||||
val outFileBaseName: String,
|
val outFileBaseName: String,
|
||||||
val outDirectory: String,
|
val outDirectory: String,
|
||||||
val outFormats: List<SubtitleFormats> = listOf()
|
val outFormats: List<SubtitleFormats> = listOf()
|
||||||
): MessageDataWrapper(status)
|
): MessageDataWrapper(status, derivedFromEventId = derivedFromEventId)
|
||||||
@ -5,9 +5,10 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_WORK_DOWNLOAD_COVER_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventWorkDownloadCoverPerformed)
|
||||||
data class CoverDownloadWorkPerformed(
|
data class CoverDownloadWorkPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null,
|
override val message: String? = null,
|
||||||
val coverFile: String
|
val coverFile: String,
|
||||||
): MessageDataWrapper(status, message)
|
override val derivedFromEventId: String?
|
||||||
|
): MessageDataWrapper(status, message, derivedFromEventId = derivedFromEventId)
|
||||||
|
|||||||
@ -5,11 +5,11 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_READ_OUT_COVER)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaReadOutCover)
|
||||||
data class CoverInfoPerformed(
|
data class CoverInfoPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val url: String,
|
val url: String,
|
||||||
val outDir: String,
|
val outDir: String,
|
||||||
val outFileBaseName: String
|
val outFileBaseName: String,
|
||||||
)
|
override val derivedFromEventId: String?
|
||||||
: MessageDataWrapper(status)
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -6,13 +6,13 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(
|
@KafkaBelongsToEvent(
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_CREATED,
|
KafkaEvents.EventWorkEncodeCreated,
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_CREATED
|
KafkaEvents.EventWorkExtractCreated
|
||||||
)
|
)
|
||||||
data class FfmpegWorkRequestCreated(
|
data class FfmpegWorkRequestCreated(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val derivedFromEventId: String,
|
|
||||||
val inputFile: String,
|
val inputFile: String,
|
||||||
val arguments: List<String>,
|
val arguments: List<String>,
|
||||||
val outFile: String
|
val outFile: String,
|
||||||
): MessageDataWrapper(status)
|
override val derivedFromEventId: String?
|
||||||
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -12,15 +12,15 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
|||||||
* @param arguments Requires arguments, instructions for what ffmpeg should do
|
* @param arguments Requires arguments, instructions for what ffmpeg should do
|
||||||
*/
|
*/
|
||||||
@KafkaBelongsToEvent(
|
@KafkaBelongsToEvent(
|
||||||
KafkaEvents.EVENT_MEDIA_ENCODE_PARAMETER_CREATED,
|
KafkaEvents.EventMediaParameterEncodeCreated,
|
||||||
KafkaEvents.EVENT_MEDIA_EXTRACT_PARAMETER_CREATED
|
KafkaEvents.EventMediaParameterExtractCreated
|
||||||
)
|
)
|
||||||
data class FfmpegWorkerArgumentsCreated(
|
data class FfmpegWorkerArgumentsCreated(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val inputFile: String, // absolutePath
|
val inputFile: String, // absolutePath
|
||||||
val entries: List<FfmpegWorkerArgument>
|
val entries: List<FfmpegWorkerArgument>,
|
||||||
):
|
override val derivedFromEventId: String?
|
||||||
MessageDataWrapper(status)
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
|
|
||||||
data class FfmpegWorkerArgument(
|
data class FfmpegWorkerArgument(
|
||||||
val outputFile: String,
|
val outputFile: String,
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_PROCESS_STARTED)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaProcessStarted)
|
||||||
data class MediaProcessStarted(
|
data class MediaProcessStarted(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val type: ProcessType = ProcessType.FLOW,
|
val type: ProcessType = ProcessType.FLOW,
|
||||||
|
|||||||
@ -6,9 +6,9 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_PARSE_STREAM_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaParseStreamPerformed)
|
||||||
data class MediaStreamsParsePerformed(
|
data class MediaStreamsParsePerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val streams: ParsedMediaStreams
|
val streams: ParsedMediaStreams,
|
||||||
|
override val derivedFromEventId: String?
|
||||||
): MessageDataWrapper(status)
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -5,12 +5,13 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_METADATA_SEARCH_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaMetadataSearchPerformed)
|
||||||
data class MetadataPerformed(
|
data class MetadataPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null,
|
override val message: String? = null,
|
||||||
val data: pyMetadata? = null
|
val data: pyMetadata? = null,
|
||||||
) : MessageDataWrapper(status, message)
|
override val derivedFromEventId: String?
|
||||||
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
|
|
||||||
data class pyMetadata(
|
data class pyMetadata(
|
||||||
val title: String,
|
val title: String,
|
||||||
|
|||||||
@ -0,0 +1,15 @@
|
|||||||
|
package no.iktdev.mediaprocessing.shared.kafka.dto.events_result
|
||||||
|
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaBelongsToEvent
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
|
@KafkaBelongsToEvent(KafkaEvents.EventNotificationOfWorkItemRemoval)
|
||||||
|
data class NotificationOfDeletionPerformed(
|
||||||
|
override val status: Status = Status.COMPLETED,
|
||||||
|
override val message: String? = null,
|
||||||
|
override val derivedFromEventId: String? = null, // Skal aldri settes derived
|
||||||
|
val deletedEventId: String,
|
||||||
|
val deletedEvent: KafkaEvents
|
||||||
|
): MessageDataWrapper()
|
||||||
@ -7,6 +7,6 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
|||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED)
|
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_PROCESS_COMPLETED)
|
||||||
data class ProcessCompleted(
|
data class ProcessCompleted(
|
||||||
override val status: Status
|
override val status: Status,
|
||||||
) : MessageDataWrapper(status) {
|
override val derivedFromEventId: String?
|
||||||
}
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -6,9 +6,10 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_READ_STREAM_PERFORMED)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaReadStreamPerformed)
|
||||||
data class ReaderPerformed(
|
data class ReaderPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val file: String, //AbsolutePath
|
val file: String, //AbsolutePath
|
||||||
val output: JsonObject
|
val output: JsonObject,
|
||||||
) : MessageDataWrapper(status)
|
override val derivedFromEventId: String?
|
||||||
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -7,13 +7,13 @@ import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
|
|||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
|
||||||
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaReadOutNameAndType)
|
||||||
data class VideoInfoPerformed(
|
data class VideoInfoPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
val info: JsonObject,
|
val info: JsonObject,
|
||||||
val outDirectory: String
|
val outDirectory: String,
|
||||||
)
|
override val derivedFromEventId: String?
|
||||||
: MessageDataWrapper(status) {
|
) : MessageDataWrapper(status, derivedFromEventId) {
|
||||||
fun toValueObject(): VideoInfo? {
|
fun toValueObject(): VideoInfo? {
|
||||||
val type = info.get("type").asString
|
val type = info.get("type").asString
|
||||||
return when (type) {
|
return when (type) {
|
||||||
@ -46,7 +46,7 @@ data class SubtitleInfo(
|
|||||||
val language: String
|
val language: String
|
||||||
)
|
)
|
||||||
|
|
||||||
@KafkaBelongsToEvent(KafkaEvents.EVENT_MEDIA_READ_OUT_NAME_AND_TYPE)
|
@KafkaBelongsToEvent(KafkaEvents.EventMediaReadOutNameAndType)
|
||||||
open class VideoInfo(
|
open class VideoInfo(
|
||||||
@Transient open val type: String,
|
@Transient open val type: String,
|
||||||
@Transient open val title: String,
|
@Transient open val title: String,
|
||||||
|
|||||||
@ -7,12 +7,12 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
|||||||
|
|
||||||
// Derived from ffmpeg work
|
// Derived from ffmpeg work
|
||||||
@KafkaBelongsToEvent(
|
@KafkaBelongsToEvent(
|
||||||
KafkaEvents.EVENT_WORK_ENCODE_PERFORMED
|
KafkaEvents.EventWorkEncodePerformed
|
||||||
)
|
)
|
||||||
data class ProcesserEncodeWorkPerformed(
|
data class ProcesserEncodeWorkPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null,
|
override val message: String? = null,
|
||||||
val producedBy: String,
|
val producedBy: String,
|
||||||
val derivedFromEventId: String,
|
val outFile: String? = null,
|
||||||
val outFile: String? = null
|
override val derivedFromEventId: String?
|
||||||
): MessageDataWrapper(status, message)
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -7,12 +7,12 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.Status
|
|||||||
|
|
||||||
// Derived from ffmpeg work
|
// Derived from ffmpeg work
|
||||||
@KafkaBelongsToEvent(
|
@KafkaBelongsToEvent(
|
||||||
KafkaEvents.EVENT_WORK_EXTRACT_PERFORMED
|
KafkaEvents.EventWorkExtractPerformed
|
||||||
)
|
)
|
||||||
data class ProcesserExtractWorkPerformed(
|
data class ProcesserExtractWorkPerformed(
|
||||||
override val status: Status,
|
override val status: Status,
|
||||||
override val message: String? = null,
|
override val message: String? = null,
|
||||||
val producedBy: String,
|
val producedBy: String,
|
||||||
val derivedFromEventId: String,
|
val outFile: String? = null,
|
||||||
val outFile: String? = null
|
override val derivedFromEventId: String?
|
||||||
): MessageDataWrapper(status, message)
|
) : MessageDataWrapper(status, derivedFromEventId)
|
||||||
@ -28,7 +28,7 @@ class SerializationTest {
|
|||||||
|
|
||||||
val json = gson.toJson(message)
|
val json = gson.toJson(message)
|
||||||
val deserializer = DeserializingRegistry()
|
val deserializer = DeserializingRegistry()
|
||||||
val result = deserializer.deserialize(KafkaEvents.EVENT_MEDIA_PROCESS_STARTED, json)
|
val result = deserializer.deserialize(KafkaEvents.EventMediaProcessStarted, json)
|
||||||
assertThat(result.data).isInstanceOf(MediaProcessStarted::class.java)
|
assertThat(result.data).isInstanceOf(MediaProcessStarted::class.java)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user