V3 - Moved to database polling

This commit is contained in:
bskjon 2024-07-11 23:26:09 +02:00
parent fe3c238adb
commit c58b00a236
112 changed files with 3336 additions and 2064 deletions

295
.github/workflows/v3.yml vendored Normal file
View File

@ -0,0 +1,295 @@
name: Build V2
on:
push:
branches:
- v3
pull_request:
branches:
- v3
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
pyMetadata: ${{ steps.filter.outputs.pyMetadata }}
coordinator: ${{ steps.filter.outputs.coordinator }}
processer: ${{ steps.filter.outputs.processer }}
converter: ${{ steps.filter.outputs.converter }}
shared: ${{ steps.filter.outputs.shared }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
pyMetadata:
- 'apps/pyMetadata/**'
apps/coordinator:
- 'apps/coordinator/**'
apps/processer:
- 'apps/processer/**'
apps/converter:
- 'apps/converter/**'
shared:
- 'shared/**'
# Step to print the outputs from "pre-check" job
- name: Print Outputs from pre-check job
run: |
echo "Apps\n"
echo "app:pyMetadata: ${{ needs.pre-check.outputs.pyMetadata }}"
echo "app:coordinator: ${{ needs.pre-check.outputs.coordinator }}"
echo "app:processer: ${{ needs.pre-check.outputs.processer }}"
echo "app:converter: ${{ needs.pre-check.outputs.converter }}"
echo "Shared"
echo "shared: ${{ needs.pre-check.outputs.shared }}"
echo "\n"
echo "${{ needs.pre-check.outputs }}"
echo "${{ needs.pre-check }}"
build-shared:
runs-on: ubuntu-latest
needs: pre-check
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared code Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Build Shared code
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.shared == 'true' || github.event_name == 'workflow_dispatch'
run: |
chmod +x ./gradlew
./gradlew :shared:build --stacktrace --info
build-processer:
needs: build-shared
if: ${{ needs.pre-check.outputs.processer == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/processer/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Processer module
id: build-processer
run: |
chmod +x ./gradlew
./gradlew :apps:processer:bootJar --info
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=processer
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-processer:v3
bskjon/mediaprocessing-processer:v3-${{ github.sha }}
bskjon/mediaprocessing-processer:v3-${{ steps.docker-tag.outputs.tag }}
build-converter:
needs: build-shared
if: ${{ needs.pre-check.outputs.converter == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v3
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/converter/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Converter module
id: build-converter
run: |
chmod +x ./gradlew
./gradlew :apps:converter:bootJar --info
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJava
build-args: |
MODULE_NAME=converter
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-converter:v3
bskjon/mediaprocessing-converter:v3-${{ github.sha }}
bskjon/mediaprocessing-converter:v3-${{ steps.docker-tag.outputs.tag }}
build-coordinator:
needs: build-shared
if: ${{ needs.pre-check.outputs.coordinator == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/coordinator/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Coordinator module
id: build-coordinator
run: |
chmod +x ./gradlew
./gradlew :apps:coordinator:bootJar
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Debug Check extracted version
run: |
echo "Extracted version: ${{ env.VERSION }}"
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=coordinator
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-coordinator:v3
bskjon/mediaprocessing-coordinator:v3-${{ github.sha }}
bskjon/mediaprocessing-coordinator:v3-${{ steps.docker-tag.outputs.tag }}
build-pymetadata:
needs: pre-check
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build pyMetadata module
id: build-pymetadata
run: |
if [[ "${{ steps.check-pymetadata.outputs.changed }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
cd apps/pyMetadata
# Add the necessary build steps for your Python module here
echo "Build completed"
else
echo "pyMetadata has not changed. Skipping pyMetadata module build."
echo "::set-output name=job_skipped::true"
fi
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./dockerfiles/Python
build-args:
MODULE_NAME=pyMetadata
push: true
tags: |
bskjon/mediaprocessing-pymetadata:v3
bskjon/mediaprocessing-pymetadata:v3-${{ github.sha }}
bskjon/mediaprocessing-pymetadata:v3-${{ steps.docker-tag.outputs.tag }}

1
.idea/gradle.xml generated
View File

@ -16,6 +16,7 @@
<option value="$PROJECT_DIR$/shared" /> <option value="$PROJECT_DIR$/shared" />
<option value="$PROJECT_DIR$/shared/common" /> <option value="$PROJECT_DIR$/shared/common" />
<option value="$PROJECT_DIR$/shared/contract" /> <option value="$PROJECT_DIR$/shared/contract" />
<option value="$PROJECT_DIR$/shared/eventi" />
<option value="$PROJECT_DIR$/shared/kafka" /> <option value="$PROJECT_DIR$/shared/kafka" />
</set> </set>
</option> </option>

7
.idea/kotlinc.xml generated
View File

@ -1,5 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="Kotlin2JvmCompilerArguments">
<option name="jvmTarget" value="1.8" />
</component>
<component name="KotlinCommonCompilerArguments">
<option name="apiVersion" value="1.9" />
<option name="languageVersion" value="1.9" />
</component>
<component name="KotlinJpsPluginSettings"> <component name="KotlinJpsPluginSettings">
<option name="version" value="1.9.20" /> <option name="version" value="1.9.20" />
</component> </component>

View File

@ -48,6 +48,7 @@ dependencies {
implementation(project(mapOf("path" to ":shared:contract"))) implementation(project(mapOf("path" to ":shared:contract")))
implementation(project(mapOf("path" to ":shared:common"))) implementation(project(mapOf("path" to ":shared:common")))
implementation(project(mapOf("path" to ":shared:eventi")))
implementation("org.jetbrains.exposed:exposed-core:$exposedVersion") implementation("org.jetbrains.exposed:exposed-core:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-dao:$exposedVersion") implementation("org.jetbrains.exposed:exposed-dao:$exposedVersion")
@ -56,7 +57,7 @@ dependencies {
implementation ("mysql:mysql-connector-java:8.0.29") implementation ("mysql:mysql-connector-java:8.0.29")
implementation("org.jetbrains.kotlin:kotlin-stdlib")
implementation(kotlin("stdlib-jdk8")) implementation(kotlin("stdlib-jdk8"))
testImplementation("org.assertj:assertj-core:3.21.0") testImplementation("org.assertj:assertj-core:3.21.0")

View File

@ -13,38 +13,34 @@ import no.iktdev.streamit.library.db.tables.*
import no.iktdev.streamit.library.db.tables.helper.cast_errors import no.iktdev.streamit.library.db.tables.helper.cast_errors
import no.iktdev.streamit.library.db.tables.helper.data_audio import no.iktdev.streamit.library.db.tables.helper.data_audio
import no.iktdev.streamit.library.db.tables.helper.data_video import no.iktdev.streamit.library.db.tables.helper.data_video
import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.transactions.transaction
import org.springframework.boot.autoconfigure.SpringBootApplication import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication import org.springframework.boot.runApplication
import org.springframework.context.ApplicationContext import org.springframework.context.annotation.Bean
val log = KotlinLogging.logger {} val log = KotlinLogging.logger {}
private lateinit var eventDatabase: EventsDatabase
private lateinit var eventsManager: EventsManager
@SpringBootApplication @SpringBootApplication
class CoordinatorApplication { class CoordinatorApplication {
@Bean
fun eventManager(): EventsManager {
return eventsManager
}
} }
private var context: ApplicationContext? = null
private lateinit var storeDatabase: MySqlDataSource private lateinit var storeDatabase: MySqlDataSource
val ioCoroutine = CoroutinesIO() val ioCoroutine = CoroutinesIO()
val defaultCoroutine = CoroutinesDefault() val defaultCoroutine = CoroutinesDefault()
@Suppress("unused")
fun getContext(): ApplicationContext? {
return context
}
fun getStoreDatabase(): MySqlDataSource { fun getStoreDatabase(): MySqlDataSource {
return storeDatabase return storeDatabase
} }
private lateinit var eventsDatabase: MySqlDataSource
fun getEventsDatabase(): MySqlDataSource {
return eventsDatabase
}
lateinit var eventManager: PersistentEventManager
lateinit var taskManager: TasksManager lateinit var taskManager: TasksManager
fun main(args: Array<String>) { fun main(args: Array<String>) {
@ -58,24 +54,17 @@ fun main(args: Array<String>) {
value.printStackTrace() value.printStackTrace()
} }
}) })
eventDatabase = EventsDatabase().also {
eventsManager = EventsManager(it.database)
}
eventsDatabase = DatabaseEnvConfig.toEventsDatabase()
eventsDatabase.createDatabase()
storeDatabase = DatabaseEnvConfig.toStoredDatabase() storeDatabase = DatabaseEnvConfig.toStoredDatabase()
storeDatabase.createDatabase() storeDatabase.createDatabase()
eventManager = PersistentEventManager(eventsDatabase) taskManager = TasksManager(eventDatabase.database)
taskManager = TasksManager(eventsDatabase)
val kafkaTables = listOf(
events, // For kafka
allEvents,
tasks,
runners
)
val tables = arrayOf( val tables = arrayOf(
@ -95,8 +84,7 @@ fun main(args: Array<String>) {
storeDatabase.createTables(*tables) storeDatabase.createTables(*tables)
eventsDatabase.createTables(*kafkaTables.toTypedArray()) runApplication<CoordinatorApplication>(*args)
context = runApplication<CoordinatorApplication>(*args)
log.info { "App Version: ${getAppVersion()}" } log.info { "App Version: ${getAppVersion()}" }
printSharedConfig() printSharedConfig()

View File

@ -0,0 +1,14 @@
package no.iktdev.mediaprocessing.coordinator
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.shared.contract.data.Event
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.ApplicationContext
class Coordinator(
@Autowired
override var applicationContext: ApplicationContext,
@Autowired
override var eventManager: EventsManager
) : EventCoordinator<Event, EventsManager>()

View File

@ -0,0 +1,10 @@
package no.iktdev.mediaprocessing.coordinator
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
abstract class CoordinatorEventListener(): EventsListenerContract<EventsManager, Coordinator>() {
abstract override val produceEvent: Events
abstract override val listensForEvents: List<Events>
abstract override var coordinator: Coordinator?
}

View File

@ -1,152 +0,0 @@
package no.iktdev.mediaprocessing.coordinator
import kotlinx.coroutines.delay
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.coordination.PersistentEventBasedMessageListener
import no.iktdev.mediaprocessing.shared.common.EventCoordinatorBase
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.*
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
import org.springframework.scheduling.annotation.EnableScheduling
import org.springframework.scheduling.annotation.Scheduled
import org.springframework.stereotype.Service
import java.io.File
import java.util.UUID
@EnableScheduling
@Service
class EventCoordinator() : EventCoordinatorBase<PersistentMessage, PersistentEventBasedMessageListener>() {
override fun onCoordinatorReady() {
super.onCoordinatorReady()
readAllUncompletedMessagesInQueue()
}
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
val success = eventManager.setEvent(event.key, event.value)
if (!success) {
log.error { "Failed to store message event\nReferenceId: ${event.value.referenceId}\n\tEventId: ${event.value.eventId}\n\tEvent: ${event.key.event}\n\nData:\n${event.value.data}" }
} else {
ioCoroutine.launch {
readAllMessagesFor(event.value.referenceId, event.value.eventId, event.key.event)
}
}
}
override fun createTasksBasedOnEventsAndPersistence(
referenceId: String,
eventId: String,
messages: List<PersistentMessage>
) {
val triggered = messages.find { it.eventId == eventId }
if (triggered == null) {
log.error { "Could not find $eventId in provided messages" }
return
}
listeners.forwardEventMessageToListeners(triggered, messages)
}
private val log = KotlinLogging.logger {}
override val listeners = PersistentEventBasedMessageListener()
//private val forwarder = Forwarder()
public fun startProcess(file: File, type: ProcessType) {
val operations: List<StartOperationEvents> = listOf(
StartOperationEvents.ENCODE,
StartOperationEvents.EXTRACT,
StartOperationEvents.CONVERT
)
startProcess(file, type, operations)
}
fun startProcess(file: File, type: ProcessType, operations: List<StartOperationEvents>): UUID {
val referenceId: UUID = UUID.randomUUID()
val processStartEvent = MediaProcessStarted(
status = Status.COMPLETED,
file = file.absolutePath,
type = type,
operations = operations
)
producer.sendMessage(UUID.randomUUID().toString(), KafkaEvents.EventMediaProcessStarted, processStartEvent)
return referenceId
}
fun permitWorkToProceedOn(referenceId: String, message: String) {
producer.sendMessage(
referenceId = referenceId,
KafkaEvents.EventMediaWorkProceedPermitted,
SimpleMessageData(Status.COMPLETED, message, null)
)
}
fun readAllUncompletedMessagesInQueue() {
val messages = eventManager.getEventsUncompleted()
if (messages.isNotEmpty()) {
log.info { "Found ${messages.size} uncompleted items" }
}
messages.onEach {
it.firstOrNull()?.let {
log.info { "Found uncompleted: ${it.referenceId}" }
}
}
ioCoroutine.launch {
messages.forEach {
delay(1000)
try {
listeners.forwardBatchEventMessagesToListeners(it)
} catch (e: Exception) {
e.printStackTrace()
}
}
}
}
fun readAllMessagesFor(referenceId: String, eventId: String, event: String) {
val messages = eventManager.getEventsWith(referenceId)
if (messages.find { it.eventId == eventId && it.referenceId == referenceId } == null) {
log.warn { "EventId ($eventId) for ReferenceId ($referenceId) with event $event has not been made available in the database yet." }
ioCoroutine.launch {
val fixedDelay = 1000L
delay(fixedDelay)
var delayed = 0L
var msc = eventManager.getEventsWith(referenceId)
while (msc.find { it.eventId == eventId } != null || delayed < 1000 * 60) {
delayed += fixedDelay
msc = eventManager.getEventsWith(referenceId)
}
operationToRunOnMessages(referenceId, eventId, msc)
}
} else {
operationToRunOnMessages(referenceId, eventId, messages)
}
}
fun operationToRunOnMessages(referenceId: String, eventId: String, messages: List<PersistentMessage>) {
try {
createTasksBasedOnEventsAndPersistence(referenceId, eventId, messages)
} catch (e: Exception) {
e.printStackTrace()
}
}
fun getProcessStarted(messages: List<PersistentMessage>): MediaProcessStarted? {
return messages.find { it.event == KafkaEvents.EventMediaProcessStarted }?.data as MediaProcessStarted
}
@Scheduled(fixedDelay = (5*6_0000))
fun checkForWork() {
if (isReady()) {
log.info { "\n\nChecking if there is any uncompleted event sets\n\n" }
readAllUncompletedMessagesInQueue()
}
}
}

View File

@ -0,0 +1,25 @@
package no.iktdev.mediaprocessing.coordinator
import no.iktdev.mediaprocessing.shared.common.DatabaseEnvConfig
import no.iktdev.mediaprocessing.shared.common.persistance.allEvents
import no.iktdev.mediaprocessing.shared.common.persistance.events
import no.iktdev.mediaprocessing.shared.common.persistance.runners
import no.iktdev.mediaprocessing.shared.common.persistance.tasks
import no.iktdev.mediaprocessing.shared.common.toEventsDatabase
class EventsDatabase() {
val database = DatabaseEnvConfig.toEventsDatabase()
val tables = listOf(
events, // For kafka
allEvents,
tasks,
runners
)
init {
database.createDatabase()
database.createTables(*tables.toTypedArray())
}
}

View File

@ -0,0 +1,36 @@
package no.iktdev.mediaprocessing.coordinator
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventsManagerImpl
import no.iktdev.mediaprocessing.shared.common.datasource.DataSource
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.Event
class EventsManager(dataSource: DataSource) : EventsManagerContract(dataSource) {
override fun readAvailableEvents(): List<Event> {
TODO("Not yet implemented")
}
override fun readAvailableEventsFor(referenceId: String): List<Event> {
TODO("Not yet implemented")
}
override fun storeEvent(event: Event): Boolean {
TODO("Not yet implemented")
}
}
class MockEventManager(dataSource: DataSource) : EventsManagerImpl<EventImpl>(dataSource) {
val events: MutableList<EventImpl> = mutableListOf()
override fun readAvailableEvents(): List<EventImpl> {
return events.toList()
}
override fun readAvailableEventsFor(referenceId: String): List<EventImpl> {
return events.filter { it.metadata.referenceId == referenceId }
}
override fun storeEvent(event: EventImpl): Boolean {
return events.add(event)
}
}

View File

@ -9,9 +9,10 @@ import org.springframework.context.annotation.Import
@Configuration @Configuration
class SocketLocalInit: SocketImplementation() { class SocketLocalInit: SocketImplementation() {
} }
@Configuration @Configuration
@Import(CoordinatorProducer::class, DefaultMessageListener::class) @Import(CoordinatorProducer::class, DefaultMessageListener::class)
class KafkaLocalInit: KafkaImplementation() { class KafkaLocalInit: KafkaImplementation() {

View File

@ -1,5 +0,0 @@
package no.iktdev.mediaprocessing.coordinator
class RequestHandler {
}

View File

@ -1,69 +0,0 @@
package no.iktdev.mediaprocessing.coordinator
import no.iktdev.mediaprocessing.coordinator.coordination.PersistentEventBasedMessageListener
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.tasks.TaskCreatorImpl
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
abstract class TaskCreator(coordinator: EventCoordinator):
TaskCreatorImpl<EventCoordinator, PersistentMessage, PersistentEventBasedMessageListener>(coordinator) {
override fun isPrerequisiteEventsOk(events: List<PersistentMessage>): Boolean {
val currentEvents = events.map { it.event }
return requiredEvents.all { currentEvents.contains(it) }
}
override fun isPrerequisiteDataPresent(events: List<PersistentMessage>): Boolean {
val failed = events.filter { e -> e.event in requiredEvents }.filter { !it.data.isSuccess() }
return failed.isEmpty()
}
override fun isEventOfSingle(event: PersistentMessage, singleOne: KafkaEvents): Boolean {
return event.event == singleOne
}
/*override fun getListener(): Tasks {
val eventListenerFilter = listensForEvents.ifEmpty { requiredEvents }
return Tasks(taskHandler = this, producesEvent = producesEvent, listensForEvents = eventListenerFilter)
}*/
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return listOf {
isPrerequisiteEventsOk(events)
}
}
override fun prerequisiteRequired(event: PersistentMessage): List<() -> Boolean> {
return listOf()
}
/**
* Will always return null
*/
open fun onProcessEventsAccepted(event: PersistentMessage, events: List<PersistentMessage>) {
val referenceId = event.referenceId
val eventIds = events.filter { it.event in requiredEvents + listensForEvents }.map { it.eventId }
val current = processedEvents[referenceId] ?: setOf()
current.toMutableSet().addAll(eventIds)
processedEvents[referenceId] = current
if (event.event == KafkaEvents.EventCollectAndStore) {
processedEvents.remove(referenceId)
}
}
override fun containsUnprocessedEvents(events: List<PersistentMessage>): Boolean {
val referenceId = events.firstOrNull()?.referenceId ?:return false
val preExistingEvents = processedEvents[referenceId]?: setOf()
val forwardedEvents = events.filter { it.event in (requiredEvents + listensForEvents) }.map { it.eventId }
val newEvents = forwardedEvents.filter { it !in preExistingEvents }
return newEvents.isNotEmpty()
}
}

View File

@ -1,7 +1,6 @@
package no.iktdev.mediaprocessing.coordinator.controller package no.iktdev.mediaprocessing.coordinator.controller
import com.google.gson.Gson import com.google.gson.Gson
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.eventManager import no.iktdev.mediaprocessing.coordinator.eventManager
import no.iktdev.mediaprocessing.shared.contract.dto.RequestWorkProceed import no.iktdev.mediaprocessing.shared.contract.dto.RequestWorkProceed
import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Autowired
@ -13,7 +12,7 @@ import org.springframework.web.bind.annotation.RequestMapping
@Controller @Controller
@RequestMapping(path = ["/action"]) @RequestMapping(path = ["/action"])
class ActionEventController(@Autowired var coordinator: EventCoordinator) { class ActionEventController(@Autowired var coordinator: EventCoordinatorDep) {
@RequestMapping("/flow/proceed") @RequestMapping("/flow/proceed")

View File

@ -1,7 +1,6 @@
package no.iktdev.mediaprocessing.coordinator.controller package no.iktdev.mediaprocessing.coordinator.controller
import com.google.gson.Gson import com.google.gson.Gson
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.shared.contract.ProcessType import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.contract.dto.EventRequest import no.iktdev.mediaprocessing.shared.contract.dto.EventRequest
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
@ -17,7 +16,7 @@ import java.io.File
@Controller @Controller
@RequestMapping(path = ["/request"]) @RequestMapping(path = ["/request"])
class RequestEventController(@Autowired var coordinator: EventCoordinator) { class RequestEventController(@Autowired var coordinator: EventCoordinatorDep) {
@PostMapping("/convert") @PostMapping("/convert")
@ResponseStatus(HttpStatus.OK) @ResponseStatus(HttpStatus.OK)

View File

@ -1,61 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf
import no.iktdev.mediaprocessing.shared.common.parsing.FileNameParser
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.BaseInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class BaseInfoFromFile(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaReadBaseInfoPerformed
override val requiredEvents: List<KafkaEvents> = listOf(KafkaEvents.EventMediaProcessStarted)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val selected = events.lastOrSuccessOf(KafkaEvents.EventMediaProcessStarted) ?: return null
return readFileInfo(selected.data as MediaProcessStarted, event.eventId)
}
fun readFileInfo(started: MediaProcessStarted, eventId: String): MessageDataWrapper {
val result = try {
val fileName = File(started.file).nameWithoutExtension
val fileNameParser = FileNameParser(fileName)
BaseInfoPerformed(
Status.COMPLETED,
title = fileNameParser.guessDesiredTitle(),
sanitizedName = fileNameParser.guessDesiredFileName(),
searchTitles = fileNameParser.guessSearchableTitle(),
derivedFromEventId = eventId
)
} catch (e: Exception) {
e.printStackTrace()
SimpleMessageData(Status.ERROR, e.message ?: "Unable to obtain proper info from file", eventId)
}
return result
}
}

View File

@ -1,7 +1,6 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event package no.iktdev.mediaprocessing.coordinator.tasks.event
import mu.KotlinLogging import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.coordinator.getStoreDatabase import no.iktdev.mediaprocessing.coordinator.getStoreDatabase
import no.iktdev.mediaprocessing.coordinator.mapping.ProcessMapping import no.iktdev.mediaprocessing.coordinator.mapping.ProcessMapping
@ -29,7 +28,7 @@ import java.io.File
import java.sql.SQLIntegrityConstraintViolationException import java.sql.SQLIntegrityConstraintViolationException
@Service @Service
class CollectAndStoreTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) { class CollectAndStoreTask() : TaskCreator(null) {
val log = KotlinLogging.logger {} val log = KotlinLogging.logger {}

View File

@ -2,9 +2,7 @@ package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson import com.google.gson.Gson
import mu.KotlinLogging import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.coordinator.mapping.ProcessMapping
import no.iktdev.mediaprocessing.coordinator.utils.isAwaitingPrecondition import no.iktdev.mediaprocessing.coordinator.utils.isAwaitingPrecondition
import no.iktdev.mediaprocessing.coordinator.utils.isAwaitingTask import no.iktdev.mediaprocessing.coordinator.utils.isAwaitingTask
import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf
@ -19,11 +17,10 @@ import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStar
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.ProcessCompleted import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.ProcessCompleted
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Autowired
import org.springframework.scheduling.support.TaskUtils
import org.springframework.stereotype.Service import org.springframework.stereotype.Service
@Service @Service
class CompleteMediaTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) { class CompleteMediaTask() : TaskCreator(null) {
val log = KotlinLogging.logger {} val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents = KafkaEvents.EventMediaProcessCompleted override val producesEvent: KafkaEvents = KafkaEvents.EventMediaProcessCompleted

View File

@ -1,122 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.coordinator.taskManager
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.persistance.isOfEvent
import no.iktdev.mediaprocessing.shared.common.persistance.isSuccess
import no.iktdev.mediaprocessing.shared.common.persistance.lastOf
import no.iktdev.mediaprocessing.shared.common.task.ConvertTaskData
import no.iktdev.mediaprocessing.shared.common.task.TaskType
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.contract.dto.isOnly
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import no.iktdev.mediaprocessing.shared.kafka.dto.az
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.ConvertWorkerRequest
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.work.ProcesserExtractWorkPerformed
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class CreateConvertWorkTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventWorkConvertCreated
override val listensForEvents: List<KafkaEvents>
get() = listOf(KafkaEvents.EventMediaProcessStarted, KafkaEvents.EventWorkExtractPerformed)
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
val startedEventData = events.lastOf(KafkaEvents.EventMediaProcessStarted)?.data?.az<MediaProcessStarted>()
if (event.event == KafkaEvents.EventWorkExtractPerformed && !event.isSuccess()) {
return SimpleMessageData(status = Status.SKIPPED, "Extract failed, skipping..", derivedFromEventId = event.eventId)
}
val result = if (event.isOfEvent(KafkaEvents.EventMediaProcessStarted) &&
event.data.az<MediaProcessStarted>()?.operations?.isOnly(StartOperationEvents.CONVERT) == true
) {
startedEventData?.file
} else if (event.isOfEvent(KafkaEvents.EventWorkExtractPerformed) && startedEventData?.operations?.contains(
StartOperationEvents.CONVERT
) == true
) {
val innerData = event.data.az<ProcesserExtractWorkPerformed>()
innerData?.outFile
} else null
val convertFile = result?.let { File(it) }
if (convertFile == null) {
log.warn { "${event.referenceId} No file to perform convert on.." }
return null
}
val taskData = ConvertTaskData(
allowOverwrite = true,
inputFile = convertFile.absolutePath,
outFileBaseName = convertFile.nameWithoutExtension,
outDirectory = convertFile.parentFile.absolutePath,
outFormats = emptyList()
)
val status = taskManager.createTask(
referenceId = event.referenceId,
eventId = event.eventId,
task = TaskType.Convert,
derivedFromEventId = event.eventId,
data = Gson().toJson(taskData)
)
if (!status) {
log.error { "Failed to create Convert task on ${event.referenceId}@${event.eventId}" }
}
return produceConvertWorkRequest(convertFile, event.referenceId, event.eventId)
}
private fun produceConvertWorkRequest(
file: File,
requiresEventId: String?,
derivedFromEventId: String?
): ConvertWorkerRequest {
return ConvertWorkerRequest(
status = Status.COMPLETED,
requiresEventId = requiresEventId,
inputFile = file.absolutePath,
allowOverwrite = true,
outFileBaseName = file.nameWithoutExtension,
outDirectory = file.parentFile.absolutePath,
derivedFromEventId = derivedFromEventId
)
}
private data class DerivedInfoObject(
val outputFile: String,
val derivedFromEventId: String,
val requiresEventId: String
) {
companion object {
fun fromExtractWorkCreated(event: PersistentMessage): DerivedInfoObject? {
return if (event.event != KafkaEvents.EventWorkExtractCreated) null else {
val data: FfmpegWorkRequestCreated = event.data as FfmpegWorkRequestCreated
DerivedInfoObject(
outputFile = data.outFile,
derivedFromEventId = event.eventId,
requiresEventId = event.eventId
)
}
}
}
}
}

View File

@ -1,83 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.taskManager
import no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg.CreateProcesserWorkTask
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.persistance.TasksManager
import no.iktdev.mediaprocessing.shared.common.persistance.isOfEvent
import no.iktdev.mediaprocessing.shared.common.persistance.isSuccess
import no.iktdev.mediaprocessing.shared.common.task.FfmpegTaskData
import no.iktdev.mediaprocessing.shared.common.task.TaskType
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.az
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkerArgumentsCreated
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class CreateEncodeWorkTask(@Autowired override var coordinator: EventCoordinator) : CreateProcesserWorkTask(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventWorkEncodeCreated
override val requiredEvents: List<KafkaEvents>
get() = listOf(KafkaEvents.EventMediaParameterEncodeCreated)
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
if (events.lastOrNull { it.isOfEvent(KafkaEvents.EventMediaParameterEncodeCreated) }?.isSuccess() != true) {
return null
}
if (!isPermittedToCreateTasks(events)) {
log.warn { "Cannot continue until permitted event is present" }
}
val forwardEvent = if (event.event != KafkaEvents.EventMediaParameterEncodeCreated) {
val sevent = events.findLast { it.event == KafkaEvents.EventMediaParameterEncodeCreated }
if (sevent != null) {
log.info { "${event.referenceId} ${event.event} is not of ${KafkaEvents.EventMediaParameterEncodeCreated}, swapping to found event" }
} else {
log.info { "${event.referenceId} ${event.event} is not of ${KafkaEvents.EventMediaParameterEncodeCreated}, could not find required event.." }
}
sevent ?: event
} else event
val batchEvents = createMessagesByArgs(forwardEvent)
batchEvents.forEach { e ->
val createdTask = if (e is FfmpegWorkRequestCreated) {
FfmpegTaskData(
inputFile = e.inputFile,
outFile = e.outFile,
arguments = e.arguments
).let { task ->
val status = taskManager.createTask(
referenceId = event.referenceId,
derivedFromEventId = event.eventId,
task = TaskType.Encode,
data = Gson().toJson(task))
if (!status) {
log.error { "Failed to create Encode task on ${forwardEvent.referenceId}@${forwardEvent.eventId}" }
}
status
}
} else false
if (createdTask)
onResult(e)
}
return null
}
}

View File

@ -1,77 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.taskManager
import no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg.CreateProcesserWorkTask
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.persistance.isOfEvent
import no.iktdev.mediaprocessing.shared.common.persistance.isSuccess
import no.iktdev.mediaprocessing.shared.common.task.FfmpegTaskData
import no.iktdev.mediaprocessing.shared.common.task.TaskType
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.az
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkerArgumentsCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.util.*
@Service
class CreateExtractWorkTask(@Autowired override var coordinator: EventCoordinator) : CreateProcesserWorkTask(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventWorkExtractCreated
override val requiredEvents: List<KafkaEvents>
get() = listOf(KafkaEvents.EventMediaParameterExtractCreated)
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
if (events.lastOrNull { it.isOfEvent(KafkaEvents.EventMediaParameterExtractCreated) }?.isSuccess() != true) {
log.warn { "Last instance of ${KafkaEvents.EventMediaParameterExtractCreated} was unsuccessful or null. Skipping.." }
return null
}
if (!isPermittedToCreateTasks(events)) {
log.warn { "Cannot continue until permitted event is present" }
}
val forwardEvent = if (event.event != KafkaEvents.EventMediaParameterExtractCreated) {
val sevent = events.findLast { it.event == KafkaEvents.EventMediaParameterExtractCreated }
if (sevent != null) {
log.info { "${event.referenceId} ${event.event} is not of ${KafkaEvents.EventMediaParameterExtractCreated}, swapping to found event" }
} else {
log.info { "${event.referenceId} ${event.event} is not of ${KafkaEvents.EventMediaParameterExtractCreated}, could not find required event.." }
}
sevent ?: event
} else event
val batchEvents = createMessagesByArgs(forwardEvent)
batchEvents.forEach { e ->
val createdTask = if (e is FfmpegWorkRequestCreated) {
FfmpegTaskData(
inputFile = e.inputFile,
outFile = e.outFile,
arguments = e.arguments
).let { task ->
val status = taskManager.createTask(referenceId = event.referenceId, eventId = UUID.randomUUID().toString(), derivedFromEventId = event.eventId, task= TaskType.Extract, data = Gson().toJson(task))
if (!status) {
log.error { "Failed to create Extract task on ${forwardEvent.referenceId}@${forwardEvent.eventId}" }
}
status
}
} else false
if (createdTask)
onResult(e)
}
return null
}
}

View File

@ -1,90 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import kotlinx.coroutines.runBlocking
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.DownloadClient
import no.iktdev.mediaprocessing.shared.common.getComputername
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.CoverDownloadWorkPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.CoverInfoPerformed
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
import java.util.*
@Service
class DownloadAndStoreCoverTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
val serviceId = "${getComputername()}::${this.javaClass.simpleName}::${UUID.randomUUID()}"
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventWorkDownloadCoverPerformed
override val requiredEvents: List<KafkaEvents>
get() = listOf(
KafkaEvents.EventMediaMetadataSearchPerformed,
KafkaEvents.EventMediaReadOutCover,
KafkaEvents.EventWorkEncodePerformed
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val cover = events.find { it.event == KafkaEvents.EventMediaReadOutCover }
if (cover == null || cover.data !is CoverInfoPerformed) {
return SimpleMessageData(Status.ERROR, "Wrong type triggered and caused an execution for $serviceId", event.eventId)
}
val coverData = cover.data as CoverInfoPerformed
val outDir = File(coverData.outDir)
if (!outDir.exists())
return SimpleMessageData(Status.ERROR, "Check for output directory for cover storage failed for $serviceId", event.eventId)
val client = DownloadClient(coverData.url, File(coverData.outDir), coverData.outFileBaseName)
val outFile = runBlocking {
client.getOutFile()
}
val coversInDifferentFormats = outDir.listFiles { it -> it.isFile && it.extension.lowercase() in client.contentTypeToExtension().values } ?: emptyArray()
var message: String? = null
var status = Status.COMPLETED
val result = if (outFile?.exists() == true) {
message = "${outFile.name} already exists"
status = Status.SKIPPED
outFile
} else if (coversInDifferentFormats.isNotEmpty()) {
status = Status.SKIPPED
coversInDifferentFormats.random()
} else if (outFile != null) {
runBlocking {
client.download(outFile)
}
} else {
null
}
return if (result == null) {
SimpleMessageData(Status.ERROR, "Could not download cover, check logs", event.eventId)
} else {
if (!result.exists() || !result.canRead()) {
status = Status.ERROR
}
CoverDownloadWorkPerformed(status = status, message = message, coverFile = result.absolutePath, event.eventId)
}
}
}

View File

@ -1,67 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
import no.iktdev.mediaprocessing.shared.common.parsing.Regexes
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.BaseInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.CoverInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MetadataPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.VideoInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class MetadataAndBaseInfoToCoverTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaReadOutCover
override val requiredEvents: List<KafkaEvents> = listOf(
KafkaEvents.EventMediaReadBaseInfoPerformed,
KafkaEvents.EventMediaReadOutNameAndType,
KafkaEvents.EventMediaMetadataSearchPerformed
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val baseInfo = events.findLast { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed
val meta = events.findLast { it.data is MetadataPerformed }?.data as MetadataPerformed? ?: return null
val fileOut = events.findLast { it.data is VideoInfoPerformed }?.data as VideoInfoPerformed? ?: return null
val videoInfo = fileOut.toValueObject()
var coverTitle = meta.data?.title ?: videoInfo?.title ?: baseInfo.title
coverTitle = Regexes.illegalCharacters.replace(coverTitle, " - ")
coverTitle = Regexes.trimWhiteSpaces.replace(coverTitle, " ")
val coverUrl = meta.data?.cover
return if (coverUrl.isNullOrBlank()) {
log.warn { "No cover available for ${baseInfo.title}" }
null
} else {
CoverInfoPerformed(
status = Status.COMPLETED,
url = coverUrl,
outFileBaseName = NameHelper.normalize(coverTitle),
outDir = fileOut.outDirectory,
derivedFromEventId = event.eventId
)
}
}
}

View File

@ -1,199 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.JsonObject
import mu.KotlinLogging
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.SharedConfig
import no.iktdev.mediaprocessing.shared.common.datasource.toEpochSeconds
import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf
import no.iktdev.mediaprocessing.shared.common.parsing.FileNameDeterminate
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
import no.iktdev.mediaprocessing.shared.common.parsing.Regexes
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEnv
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.scheduling.annotation.EnableScheduling
import org.springframework.scheduling.annotation.Scheduled
import org.springframework.stereotype.Service
import java.io.FileFilter
import java.time.LocalDateTime
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.*
/**
*
*/
@Service
@EnableScheduling
class MetadataAndBaseInfoToFileOut(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
val metadataTimeout = KafkaEnv.metadataTimeoutMinutes * 60
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaReadOutNameAndType
val waitingProcessesForMeta: MutableMap<String, MetadataTriggerData> = mutableMapOf()
override val listensForEvents: List<KafkaEvents> = listOf(
KafkaEvents.EventMediaReadBaseInfoPerformed,
KafkaEvents.EventMediaMetadataSearchPerformed
)
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val baseInfo = events.lastOrSuccessOf(KafkaEvents.EventMediaReadBaseInfoPerformed) { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed? ?: return null
val meta = events.lastOrSuccessOf(KafkaEvents.EventMediaMetadataSearchPerformed) { it.data is MetadataPerformed }?.data as MetadataPerformed?
// Only Return here as both baseInfo events are required to continue
if (!baseInfo.isSuccess() || !baseInfo.hasValidData() || events.any { it.event == KafkaEvents.EventMediaReadOutNameAndType }) {
return null
}
if (baseInfo.isSuccess() && meta == null) {
val estimatedTimeout = LocalDateTime.now().toEpochSeconds() + metadataTimeout
val dateTime = LocalDateTime.ofEpochSecond(estimatedTimeout, 0, ZoneOffset.UTC)
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
log.info { "Sending ${baseInfo.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
if (!waitingProcessesForMeta.containsKey(event.referenceId)) {
waitingProcessesForMeta[event.referenceId] = MetadataTriggerData(event.eventId, LocalDateTime.now())
}
return null
}
if (!isPrerequisiteDataPresent(events)) {
return null
}
if (waitingProcessesForMeta.containsKey(event.referenceId)) {
waitingProcessesForMeta.remove(event.referenceId)
}
val pm = ProcessMediaInfoAndMetadata(baseInfo, meta)
val vi = pm.getVideoPayload()
return if (vi != null) {
VideoInfoPerformed(Status.COMPLETED, vi, outDirectory = pm.getOutputDirectory().absolutePath, event.eventId)
} else {
SimpleMessageData(Status.ERROR, "No VideoInfo found...", event.eventId)
}
}
class ProcessMediaInfoAndMetadata(val baseInfo: BaseInfoPerformed, val metadata: MetadataPerformed? = null) {
var metadataDeterminedContentType: FileNameDeterminate.ContentType = metadata?.data?.type?.let { contentType ->
when (contentType) {
"serie", "tv" -> FileNameDeterminate.ContentType.SERIE
"movie" -> FileNameDeterminate.ContentType.MOVIE
else -> FileNameDeterminate.ContentType.UNDEFINED
}
} ?: FileNameDeterminate.ContentType.UNDEFINED
fun getTitlesFromMetadata(): List<String> {
val titles: MutableList<String> = mutableListOf()
metadata?.data?.title?.let { titles.add(it) }
metadata?.data?.altTitle?.let { titles.addAll(it) }
return titles
}
fun getExistingCollections() =
SharedConfig.outgoingContent.listFiles(FileFilter { it.isDirectory })?.map { it.name } ?: emptyList()
fun getAlreadyUsedForCollectionOrTitle(): String {
val exisiting = getExistingCollections()
val existingMatch = exisiting.find { it.contains(baseInfo.title) }
if (existingMatch != null) {
return existingMatch
}
val metaTitles = getTitlesFromMetadata()
return metaTitles.firstOrNull { it.contains(baseInfo.title) }
?: (getTitlesFromMetadata().firstOrNull { it in exisiting } ?: getTitlesFromMetadata().firstOrNull()
?: baseInfo.title)
}
fun getCollection(): String {
val title = getAlreadyUsedForCollectionOrTitle()?: metadata?.data?.title ?: baseInfo.title
var cleaned = Regexes.illegalCharacters.replace(title, " - ")
cleaned = Regexes.trimWhiteSpaces.replace(cleaned, " ")
return cleaned
}
fun getTitle(): String {
val metaTitles = getTitlesFromMetadata()
val metaTitle = metaTitles.filter { it.contains(baseInfo.title) || NameHelper.normalize(it).contains(baseInfo.title) }
val title = metaTitle.firstOrNull() ?: metaTitles.firstOrNull() ?: baseInfo.title
var cleaned = Regexes.illegalCharacters.replace(title, " - ")
cleaned = Regexes.trimWhiteSpaces.replace(cleaned, " ")
return cleaned
}
fun getVideoPayload(): JsonObject? {
val defaultFnd = FileNameDeterminate(getTitle(), baseInfo.sanitizedName, FileNameDeterminate.ContentType.UNDEFINED)
val determinedContentType = defaultFnd.getDeterminedVideoInfo().let { if (it is EpisodeInfo) FileNameDeterminate.ContentType.SERIE else if (it is MovieInfo) FileNameDeterminate.ContentType.MOVIE else FileNameDeterminate.ContentType.UNDEFINED }
return if (determinedContentType == metadataDeterminedContentType && determinedContentType == FileNameDeterminate.ContentType.MOVIE) {
FileNameDeterminate(getTitle(), getTitle(), FileNameDeterminate.ContentType.MOVIE).getDeterminedVideoInfo()?.toJsonObject()
} else {
FileNameDeterminate(getTitle(), baseInfo.sanitizedName, metadataDeterminedContentType).getDeterminedVideoInfo()?.toJsonObject()
}
}
fun getOutputDirectory() = SharedConfig.outgoingContent.using(NameHelper.normalize(getCollection()))
}
fun findNearestValue(list: List<String>, target: String): String? {
return list.minByOrNull { it.distanceTo(target) }
}
fun String.distanceTo(other: String): Int {
val distance = Array(length + 1) { IntArray(other.length + 1) }
for (i in 0..length) {
distance[i][0] = i
}
for (j in 0..other.length) {
distance[0][j] = j
}
for (i in 1..length) {
for (j in 1..other.length) {
distance[i][j] = minOf(
distance[i - 1][j] + 1,
distance[i][j - 1] + 1,
distance[i - 1][j - 1] + if (this[i - 1] == other[j - 1]) 0 else 1
)
}
}
return distance[length][other.length]
}
//@Scheduled(fixedDelay = (60_000))
@Scheduled(fixedDelay = (1_000))
fun sendErrorMessageForMetadata() {
val expired = waitingProcessesForMeta.filter {
LocalDateTime.now().toEpochSeconds() > (it.value.executed.toEpochSeconds() + metadataTimeout)
}
expired.forEach {
log.info { "Producing timeout for ${it.key} ${LocalDateTime.now()}" }
producer.sendMessage(it.key, KafkaEvents.EventMediaMetadataSearchPerformed, MetadataPerformed(status = Status.ERROR, "Timed Out by: ${this@MetadataAndBaseInfoToFileOut::class.simpleName}", derivedFromEventId = it.value.eventId))
waitingProcessesForMeta.remove(it.key)
}
}
data class MetadataTriggerData(val eventId: String, val executed: LocalDateTime)
}

View File

@ -1,86 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoStream
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaStreamsParsePerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.ReaderPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class ParseVideoFileStreams(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaParseStreamPerformed
override val requiredEvents: List<KafkaEvents> = listOf(
KafkaEvents.EventMediaReadStreamPerformed
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val desiredEvent = events.lastOrSuccessOf(KafkaEvents.EventMediaReadStreamPerformed) ?: return null
val data = desiredEvent.data as ReaderPerformed
return parseStreams(data, desiredEvent.eventId)
}
fun parseStreams(data: ReaderPerformed, eventId: String): MessageDataWrapper {
val gson = Gson()
return try {
val jStreams = data.output.getAsJsonArray("streams")
val videoStreams = mutableListOf<VideoStream>()
val audioStreams = mutableListOf<AudioStream>()
val subtitleStreams = mutableListOf<SubtitleStream>()
jStreams.forEach { streamJson ->
val streamObject = streamJson.asJsonObject
val codecType = streamObject.get("codec_type").asString
if (streamObject.has("codec_name") && streamObject.get("codec_name").asString == "mjpeg") {
} else {
when (codecType) {
"video" -> videoStreams.add(gson.fromJson(streamObject, VideoStream::class.java))
"audio" -> audioStreams.add(gson.fromJson(streamObject, AudioStream::class.java))
"subtitle" -> subtitleStreams.add(gson.fromJson(streamObject, SubtitleStream::class.java))
}
}
}
val parsedStreams = ParsedMediaStreams(
videoStream = videoStreams,
audioStream = audioStreams,
subtitleStream = subtitleStreams
)
MediaStreamsParsePerformed(Status.COMPLETED, parsedStreams, eventId)
} catch (e: Exception) {
e.printStackTrace()
SimpleMessageData(Status.ERROR, message = e.message, eventId)
}
}
}

View File

@ -1,76 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event
import com.google.gson.Gson
import com.google.gson.JsonObject
import kotlinx.coroutines.runBlocking
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.SharedConfig
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.runner.CodeToOutput
import no.iktdev.mediaprocessing.shared.common.runner.getOutputUsing
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.ReaderPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class ReadVideoFileStreams(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
val requiredOperations = listOf(StartOperationEvents.ENCODE, StartOperationEvents.EXTRACT)
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaReadStreamPerformed
override val requiredEvents: List<KafkaEvents> = listOf(
KafkaEvents.EventMediaProcessStarted
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val desiredEvent = events.find { it.data is MediaProcessStarted } ?: return null
val data = desiredEvent.data as MediaProcessStarted
if (!data.operations.any { it in requiredOperations }) {
log.info { "${event.referenceId} does not contain a operation in ${requiredOperations.joinToString(",") { it.name }}" }
return null
}
return runBlocking { fileReadStreams(data, desiredEvent.eventId) }
}
suspend fun fileReadStreams(started: MediaProcessStarted, eventId: String): MessageDataWrapper {
val file = File(started.file)
return if (file.exists() && file.isFile) {
val result = readStreams(file)
val joined = result.output.joinToString(" ")
val jsoned = Gson().fromJson(joined, JsonObject::class.java)
ReaderPerformed(Status.COMPLETED, file = started.file, output = jsoned, derivedFromEventId = eventId)
} else {
SimpleMessageData(Status.ERROR, "File in data is not a file or does not exist", eventId)
}
}
suspend fun readStreams(file: File): CodeToOutput {
val result = getOutputUsing(
SharedConfig.ffprobe,
"-v", "quiet", "-print_format", "json", "-show_streams", file.absolutePath
)
return result
}
}

View File

@ -1,63 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkRequestCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.FfmpegWorkerArgumentsCreated
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
abstract class CreateProcesserWorkTask(override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
private val log = KotlinLogging.logger {}
open fun isPermittedToCreateTasks(events: List<PersistentMessage>): Boolean {
val event = events.firstOrNull() ?: return false
val started = events.findLast { it.event == KafkaEvents.EventMediaProcessStarted }?.data as MediaProcessStarted?
if (started == null) {
log.info { "${event.referenceId} couldn't find start event" }
return false
} else if (started.type == ProcessType.MANUAL) {
val proceed = events.find { it.event == KafkaEvents.EventMediaWorkProceedPermitted }
if (proceed == null) {
log.warn { "${event.referenceId} waiting for Proceed event due to Manual process" }
return false
} else {
log.warn { "${event.referenceId} registered proceed permitted" }
}
}
return true
}
fun createMessagesByArgs(event: PersistentMessage): List<MessageDataWrapper> {
val events: MutableList<MessageDataWrapper> = mutableListOf()
val earg = if (event.data is FfmpegWorkerArgumentsCreated) event.data as FfmpegWorkerArgumentsCreated? else return events
if (earg == null || earg.entries.isEmpty()) {
log.info { "${event.referenceId} ffargument is empty" }
return events
}
val requestEvents = earg.entries.map {
FfmpegWorkRequestCreated(
status = Status.COMPLETED,
derivedFromEventId = event.eventId,
inputFile = earg.inputFile,
arguments = it.arguments,
outFile = it.outputFile
)
}
requestEvents.forEach {
log.info { "${event.referenceId} creating work request based on ${it.derivedFromEventId}" }
events.add(it)
}
return events
}
}

View File

@ -1,231 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.shared.common.Preference
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.*
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class EncodeArgumentCreatorTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
val preference = Preference.getPreference()
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaParameterEncodeCreated
override val requiredEvents: List<KafkaEvents> =
listOf(
KafkaEvents.EventMediaProcessStarted,
KafkaEvents.EventMediaReadBaseInfoPerformed,
KafkaEvents.EventMediaParseStreamPerformed,
KafkaEvents.EventMediaReadOutNameAndType
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
val started = events.find { it.data is MediaProcessStarted }?.data as MediaProcessStarted
if (!started.operations.contains(StartOperationEvents.ENCODE)) {
log.info { "Couldn't find operation event ${StartOperationEvents.ENCODE} in ${Gson().toJson(started.operations)}\n\tEncode Arguments will not be created" }
return null
}
val inputFile = events.find { it.data is MediaProcessStarted }?.data as MediaProcessStarted
val baseInfo = events.findLast { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed
val readStreamsEvent = events.find { it.data is MediaStreamsParsePerformed }?.data as MediaStreamsParsePerformed?
val serializedParsedStreams = readStreamsEvent?.streams
val videoInfoWrapper: VideoInfoPerformed? = events.findLast { it.data is VideoInfoPerformed }?.data as VideoInfoPerformed?
val videoInfo = videoInfoWrapper?.toValueObject()
if (serializedParsedStreams == null) {
log.error { "Cant create encode arguments on a file without streams" }
return null
}
if (videoInfoWrapper == null || videoInfo == null) {
log.error { "${KafkaEvents.EventMediaReadOutNameAndType} result is read as null" }
return null
}
//val outDir = SharedConfig.outgoingContent.using(baseInfo.title)
return getFfmpegVideoArguments(
inputFile = inputFile.file,
outFullName = videoInfo.fullName,
outDir = File(videoInfoWrapper.outDirectory),
preference = preference.encodePreference,
baseInfo = baseInfo,
serializedParsedStreams = serializedParsedStreams,
eventId = event.eventId
)
}
private fun getFfmpegVideoArguments(
inputFile: String,
outFullName: String,
outDir: File,
preference: EncodingPreference,
baseInfo: BaseInfoPerformed,
serializedParsedStreams: ParsedMediaStreams,
eventId: String
): MessageDataWrapper {
val outVideoFile = outDir.using("${outFullName}.mp4").absolutePath
val vaas = VideoAndAudioSelector(serializedParsedStreams, preference)
val vArg = vaas.getVideoStream()
?.let { VideoArguments(it, serializedParsedStreams, preference.video).getVideoArguments() }
val aArg = vaas.getAudioStream()
?.let { AudioArguments(it, serializedParsedStreams, preference.audio).getAudioArguments() }
val vaArgs = toFfmpegWorkerArguments(vArg, aArg)
return if (vaArgs.isEmpty()) {
SimpleMessageData(Status.ERROR, message = "Unable to produce arguments", derivedFromEventId = eventId)
} else {
FfmpegWorkerArgumentsCreated(
status = Status.COMPLETED,
inputFile = inputFile,
entries = listOf(
FfmpegWorkerArgument(
outputFile = outVideoFile,
arguments = vaArgs
)
),
derivedFromEventId = eventId
)
}
}
private class VideoAndAudioSelector(val mediaStreams: ParsedMediaStreams, val preference: EncodingPreference) {
private var defaultVideoSelected: VideoStream? = mediaStreams.videoStream
.filter { (it.duration_ts ?: 0) > 0 }
.maxByOrNull { it.duration_ts ?: 0 } ?: mediaStreams.videoStream.minByOrNull { it.index }
private var defaultAudioSelected: AudioStream? = mediaStreams.audioStream
.filter { (it.duration_ts ?: 0) > 0 }
.maxByOrNull { it.duration_ts ?: 0 } ?: mediaStreams.audioStream.minByOrNull { it.index }
fun getVideoStream(): VideoStream? {
return defaultVideoSelected
}
fun getAudioStream(): AudioStream? {
val languageFiltered = mediaStreams.audioStream.filter { it.tags.language == preference.audio.language }
val channeledAndCodec = languageFiltered.find {
it.channels >= (preference.audio.channels ?: 2) && it.codec_name == preference.audio.codec.lowercase()
}
return channeledAndCodec ?: return languageFiltered.minByOrNull { it.index } ?: defaultAudioSelected
}
}
private class VideoArguments(
val videoStream: VideoStream,
val allStreams: ParsedMediaStreams,
val preference: VideoPreference
) {
fun isVideoCodecEqual() = getCodec(videoStream.codec_name) == getCodec(preference.codec.lowercase())
protected fun getCodec(name: String): String {
return when (name) {
"hevc", "hevec", "h265", "h.265", "libx265"
-> "libx265"
"h.264", "h264", "libx264"
-> "libx264"
else -> name
}
}
fun getVideoArguments(): VideoArgumentsDto {
val optionalParams = mutableListOf<String>()
if (preference.pixelFormatPassthrough.none { it == videoStream.pix_fmt }) {
optionalParams.addAll(listOf("-pix_fmt", preference.pixelFormat))
}
val codecParams = if (isVideoCodecEqual()) {
val default = mutableListOf("-c:v", "copy")
if (getCodec(videoStream.codec_name) == "libx265") {
default.addAll(listOf("-vbsf", "hevc_mp4toannexb"))
}
default
}
else {
optionalParams.addAll(listOf("-crf", preference.threshold.toString()))
listOf("-c:v", getCodec(preference.codec.lowercase()))
}
return VideoArgumentsDto(
index = allStreams.videoStream.indexOf(videoStream),
codecParameters = codecParams,
optionalParameters = optionalParams
)
}
}
class AudioArguments(
val audioStream: AudioStream,
val allStreams: ParsedMediaStreams,
val preference: AudioPreference
) {
fun isAudioCodecEqual() = audioStream.codec_name.lowercase() == preference.codec.lowercase()
fun isSurroundButNotEAC3(): Boolean {
return audioStream.channels > 2 && audioStream.codec_name.lowercase() != "eac3"
}
fun isSurroundAndEAC3(): Boolean {
return audioStream.channels > 2 && audioStream.codec_name.lowercase() == "eac3"
}
fun isSurround(): Boolean {
return audioStream.channels > 2
}
private fun shouldUseEAC3(): Boolean {
return (preference.defaultToEAC3OnSurroundDetected && audioStream.channels > 2 && audioStream.codec_name.lowercase() != "eac3")
}
fun getAudioArguments(): AudioArgumentsDto {
val optionalParams = mutableListOf<String>()
val codecParams = if (isAudioCodecEqual() || isSurroundAndEAC3()) {
listOf("-acodec", "copy")
} else if (!isSurroundButNotEAC3() && shouldUseEAC3()) {
listOf("-c:a", "eac3")
} else {
val codecSwap = mutableListOf("-c:a", preference.codec)
if (audioStream.channels > 2 && !preference.preserveChannels) {
codecSwap.addAll(listOf("-ac", "2"))
}
codecSwap
}
return AudioArgumentsDto(
index = allStreams.audioStream.indexOf(audioStream),
codecParameters = codecParams,
optionalParameters = optionalParams
)
}
}
}

View File

@ -1,192 +0,0 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.TaskCreator
import no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg.ExtractArgumentCreatorTask.SubtitleArguments.SubtitleType.*
import no.iktdev.mediaprocessing.shared.common.Preference
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleArgumentsDto
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleStream
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.SimpleMessageData
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.*
import no.iktdev.mediaprocessing.shared.kafka.dto.Status
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class ExtractArgumentCreatorTask(@Autowired override var coordinator: EventCoordinator) : TaskCreator(coordinator) {
val log = KotlinLogging.logger {}
val preference = Preference.getPreference()
override val producesEvent: KafkaEvents
get() = KafkaEvents.EventMediaParameterExtractCreated
override val requiredEvents: List<KafkaEvents> = listOf(
KafkaEvents.EventMediaProcessStarted,
KafkaEvents.EventMediaReadBaseInfoPerformed,
KafkaEvents.EventMediaParseStreamPerformed,
KafkaEvents.EventMediaReadOutNameAndType
)
override fun prerequisitesRequired(events: List<PersistentMessage>): List<() -> Boolean> {
return super.prerequisitesRequired(events) + listOf {
isPrerequisiteDataPresent(events)
}
}
override fun onProcessEvents(event: PersistentMessage, events: List<PersistentMessage>): MessageDataWrapper? {
super.onProcessEventsAccepted(event, events)
log.info { "${event.referenceId} triggered by ${event.event}" }
if (!requiredEvents.contains(event.event)) {
log.info { "Ignored ${event.event} @ ${event.eventId}" }
return null
}
val started = events.find { it.data is MediaProcessStarted }?.data as MediaProcessStarted
if (!started.operations.contains(StartOperationEvents.EXTRACT)) {
log.info { "Couldn't find operation event ${StartOperationEvents.EXTRACT} in ${Gson().toJson(started.operations)}\n\tExtract Arguments will not be created" }
return null
}
val inputFile = events.find { it.data is MediaProcessStarted }?.data as MediaProcessStarted
val baseInfo = events.findLast { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed
val readStreamsEvent = events.find { it.data is MediaStreamsParsePerformed }?.data as MediaStreamsParsePerformed
val serializedParsedStreams = readStreamsEvent.streams
val videoInfoWrapper: VideoInfoPerformed? = events.findLast { it.data is VideoInfoPerformed }?.data as VideoInfoPerformed?
val videoInfo = videoInfoWrapper?.toValueObject()
if (videoInfoWrapper == null || videoInfo == null) {
log.error { "${KafkaEvents.EventMediaReadOutNameAndType} result is read as null" }
return null
}
return getFfmpegSubtitleArguments(
inputFile = inputFile.file,
outFullName = videoInfo.fullName,
outDir = File(videoInfoWrapper.outDirectory),
baseInfo = baseInfo,
serializedParsedStreams = serializedParsedStreams,
eventId = event.eventId
)
}
private fun getFfmpegSubtitleArguments(
inputFile: String,
outFullName: String,
outDir: File,
baseInfo: BaseInfoPerformed,
serializedParsedStreams: ParsedMediaStreams,
eventId: String
): MessageDataWrapper? {
val subRootDir = outDir.using("sub")
val sArg = SubtitleArguments(serializedParsedStreams.subtitleStream).getSubtitleArguments()
val entries = sArg.map {
FfmpegWorkerArgument(
arguments = it.codecParameters + it.optionalParameters + listOf("-map", "0:s:${it.index}"),
outputFile = subRootDir.using(it.language, "${outFullName}.${it.format}").absolutePath
)
}
if (entries.isEmpty()) {
return SimpleMessageData(status = Status.SKIPPED, "No entries found!", derivedFromEventId = eventId)
}
return FfmpegWorkerArgumentsCreated(
status = Status.COMPLETED,
inputFile = inputFile,
entries = entries,
derivedFromEventId = eventId
)
}
private class SubtitleArguments(val subtitleStreams: List<SubtitleStream>) {
/**
* @property DEFAULT is default subtitle as dialog
* @property CC is Closed-Captions
* @property SHD is Hard of hearing
* @property NON_DIALOGUE is for Signs or Song (as in lyrics)
*/
private enum class SubtitleType {
DEFAULT,
CC,
SHD,
NON_DIALOGUE
}
private fun SubtitleStream.isCC(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("cc", "closed caption")
return keywords.any { title.contains(it) }
}
private fun SubtitleStream.isSHD(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("shd", "hh", "Hard-of-Hearing", "Hard of Hearing")
return keywords.any { title.contains(it) }
}
private fun SubtitleStream.isSignOrSong(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("song", "songs", "sign", "signs")
return keywords.any { title.contains(it) }
}
private fun getSubtitleType(stream: SubtitleStream): SubtitleType {
return if (stream.isSignOrSong())
SubtitleType.NON_DIALOGUE
else if (stream.isSHD()) {
SubtitleType.SHD
} else if (stream.isCC()) {
SubtitleType.CC
} else SubtitleType.DEFAULT
}
fun getSubtitleArguments(): List<SubtitleArgumentsDto> {
val acceptable = subtitleStreams.filter { !it.isSignOrSong() }
val codecFiltered = acceptable.filter { getFormatToCodec(it.codec_name) != null }
val mappedToType =
codecFiltered.map { getSubtitleType(it) to it }.filter { it.first in SubtitleType.entries }
.groupBy { it.second.tags.language ?: "eng" }
.mapValues { entry ->
val languageStreams = entry.value
val sortedStreams = languageStreams.sortedBy { SubtitleType.entries.indexOf(it.first) }
sortedStreams.firstOrNull()?.second
}.mapNotNull { it.value }
return mappedToType.mapNotNull { stream ->
getFormatToCodec(stream.codec_name)?.let { format ->
SubtitleArgumentsDto(
index = subtitleStreams.indexOf(stream),
language = stream.tags.language ?: "eng",
format = format
)
}
}
}
fun getFormatToCodec(codecName: String): String? {
return when (codecName) {
"ass" -> "ass"
"subrip" -> "srt"
"webvtt", "vtt" -> "vtt"
"smi" -> "smi"
"hdmv_pgs_subtitle" -> null
else -> null
}
}
}
}

View File

@ -0,0 +1,29 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.implementations
import mu.KotlinLogging
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.contract.data.Event
import no.iktdev.mediaprocessing.shared.contract.data.MediaProcessStartEvent
import no.iktdev.mediaprocessing.shared.contract.data.az
abstract class WorkTaskListener: CoordinatorEventListener() {
private val log = KotlinLogging.logger {}
fun canStart(incomingEvent: Event, events: List<Event>): Boolean {
val autoStart = events.find { it.eventType == Events.EventMediaProcessStarted }?.az<MediaProcessStartEvent>()?.data
if (autoStart == null) {
log.error { "Start event not found. Requiring permitt event" }
}
return if (incomingEvent.eventType == Events.EventMediaWorkProceedPermitted) {
return true
} else {
if (autoStart == null || autoStart.type == ProcessType.MANUAL) {
log.warn { "${incomingEvent.metadata.referenceId} waiting for Proceed event due to Manual process" }
false
} else true
}
}
}

View File

@ -0,0 +1,59 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.shared.common.parsing.FileNameParser
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.data.BaseInfo
import no.iktdev.mediaprocessing.shared.contract.data.BaseInfoEvent
import no.iktdev.mediaprocessing.shared.contract.data.Event
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class BaseInfoFromFileTaskListener() : CoordinatorEventListener() {
@Autowired
override var coordinator: Coordinator? = null
val log = KotlinLogging.logger {}
override val produceEvent: Events = Events.EventMediaReadBaseInfoPerformed
override val listensForEvents: List<Events> = listOf(Events.EventMediaProcessStarted)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val message = try {
readFileInfo(incomingEvent.data as MediaProcessStarted, incomingEvent.metadata.eventId)?.let {
BaseInfoEvent(metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success), data = it)
} ?: BaseInfoEvent(metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed))
} catch (e: Exception) {
BaseInfoEvent(metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed))
}
onProduceEvent(message)
}
@Throws(Exception::class)
fun readFileInfo(started: MediaProcessStarted, eventId: String): BaseInfo? {
return try {
val fileName = File(started.file).nameWithoutExtension
val fileNameParser = FileNameParser(fileName)
BaseInfo(
title = fileNameParser.guessDesiredTitle(),
sanitizedName = fileNameParser.guessDesiredFileName(),
searchTitles = fileNameParser.guessSearchableTitle()
)
} catch (e: Exception) {
e.printStackTrace()
log.error { "Failed to read info from file\neventId: $eventId" }
throw e
}
}
}

View File

@ -0,0 +1,84 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.taskManager
import no.iktdev.mediaprocessing.coordinator.tasksV2.implementations.WorkTaskListener
import no.iktdev.mediaprocessing.shared.common.task.TaskType
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.contract.dto.isOnly
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class ConvertWorkTaskListener: WorkTaskListener() {
val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventWorkConvertCreated
override val listensForEvents: List<Events> = listOf(
Events.EventWorkExtractPerformed
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
if (!canStart(incomingEvent, events)) {
return
}
val file = if (incomingEvent.eventType == Events.EventWorkExtractPerformed) {
incomingEvent.az<ExtractWorkPerformedEvent>()?.data?.outputFile
} else if (incomingEvent.eventType == Events.EventMediaProcessStarted) {
val startEvent = incomingEvent.az<MediaProcessStartEvent>()?.data
if (startEvent?.operations?.isOnly(StartOperationEvents.CONVERT) == true) {
startEvent.file
} else null
} else {
events.find { it.eventType == Events.EventWorkExtractPerformed }
?.az<ExtractWorkPerformedEvent>()?.data?.outputFile
}
val convertFile = file?.let { File(it) }
if (convertFile == null || !convertFile.exists()) {
onProduceEvent(ConvertWorkCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
))
return
} else {
val convertData = ConvertData(
inputFile = convertFile.absolutePath,
outputFileName = convertFile.nameWithoutExtension,
outputDirectory = convertFile.parentFile.absolutePath,
allowOverwrite = true
)
val status = taskManager.createTask(
referenceId = incomingEvent.referenceId(),
eventId = incomingEvent.eventId(),
task = TaskType.Convert,
derivedFromEventId = incomingEvent.eventId(),
data = Gson().toJson(convertData),
inputFile = convertFile.absolutePath
)
if (!status) {
log.error { "Failed to create Convert task on ${incomingEvent.referenceId()}@${incomingEvent.eventId()}" }
return
}
onProduceEvent(ConvertWorkCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = convertData
))
}
}
}

View File

@ -0,0 +1,78 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import kotlinx.coroutines.runBlocking
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.shared.common.DownloadClient
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class CoverDownloadTaskListener : CoordinatorEventListener() {
val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventWorkDownloadCoverPerformed
override val listensForEvents: List<Events> = listOf(Events.EventMediaReadOutCover)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val failedEventDefault = MediaCoverDownloadedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
val data = incomingEvent.az<MediaCoverInfoReceivedEvent>()?.data
if (data == null) {
log.error { "No valid data for use to obtain cover" }
onProduceEvent(failedEventDefault)
return
}
val outDir = File(data.outDir)
if (!outDir.exists()) {
log.error { "Check for output directory for cover storage failed for ${incomingEvent.metadata.eventId} " }
onProduceEvent(failedEventDefault)
}
val client = DownloadClient(data.url, File(data.outDir), data.outFileBaseName)
val outFile = runBlocking {
client.getOutFile()
}
val coversInDifferentFormats = outDir.listFiles { it -> it.isFile && it.extension.lowercase() in client.contentTypeToExtension().values } ?: emptyArray()
val result = if (outFile?.exists() == true) {
outFile
} else if (coversInDifferentFormats.isNotEmpty()) {
coversInDifferentFormats.random()
} else if (outFile != null) {
runBlocking {
client.download(outFile)
}
} else {
null
}
if (result == null) {
log.error { "Could not download cover, check logs ${incomingEvent.metadata.eventId} " }
} else {
if (!result.exists() || !result.canRead()) {
onProduceEvent(failedEventDefault)
return
}
onProduceEvent(MediaCoverDownloadedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = DownloadedCover(result.absolutePath)
))
}
}
}

View File

@ -0,0 +1,57 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
import no.iktdev.mediaprocessing.shared.common.parsing.Regexes
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class CoverFromMetadataTaskListener: CoordinatorEventListener() {
val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventMediaReadOutCover
override val listensForEvents: List<Events> = listOf(Events.EventMediaMetadataSearchPerformed)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val baseInfo = events.find { it.eventType == Events.EventMediaReadBaseInfoPerformed }?.az<BaseInfoEvent>()?.data ?: return
val metadata = events.findLast { it.eventType == Events.EventMediaMetadataSearchPerformed }?.az<MediaMetadataReceivedEvent>()?.data ?: return
val mediaOutInfo = events.find { it.eventType == Events.EventMediaReadOutNameAndType }?.az<MediaOutInformationConstructedEvent>()?.data ?: return
val videoInfo = mediaOutInfo.toValueObject()
var coverTitle = metadata.title ?: videoInfo?.title ?: baseInfo.title
coverTitle = Regexes.illegalCharacters.replace(coverTitle, " - ")
coverTitle = Regexes.trimWhiteSpaces.replace(coverTitle, " ")
val coverUrl = metadata.cover
val result = if (coverUrl.isNullOrBlank()) {
log.warn { "No cover available for ${baseInfo.title}" }
MediaCoverInfoReceivedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Skipped)
)
} else {
MediaCoverInfoReceivedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = CoverDetails(
url = coverUrl,
outFileBaseName = NameHelper.normalize(coverTitle),
outDir = mediaOutInfo.outDirectory,
)
)
}
onProduceEvent(result)
}
}

View File

@ -0,0 +1,71 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.EncodeWorkArgumentsMapping
import no.iktdev.mediaprocessing.shared.common.Preference
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class EncodeWorkArgumentsTaskListener: CoordinatorEventListener() {
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventMediaParameterEncodeCreated
override val listensForEvents: List<Events> = listOf(
Events.EventMediaParseStreamPerformed,
Events.EventMediaReadOutNameAndType
)
val preference = Preference.getPreference()
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val started = events.find { it.eventType == Events.EventMediaProcessStarted }?.az<MediaProcessStartEvent>() ?: return
if (started.data == null || started.data?.operations?.contains(StartOperationEvents.ENCODE) == false) {
return
}
val streams = events.find { it.eventType == Events.EventMediaParseStreamPerformed }?.az<MediaFileStreamsParsedEvent>()?.data
if (streams == null) {
return
}
val mediaInfo = events.find { it.eventType == Events.EventMediaReadOutNameAndType }?.az<MediaOutInformationConstructedEvent>()
if (mediaInfo?.data == null) {
return
}
val mediaInfoData = mediaInfo.data?.toValueObject() ?: return
val inputFile = started.data?.file ?: return
val mapper = EncodeWorkArgumentsMapping(
inputFile = inputFile,
outFileFullName = mediaInfoData.fullName,
outFileAbsolutePathFile = mediaInfo.data?.outDirectory?.let { File(it) } ?: return,
streams = streams,
preference = preference.encodePreference
)
val result = mapper.getArguments()
if (result == null) {
onProduceEvent(EncodeArgumentCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
))
} else {
onProduceEvent(EncodeArgumentCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = result
))
}
}
}

View File

@ -0,0 +1,49 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.tasksV2.implementations.WorkTaskListener
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class EncodeWorkTaskListener : WorkTaskListener() {
private val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventWorkEncodeCreated
override val listensForEvents: List<Events> = listOf(
Events.EventMediaParameterEncodeCreated,
Events.EventMediaWorkProceedPermitted
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
if (!canStart(incomingEvent, events)) {
return
}
val encodeArguments = if (incomingEvent.eventType == Events.EventMediaParameterEncodeCreated) {
incomingEvent.az<EncodeArgumentCreatedEvent>()?.data
} else {
events.find { it.eventType == Events.EventMediaParameterEncodeCreated }
?.az<EncodeArgumentCreatedEvent>()?.data
}
if (encodeArguments == null) {
log.error { "No Encode arguments found.. referenceId: ${incomingEvent.referenceId()}" }
return
}
onProduceEvent(
EncodeWorkCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = encodeArguments
)
)
}
}

View File

@ -0,0 +1,67 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.ExtractWorkArgumentsMapping
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class ExtractWorkArgumentsTaskListener: CoordinatorEventListener() {
val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventMediaParameterExtractCreated
override val listensForEvents: List<Events> = listOf(
Events.EventMediaParseStreamPerformed,
Events.EventMediaReadOutNameAndType
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val started = events.find { it.eventType == Events.EventMediaProcessStarted }?.az<MediaProcessStartEvent>() ?: return
if (started.data == null || started.data?.operations?.contains(StartOperationEvents.EXTRACT) == false) {
return
}
val streams = events.find { it.eventType == Events.EventMediaParseStreamPerformed }?.az<MediaFileStreamsParsedEvent>()?.data
if (streams == null) {
return
}
val mediaInfo = events.find { it.eventType == Events.EventMediaReadOutNameAndType }?.az<MediaOutInformationConstructedEvent>()
if (mediaInfo?.data == null) {
return
}
val mediaInfoData = mediaInfo.data?.toValueObject() ?: return
val inputFile = started.data?.file ?: return
val mapper = ExtractWorkArgumentsMapping(
inputFile = inputFile,
outFileFullName = mediaInfoData.fullName,
outFileAbsolutePathFile = mediaInfo.data?.outDirectory?.let { File(it) } ?: return,
streams = streams
)
val result = mapper.getArguments()
if (result.isEmpty()) {
onProduceEvent(ExtractArgumentCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Skipped)
))
} else {
onProduceEvent(ExtractArgumentCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = result
))
}
}
}

View File

@ -0,0 +1,57 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.tasksV2.implementations.WorkTaskListener
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class ExtractWorkTaskListener: WorkTaskListener() {
private val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventWorkEncodeCreated
override val listensForEvents: List<Events> = listOf(
Events.EventMediaParameterEncodeCreated,
Events.EventMediaWorkProceedPermitted
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
if (!canStart(incomingEvent, events)) {
return
}
val arguments = if (incomingEvent.eventType == Events.EventMediaParameterExtractCreated) {
incomingEvent.az<ExtractArgumentCreatedEvent>()?.data
} else {
events.find { it.eventType == Events.EventMediaParameterExtractCreated }
?.az<ExtractArgumentCreatedEvent>()?.data
}
if (arguments == null) {
log.error { "No Extract arguments found.. referenceId: ${incomingEvent.referenceId()}" }
return
}
if (arguments.isEmpty()) {
ExtractWorkCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
return
}
arguments.mapNotNull {
ExtractWorkCreatedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = it
)
}.forEach { event ->
onProduceEvent(event)
}
}
}

View File

@ -0,0 +1,154 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import com.google.gson.JsonObject
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.utils.log
import no.iktdev.mediaprocessing.shared.common.SharedConfig
import no.iktdev.mediaprocessing.shared.common.parsing.FileNameDeterminate
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
import no.iktdev.mediaprocessing.shared.common.parsing.Regexes
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.*
import no.iktdev.mediaprocessing.shared.contract.data.EpisodeInfo
import no.iktdev.mediaprocessing.shared.contract.data.MovieInfo
import no.iktdev.mediaprocessing.shared.contract.data.pyMetadata
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.FileFilter
@Service
class MediaOutInformationTaskListener: CoordinatorEventListener() {
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventMediaReadOutNameAndType
override val listensForEvents: List<Events> = listOf(
Events.EventMediaMetadataSearchPerformed
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val metadataResult = incomingEvent.az<MediaMetadataReceivedEvent>()
val mediaBaseInfo = events.findLast { it.eventType == Events.EventMediaReadBaseInfoPerformed }?.az<BaseInfoEvent>()?.data
if (mediaBaseInfo == null) {
log.error { "Required event ${Events.EventMediaReadBaseInfoPerformed} is not present" }
coordinator?.produceNewEvent(
MediaOutInformationConstructedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
)
return
}
val pm = ProcessMediaInfoAndMetadata(mediaBaseInfo, metadataResult?.data)
val vi = pm.getVideoPayload()
val result = if (vi != null) {
MediaInfoReceived(
outDirectory = pm.getOutputDirectory().absolutePath,
info = vi
).let { MediaOutInformationConstructedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = it
) }
} else {
MediaOutInformationConstructedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
}
onProduceEvent(result)
}
class ProcessMediaInfoAndMetadata(val baseInfo: BaseInfo, val metadata: pyMetadata? = null) {
var metadataDeterminedContentType: FileNameDeterminate.ContentType = metadata?.type?.let { contentType ->
when (contentType) {
"serie", "tv" -> FileNameDeterminate.ContentType.SERIE
"movie" -> FileNameDeterminate.ContentType.MOVIE
else -> FileNameDeterminate.ContentType.UNDEFINED
}
} ?: FileNameDeterminate.ContentType.UNDEFINED
fun getTitlesFromMetadata(): List<String> {
val titles: MutableList<String> = mutableListOf()
metadata?.title?.let { titles.add(it) }
metadata?.altTitle?.let { titles.addAll(it) }
return titles
}
fun getExistingCollections() =
SharedConfig.outgoingContent.listFiles(FileFilter { it.isDirectory })?.map { it.name } ?: emptyList()
fun getAlreadyUsedForCollectionOrTitle(): String {
val exisiting = getExistingCollections()
val existingMatch = exisiting.find { it.contains(baseInfo.title) }
if (existingMatch != null) {
return existingMatch
}
val metaTitles = getTitlesFromMetadata()
return metaTitles.firstOrNull { it.contains(baseInfo.title) }
?: (getTitlesFromMetadata().firstOrNull { it in exisiting } ?: getTitlesFromMetadata().firstOrNull()
?: baseInfo.title)
}
fun getCollection(): String {
val title = getAlreadyUsedForCollectionOrTitle()?: metadata?.title ?: baseInfo.title
var cleaned = Regexes.illegalCharacters.replace(title, " - ")
cleaned = Regexes.trimWhiteSpaces.replace(cleaned, " ")
return cleaned
}
fun getTitle(): String {
val metaTitles = getTitlesFromMetadata()
val metaTitle = metaTitles.filter { it.contains(baseInfo.title) || NameHelper.normalize(it).contains(baseInfo.title) }
val title = metaTitle.firstOrNull() ?: metaTitles.firstOrNull() ?: baseInfo.title
var cleaned = Regexes.illegalCharacters.replace(title, " - ")
cleaned = Regexes.trimWhiteSpaces.replace(cleaned, " ")
return cleaned
}
fun getVideoPayload(): JsonObject? {
val defaultFnd = FileNameDeterminate(getTitle(), baseInfo.sanitizedName, FileNameDeterminate.ContentType.UNDEFINED)
val determinedContentType = defaultFnd.getDeterminedVideoInfo().let { if (it is EpisodeInfo) FileNameDeterminate.ContentType.SERIE else if (it is MovieInfo) FileNameDeterminate.ContentType.MOVIE else FileNameDeterminate.ContentType.UNDEFINED }
return if (determinedContentType == metadataDeterminedContentType && determinedContentType == FileNameDeterminate.ContentType.MOVIE) {
FileNameDeterminate(getTitle(), getTitle(), FileNameDeterminate.ContentType.MOVIE).getDeterminedVideoInfo()?.toJsonObject()
} else {
FileNameDeterminate(getTitle(), baseInfo.sanitizedName, metadataDeterminedContentType).getDeterminedVideoInfo()?.toJsonObject()
}
}
fun getOutputDirectory() = SharedConfig.outgoingContent.using(NameHelper.normalize(getCollection()))
}
fun findNearestValue(list: List<String>, target: String): String? {
return list.minByOrNull { it.distanceTo(target) }
}
fun String.distanceTo(other: String): Int {
val distance = Array(length + 1) { IntArray(other.length + 1) }
for (i in 0..length) {
distance[i][0] = i
}
for (j in 0..other.length) {
distance[0][j] = j
}
for (i in 1..length) {
for (j in 1..other.length) {
distance[i][j] = minOf(
distance[i - 1][j] + 1,
distance[i][j - 1] + 1,
distance[i - 1][j - 1] + if (this[i - 1] == other[j - 1]) 0 else 1
)
}
}
return distance[length][other.length]
}
}

View File

@ -0,0 +1,94 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import mu.KotlinLogging
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.eventi.data.EventStatus
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.shared.common.datasource.toEpochSeconds
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.data.BaseInfoEvent
import no.iktdev.mediaprocessing.shared.contract.data.Event
import no.iktdev.mediaprocessing.shared.contract.data.MediaMetadataReceivedEvent
import no.iktdev.mediaprocessing.shared.contract.data.az
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEnv
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.scheduling.annotation.EnableScheduling
import org.springframework.scheduling.annotation.Scheduled
import org.springframework.stereotype.Service
import java.time.LocalDateTime
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.*
@Service
@EnableScheduling
class MetadataWaitOrDefaultTaskListener() : CoordinatorEventListener() {
@Autowired
override var coordinator: Coordinator? = null
val log = KotlinLogging.logger {}
override val produceEvent: Events = Events.EventMediaMetadataSearchPerformed
override val listensForEvents: List<Events> = listOf(
Events.EventMediaReadBaseInfoPerformed,
Events.EventMediaMetadataSearchPerformed
)
val metadataTimeout = KafkaEnv.metadataTimeoutMinutes * 60
val waitingProcessesForMeta: MutableMap<String, MetadataTriggerData> = mutableMapOf()
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
if (incomingEvent.eventType == Events.EventMediaReadBaseInfoPerformed &&
events.none { it.eventType == Events.EventMediaMetadataSearchPerformed }) {
val baseInfo = incomingEvent.az<BaseInfoEvent>()?.data
if (baseInfo == null) {
log.error { "BaseInfoEvent is null for referenceId: ${incomingEvent.metadata.referenceId} on eventId: ${incomingEvent.metadata.eventId}" }
return
}
val estimatedTimeout = LocalDateTime.now().toEpochSeconds() + metadataTimeout
val dateTime = LocalDateTime.ofEpochSecond(estimatedTimeout, 0, ZoneOffset.UTC)
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
log.info { "Sending ${baseInfo.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
if (!waitingProcessesForMeta.containsKey(incomingEvent.metadata.referenceId)) {
waitingProcessesForMeta[incomingEvent.metadata.referenceId] =
MetadataTriggerData(incomingEvent.metadata.eventId, LocalDateTime.now())
}
}
if (incomingEvent.eventType == Events.EventMediaMetadataSearchPerformed) {
if (waitingProcessesForMeta.containsKey(incomingEvent.metadata.referenceId)) {
waitingProcessesForMeta.remove(incomingEvent.metadata.referenceId)
}
}
}
@Scheduled(fixedDelay = (1_000))
fun sendErrorMessageForMetadata() {
val expired = waitingProcessesForMeta.filter {
LocalDateTime.now().toEpochSeconds() > (it.value.executed.toEpochSeconds() + metadataTimeout)
}
expired.forEach {
log.info { "Producing timeout for ${it.key} ${LocalDateTime.now()}" }
coordinator?.produceNewEvent(
MediaMetadataReceivedEvent(
metadata = EventMetadata(
referenceId = it.key,
derivedFromEventId = it.value.eventId,
status = EventStatus.Skipped
)
)
)
waitingProcessesForMeta.remove(it.key)
}
}
data class MetadataTriggerData(val eventId: String, val executed: LocalDateTime)
}

View File

@ -0,0 +1,94 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import com.google.gson.Gson
import com.google.gson.JsonObject
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.data.dataAs
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.Event
import no.iktdev.mediaprocessing.shared.contract.data.MediaFileStreamsParsedEvent
import no.iktdev.mediaprocessing.shared.contract.data.MediaFileStreamsReadEvent
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoStream
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class ParseMediaFileStreamsTaskListener() : CoordinatorEventListener() {
val log = KotlinLogging.logger {}
@Autowired
override var coordinator: Coordinator? = null
override val produceEvent: Events = Events.EventMediaParseStreamPerformed
override val listensForEvents: List<Events> = listOf(
Events.EventMediaReadStreamPerformed
)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
// MediaFileStreamsReadEvent
val readData = incomingEvent.dataAs<MediaFileStreamsReadEvent>()?.data
val result = try {
MediaFileStreamsParsedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = parseStreams(readData)
)
} catch (e: Exception) {
e.printStackTrace()
MediaFileStreamsParsedEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
}
}
fun parseStreams(data: JsonObject?): ParsedMediaStreams {
val gson = Gson()
return try {
val jStreams = data!!.getAsJsonArray("streams")
val videoStreams = mutableListOf<VideoStream>()
val audioStreams = mutableListOf<AudioStream>()
val subtitleStreams = mutableListOf<SubtitleStream>()
jStreams.forEach { streamJson ->
val streamObject = streamJson.asJsonObject
val codecType = streamObject.get("codec_type").asString
if (streamObject.has("codec_name") && streamObject.get("codec_name").asString == "mjpeg") {
} else {
when (codecType) {
"video" -> videoStreams.add(gson.fromJson(streamObject, VideoStream::class.java))
"audio" -> audioStreams.add(gson.fromJson(streamObject, AudioStream::class.java))
"subtitle" -> subtitleStreams.add(gson.fromJson(streamObject, SubtitleStream::class.java))
}
}
}
val parsedStreams = ParsedMediaStreams(
videoStream = videoStreams,
audioStream = audioStreams,
subtitleStream = subtitleStreams
)
parsedStreams
} catch (e: Exception) {
"Failed to parse data, its either not a valid json structure or expected and required fields are not present.".also {
log.error { it }
}
throw e
}
}
}

View File

@ -0,0 +1,85 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.listeners
import com.google.gson.Gson
import com.google.gson.JsonObject
import kotlinx.coroutines.runBlocking
import mu.KotlinLogging
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.data.dataAs
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.mediaprocessing.coordinator.Coordinator
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
import no.iktdev.mediaprocessing.shared.common.SharedConfig
import no.iktdev.mediaprocessing.shared.common.runner.CodeToOutput
import no.iktdev.mediaprocessing.shared.common.runner.getOutputUsing
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.EventsListenerContract
import no.iktdev.mediaprocessing.shared.contract.EventsManagerContract
import no.iktdev.mediaprocessing.shared.contract.data.Event
import no.iktdev.mediaprocessing.shared.contract.data.MediaFileStreamsReadEvent
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import java.io.File
@Service
class ReadMediaFileStreamsTaskListener() : CoordinatorEventListener() {
@Autowired
override var coordinator: Coordinator? = null
val log = KotlinLogging.logger {}
val requiredOperations = listOf(StartOperationEvents.ENCODE, StartOperationEvents.EXTRACT)
override val produceEvent: Events = Events.EventMediaReadStreamPerformed
override val listensForEvents: List<Events> = listOf(Events.EventMediaProcessStarted)
override fun onEventsReceived(incomingEvent: Event, events: List<Event>) {
val startEvent = incomingEvent.dataAs<MediaProcessStarted>() ?: return
if (!startEvent.operations.any { it in requiredOperations }) {
log.info { "${incomingEvent.metadata.referenceId} does not contain a operation in ${requiredOperations.joinToString(",") { it.name }}" }
return
}
val result = runBlocking {
try {
val data = fileReadStreams(startEvent, incomingEvent.metadata.eventId)
MediaFileStreamsReadEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Success),
data = data
)
} catch (e: Exception) {
e.printStackTrace()
MediaFileStreamsReadEvent(
metadata = incomingEvent.makeDerivedEventInfo(EventStatus.Failed)
)
}
}
onProduceEvent(result)
}
suspend fun fileReadStreams(started: MediaProcessStarted, eventId: String): JsonObject? {
val file = File(started.file)
return if (file.exists() && file.isFile) {
val result = readStreams(file)
val joined = result.output.joinToString(" ")
Gson().fromJson(joined, JsonObject::class.java)
} else {
val message = "File in data is not a file or does not exist".also {
log.error { it }
}
throw RuntimeException(message)
}
}
suspend fun readStreams(file: File): CodeToOutput {
val result = getOutputUsing(
SharedConfig.ffprobe,
"-v", "quiet", "-print_format", "json", "-show_streams", file.absolutePath
)
return result
}
}

View File

@ -0,0 +1,63 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams.AudioArguments
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams.VideoArguments
import no.iktdev.mediaprocessing.shared.contract.data.EncodeArgumentData
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.EncodingPreference
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoStream
import java.io.File
class EncodeWorkArgumentsMapping(
val inputFile: String,
val outFileFullName: String,
val outFileAbsolutePathFile: File,
val streams: ParsedMediaStreams,
val preference: EncodingPreference
) {
fun getArguments(): EncodeArgumentData? {
val outVideoFileAbsolutePath = outFileAbsolutePathFile.using("${outFileFullName}.mp4").absolutePath
val vaas = VideoAndAudioSelector(streams, preference)
val vArg = vaas.getVideoStream()
?.let { VideoArguments(it, streams, preference.video).getVideoArguments() }
val aArg = vaas.getAudioStream()
?.let { AudioArguments(it, streams, preference.audio).getAudioArguments() }
val vaArgs = toFfmpegWorkerArguments(vArg, aArg)
return if (vaArgs.isEmpty()) {
null
} else {
EncodeArgumentData(
inputFile = inputFile,
outputFile = outVideoFileAbsolutePath,
arguments = vaArgs
)
}
}
private class VideoAndAudioSelector(val mediaStreams: ParsedMediaStreams, val preference: EncodingPreference) {
private var defaultVideoSelected: VideoStream? = mediaStreams.videoStream
.filter { (it.duration_ts ?: 0) > 0 }
.maxByOrNull { it.duration_ts ?: 0 } ?: mediaStreams.videoStream.minByOrNull { it.index }
private var defaultAudioSelected: AudioStream? = mediaStreams.audioStream
.filter { (it.duration_ts ?: 0) > 0 }
.maxByOrNull { it.duration_ts ?: 0 } ?: mediaStreams.audioStream.minByOrNull { it.index }
fun getVideoStream(): VideoStream? {
return defaultVideoSelected
}
fun getAudioStream(): AudioStream? {
val languageFiltered = mediaStreams.audioStream.filter { it.tags.language == preference.audio.language }
val channeledAndCodec = languageFiltered.find {
it.channels >= (preference.audio.channels ?: 2) && it.codec_name == preference.audio.codec.lowercase()
}
return channeledAndCodec ?: return languageFiltered.minByOrNull { it.index } ?: defaultAudioSelected
}
}
}

View File

@ -0,0 +1,31 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping
import no.iktdev.exfl.using
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams.SubtitleArguments
import no.iktdev.mediaprocessing.shared.contract.data.ExtractArgumentData
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import java.io.File
class ExtractWorkArgumentsMapping(
val inputFile: String,
val outFileFullName: String,
val outFileAbsolutePathFile: File,
val streams: ParsedMediaStreams
) {
fun getArguments(): List<ExtractArgumentData> {
val subDir = outFileAbsolutePathFile.using("sub")
val sArg = SubtitleArguments(streams.subtitleStream).getSubtitleArguments()
val entries = sArg.map {
ExtractArgumentData(
inputFile = inputFile,
arguments = it.codecParameters + it.optionalParameters + listOf("-map", "0:s:${it.index}"),
outputFile = subDir.using(it.language, "${outFileFullName}.${it.format}").absolutePath
)
}
return entries
}
}

View File

@ -1,4 +1,4 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioArgumentsDto import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioArgumentsDto
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoArgumentsDto import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoArgumentsDto

View File

@ -0,0 +1,53 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioArgumentsDto
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioPreference
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
class AudioArguments(
val audioStream: AudioStream,
val allStreams: ParsedMediaStreams,
val preference: AudioPreference
) {
fun isAudioCodecEqual() = audioStream.codec_name.lowercase() == preference.codec.lowercase()
fun isSurroundButNotEAC3(): Boolean {
return audioStream.channels > 2 && audioStream.codec_name.lowercase() != "eac3"
}
fun isSurroundAndEAC3(): Boolean {
return audioStream.channels > 2 && audioStream.codec_name.lowercase() == "eac3"
}
fun isSurround(): Boolean {
return audioStream.channels > 2
}
private fun shouldUseEAC3(): Boolean {
return (preference.defaultToEAC3OnSurroundDetected && audioStream.channels > 2 && audioStream.codec_name.lowercase() != "eac3")
}
fun getAudioArguments(): AudioArgumentsDto {
val optionalParams = mutableListOf<String>()
val codecParams = if (isAudioCodecEqual() || isSurroundAndEAC3()) {
listOf("-acodec", "copy")
} else if (!isSurroundButNotEAC3() && shouldUseEAC3()) {
listOf("-c:a", "eac3")
} else {
val codecSwap = mutableListOf("-c:a", preference.codec)
if (audioStream.channels > 2 && !preference.preserveChannels) {
codecSwap.addAll(listOf("-ac", "2"))
}
codecSwap
}
return AudioArgumentsDto(
index = allStreams.audioStream.indexOf(audioStream),
codecParameters = codecParams,
optionalParameters = optionalParams
)
}
}

View File

@ -0,0 +1,83 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleArgumentsDto
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.SubtitleStream
class SubtitleArguments(val subtitleStreams: List<SubtitleStream>) {
/**
* @property DEFAULT is default subtitle as dialog
* @property CC is Closed-Captions
* @property SHD is Hard of hearing
* @property NON_DIALOGUE is for Signs or Song (as in lyrics)
*/
private enum class SubtitleType {
DEFAULT,
CC,
SHD,
NON_DIALOGUE
}
private fun SubtitleStream.isCC(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("cc", "closed caption")
return keywords.any { title.contains(it) }
}
private fun SubtitleStream.isSHD(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("shd", "hh", "Hard-of-Hearing", "Hard of Hearing")
return keywords.any { title.contains(it) }
}
private fun SubtitleStream.isSignOrSong(): Boolean {
val title = this.tags.title?.lowercase() ?: return false
val keywords = listOf("song", "songs", "sign", "signs")
return keywords.any { title.contains(it) }
}
private fun getSubtitleType(stream: SubtitleStream): SubtitleType {
return if (stream.isSignOrSong())
SubtitleType.NON_DIALOGUE
else if (stream.isSHD()) {
SubtitleType.SHD
} else if (stream.isCC()) {
SubtitleType.CC
} else SubtitleType.DEFAULT
}
fun getSubtitleArguments(): List<SubtitleArgumentsDto> {
val acceptable = subtitleStreams.filter { !it.isSignOrSong() }
val codecFiltered = acceptable.filter { getFormatToCodec(it.codec_name) != null }
val mappedToType =
codecFiltered.map { getSubtitleType(it) to it }.filter { it.first in SubtitleType.entries }
.groupBy { it.second.tags.language ?: "eng" }
.mapValues { entry ->
val languageStreams = entry.value
val sortedStreams = languageStreams.sortedBy { SubtitleType.entries.indexOf(it.first) }
sortedStreams.firstOrNull()?.second
}.mapNotNull { it.value }
return mappedToType.mapNotNull { stream ->
getFormatToCodec(stream.codec_name)?.let { format ->
SubtitleArgumentsDto(
index = subtitleStreams.indexOf(stream),
language = stream.tags.language ?: "eng",
format = format
)
}
}
}
fun getFormatToCodec(codecName: String): String? {
return when (codecName) {
"ass" -> "ass"
"subrip" -> "srt"
"webvtt", "vtt" -> "vtt"
"smi" -> "smi"
"hdmv_pgs_subtitle" -> null
else -> null
}
}
}

View File

@ -0,0 +1,49 @@
package no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoArgumentsDto
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoPreference
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.VideoStream
class VideoArguments(
val videoStream: VideoStream,
val allStreams: ParsedMediaStreams,
val preference: VideoPreference
) {
fun isVideoCodecEqual() = getCodec(videoStream.codec_name) == getCodec(preference.codec.lowercase())
protected fun getCodec(name: String): String {
return when (name) {
"hevc", "hevec", "h265", "h.265", "libx265"
-> "libx265"
"h.264", "h264", "libx264"
-> "libx264"
else -> name
}
}
fun getVideoArguments(): VideoArgumentsDto {
val optionalParams = mutableListOf<String>()
if (preference.pixelFormatPassthrough.none { it == videoStream.pix_fmt }) {
optionalParams.addAll(listOf("-pix_fmt", preference.pixelFormat))
}
val codecParams = if (isVideoCodecEqual()) {
val default = mutableListOf("-c:v", "copy")
if (getCodec(videoStream.codec_name) == "libx265") {
default.addAll(listOf("-vbsf", "hevc_mp4toannexb"))
}
default
}
else {
optionalParams.addAll(listOf("-crf", preference.threshold.toString()))
listOf("-c:v", getCodec(preference.codec.lowercase()))
}
return VideoArgumentsDto(
index = allStreams.videoStream.indexOf(videoStream),
codecParameters = codecParams,
optionalParameters = optionalParams
)
}
}

View File

@ -34,7 +34,7 @@ interface FileWatcherEvents {
@Service @Service
class InputDirectoryWatcher(@Autowired var coordinator: EventCoordinator): FileWatcherEvents { class InputDirectoryWatcher(@Autowired var coordinator: EventCoordinatorDep): FileWatcherEvents {
private val logger = KotlinLogging.logger {} private val logger = KotlinLogging.logger {}
val watcherChannel = SharedConfig.incomingContent.asWatchChannel() val watcherChannel = SharedConfig.incomingContent.asWatchChannel()
val queue = FileWatcherQueue() val queue = FileWatcherQueue()

View File

@ -1,12 +1,12 @@
package no.iktdev.mediaprocessing.coordinator.tasks.event package no.iktdev.mediaprocessing.coordinator.tasks.event
import no.iktdev.mediaprocessing.PersistentMessageFromJsonDump import no.iktdev.mediaprocessing.PersistentMessageFromJsonDump
import no.iktdev.mediaprocessing.coordinator.tasksV2.listeners.MediaOutInformationTaskListener
import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf import no.iktdev.mediaprocessing.shared.common.lastOrSuccessOf
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.BaseInfoPerformed import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.BaseInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MetadataPerformed import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MetadataPerformed
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Assertions.*
import org.junit.jupiter.api.Test import org.junit.jupiter.api.Test
class MetadataAndBaseInfoToFileOutTest { class MetadataAndBaseInfoToFileOutTest {
@ -34,7 +34,7 @@ class MetadataAndBaseInfoToFileOutTest {
val baseInfo = events.lastOrSuccessOf(KafkaEvents.EventMediaReadBaseInfoPerformed) { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed val baseInfo = events.lastOrSuccessOf(KafkaEvents.EventMediaReadBaseInfoPerformed) { it.data is BaseInfoPerformed }?.data as BaseInfoPerformed
val meta = events.lastOrSuccessOf(KafkaEvents.EventMediaMetadataSearchPerformed) { it.data is MetadataPerformed }?.data as MetadataPerformed? val meta = events.lastOrSuccessOf(KafkaEvents.EventMediaMetadataSearchPerformed) { it.data is MetadataPerformed }?.data as MetadataPerformed?
val pm = MetadataAndBaseInfoToFileOut.ProcessMediaInfoAndMetadata(baseInfo, meta) val pm = MediaOutInformationTaskListener.ProcessMediaInfoAndMetadata(baseInfo, meta)
val vi = pm.getVideoPayload() val vi = pm.getVideoPayload()

View File

@ -2,6 +2,7 @@ package no.iktdev.mediaprocessing.coordinator.tasks.event.ffmpeg
import com.google.gson.Gson import com.google.gson.Gson
import com.google.gson.reflect.TypeToken import com.google.gson.reflect.TypeToken
import no.iktdev.mediaprocessing.coordinator.tasksV2.mapping.streams.AudioArguments
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioPreference import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioPreference
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream import no.iktdev.mediaprocessing.shared.contract.ffmpeg.AudioStream
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
@ -15,7 +16,7 @@ class EncodeArgumentCreatorTaskTest {
@Test @Test
fun verifyThatEacStreamGetsCorrectArguments() { fun verifyThatEacStreamGetsCorrectArguments() {
val audio = EncodeArgumentCreatorTask.AudioArguments( val audio = AudioArguments(
audioStream = audioStreamsEAC().first(), audioStream = audioStreamsEAC().first(),
allStreams = ParsedMediaStreams(listOf(), audioStreamsEAC(), listOf()), allStreams = ParsedMediaStreams(listOf(), audioStreamsEAC(), listOf()),
preference = AudioPreference(preserveChannels = true, forceStereo = false, defaultToEAC3OnSurroundDetected = true) preference = AudioPreference(preserveChannels = true, forceStereo = false, defaultToEAC3OnSurroundDetected = true)

View File

@ -1,159 +0,0 @@
package no.iktdev.mediaprocessing.ui
import no.iktdev.mediaprocessing.shared.common.EventCoordinatorBase
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentMessage
import no.iktdev.mediaprocessing.shared.common.persistance.PersistentProcessDataMessage
import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.DeserializedConsumerRecord
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.BaseInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MediaProcessStarted
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.VideoInfoPerformed
import no.iktdev.mediaprocessing.shared.kafka.dto.isSuccess
import no.iktdev.mediaprocessing.ui.coordinator.PersistentEventBasedMessageListener
import no.iktdev.mediaprocessing.ui.dto.EventSummary
import no.iktdev.mediaprocessing.ui.dto.EventSummarySubItem
import no.iktdev.mediaprocessing.ui.dto.SummaryState
import no.iktdev.mediaprocessing.ui.socket.EventbasedTopic
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.scheduling.annotation.EnableScheduling
import org.springframework.scheduling.annotation.Scheduled
import org.springframework.stereotype.Service
@Service
@EnableScheduling
class EventCoordinator(@Autowired private val eventbasedTopic: EventbasedTopic) : EventCoordinatorBase<PersistentMessage, PersistentEventBasedMessageListener>() {
override val listeners = PersistentEventBasedMessageListener()
override fun onCoordinatorReady() {
super.onCoordinatorReady()
}
override fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>) {
}
override fun createTasksBasedOnEventsAndPersistence(
referenceId: String,
eventId: String,
messages: List<PersistentMessage>
) {
}
@Scheduled(fixedDelay = (5_000))
fun refreshDatabaseData() {
}
private fun getCurrentStateFromProcesserEvents(events: List<PersistentProcessDataMessage>): Map<String, EventSummarySubItem> {
return events.associate {
it.event.event to EventSummarySubItem(
eventId = it.eventId,
status = if (it.consumed) SummaryState.Completed else if (it.claimed) SummaryState.Working else SummaryState.Pending
)
}
}
private fun getCurrentState(events: List<PersistentMessage>, processes: Map<String, EventSummarySubItem>): SummaryState {
val stored = events.findLast { it.event == KafkaEvents.EventCollectAndStore }
val started = events.findLast { it.event == KafkaEvents.EventMediaProcessStarted }
val completedMediaEvent = events.findLast { it.event == KafkaEvents.EventMediaProcessCompleted }
if (stored != null && stored.data.isSuccess()) {
return SummaryState.Completed
}
if (completedMediaEvent?.data.isSuccess()) {
return SummaryState.AwaitingStore
}
if (processes.values.all { it.status == SummaryState.Completed }) {
return SummaryState.AwaitingStore
} else if (processes.values.any { it.status == SummaryState.Working }) {
return SummaryState.Working
} else if (processes.values.any { it.status == SummaryState.Pending }) {
return SummaryState.Pending
}
val workPrepared = events.filter { it.event in listOf(
KafkaEvents.EventWorkExtractCreated,
KafkaEvents.EventWorkConvertCreated,
KafkaEvents.EventWorkEncodeCreated
) }
if (workPrepared.isNotEmpty()) {
return SummaryState.Pending
}
if (started != null && (started.data as MediaProcessStarted).type == ProcessType.MANUAL) {
return SummaryState.AwaitingConfirmation
}
val perparation = events.filter { it.event in listOf(
KafkaEvents.EventMediaParameterExtractCreated,
KafkaEvents.EventMediaParameterEncodeCreated,
) }
if (perparation.isNotEmpty()) {
return SummaryState.Preparing
}
val analyzed2 = events.findLast { it.event in listOf(KafkaEvents.EventMediaReadOutNameAndType) }
if (analyzed2 != null) {
return SummaryState.Analyzing
}
val waitingForMeta = events.findLast { it.event == KafkaEvents.EventMediaMetadataSearchPerformed }
if (waitingForMeta != null) {
return SummaryState.Metadata
}
val analyzed = events.findLast { it.event in listOf(KafkaEvents.EventMediaParseStreamPerformed, KafkaEvents.EventMediaReadBaseInfoPerformed, KafkaEvents.EventMediaReadOutNameAndType) }
if (analyzed != null) {
return SummaryState.Analyzing
}
val readEvent = events.findLast { it.event == KafkaEvents.EventMediaReadStreamPerformed }
if (readEvent != null) {
return SummaryState.Read
}
return SummaryState.Started
}
fun buildSummaries() {
val processerMessages = persistentReader.getProcessEvents().groupBy { it.referenceId }
val messages = persistentReader.getAllMessages()
val mapped = messages.mapNotNull { it ->
val referenceId = it.firstOrNull()?.referenceId
if (referenceId != null) {
val procM = processerMessages.getOrDefault(referenceId, emptyList())
val processesStatuses = getCurrentStateFromProcesserEvents(procM)
val messageStatus = getCurrentState(it, processesStatuses)
val baseNameEvent = it.lastOrNull {ke -> ke.event == KafkaEvents.EventMediaReadBaseInfoPerformed }?.data.let { data ->
if (data is BaseInfoPerformed) data else null
}
val mediaNameEvent = it.lastOrNull { ke -> ke.event == KafkaEvents.EventMediaReadOutNameAndType }?.data.let { data ->
if (data is VideoInfoPerformed) data else null
}
val baseName = if (mediaNameEvent == null) baseNameEvent?.sanitizedName else mediaNameEvent.toValueObject()?.fullName
EventSummary(
referenceId = referenceId,
baseName = baseName,
collection = mediaNameEvent?.toValueObject()?.title,
events = it.map { ke -> ke.event },
status = messageStatus,
activeEvens = processesStatuses
)
} else null
}
}
}

View File

@ -12,6 +12,7 @@ findProject(":shared")?.name = "shared"
findProject(":shared:kafka")?.name = "kafka" findProject(":shared:kafka")?.name = "kafka"
findProject(":shared:contract")?.name = "contract" findProject(":shared:contract")?.name = "contract"
findProject(":shared:common")?.name = "common" findProject(":shared:common")?.name = "common"
findProject(":shared:eventi")?.name = "eventi"
include("apps") include("apps")
include("apps:ui") include("apps:ui")
@ -23,5 +24,5 @@ include("shared")
include("shared:kafka") include("shared:kafka")
include("shared:contract") include("shared:contract")
include("shared:common") include("shared:common")
include("shared:eventi")
findProject(":shared:eventi")?.name = "eventi"

View File

@ -1,70 +0,0 @@
package no.iktdev.mediaprocessing.shared.common
import kotlinx.coroutines.*
import mu.KotlinLogging
import no.iktdev.exfl.coroutines.CoroutinesDefault
import no.iktdev.mediaprocessing.shared.common.tasks.EventBasedMessageListener
import no.iktdev.mediaprocessing.shared.common.tasks.TaskCreatorImpl
import no.iktdev.mediaprocessing.shared.kafka.core.CoordinatorProducer
import no.iktdev.mediaprocessing.shared.kafka.core.DefaultMessageListener
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEnv
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.DeserializedConsumerRecord
import no.iktdev.mediaprocessing.shared.kafka.dto.Message
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import org.springframework.context.ApplicationContext
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import javax.annotation.PostConstruct
abstract class EventCoordinatorBase<V, L: EventBasedMessageListener<V>> {
val defaultCoroutine = CoroutinesDefault()
private var ready: Boolean = false
fun isReady() = ready
private val log = KotlinLogging.logger {}
abstract val listeners: L
@Autowired
private lateinit var context: ApplicationContext
@Autowired
lateinit var producer: CoordinatorProducer
@Autowired
private lateinit var listener: DefaultMessageListener
abstract fun createTasksBasedOnEventsAndPersistence(referenceId: String, eventId: String, messages: List<V>)
open fun onCoordinatorReady() {
log.info { "Attaching listeners to Coordinator" }
listener.onMessageReceived = { event -> onMessageReceived(event)}
listener.listen(KafkaEnv.kafkaTopic)
ready = true
}
abstract fun onMessageReceived(event: DeserializedConsumerRecord<KafkaEvents, Message<out MessageDataWrapper>>)
fun isAllServicesRegistered(): Boolean {
val services = context.getBeansWithAnnotation(Service::class.java).values.map { it.javaClass }.filter { TaskCreatorImpl.isInstanceOfTaskCreatorImpl(it) }
val loadedServices = listeners.listeners.map { it.taskHandler.javaClass as Class<Any> }
val notPresent = services.filter { it !in loadedServices }
notPresent.forEach {
log.warn { "Waiting for ${it.simpleName} to attach.." }
}
return notPresent.isEmpty()
}
@PostConstruct
fun onInitializationCompleted() {
defaultCoroutine.launch {
while (!isAllServicesRegistered()) {
log.info { "Waiting for mandatory services to start" }
delay(1000)
}
}.invokeOnCompletion {
onCoordinatorReady()
log.info { "Coordinator is Ready!" }
}
}
}

View File

@ -1,8 +1,8 @@
package no.iktdev.mediaprocessing.shared.common.parsing package no.iktdev.mediaprocessing.shared.common.parsing
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.EpisodeInfo import no.iktdev.mediaprocessing.shared.contract.data.EpisodeInfo
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.MovieInfo import no.iktdev.mediaprocessing.shared.contract.data.MediaInfo
import no.iktdev.mediaprocessing.shared.kafka.dto.events_result.VideoInfo import no.iktdev.mediaprocessing.shared.contract.data.MovieInfo
class FileNameDeterminate(val title: String, val sanitizedName: String, val ctype: ContentType = ContentType.UNDEFINED, val metaTitle: String? = null) { class FileNameDeterminate(val title: String, val sanitizedName: String, val ctype: ContentType = ContentType.UNDEFINED, val metaTitle: String? = null) {
@ -13,7 +13,7 @@ class FileNameDeterminate(val title: String, val sanitizedName: String, val ctyp
UNDEFINED UNDEFINED
} }
fun getDeterminedVideoInfo(): VideoInfo? { fun getDeterminedVideoInfo(): MediaInfo? {
return when (ctype) { return when (ctype) {
ContentType.MOVIE -> determineMovieFileName() ContentType.MOVIE -> determineMovieFileName()
ContentType.SERIE -> determineSerieFileName() ContentType.SERIE -> determineSerieFileName()
@ -61,7 +61,7 @@ class FileNameDeterminate(val title: String, val sanitizedName: String, val ctyp
return EpisodeInfo(title = metaTitle ?: title, episode = episodeNumber.toInt(), season = seasonNumber.toInt(), episodeTitle = episodeTitle, fullName = cleanup(fullName)) return EpisodeInfo(title = metaTitle ?: title, episode = episodeNumber.toInt(), season = seasonNumber.toInt(), episodeTitle = episodeTitle, fullName = cleanup(fullName))
} }
private fun determineUndefinedFileName(): VideoInfo? { private fun determineUndefinedFileName(): MediaInfo? {
val serieEx = SerieEx(title, sanitizedName) val serieEx = SerieEx(title, sanitizedName)
val (season, episode) = serieEx.findSeasonAndEpisode(sanitizedName) val (season, episode) = serieEx.findSeasonAndEpisode(sanitizedName)
val episodeNumber = serieEx.findEpisodeNumber() val episodeNumber = serieEx.findEpisodeNumber()

View File

@ -123,7 +123,7 @@ class TasksManager(private val dataSource: DataSource) {
} }
} }
fun createTask(referenceId: String, eventId: String = UUID.randomUUID().toString(), derivedFromEventId: String? = null, task: TaskType, data: String): Boolean { fun createTask(referenceId: String, eventId: String = UUID.randomUUID().toString(), derivedFromEventId: String? = null, task: TaskType, data: String, inputFile: String): Boolean {
return executeWithStatus(dataSource) { return executeWithStatus(dataSource) {
tasks.insert { tasks.insert {
it[tasks.referenceId] = referenceId it[tasks.referenceId] = referenceId
@ -131,7 +131,7 @@ class TasksManager(private val dataSource: DataSource) {
it[tasks.task] = task.name it[tasks.task] = task.name
it[tasks.data] = data it[tasks.data] = data
it[tasks.derivedFromEventId] = derivedFromEventId it[tasks.derivedFromEventId] = derivedFromEventId
it[tasks.integrity] = getIntegrityOfData(data) it[tasks.inputFile] = inputFile
} }
} }
} }

View File

@ -8,6 +8,7 @@ import java.time.LocalDateTime
object tasks: IntIdTable() { object tasks: IntIdTable() {
val referenceId: Column<String> = varchar("referenceId", 50) val referenceId: Column<String> = varchar("referenceId", 50)
val inputFile: Column<String?> = varchar("inputFile", 250).nullable()
val status: Column<String?> = varchar("status", 10).nullable() val status: Column<String?> = varchar("status", 10).nullable()
val claimed: Column<Boolean> = bool("claimed").default(false) val claimed: Column<Boolean> = bool("claimed").default(false)
val claimedBy: Column<String?> = varchar("claimedBy", 100).nullable() val claimedBy: Column<String?> = varchar("claimedBy", 100).nullable()
@ -18,7 +19,6 @@ object tasks: IntIdTable() {
val data: Column<String> = text("data") val data: Column<String> = text("data")
val created: Column<LocalDateTime> = datetime("created").defaultExpression(CurrentDateTime) val created: Column<LocalDateTime> = datetime("created").defaultExpression(CurrentDateTime)
val lastCheckIn: Column<LocalDateTime?> = datetime("lastCheckIn").nullable() val lastCheckIn: Column<LocalDateTime?> = datetime("lastCheckIn").nullable()
val integrity: Column<String> = varchar("integrity", 100)
init { init {
uniqueIndex(referenceId, task, eventId) uniqueIndex(referenceId, task, eventId)

View File

@ -1,67 +0,0 @@
package no.iktdev.mediaprocessing.shared.common.tasks
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
abstract class EventBasedMessageListener<V> {
val listeners: MutableList<Tasks<V>> = mutableListOf()
fun add(produces: KafkaEvents, listener: ITaskCreatorListener<V>) {
listeners.add(Tasks(producesEvent = produces, taskHandler = listener))
}
fun add(task: Tasks<V>) {
listeners.add(task)
}
/**
* Example implementation
*
* fun waitingListeners(events: List<PersistentMessage>): List<Tasks> {
* val nonCreators = listeners
* .filter { !events.map { e -> e.event }
* .contains(it.producesEvent) }
* return nonCreators
* }
*/
abstract fun waitingListeners(events: List<V>): List<Tasks<V>>
/**
* Example implementation
*
* fun listenerWantingEvent(event: PersistentMessage, waitingListeners: List<Tasks>)
* : List<Tasks>
* {
* return waitingListeners.filter { event.event in it.listensForEvents }
* }
*/
abstract fun listenerWantingEvent(event: V, waitingListeners: List<Tasks<V>>): List<Tasks<V>>
/**
* Send to taskHandler
*/
abstract fun onForward(event: V, history: List<V>, listeners: List<ITaskCreatorListener<V>>)
/**
* This will be called in sequence, thus some messages might be made a duplicate of.
*/
fun forwardEventMessageToListeners(newEvent: V, events: List<V>) {
val waitingListeners = waitingListeners(events)
val availableListeners = listenerWantingEvent(event = newEvent, waitingListeners = waitingListeners)
onForward(event = newEvent, history = events, listeners = availableListeners.map { it.taskHandler })
}
/**
* This will be called with all messages at once, thus it should reflect kafka topic and database
*/
fun forwardBatchEventMessagesToListeners(events: List<V>) {
val waitingListeners = waitingListeners(events)
onForward(event = events.last(), history = events, waitingListeners.map { it.taskHandler })
}
}
data class Tasks<V>(
val producesEvent: KafkaEvents,
val listensForEvents: List<KafkaEvents> = listOf(),
val taskHandler: ITaskCreatorListener<V>
)

View File

@ -1,6 +0,0 @@
package no.iktdev.mediaprocessing.shared.common.tasks
interface ITaskCreatorListener<V> {
fun onEventReceived(referenceId: String, event: V, events: List<V>): Unit
}

View File

@ -1,131 +0,0 @@
package no.iktdev.mediaprocessing.shared.common.tasks
import mu.KotlinLogging
import no.iktdev.mediaprocessing.shared.common.EventCoordinatorBase
import no.iktdev.mediaprocessing.shared.kafka.core.CoordinatorProducer
import no.iktdev.mediaprocessing.shared.kafka.core.KafkaEvents
import no.iktdev.mediaprocessing.shared.kafka.dto.MessageDataWrapper
import org.springframework.beans.factory.annotation.Autowired
import javax.annotation.PostConstruct
abstract class TaskCreatorImpl<C : EventCoordinatorBase<V, L>, V, L : EventBasedMessageListener<V>>(
open var coordinator: C
) : ITaskCreatorListener<V> {
private val log = KotlinLogging.logger {}
protected open val processedEvents: MutableMap<String, Set<String>> = mutableMapOf()
companion object {
fun <T> isInstanceOfTaskCreatorImpl(clazz: Class<T>): Boolean {
val superClass = TaskCreatorImpl::class.java
return superClass.isAssignableFrom(clazz)
}
}
// Event that the implementer sets
abstract val producesEvent: KafkaEvents
open val requiredEvents: List<KafkaEvents> = listOf()
open val listensForEvents: List<KafkaEvents> = listOf()
@Autowired
lateinit var producer: CoordinatorProducer
fun getListener(): Tasks<V> {
val reactableEvents = (requiredEvents + listensForEvents).distinct()
//val eventListenerFilter = listensForEvents.ifEmpty { requiredEvents }
return Tasks(taskHandler = this, producesEvent = producesEvent, listensForEvents = reactableEvents)
}
@PostConstruct
open fun attachListener() {
coordinator.listeners.add(getListener())
}
/**
* Example implementation
*
* open fun isPrerequisiteEventsOk(events: List<V>): Boolean {
* val currentEvents = events.map { it.event }
* return requiredEvents.all { currentEvents.contains(it) }
* }
*
*/
abstract fun isPrerequisiteEventsOk(events: List<V>): Boolean
/**
* Example implementation
*
* open fun isPrerequisiteDataPresent(events: List<V>): Boolean {
* val failed = events
* .filter { e -> e.event in requiredEvents }
* .filter { !it.data.isSuccess() }
* return failed.isEmpty()
* }
*/
abstract fun isPrerequisiteDataPresent(events: List<V>): Boolean
/**
* Example implementation
*
* open fun isEventOfSingle(event: V, singleOne: KafkaEvents): Boolean {
* return event.event == singleOne
* }
*/
abstract fun isEventOfSingle(event: V, singleOne: KafkaEvents): Boolean
open fun prerequisitesRequired(events: List<V>): List<() -> Boolean> {
return listOf {
isPrerequisiteEventsOk(events)
}
}
open fun prerequisiteRequired(event: V): List<() -> Boolean> {
return listOf()
}
private val context: MutableMap<String, Any> = mutableMapOf()
private val context_key_reference = "reference"
private val context_key_producesEvent = "event"
final override fun onEventReceived(referenceId: String, event: V, events: List<V>) {
context[context_key_reference] = referenceId
getListener().producesEvent.let {
context[context_key_producesEvent] = it
}
if (prerequisitesRequired(events).all { it.invoke() } && prerequisiteRequired(event).all { it.invoke() }) {
if (!containsUnprocessedEvents(events)) {
log.warn { "Event register blocked proceeding" }
return
}
val result = onProcessEvents(event, events)
if (result != null) {
onResult(result)
}
} else {
// TODO: Re-enable this
// log.info { "Skipping: ${event.event} as it does not fulfill the requirements for ${context[context_key_producesEvent]}" }
}
}
/**
* This function is intended to cache the referenceId and its eventid's
* This is to prevent dupliation
* */
abstract fun containsUnprocessedEvents(events: List<V>): Boolean
protected fun onResult(data: MessageDataWrapper) {
producer.sendMessage(
referenceId = context[context_key_reference] as String,
event = context[context_key_producesEvent] as KafkaEvents,
data = data
)
}
abstract fun onProcessEvents(event: V, events: List<V>): MessageDataWrapper?
}

View File

@ -14,10 +14,9 @@ import org.junit.jupiter.api.Test
import java.util.UUID import java.util.UUID
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.jetbrains.exposed.sql.deleteAll import org.jetbrains.exposed.sql.deleteAll
import kotlin.math.sin
class PersistentEventMangerTest { class PersistentEventMangerTestBase {
val defaultReferenceId = UUID.randomUUID().toString() val defaultReferenceId = UUID.randomUUID().toString()
val dataSource = H2DataSource2(DatabaseConnectionConfig( val dataSource = H2DataSource2(DatabaseConnectionConfig(
address = "", address = "",
@ -364,7 +363,7 @@ class PersistentEventMangerTest {
).onEach { entry -> eventManager.setEvent(entry.event, entry.message) } ).onEach { entry -> eventManager.setEvent(entry.event, entry.message) }
val convertEvents = mutableListOf<PersistentEventMangerTest.EventToMessage>(); val convertEvents = mutableListOf<PersistentEventMangerTestBase.EventToMessage>();
val extractEvents = listOf( val extractEvents = listOf(
EventToMessage(KafkaEvents.EventWorkExtractCreated, EventToMessage(KafkaEvents.EventWorkExtractCreated,

View File

@ -11,6 +11,13 @@ repositories {
} }
dependencies { dependencies {
implementation(project(mapOf("path" to ":shared:eventi")))
implementation("com.google.code.gson:gson:2.8.9")
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
testImplementation(platform("org.junit:junit-bom:5.9.1")) testImplementation(platform("org.junit:junit-bom:5.9.1"))
testImplementation("org.junit.jupiter:junit-jupiter") testImplementation("org.junit.jupiter:junit-jupiter")
} }

View File

@ -0,0 +1,66 @@
package no.iktdev.mediaprocessing.shared.contract
enum class Events(val event: String) {
EventMediaProcessStarted ("event:media-process:started"),
EventMediaReadStreamPerformed ("event:media-read-stream:performed"),
EventMediaParseStreamPerformed ("event:media-parse-stream:performed"),
EventMediaReadBaseInfoPerformed ("event:media-read-base-info:performed"),
EventMediaMetadataSearchPerformed ("event:media-metadata-search:performed"),
EventMediaReadOutNameAndType ("event:media-read-out-name-and-type:performed"),
EventMediaReadOutCover ("event:media-read-out-cover:performed"),
EventMediaParameterEncodeCreated ("event:media-encode-parameter:created"),
EventMediaParameterExtractCreated ("event:media-extract-parameter:created"),
EventMediaParameterConvertCreated ("event:media-convert-parameter:created"),
EventMediaParameterDownloadCoverCreated ("event:media-download-cover-parameter:created"),
EventMediaWorkProceedPermitted ("event:media-work-proceed:permitted"),
EventNotificationOfWorkItemRemoval("event:notification-work-item-removal"),
EventWorkEncodeCreated ("event:work-encode:created"),
EventWorkExtractCreated ("event:work-extract:created"),
EventWorkConvertCreated ("event:work-convert:created"),
EventWorkEncodePerformed ("event:work-encode:performed"),
EventWorkExtractPerformed ("event:work-extract:performed"),
EventWorkConvertPerformed ("event:work-convert:performed"),
EventWorkDownloadCoverPerformed ("event:work-download-cover:performed"),
EVENT_STORE_VIDEO_PERFORMED ("event:store-video:performed"),
EVENT_STORE_SUBTITLE_PERFORMED ("event:store-subtitle:performed"),
EVENT_STORE_COVER_PERFORMED ("event:store-cover:performed"),
EVENT_STORE_METADATA_PERFORMED ("event:store-metadata:performed"),
EventMediaProcessCompleted ("event:media-process:completed"),
EventCollectAndStore ("event::save"),
;
companion object {
fun toEvent(event: String): Events? {
return Events.entries.find { it.event == event }
}
fun isOfWork(event: Events): Boolean {
return event in listOf(
EventWorkConvertCreated,
EventWorkExtractCreated,
EventWorkEncodeCreated,
EventWorkEncodePerformed,
EventWorkConvertPerformed,
EventWorkExtractPerformed
)
}
fun isOfFinalize(event: Events): Boolean {
return event in listOf(
EventMediaProcessCompleted,
EventCollectAndStore
)
}
}
}

View File

@ -0,0 +1,11 @@
package no.iktdev.mediaprocessing.shared.contract
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.eventi.implementations.EventListenerImpl
import no.iktdev.mediaprocessing.shared.contract.data.Event
abstract class EventsListenerContract<E: EventsManagerContract, C: EventCoordinator<Event, E>>: EventListenerImpl<Event, E>() {
abstract override val produceEvent: Events
abstract override val listensForEvents: List<Events>
abstract override val coordinator: C?
}

View File

@ -0,0 +1,8 @@
package no.iktdev.mediaprocessing.shared.contract
import no.iktdev.eventi.implementations.EventsManagerImpl
import no.iktdev.mediaprocessing.shared.common.datasource.DataSource
import no.iktdev.mediaprocessing.shared.contract.data.Event
abstract class EventsManagerContract(dataSource: DataSource) : EventsManagerImpl<Event>(dataSource) {
}

View File

@ -0,0 +1,18 @@
package no.iktdev.mediaprocessing.shared.contract
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.data.isSuccessful
fun List<EventImpl>.lastOrSuccess(): EventImpl? {
return this.lastOrNull { it.isSuccessful() } ?: this.lastOrNull()
}
fun List<EventImpl>.lastOrSuccessOf(event: Events): EventImpl? {
val validEvents = this.filter { it.eventType == event }
return validEvents.lastOrNull { it.isSuccessful() } ?: validEvents.lastOrNull()
}
fun List<EventImpl>.lastOrSuccessOf(event: Events, predicate: (EventImpl) -> Boolean): EventImpl? {
val validEvents = this.filter { it.eventType == event && predicate(it) }
return validEvents.lastOrNull()
}

View File

@ -0,0 +1,17 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
class BaseInfoEvent(
override val eventType: Events = Events.EventMediaReadBaseInfoPerformed,
override val metadata: EventMetadata,
override val data: BaseInfo? = null
) : Event()
data class BaseInfo(
val title: String,
val sanitizedName: String,
val searchTitles: List<String> = emptyList<String>(),
)

View File

@ -0,0 +1,19 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class ConvertWorkCreatedEvent(
override val eventType: Events = Events.EventWorkConvertCreated,
override val metadata: EventMetadata,
override val data: ConvertData? = null
) : Event() {
}
data class ConvertData(
val inputFile: String,
val outputDirectory: String,
val outputFileName: String,
val formats: List<String> = emptyList(),
val allowOverwrite: Boolean
)

View File

@ -0,0 +1,16 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
class ConvertWorkPerformed(
override val eventType: Events = Events.EventWorkConvertPerformed,
override val metadata: EventMetadata,
override val data: ConvertedData? = null,
val message: String? = null
) : Event() {
}
data class ConvertedData(
val outputFiles: List<String>
)

View File

@ -0,0 +1,17 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class EncodeArgumentCreatedEvent(
override val eventType: Events = Events.EventMediaParameterEncodeCreated,
override val metadata: EventMetadata,
override val data: EncodeArgumentData? = null
) : Event() {
}
data class EncodeArgumentData(
val arguments: List<String>,
val outputFile: String,
val inputFile: String
)

View File

@ -0,0 +1,10 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class EncodeWorkCreatedEvent(
override val eventType: Events = Events.EventWorkEncodeCreated,
override val metadata: EventMetadata,
override val data: EncodeArgumentData? = null
) : Event()

View File

@ -0,0 +1,16 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class EncodeWorkPerformedEvent(
override val eventType: Events = Events.EventWorkEncodePerformed,
override val metadata: EventMetadata,
override val data: EncodedData? = null,
val message: String? = null
) : Event() {
}
data class EncodedData(
val outputFile: String
)

View File

@ -0,0 +1,24 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
abstract class Event: EventImpl() {
abstract override val eventType: Events
}
inline fun <reified T: Event> Event.az(): T? {
return if (this !is T) {
System.err.println("${this::class.java.name} is not a type of ${T::class.java.name}")
null
} else this
}
fun Event.referenceId(): String {
return this.metadata.referenceId
}
fun Event.eventId(): String {
return this.metadata.eventId
}

View File

@ -0,0 +1,17 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class ExtractArgumentCreatedEvent(
override val eventType: Events = Events.EventMediaParameterExtractCreated,
override val metadata: EventMetadata,
override val data: List<ExtractArgumentData>? = null
): Event()
data class ExtractArgumentData(
val arguments: List<String>,
val outputFile: String,
val inputFile: String
)

View File

@ -0,0 +1,11 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class ExtractWorkCreatedEvent(
override val eventType: Events = Events.EventWorkExtractCreated,
override val metadata: EventMetadata,
override val data: ExtractArgumentData? = null
) : Event() {
}

View File

@ -0,0 +1,16 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class ExtractWorkPerformedEvent(
override val eventType: Events = Events.EventWorkExtractPerformed,
override val metadata: EventMetadata,
override val data: ExtractedData? = null,
val message: String? = null
) : Event() {
}
data class ExtractedData(
val outputFile: String
)

View File

@ -0,0 +1,15 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class MediaCoverDownloadedEvent(
override val eventType: Events = Events.EventWorkDownloadCoverPerformed,
override val metadata: EventMetadata,
override val data: DownloadedCover? = null
) : Event() {
}
data class DownloadedCover(
val absoluteFilePath: String
)

View File

@ -0,0 +1,17 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class MediaCoverInfoReceivedEvent(
override val eventType: Events = Events.EventMediaReadOutCover,
override val metadata: EventMetadata,
override val data: CoverDetails? = null
) : Event() {
}
data class CoverDetails(
val url: String,
val outDir: String,
val outFileBaseName: String,
)

View File

@ -0,0 +1,12 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.ffmpeg.ParsedMediaStreams
class MediaFileStreamsParsedEvent(
override val metadata: EventMetadata,
override val data: ParsedMediaStreams? = null,
override val eventType: Events = Events.EventMediaParseStreamPerformed
) : Event()

View File

@ -0,0 +1,12 @@
package no.iktdev.mediaprocessing.shared.contract.data
import com.google.gson.JsonObject
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
class MediaFileStreamsReadEvent(
override val metadata: EventMetadata,
override val data: JsonObject? = null,
override val eventType: Events = Events.EventMediaReadStreamPerformed
) : Event()

View File

@ -0,0 +1,25 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class MediaMetadataReceivedEvent(
override val eventType: Events = Events.EventMediaMetadataSearchPerformed,
override val metadata: EventMetadata,
override val data: pyMetadata? = null,
): Event() {
}
data class pyMetadata(
val title: String,
val altTitle: List<String> = emptyList(),
val cover: String? = null,
val type: String,
val summary: List<pySummary> = emptyList(),
val genres: List<String> = emptyList()
)
data class pySummary(
val summary: String?,
val language: String = "eng"
)

View File

@ -0,0 +1,59 @@
package no.iktdev.mediaprocessing.shared.contract.data
import com.google.gson.Gson
import com.google.gson.JsonObject
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
data class MediaOutInformationConstructedEvent(
override val eventType: Events = Events.EventMediaReadOutNameAndType,
override val metadata: EventMetadata,
override val data: MediaInfoReceived? = null
) : Event() {
}
data class MediaInfoReceived(
val info: JsonObject,
val outDirectory: String,
) {
fun toValueObject(): MediaInfo? {
val type = info.get("type").asString
return when (type) {
"movie" -> Gson().fromJson(info.toString(), MovieInfo::class.java)
"serie" -> Gson().fromJson(info.toString(), EpisodeInfo::class.java)
else -> null
}
}
}
data class EpisodeInfo(
override val type: String = "serie",
override val title: String,
val episode: Int,
val season: Int,
val episodeTitle: String?,
override val fullName: String
): MediaInfo(type, title, fullName)
data class MovieInfo(
override val type: String = "movie",
override val title: String,
override val fullName: String
) : MediaInfo(type, title, fullName)
data class SubtitleInfo(
val inputFile: String,
val collection: String,
val language: String
)
open class MediaInfo(
@Transient open val type: String,
@Transient open val title: String,
@Transient open val fullName: String
) {
fun toJsonObject(): JsonObject {
return Gson().toJsonTree(this).asJsonObject
}
}

View File

@ -0,0 +1,22 @@
package no.iktdev.mediaprocessing.shared.contract.data
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.mediaprocessing.shared.contract.Events
import no.iktdev.mediaprocessing.shared.contract.ProcessType
import no.iktdev.mediaprocessing.shared.contract.dto.StartOperationEvents
data class MediaProcessStartEvent(
override val metadata: EventMetadata,
override val data: StartEventData?,
override val eventType: Events = Events.EventMediaProcessStarted
): Event()
data class StartEventData(
val type: ProcessType = ProcessType.FLOW,
val operations: List<StartOperationEvents> = listOf(
StartOperationEvents.ENCODE,
StartOperationEvents.EXTRACT,
StartOperationEvents.CONVERT
),
val file: String // AbsolutePath
)

View File

@ -0,0 +1,60 @@
plugins {
id("java")
kotlin("jvm")
kotlin("plugin.spring") version "1.5.31"
id("org.springframework.boot") version "2.5.5"
id("io.spring.dependency-management") version "1.0.11.RELEASE"
}
group = "no.iktdev.mediaprocessing"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
}
val exposedVersion = "0.44.0"
dependencies {
/*Spring boot*/
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
implementation("org.jetbrains.kotlin:kotlin-stdlib")
implementation("org.jetbrains.exposed:exposed-core:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-dao:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-jdbc:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-java-time:$exposedVersion")
implementation ("mysql:mysql-connector-java:8.0.29")
implementation("org.apache.commons:commons-lang3:3.12.0")
testImplementation("org.springframework.boot:spring-boot-starter-test:2.7.0")
testImplementation("org.jetbrains.kotlin:kotlin-test")
testImplementation(platform("org.junit:junit-bom:5.9.1"))
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("io.mockk:mockk:1.12.0")
testImplementation("com.h2database:h2:1.4.200")
testImplementation("org.assertj:assertj-core:3.4.1")
testImplementation("org.junit.jupiter:junit-jupiter-api:5.7.2")
testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.7.2")
testImplementation("io.kotlintest:kotlintest-assertions:3.3.2")
testImplementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.0")
implementation("org.jetbrains.kotlin:kotlin-reflect")
testImplementation("org.springframework.boot:spring-boot-starter-test")
testImplementation("org.jetbrains.kotlin:kotlin-test-junit5")
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
}
tasks.test {
useJUnitPlatform()
}
kotlin {
jvmToolchain(17)
}

View File

@ -0,0 +1,41 @@
package no.iktdev.eventi.data
import java.time.LocalDateTime
import java.util.*
abstract class EventImpl {
abstract val metadata: EventMetadata
abstract val data: Any?
abstract val eventType: Any
}
fun <T> EventImpl.dataAs(): T? {
return this.data as T
}
data class EventMetadata(
val referenceId: String,
val eventId: String = UUID.randomUUID().toString(),
val derivedFromEventId: String? = null, // Can be null but should not, unless its init event
val status: EventStatus,
val created: LocalDateTime = LocalDateTime.now()
)
enum class EventStatus {
Success,
Skipped,
Failed
}
fun EventImpl.isSuccessful(): Boolean {
return this.metadata.status == EventStatus.Success
}
fun EventImpl.isSkipped(): Boolean {
return this.metadata.status == EventStatus.Skipped
}
fun EventImpl.isFailed(): Boolean {
return this.metadata.status == EventStatus.Failed
}

View File

@ -0,0 +1,44 @@
package no.iktdev.mediaprocessing.shared.common.datasource
import no.iktdev.eventi.database.DatabaseConnectionConfig
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.Table
import java.time.Instant
import java.time.LocalDateTime
import java.time.ZoneId
import java.time.ZoneOffset
abstract class DataSource(val config: DatabaseConnectionConfig) {
open var database: Database? = null
abstract fun connect()
abstract fun createDatabase(): Database?
abstract fun createTables(vararg tables: Table)
abstract fun createDatabaseStatement(): String
abstract fun toConnectionUrl(): String
abstract fun toDatabaseConnectionUrl(database: String): String
fun toPortedAddress(): String {
var baseAddress = config.address
if (!config.port.isNullOrBlank()) {
baseAddress += ":${config.port}"
}
return baseAddress
}
abstract fun toDatabase(): Database
}
fun timestampToLocalDateTime(timestamp: Int): LocalDateTime {
return Instant.ofEpochSecond(timestamp.toLong()).atZone(ZoneId.systemDefault()).toLocalDateTime()
}
fun LocalDateTime.toEpochSeconds(): Long {
return this.toEpochSecond(ZoneOffset.ofTotalSeconds(ZoneOffset.systemDefault().rules.getOffset(LocalDateTime.now()).totalSeconds))
}

View File

@ -0,0 +1,9 @@
package no.iktdev.eventi.database
data class DatabaseConnectionConfig(
val address: String,
val port: String?,
val username: String,
val password: String,
val databaseName: String
)

View File

@ -0,0 +1,86 @@
package no.iktdev.mediaprocessing.shared.common.datasource
import mu.KotlinLogging
import no.iktdev.eventi.database.DatabaseConnectionConfig
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.Table
import org.jetbrains.exposed.sql.transactions.TransactionManager
import org.jetbrains.exposed.sql.transactions.transaction
open class MySqlDataSource(conf: DatabaseConnectionConfig): DataSource(conf) {
val log = KotlinLogging.logger {}
override fun connect() {
this.toDatabase()
}
override fun createDatabase(): Database? {
val ok = transaction(toDatabaseServerConnection()) {
val tmc = TransactionManager.current().connection
val query = "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${config.databaseName}';"
val stmt = tmc.prepareStatement(query, true)
val resultSet = stmt.executeQuery()
val databaseExists = resultSet.next()
if (!databaseExists) {
try {
exec(createDatabaseStatement())
log.info { "Database ${config.databaseName} created." }
true
} catch (e: Exception) {
e.printStackTrace()
false
}
} else {
log.info { "Database ${config.databaseName} already exists." }
true
}
}
return if (ok) toDatabase() else {
log.error { "No database to create or connect to" }
null
}
}
override fun createTables(vararg tables: Table) {
transaction(this.database) {
SchemaUtils.createMissingTablesAndColumns(*tables)
log.info { "Database transaction completed" }
}
}
override fun createDatabaseStatement(): String {
return "CREATE DATABASE ${config.databaseName};"
}
protected fun toDatabaseServerConnection(): Database {
database = Database.connect(
toConnectionUrl(),
user = config.username,
password = config.password
)
return database!!
}
override fun toDatabase(): Database {
val database = Database.connect(
toDatabaseConnectionUrl(config.databaseName),
user = config.username,
password = config.password
)
this.database = database
return database
}
override fun toDatabaseConnectionUrl(database: String): String {
return toConnectionUrl() + "/$database"
}
override fun toConnectionUrl(): String {
return "jdbc:mysql://${toPortedAddress()}"
}
}

View File

@ -0,0 +1,153 @@
package no.iktdev.mediaprocessing.shared.common.datasource
import org.jetbrains.exposed.exceptions.ExposedSQLException
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.Table
import org.jetbrains.exposed.sql.transactions.transaction
import java.sql.Connection
import java.sql.SQLIntegrityConstraintViolationException
open class TableDefaultOperations<T : Table> {
}
fun <T> withDirtyRead(db: Database? = null, block: () -> T): T? {
return try {
transaction(db = db, transactionIsolation = Connection.TRANSACTION_READ_UNCOMMITTED) {
try {
block()
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
throw e // Optionally, you can rethrow the exception if needed
}
}
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
null
}
}
fun <T> withDirtyRead(db: DataSource? = null, block: () -> T): T? {
return withDirtyRead(db?.database, block)
}
fun <T> withTransaction(db: Database? = null, block: () -> T): T? {
return try {
transaction(db) {
try {
block()
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
throw e // Optionally, you can rethrow the exception if needed
}
}
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
null
}
}
fun <T> withTransaction(db: DataSource? = null, block: () -> T): T? {
return withTransaction(db?.database, block)
}
fun <T> insertWithSuccess(db: Database? = null, block: () -> T): Boolean {
return try {
transaction(db) {
try {
block()
commit()
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
throw e // Optionally, you can rethrow the exception if needed
}
}
true
} catch (e: Exception) {
e.printStackTrace()
false
}
}
fun <T> executeOrException(db: Database? = null, rollbackOnFailure: Boolean = false, block: () -> T): Exception? {
return try {
transaction(db) {
try {
block()
commit()
null
} catch (e: Exception) {
// log the error here or handle the exception as needed
if (rollbackOnFailure)
rollback()
e
}
}
} catch (e: Exception) {
e.printStackTrace()
return e
}
}
fun <T> executeWithResult(db: Database? = null, block: () -> T): Pair<T?, Exception?> {
return try {
transaction(db) {
try {
val res = block()
commit()
res to null
} catch (e: Exception) {
// log the error here or handle the exception as needed
rollback()
null to e
}
}
} catch (e: Exception) {
e.printStackTrace()
return null to e
}
}
fun <T> executeWithStatus(db: Database? = null, block: () -> T): Boolean {
return try {
transaction(db) {
try {
block()
commit()
} catch (e: Exception) {
e.printStackTrace()
// log the error here or handle the exception as needed
throw e // Optionally, you can rethrow the exception if needed
}
}
true
} catch (e: Exception) {
e.printStackTrace()
false
}
}
fun <T> executeWithStatus(db: DataSource? = null, block: () -> T): Boolean {
return executeWithStatus(db?.database, block)
}
fun Exception.isExposedSqlException(): Boolean {
return this is ExposedSQLException
}
fun ExposedSQLException.isCausedByDuplicateError(): Boolean {
return if (this.cause is SQLIntegrityConstraintViolationException) {
return this.errorCode == 1062
} else false
}

View File

@ -0,0 +1,107 @@
package no.iktdev.eventi.implementations
import kotlinx.coroutines.*
import mu.KotlinLogging
import no.iktdev.eventi.data.EventImpl
import org.springframework.context.ApplicationContext
import org.springframework.stereotype.Service
abstract class EventCoordinator<T: EventImpl, E: EventsManagerImpl<T>> {
abstract var applicationContext: ApplicationContext
abstract var eventManager: E
//private val listeners: MutableList<EventListener<T>> = mutableListOf()
private val log = KotlinLogging.logger {}
private var coroutine = CoroutineScope(Dispatchers.IO + Job())
private var ready: Boolean = false
fun isReady(): Boolean {
return ready
}
init {
ready = true
pullForEvents()
}
var taskMode: ActiveMode = ActiveMode.Active
private fun onEventsReceived(list: List<T>) = runBlocking {
val listeners = getListeners()
list.groupBy { it.metadata.referenceId }.forEach { (referenceId, events) ->
launch {
events.forEach { event ->
listeners.forEach { listener ->
if (listener.shouldIProcessAndHandleEvent(event, events))
listener.onEventsReceived(event, events)
}
}
}
}
}
private var newItemReceived: Boolean = false
private fun pullForEvents() {
coroutine.launch {
while (taskMode == ActiveMode.Active) {
val events = eventManager?.readAvailableEvents()
if (events == null) {
log.warn { "EventManager is not loaded!" }
} else {
onEventsReceived(events)
}
waitForConditionOrTimeout(5000) { newItemReceived }.also {
newItemReceived = false
}
}
}
}
fun getListeners(): List<EventListenerImpl<T, *>> {
val serviceBeans: Map<String, Any> = applicationContext.getBeansWithAnnotation(Service::class.java)
val beans = serviceBeans.values.stream()
.filter { bean: Any? -> bean is EventListenerImpl<*, *> }
.map { it -> it as EventListenerImpl<*, *> }
.toList()
return beans as List<EventListenerImpl<T, *>>
}
/**
* @return true if its stored
*/
fun produceNewEvent(event: T): Boolean {
val isStored = eventManager?.storeEvent(event) ?: false
if (isStored) {
newItemReceived = true
}
return isStored
}
suspend fun waitForConditionOrTimeout(timeout: Long, condition: () -> Boolean) {
val startTime = System.currentTimeMillis()
withTimeout(timeout) {
while (!condition()) {
delay(100)
if (System.currentTimeMillis() - startTime >= timeout) {
break
}
}
}
}
}
// TODO: Ikke implementert enda
enum class ActiveMode {
Active,
Passive
}

View File

@ -0,0 +1,57 @@
package no.iktdev.eventi.implementations
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.data.EventMetadata
import no.iktdev.eventi.data.EventStatus
import no.iktdev.eventi.data.isSuccessful
abstract class EventListenerImpl<T: EventImpl, E: EventsManagerImpl<T>> {
abstract val coordinator: EventCoordinator<T, E>?
abstract val produceEvent: Any
abstract val listensForEvents: List<Any>
protected open fun onProduceEvent(event: T) {
coordinator?.produceNewEvent(event) ?: {
println("No Coordinator set")
}
}
open fun isOfEventsIListenFor(event: T): Boolean {
return listensForEvents.any { it == event.eventType }
}
open fun <T: EventImpl> isPrerequisitesFulfilled(incomingEvent: T, events: List<T>): Boolean {
return true
}
open fun shouldIProcessAndHandleEvent(incomingEvent: T, events: List<T>): Boolean {
if (!isOfEventsIListenFor(incomingEvent))
return false
if (!isPrerequisitesFulfilled(incomingEvent, events)) {
return false
}
if (!incomingEvent.isSuccessful()) {
return false
}
val isDerived = events.any { it.metadata.derivedFromEventId == incomingEvent.metadata.eventId } // && incomingEvent.eventType == produceEvent
return !isDerived
}
/**
* @param incomingEvent Can be a new event or iterated form sequence in order to re-produce events
* @param events Will be all available events for collection with the same reference id
* @return boolean if read or not
*/
abstract fun onEventsReceived(incomingEvent: T, events: List<T>)
fun T.makeDerivedEventInfo(status: EventStatus): EventMetadata {
return EventMetadata(
referenceId = this.metadata.referenceId,
derivedFromEventId = this.metadata.eventId,
status = status
)
}
}

View File

@ -0,0 +1,15 @@
package no.iktdev.eventi.implementations
import no.iktdev.eventi.data.EventImpl
import no.iktdev.mediaprocessing.shared.common.datasource.DataSource
/**
* Interacts with the database, needs to be within the Coordinator
*/
abstract class EventsManagerImpl<T: EventImpl>(val dataSource: DataSource) {
abstract fun readAvailableEvents(): List<T>
abstract fun readAvailableEventsFor(referenceId: String): List<T>
abstract fun storeEvent(event: T): Boolean
}

View File

@ -0,0 +1,41 @@
/**
* This is only to run the code and verify behavior
*/
package no.iktdev.eventi
import no.iktdev.eventi.data.EventImpl
import no.iktdev.mediaprocessing.shared.common.datasource.DataSource
import no.iktdev.eventi.database.DatabaseConnectionConfig
import no.iktdev.eventi.implementations.EventListenerImpl
import no.iktdev.eventi.implementations.EventsManagerImpl
import no.iktdev.eventi.mock.MockEventManager
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.Table
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
import org.springframework.context.ApplicationContext
import org.springframework.context.annotation.Bean
import org.springframework.stereotype.Component
@SpringBootApplication
class EventiApplication {
@Autowired
lateinit var applicationContext: ApplicationContext
@Bean
fun eventManager(): EventsManagerImpl<EventImpl> {
return MockEventManager()
}
}
fun main() {
runApplication<EventiApplication>()
}

View File

@ -0,0 +1,40 @@
package no.iktdev.eventi
import kotlinx.coroutines.delay
import kotlinx.coroutines.runBlocking
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.eventi.mock.MockEventManager
import org.assertj.core.api.Assertions
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.ApplicationContext
@SpringBootTest(classes = [EventiApplication::class])
class EventiApplicationTests {
@Autowired
lateinit var context: ApplicationContext
@Autowired
var coordinator: EventCoordinator<EventImpl, MockEventManager>? = null
@BeforeEach
fun awaitCreationOfCoordinator() {
runBlocking {
while (coordinator?.isReady() != true) {
delay(100)
}
}
}
@Test
fun contextLoads() {
Assertions.assertThat(coordinator?.getListeners()).isNotEmpty()
}
}

View File

@ -0,0 +1,53 @@
package no.iktdev.eventi
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventsManagerImpl
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtendWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.junit.jupiter.SpringExtension
import java.util.concurrent.TimeUnit
@ExtendWith(SpringExtension::class)
open class EventiImplementationBase: EventiApplicationTests() {
@BeforeEach
fun clearData() {
coordinator!!.eventManager.events.clear()
}
@Autowired
var eventManager: EventsManagerImpl<EventImpl>? = null
@Test
fun validateCoordinatorConstruction() {
assertThat(eventManager).isNotNull()
assertThat(eventManager?.dataSource).isNotNull()
assertThat(coordinator).isNotNull()
assertThat(coordinator?.eventManager?.dataSource).isNotNull()
}
private val timeout = 3_00000
/**
* @return true when
*/
fun runPull(condition: () -> Boolean): Boolean {
val startTime = System.currentTimeMillis()
while (System.currentTimeMillis() - startTime < timeout) {
if (condition()) {
return true
}
TimeUnit.MILLISECONDS.sleep(500)
}
return condition()
}
fun getEvents(): List<EventImpl> {
return coordinator?.eventManager?.readAvailableEvents() ?: emptyList()
}
}

View File

@ -0,0 +1,22 @@
package no.iktdev.eventi
import org.springframework.beans.factory.support.DefaultListableBeanFactory
import org.springframework.context.ApplicationContext
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider
import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import org.springframework.context.support.GenericApplicationContext
import org.springframework.core.type.filter.AnnotationTypeFilter
import org.springframework.stereotype.Service
import java.util.*
@Configuration
class TestConfig {
companion object {
val persistentReferenceId: String = "00000000-0000-0000-0000-000000000000"
}
}

View File

@ -0,0 +1,11 @@
package no.iktdev.eventi.mock
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventCoordinator
import no.iktdev.eventi.implementations.EventListenerImpl
abstract class MockDataEventListener() : EventListenerImpl<EventImpl, MockEventManager>() {
abstract override val produceEvent: Any
abstract override val listensForEvents: List<Any>
abstract override val coordinator: MockEventCoordinator?
}

View File

@ -0,0 +1,32 @@
package no.iktdev.eventi.mock
import no.iktdev.eventi.database.DatabaseConnectionConfig
import no.iktdev.mediaprocessing.shared.common.datasource.DataSource
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.Table
val fakeDatabasConfig = DatabaseConnectionConfig(
address = "0.0.0.0",
port = "3033",
username = "TST",
password = "TST",
databaseName = "events"
)
class MockDataSource(): DataSource(fakeDatabasConfig) {
override fun connect() {}
override fun createDatabase(): Database? { return null }
override fun createTables(vararg tables: Table) {}
override fun createDatabaseStatement(): String { return "" }
override fun toConnectionUrl(): String { return "" }
override fun toDatabaseConnectionUrl(database: String): String { return "" }
override fun toDatabase(): Database { TODO("Not yet implemented") }
}

View File

@ -0,0 +1,18 @@
package no.iktdev.eventi.mock
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventCoordinator
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.ApplicationContext
import org.springframework.stereotype.Component
@Component
class MockEventCoordinator(
@Autowired
override var applicationContext: ApplicationContext,
@Autowired
override var eventManager: MockEventManager
) : EventCoordinator<EventImpl, MockEventManager>() {
}

View File

@ -0,0 +1,21 @@
package no.iktdev.eventi.mock
import no.iktdev.eventi.data.EventImpl
import no.iktdev.eventi.implementations.EventsManagerImpl
import org.springframework.stereotype.Component
@Component
class MockEventManager(dataSource: MockDataSource = MockDataSource()) : EventsManagerImpl<EventImpl>(dataSource) {
val events: MutableList<EventImpl> = mutableListOf()
override fun readAvailableEvents(): List<EventImpl> {
return events.toList()
}
override fun readAvailableEventsFor(referenceId: String): List<EventImpl> {
return events.filter { it.metadata.referenceId == referenceId }
}
override fun storeEvent(event: EventImpl): Boolean {
return events.add(event)
}
}

Some files were not shown because too many files have changed in this diff Show More