Wip 2
This commit is contained in:
parent
2c61650a0e
commit
2400cf0580
4
.gitignore
vendored
4
.gitignore
vendored
@ -36,10 +36,12 @@ bin/
|
||||
/.nb-gradle/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
||||
|
||||
|
||||
.idea/runConfigurations
|
||||
/apps/pyMetadata/venv/
|
||||
/apps/pyWatcher/venv/
|
||||
|
||||
@ -39,7 +39,7 @@ dependencies {
|
||||
|
||||
implementation("no.iktdev:exfl:0.0.16-SNAPSHOT")
|
||||
implementation("no.iktdev.library:subtitle:1.8.1-SNAPSHOT")
|
||||
implementation("no.iktdev:eventi:1.0-rc13")
|
||||
implementation("no.iktdev:eventi:1.0-rc15")
|
||||
|
||||
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
|
||||
@ -52,6 +52,8 @@ dependencies {
|
||||
|
||||
testImplementation("io.mockk:mockk:1.12.0")
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test")
|
||||
testImplementation(project(":shared:common", configuration = "testArtifacts"))
|
||||
|
||||
}
|
||||
|
||||
tasks.test {
|
||||
|
||||
@ -6,15 +6,15 @@ import no.iktdev.eventi.tasks.TaskTypeRegistry
|
||||
import no.iktdev.exfl.coroutines.CoroutinesDefault
|
||||
import no.iktdev.exfl.coroutines.CoroutinesIO
|
||||
import no.iktdev.exfl.observable.Observables
|
||||
import no.iktdev.mediaprocessing.shared.common.DatabaseApplication
|
||||
import no.iktdev.mediaprocessing.shared.common.MediaProcessingApp
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.EventRegistry
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.TaskRegistry
|
||||
import no.iktdev.mediaprocessing.shared.common.DatabaseApplication
|
||||
import no.iktdev.mediaprocessing.shared.common.getAppVersion
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication
|
||||
import org.springframework.boot.runApplication
|
||||
import org.springframework.context.annotation.Configuration
|
||||
|
||||
|
||||
@MediaProcessingApp
|
||||
open class ConverterApplication: DatabaseApplication() {
|
||||
}
|
||||
|
||||
|
||||
@ -4,7 +4,8 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.tasks.AbstractTaskPoller
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskPollerImplementation
|
||||
import no.iktdev.eventi.tasks.TaskReporter
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.EventStore
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
@ -30,7 +31,7 @@ class PollerAdministrator(
|
||||
@Service
|
||||
class TaskPoller(
|
||||
private val reporter: TaskReporter,
|
||||
) : AbstractTaskPoller(
|
||||
) : TaskPollerImplementation(
|
||||
taskStore = TaskStore,
|
||||
reporterFactory = { reporter } // én reporter brukes for alle tasks
|
||||
) {
|
||||
@ -49,7 +50,7 @@ class DefaultTaskReporter() : TaskReporter {
|
||||
}
|
||||
|
||||
override fun markConsumed(taskId: UUID) {
|
||||
TaskStore.markConsumed(taskId)
|
||||
TaskStore.markConsumed(taskId, TaskStatus.Completed)
|
||||
}
|
||||
|
||||
override fun updateProgress(taskId: UUID, progress: Int) {
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
package no.iktdev.mediaprocessing.converter.convert
|
||||
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.Data
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.SubtitleFormats
|
||||
import no.iktdev.library.subtitle.Configuration
|
||||
import no.iktdev.library.subtitle.Syncro
|
||||
import no.iktdev.library.subtitle.classes.Dialog
|
||||
@ -10,11 +8,12 @@ import no.iktdev.library.subtitle.export.Export
|
||||
import no.iktdev.library.subtitle.reader.BaseReader
|
||||
import no.iktdev.library.subtitle.reader.Reader
|
||||
import no.iktdev.mediaprocessing.converter.ConverterEnv
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ConvertTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.SubtitleFormat
|
||||
import java.io.File
|
||||
import kotlin.jvm.Throws
|
||||
|
||||
class Converter2(val data: Data,
|
||||
private val listener: ConvertListener) {
|
||||
class Converter2(val data: ConvertTask.Data,
|
||||
private val listener: ConvertListener) {
|
||||
|
||||
@Throws(FileUnavailableException::class)
|
||||
private fun getReader(): BaseReader? {
|
||||
@ -55,13 +54,13 @@ class Converter2(val data: Data,
|
||||
exporter.write(syncOrNotSync)
|
||||
} else {
|
||||
val exported = mutableListOf<File>()
|
||||
if (data.formats.contains(SubtitleFormats.SRT)) {
|
||||
if (data.formats.contains(SubtitleFormat.SRT)) {
|
||||
exported.add(exporter.writeSrt(syncOrNotSync))
|
||||
}
|
||||
if (data.formats.contains(SubtitleFormats.SMI)) {
|
||||
if (data.formats.contains(SubtitleFormat.SMI)) {
|
||||
exported.add(exporter.writeSmi(syncOrNotSync))
|
||||
}
|
||||
if (data.formats.contains(SubtitleFormats.VTT)) {
|
||||
if (data.formats.contains(SubtitleFormat.VTT)) {
|
||||
exported.add(exporter.writeVtt(syncOrNotSync))
|
||||
}
|
||||
exported
|
||||
|
||||
@ -8,7 +8,6 @@ import no.iktdev.eventi.tasks.TaskType
|
||||
import no.iktdev.mediaprocessing.converter.convert.ConvertListener
|
||||
import no.iktdev.mediaprocessing.converter.convert.Converter2
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ConvertTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ConvertedData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ConvertTask
|
||||
import org.springframework.stereotype.Component
|
||||
import java.util.*
|
||||
@ -44,10 +43,10 @@ class ConvertTaskListener: TaskListener(TaskType.CPU_INTENSIVE) {
|
||||
return try {
|
||||
val result = converter.getResult()
|
||||
val newEvent = ConvertTaskResultEvent(
|
||||
data = ConvertedData(
|
||||
data = ConvertTaskResultEvent.ConvertedData(
|
||||
language = task.data.language,
|
||||
outputFiles = result,
|
||||
baseName = task.data.storeFileName
|
||||
baseName = task.data.outputFileName
|
||||
),
|
||||
status = TaskStatus.Completed
|
||||
).producedFrom(task)
|
||||
|
||||
@ -1,21 +1,24 @@
|
||||
package no.iktdev.mediaprocessing.converter
|
||||
|
||||
import io.mockk.junit5.MockKExtension
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.mediaprocessing.shared.common.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.config.DatasourceConfiguration
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.junit.jupiter.api.Assertions.assertNotNull
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.boot.test.context.SpringBootTest
|
||||
import org.springframework.test.context.ActiveProfiles
|
||||
import org.springframework.test.context.TestPropertySource
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
|
||||
@SpringBootTest(classes = [ConverterApplication::class])
|
||||
@SpringBootTest(
|
||||
classes = [ConverterApplication::class,
|
||||
DatasourceConfiguration::class],
|
||||
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT
|
||||
)
|
||||
@TestPropertySource(properties = ["spring.flyway.enabled=true"])
|
||||
@ExtendWith(SpringExtension::class)
|
||||
class ConverterApplicationTest {
|
||||
private val log = KotlinLogging.logger {}
|
||||
class ConverterApplicationTest: TestBase() {
|
||||
|
||||
data class TestTask(
|
||||
val success: Boolean
|
||||
|
||||
@ -0,0 +1,7 @@
|
||||
package no.iktdev.mediaprocessing.converter.listeners
|
||||
|
||||
class ConvertTaskListenerTest {
|
||||
|
||||
|
||||
|
||||
}
|
||||
28
apps/converter/src/test/resources/application.yml
Normal file
28
apps/converter/src/test/resources/application.yml
Normal file
@ -0,0 +1,28 @@
|
||||
spring:
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
flyway:
|
||||
enabled: false
|
||||
locations: classpath:flyway
|
||||
autoconfigure:
|
||||
exclude:
|
||||
- org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
|
||||
|
||||
output:
|
||||
ansi:
|
||||
enabled: always
|
||||
|
||||
springdoc:
|
||||
swagger-ui:
|
||||
path: /open/swagger-ui
|
||||
|
||||
logging:
|
||||
level:
|
||||
org.springframework.web.socket.config.WebSocketMessageBrokerStats: WARN
|
||||
org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping: DEBUG
|
||||
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: mappings
|
||||
@ -22,7 +22,7 @@ repositories {
|
||||
}
|
||||
|
||||
|
||||
val exposedVersion = "0.44.0"
|
||||
val exposedVersion = "0.61.0"
|
||||
dependencies {
|
||||
|
||||
/*Spring boot*/
|
||||
@ -39,7 +39,7 @@ dependencies {
|
||||
|
||||
implementation("no.iktdev:exfl:0.0.16-SNAPSHOT")
|
||||
implementation("no.iktdev.streamit.library:streamit-library-db:1.0.0-alpha14")
|
||||
implementation("no.iktdev:eventi:1.0-rc13")
|
||||
implementation("no.iktdev:eventi:1.0-rc16")
|
||||
|
||||
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
|
||||
@ -60,6 +60,7 @@ dependencies {
|
||||
implementation(kotlin("stdlib-jdk8"))
|
||||
testImplementation("org.assertj:assertj-core:3.21.0")
|
||||
|
||||
|
||||
testImplementation("junit:junit:4.12")
|
||||
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.14.2")
|
||||
|
||||
@ -79,12 +80,24 @@ dependencies {
|
||||
testImplementation("org.mockito:mockito-core:3.+")
|
||||
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||
testImplementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.0")
|
||||
testImplementation("org.jetbrains.kotlinx:kotlinx-coroutines-test:1.10.2")
|
||||
testImplementation("io.mockk:mockk:1.13.9")
|
||||
testImplementation("org.mockito:mockito-inline:5.2.0")
|
||||
testImplementation("org.mockito.kotlin:mockito-kotlin:5.2.1")
|
||||
testImplementation("org.mockito:mockito-junit-jupiter:5.11.0")
|
||||
testImplementation(project(":shared:common", configuration = "testArtifacts"))
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test")
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
tasks.withType<Test> {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
|
||||
|
||||
kotlin {
|
||||
jvmToolchain(21)
|
||||
}
|
||||
|
||||
@ -7,12 +7,14 @@ import no.iktdev.exfl.coroutines.CoroutinesDefault
|
||||
import no.iktdev.exfl.coroutines.CoroutinesIO
|
||||
import no.iktdev.exfl.observable.Observables
|
||||
import no.iktdev.mediaprocessing.shared.common.DatabaseApplication
|
||||
import no.iktdev.mediaprocessing.shared.common.MediaProcessingApp
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.EventRegistry
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.TaskRegistry
|
||||
import no.iktdev.mediaprocessing.shared.common.getAppVersion
|
||||
import org.springframework.boot.runApplication
|
||||
import org.springframework.context.annotation.Configuration
|
||||
|
||||
@MediaProcessingApp
|
||||
class CoordinatorApplication: DatabaseApplication() {
|
||||
}
|
||||
|
||||
|
||||
@ -4,8 +4,14 @@ import java.io.File
|
||||
|
||||
class CoordinatorEnv {
|
||||
companion object {
|
||||
val streamitAddress = System.getenv("STREAMIT_ADDRESS") ?: "http://streamit.service"
|
||||
|
||||
val ffprobe: String = System.getenv("SUPPORTING_EXECUTABLE_FFPROBE") ?: "ffprobe"
|
||||
|
||||
val preference: File = File("/data/config/preference.json")
|
||||
|
||||
var cachedContent: File = if (!System.getenv("DIRECTORY_CONTENT_CACHE").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_CACHE")) else File("/src/cache")
|
||||
val outgoingContent: File = if (!System.getenv("DIRECTORY_CONTENT_OUTGOING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_OUTGOING")) else File("/src/output")
|
||||
|
||||
}
|
||||
}
|
||||
@ -3,8 +3,8 @@ package no.iktdev.mediaprocessing.coordinator
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import no.iktdev.eventi.events.AbstractEventPoller
|
||||
import no.iktdev.eventi.events.EventDispatcher
|
||||
import no.iktdev.eventi.events.EventPollerImplementation
|
||||
import no.iktdev.eventi.events.SequenceDispatchQueue
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.EventStore
|
||||
import org.springframework.boot.ApplicationArguments
|
||||
@ -26,5 +26,6 @@ class PollerAdministrator(
|
||||
val sequenceDispatcher = SequenceDispatchQueue(8)
|
||||
val dispatcher = EventDispatcher(eventStore = EventStore)
|
||||
|
||||
class EventPoller: AbstractEventPoller(eventStore = EventStore, dispatchQueue = sequenceDispatcher, dispatcher = dispatcher) {
|
||||
@Component
|
||||
class EventPoller: EventPollerImplementation(eventStore = EventStore, dispatchQueue = sequenceDispatcher, dispatcher = dispatcher) {
|
||||
}
|
||||
|
||||
@ -7,10 +7,10 @@ import no.iktdev.mediaprocessing.ffmpeg.dsl.VideoCodec
|
||||
import no.iktdev.mediaprocessing.shared.common.silentTry
|
||||
import java.io.File
|
||||
|
||||
class ProcesserPreference {
|
||||
val videoPreference: VideoPreference? = null
|
||||
data class ProcesserPreference(
|
||||
val videoPreference: VideoPreference? = null,
|
||||
val audioPreference: AudioPreference? = null
|
||||
}
|
||||
)
|
||||
|
||||
data class VideoPreference(
|
||||
val codec: VideoCodec,
|
||||
@ -18,7 +18,7 @@ data class VideoPreference(
|
||||
)
|
||||
|
||||
data class AudioPreference(
|
||||
val language: String,
|
||||
val language: String? = null,
|
||||
val codec: AudioCodec
|
||||
)
|
||||
|
||||
|
||||
@ -0,0 +1,22 @@
|
||||
package no.iktdev.mediaprocessing.coordinator
|
||||
|
||||
import org.springframework.boot.web.client.RestTemplateBuilder
|
||||
import org.springframework.context.annotation.Bean
|
||||
import org.springframework.context.annotation.Configuration
|
||||
import org.springframework.web.client.RestTemplate
|
||||
|
||||
@Configuration
|
||||
class RestTemplateConfig {
|
||||
|
||||
@Configuration
|
||||
class RestTemplateConfig {
|
||||
|
||||
@Bean
|
||||
fun streamitRestTemplate(): RestTemplate {
|
||||
return RestTemplateBuilder()
|
||||
.rootUri(CoordinatorEnv.streamitAddress)
|
||||
.build()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,36 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.projection.CollectProjection
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Component
|
||||
class CollectEventsListener: EventListener() {
|
||||
private val log = KotlinLogging.logger {}
|
||||
|
||||
val undesiredStates = listOf(CollectProjection.TaskStatus.Failed, CollectProjection.TaskStatus.Pending)
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
|
||||
val collectProjection = CollectProjection(history)
|
||||
log.info { collectProjection.prettyPrint() }
|
||||
|
||||
val taskStatus = collectProjection.getTaskStatus()
|
||||
if (taskStatus.all { it == CollectProjection.TaskStatus.NotInitiated }) {
|
||||
// No work has been done, so we are not ready
|
||||
return null
|
||||
}
|
||||
val statusAcceptable = taskStatus.none { it in undesiredStates }
|
||||
if (!statusAcceptable) {
|
||||
log.warn { "One or more tasks have failed in ${event.referenceId}" }
|
||||
return null
|
||||
}
|
||||
|
||||
return CollectedEvent(history.map { it.eventId }.toSet()).derivedOf(event)
|
||||
}
|
||||
}
|
||||
@ -1,15 +1,60 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ConvertTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ConvertTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
import java.io.File
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
@Component
|
||||
class MediaCreateConvertTaskListener: EventListener() {
|
||||
private val log = KotlinLogging.logger {}
|
||||
|
||||
fun allowOverwrite(): Boolean {
|
||||
return true
|
||||
}
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
return null;
|
||||
|
||||
val startedEvent = history.filterIsInstance<StartProcessingEvent>().firstOrNull() ?: return null
|
||||
if (startedEvent.data.operation.isNotEmpty()) {
|
||||
if (!startedEvent.data.operation.contains(OperationType.Convert))
|
||||
return null
|
||||
}
|
||||
val selectedEvent = event as? ProcesserExtractResultEvent ?: return null
|
||||
if (selectedEvent.status != TaskStatus.Completed)
|
||||
return null
|
||||
|
||||
|
||||
val result = selectedEvent.data ?: return null
|
||||
if (!Files.exists(Path.of(result.cachedOutputFile)))
|
||||
return null
|
||||
val useFile = File(result.cachedOutputFile)
|
||||
|
||||
val convertTask = ConvertTask(
|
||||
data = ConvertTask.Data(
|
||||
inputFile = result.cachedOutputFile,
|
||||
language = result.language,
|
||||
allowOverwrite = allowOverwrite(),
|
||||
outputDirectory = useFile.parentFile.absolutePath,
|
||||
outputFileName = useFile.nameWithoutExtension,
|
||||
)
|
||||
).derivedOf(event)
|
||||
TaskStore.persist(convertTask)
|
||||
|
||||
return ConvertTaskCreatedEvent(convertTask.taskId)
|
||||
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,48 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoverDownloadTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MetadataSearchResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.CoverDownloadTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Component
|
||||
class MediaCreateCoverDownloadTaskListener: EventListener() {
|
||||
private val log = KotlinLogging.logger {}
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
val useEvent = event as? MetadataSearchResultEvent ?: return null
|
||||
if (useEvent.status != TaskStatus.Completed) {
|
||||
log.warn("MetadataResult on ${event.referenceId} did not complete successfully")
|
||||
return null
|
||||
}
|
||||
|
||||
val downloadData = useEvent.results.map {
|
||||
val data = it.data
|
||||
val outputFileName = "${data.title}-${data.source}"
|
||||
CoverDownloadTask.CoverDownloadData(
|
||||
url = it.data.cover,
|
||||
source = it.data.source,
|
||||
outputFileName = outputFileName
|
||||
)
|
||||
}
|
||||
|
||||
val downloadTasks = downloadData.map {
|
||||
CoverDownloadTask(it)
|
||||
.derivedOf(useEvent)
|
||||
}
|
||||
|
||||
downloadTasks.forEach { TaskStore.persist(it) }
|
||||
|
||||
return CoverDownloadTaskCreatedEvent(
|
||||
downloadTasks.map { it.taskId }
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -10,6 +10,8 @@ import no.iktdev.mediaprocessing.ffmpeg.dsl.VideoCodec
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.VideoTarget
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksEncodeSelectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeTask
|
||||
@ -20,7 +22,6 @@ import java.io.File
|
||||
@Component
|
||||
class MediaCreateEncodeTaskListener : EventListener() {
|
||||
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
@ -28,6 +29,10 @@ class MediaCreateEncodeTaskListener : EventListener() {
|
||||
val preference = Preference.getProcesserPreference()
|
||||
|
||||
val startedEvent = history.filterIsInstance<StartProcessingEvent>().firstOrNull() ?: return null
|
||||
if (startedEvent.data.operation.isNotEmpty()) {
|
||||
if (!startedEvent.data.operation.contains(OperationType.Encode))
|
||||
return null
|
||||
}
|
||||
val selectedEvent = event as? MediaTracksEncodeSelectedEvent ?: return null
|
||||
val streams = history.filterIsInstance<MediaStreamParsedEvent>().firstOrNull()?.data ?: return null
|
||||
|
||||
@ -53,20 +58,21 @@ class MediaCreateEncodeTaskListener : EventListener() {
|
||||
audioTracks = audioTargets
|
||||
)
|
||||
val args = plan.toFfmpegArgs(streams.videoStream, streams.audioStream)
|
||||
val filename = startedEvent.data.fileUri.let { File(it) }.nameWithoutExtension
|
||||
val extension = plan.toContainer()
|
||||
|
||||
val task = EncodeTask(
|
||||
data = EncodeData(
|
||||
arguments = args,
|
||||
outputFileName = startedEvent.data.fileUri.let { File(it).nameWithoutExtension },
|
||||
outputFileName = "$filename.$extension",
|
||||
inputFile = startedEvent.data.fileUri
|
||||
)
|
||||
).derivedOf(event)
|
||||
|
||||
|
||||
TaskStore.persist(task)
|
||||
return null // Create task instead of event
|
||||
return ProcesserEncodeTaskCreatedEvent(
|
||||
taskCreated = task.taskId
|
||||
).derivedOf(event)
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
@ -2,8 +2,19 @@ package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.SubtitleCodec
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksExtractSelectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
import java.io.File
|
||||
import java.util.UUID
|
||||
|
||||
@Component
|
||||
class MediaCreateExtractTaskListener: EventListener() {
|
||||
@ -11,9 +22,59 @@ class MediaCreateExtractTaskListener: EventListener() {
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
val useEvent = event as? MediaTracksExtractSelectedEvent ?: return null
|
||||
|
||||
val startedEvent = history.filterIsInstance<StartProcessingEvent>().firstOrNull() ?: return null
|
||||
if (startedEvent.data.operation.isNotEmpty()) {
|
||||
if (!startedEvent.data.operation.contains(OperationType.Extract))
|
||||
return null
|
||||
}
|
||||
|
||||
return null
|
||||
val selectedEvent = event as? MediaTracksExtractSelectedEvent ?: return null
|
||||
val streams = history.filterIsInstance<MediaStreamParsedEvent>().firstOrNull()?.data ?: return null
|
||||
|
||||
val selectedStreams: Map<Int, SubtitleStream> = selectedEvent.selectedSubtitleTracks.associateWith {
|
||||
streams.subtitleStream[it]
|
||||
}
|
||||
|
||||
val entries = selectedStreams.mapNotNull { (idx, stream )->
|
||||
toSubtitleArgumentData(idx, startedEvent.data.fileUri.let { File(it) }, stream)
|
||||
}
|
||||
|
||||
val createdTaskIds: MutableList<UUID> = mutableListOf()
|
||||
entries.forEach { entry ->
|
||||
ExtractSubtitleTask(data = entry).derivedOf(event).also {
|
||||
TaskStore.persist(it)
|
||||
createdTaskIds.add(it.taskId)
|
||||
}
|
||||
}
|
||||
|
||||
return ProcesserExtractTaskCreatedEvent(
|
||||
tasksCreated = createdTaskIds
|
||||
).derivedOf(event)
|
||||
}
|
||||
|
||||
fun toSubtitleArgumentData(index: Int, inputFile: File, stream: SubtitleStream): ExtractSubtitleData? {
|
||||
val codec = SubtitleCodec.getCodec(stream.codec_name) ?: return null
|
||||
val extension = codec.getExtension()
|
||||
|
||||
// ffmpeg-args for å mappe og copy akkurat dette subtitle-sporet
|
||||
val args = mutableListOf<String>()
|
||||
args += listOf("-map", "0:s:$index")
|
||||
args += codec.buildFfmpegArgs(stream)
|
||||
|
||||
val language = stream.tags.language?: return null
|
||||
|
||||
// outputfilnavn basert på index og extension
|
||||
val outputFileName = "${inputFile.nameWithoutExtension}-${language}.${extension}"
|
||||
|
||||
return ExtractSubtitleData(
|
||||
inputFile = inputFile.absolutePath,
|
||||
arguments = args,
|
||||
outputFileName = outputFileName,
|
||||
language = language
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,75 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MetadataSearchResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MetadataSearchTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MetadataSearchTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
import java.util.*
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.concurrent.ScheduledFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
@Component
|
||||
@ListenerOrder(5)
|
||||
class MediaCreateMetadataSearchTaskListener: EventListener() {
|
||||
|
||||
private val scheduledExpiries = ConcurrentHashMap<UUID, ScheduledFuture<*>>()
|
||||
private val scheduler = Executors.newScheduledThreadPool(1)
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
// For replay
|
||||
if (event is MetadataSearchTaskCreatedEvent) {
|
||||
val hasResult = history.filter { it is MetadataSearchResultEvent }
|
||||
.any { it.metadata.derivedFromId?.contains(event.taskId) == true }
|
||||
|
||||
if (!hasResult) {
|
||||
scheduleTaskExpiry(event.taskId, event.eventId, event.referenceId)
|
||||
}
|
||||
} else if (event is MetadataSearchResultEvent) {
|
||||
val cancelKeys = event.metadata.derivedFromId ?: emptySet()
|
||||
scheduledExpiries.filter { it -> it.key in cancelKeys }.keys.forEach { key ->
|
||||
scheduledExpiries.remove(key)?.cancel(true)
|
||||
}
|
||||
}
|
||||
|
||||
val useEvent = event as? MediaParsedInfoEvent ?: return null
|
||||
|
||||
val task = MetadataSearchTask(
|
||||
MetadataSearchTask.SearchData(
|
||||
searchTitles = useEvent.data.parsedSearchTitles,
|
||||
collection = useEvent.data.parsedCollection
|
||||
)
|
||||
).derivedOf(useEvent)
|
||||
val finalResult = MetadataSearchTaskCreatedEvent(task.taskId).derivedOf(useEvent)
|
||||
scheduleTaskExpiry(task.taskId, finalResult.eventId, task.referenceId)
|
||||
return finalResult
|
||||
}
|
||||
|
||||
private fun scheduleTaskExpiry(taskId: UUID, eventId: UUID, referenceId: UUID) {
|
||||
if (scheduledExpiries.containsKey(taskId)) return
|
||||
|
||||
val future = scheduler.schedule({
|
||||
// Hvis tasken fortsatt ikke har result/failed → marker som failed
|
||||
TaskStore.claim(taskId, "Coordinator-MetadataSearchTaskListener-TimeoutScheduler")
|
||||
TaskStore.markConsumed(taskId, TaskStatus.Failed)
|
||||
val failureEvent = MetadataSearchResultEvent(
|
||||
status = TaskStatus.Failed,
|
||||
).apply { setFailed(listOf(taskId)) }
|
||||
//publishEvent(MetadataSearchFailedEvent(taskId, "Timeout").derivedOf(referenceId))
|
||||
scheduledExpiries.remove(taskId)
|
||||
}, 10, TimeUnit.MINUTES)
|
||||
|
||||
scheduledExpiries[taskId] = future
|
||||
}
|
||||
|
||||
}
|
||||
@ -38,8 +38,8 @@ class MediaDetermineSubtitleTrackTypeListener: EventListener() {
|
||||
}
|
||||
|
||||
|
||||
fun getCommentaryFilters(): Set<String> = setOf("commentary", "kommentar", "kommentaar")
|
||||
fun getSongFilters(): Set<String> = setOf("song", "sign")
|
||||
fun getCommentaryFilters(): Set<String> = setOf("commentary", "comentary", "kommentar", "kommentaar")
|
||||
fun getSongFilters(): Set<String> = setOf("song", "sign", "lyrics")
|
||||
fun getClosedCaptionFilters(): Set<String> = setOf("closed caption", "cc", "close caption", "closed-caption", "cc.")
|
||||
fun getSHDFilters(): Set<String> = setOf("shd", "hh", "hard of hearing", "hard-of-hearing")
|
||||
|
||||
|
||||
@ -3,18 +3,19 @@ package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.AudioStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.ParsedMediaStreams
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.VideoStream
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoordinatorReadStreamsResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamReadEvent
|
||||
import org.springframework.core.annotation.Order
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Order(4)
|
||||
@ListenerOrder(4)
|
||||
@Component
|
||||
class MediaParseStreamsListener: EventListener() {
|
||||
val log = KotlinLogging.logger {}
|
||||
@ -23,7 +24,13 @@ class MediaParseStreamsListener: EventListener() {
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
if (event !is MediaStreamReadEvent) return null
|
||||
if (event !is CoordinatorReadStreamsResultEvent) return null
|
||||
if (event.status != TaskStatus.Completed)
|
||||
return null
|
||||
if (event.data == null) {
|
||||
log.error { "No data to parse in CoordinatorReadStreamsResultEvent" }
|
||||
return null
|
||||
}
|
||||
|
||||
val streams = parseStreams(event.data)
|
||||
return MediaStreamParsedEvent(
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import org.springframework.core.annotation.Order
|
||||
import org.springframework.stereotype.Component
|
||||
import java.io.File
|
||||
|
||||
@Order(2)
|
||||
@ListenerOrder(2)
|
||||
@Component
|
||||
class MediaParsedInfoListener : EventListener() {
|
||||
override fun onEvent(
|
||||
@ -24,12 +24,22 @@ class MediaParsedInfoListener : EventListener() {
|
||||
val searchTitles = file.guessSearchableTitle()
|
||||
val mediaType = file.guessMovieOrSeries()
|
||||
|
||||
val episodeInfo = if (mediaType == MediaType.Serie) {
|
||||
val serieInfo = file.guessSerieInfo()
|
||||
MediaParsedInfoEvent.ParsedData.EpisodeInfo(
|
||||
episodeNumber = serieInfo.episodeNumber,
|
||||
seasonNumber = serieInfo.seasonNumber,
|
||||
episodeTitle = serieInfo.episodeTitle,
|
||||
)
|
||||
} else null
|
||||
|
||||
return MediaParsedInfoEvent(
|
||||
MediaParsedInfoEvent.ParsedData(
|
||||
parsedFileName = filename,
|
||||
parsedCollection = collection,
|
||||
parsedSearchTitles = searchTitles,
|
||||
mediaType = mediaType
|
||||
mediaType = mediaType,
|
||||
episodeInfo = episodeInfo,
|
||||
)
|
||||
).derivedOf(event)
|
||||
}
|
||||
@ -99,6 +109,9 @@ class MediaParsedInfoListener : EventListener() {
|
||||
return when (type) {
|
||||
MediaType.Movie -> this.guessDesiredMovieTitle()
|
||||
MediaType.Serie -> this.guessDesiredSerieTitle()
|
||||
MediaType.Subtitle -> {
|
||||
this.nameWithoutExtension
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -106,6 +119,9 @@ class MediaParsedInfoListener : EventListener() {
|
||||
val collection = when (this.guessMovieOrSeries()) {
|
||||
MediaType.Movie -> this.guessDesiredMovieTitle()
|
||||
MediaType.Serie -> this.guessDesiredSerieTitle()
|
||||
MediaType.Subtitle -> {
|
||||
this.parentFile.parentFile.nameWithoutExtension
|
||||
}
|
||||
}
|
||||
return collection.noParens().noYear().split(" - ").first().trim()
|
||||
}
|
||||
@ -131,6 +147,26 @@ class MediaParsedInfoListener : EventListener() {
|
||||
* @return A fully cleaned title including season and episode with possible episode title
|
||||
*/
|
||||
fun File.guessDesiredSerieTitle(): String {
|
||||
val parsedSerieInfo = this.guessSerieInfo()
|
||||
|
||||
val tag = buildString {
|
||||
append("S${(parsedSerieInfo.seasonNumber ?: 1).toString().padStart(2, '0')}")
|
||||
append("E${(parsedSerieInfo.episodeNumber ?: 1).toString().padStart(2, '0')}")
|
||||
if (parsedSerieInfo.revision != null) append(" (v$parsedSerieInfo.revision)")
|
||||
}
|
||||
|
||||
return buildString {
|
||||
append(parsedSerieInfo.serieTitle)
|
||||
append(" - ")
|
||||
append(tag)
|
||||
if (parsedSerieInfo.episodeTitle.isNotEmpty()) {
|
||||
append(" - ")
|
||||
append(parsedSerieInfo.episodeTitle)
|
||||
}
|
||||
}.trim()
|
||||
}
|
||||
|
||||
fun File.guessSerieInfo(): ParsedSerieInfo {
|
||||
val raw = this.nameWithoutExtension
|
||||
|
||||
val seasonRegex = Regex("""(?i)(?:S|Season|Series)\s*(\d{1,2})""")
|
||||
@ -174,25 +210,18 @@ class MediaParsedInfoListener : EventListener() {
|
||||
baseTitle = this.parentFile?.name?.getCleanedTitle() ?: "Dumb ways to die"
|
||||
}
|
||||
|
||||
val tag = buildString {
|
||||
append("S${(season ?: 1).toString().padStart(2, '0')}")
|
||||
append("E${(episode ?: 1).toString().padStart(2, '0')}")
|
||||
if (revision != null) append(" (v$revision)")
|
||||
}
|
||||
|
||||
return buildString {
|
||||
append(baseTitle)
|
||||
append(" - ")
|
||||
append(tag)
|
||||
if (episodeTitle.isNotEmpty()) {
|
||||
append(" - ")
|
||||
append(episodeTitle)
|
||||
}
|
||||
}.trim()
|
||||
return ParsedSerieInfo(
|
||||
serieTitle = baseTitle,
|
||||
episodeNumber = episode ?: 1,
|
||||
seasonNumber = season ?: 1,
|
||||
revision = revision,
|
||||
episodeTitle
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
fun File.guessSearchableTitle(): List<String> {
|
||||
val cleaned = this.guessDesiredFileName()
|
||||
.noResolutionAndAfter()
|
||||
@ -230,5 +259,13 @@ class MediaParsedInfoListener : EventListener() {
|
||||
return titles.distinct()
|
||||
}
|
||||
|
||||
data class ParsedSerieInfo(
|
||||
val serieTitle: String,
|
||||
val episodeNumber: Int,
|
||||
val seasonNumber: Int,
|
||||
val revision: Int? = null,
|
||||
val episodeTitle: String,
|
||||
)
|
||||
|
||||
|
||||
}
|
||||
@ -1,16 +1,16 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoordinatorReadStreamsTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamReadTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MediaReadTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.core.annotation.Order
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Order(3)
|
||||
@ListenerOrder(3)
|
||||
@Component
|
||||
class MediaReadStreamsTaskCreatedListener: EventListener() {
|
||||
override fun onEvent(
|
||||
@ -24,9 +24,9 @@ class MediaReadStreamsTaskCreatedListener: EventListener() {
|
||||
|
||||
val readTask = MediaReadTask(
|
||||
fileUri = startEvent.data.fileUri
|
||||
)
|
||||
).derivedOf(event)
|
||||
|
||||
TaskStore.persist(readTask)
|
||||
return null // Create task instead of event
|
||||
return CoordinatorReadStreamsTaskCreatedEvent(readTask.taskId) // Create task instead of event
|
||||
}
|
||||
}
|
||||
@ -33,7 +33,7 @@ class MediaSelectEncodeTracksListener: EventListener() {
|
||||
).derivedOf(event)
|
||||
}
|
||||
|
||||
private fun getAudioExtendedTrackToUse(audioStream: List<AudioStream>, selectedDefaultTrack: Int): Int? {
|
||||
protected fun getAudioExtendedTrackToUse(audioStream: List<AudioStream>, selectedDefaultTrack: Int): Int? {
|
||||
val durationFiltered = audioStream.filterOnPreferredLanguage()
|
||||
.filter { (it.duration_ts ?: 0) > 0 }
|
||||
.filter { it.channels > 2 }
|
||||
@ -47,13 +47,13 @@ class MediaSelectEncodeTracksListener: EventListener() {
|
||||
* If no default track is found, select the first audio track.
|
||||
* If audio track with preferred language (e.g., "nor") is not found, selects "eng" or first available.
|
||||
*/
|
||||
private fun getAudioDefaultTrackToUse(audioStream: List<AudioStream>): Int {
|
||||
protected fun getAudioDefaultTrackToUse(audioStream: List<AudioStream>): Int {
|
||||
val durationFiltered = audioStream.filterOnPreferredLanguage()
|
||||
.filter { (it.duration_ts ?: 0) > 0 }
|
||||
|
||||
val selected = durationFiltered
|
||||
.filter { it.channels == 2 }.ifEmpty { durationFiltered }
|
||||
.maxByOrNull { it.index } ?: audioStream.minByOrNull { it.index } ?: durationFiltered.firstOrNull()
|
||||
.minByOrNull { it.index } ?: audioStream.minByOrNull { it.index } ?: durationFiltered.firstOrNull()
|
||||
|
||||
return audioStream.indexOf(selected)
|
||||
}
|
||||
@ -62,11 +62,11 @@ class MediaSelectEncodeTracksListener: EventListener() {
|
||||
* Filters audio streams based on preferred languages.
|
||||
* If no streams match the preferred languages, returns the original list.
|
||||
*/
|
||||
private fun List<AudioStream>.filterOnPreferredLanguage(): List<AudioStream> {
|
||||
protected fun List<AudioStream>.filterOnPreferredLanguage(): List<AudioStream> {
|
||||
return this.filter { it.tags.language in getAudioLanguagePreference() }.ifEmpty { this }
|
||||
}
|
||||
|
||||
private fun getVideoTrackToUse(streams: List<VideoStream>): Int {
|
||||
protected fun getVideoTrackToUse(streams: List<VideoStream>): Int {
|
||||
val selectStream = streams.filter { (it.duration_ts ?: 0) > 0 }
|
||||
.maxByOrNull { it.duration_ts ?: 0 } ?: streams.minByOrNull { it.index } ?: throw Exception("No video streams found")
|
||||
return streams.indexOf(selectStream)
|
||||
|
||||
@ -11,7 +11,7 @@ import org.springframework.stereotype.Component
|
||||
@Component
|
||||
class MediaSelectExtractTracksListener: EventListener() {
|
||||
|
||||
fun limitToLanguages(): Set<String> {
|
||||
open fun limitToLanguages(): Set<String> {
|
||||
return emptySet()
|
||||
}
|
||||
|
||||
@ -36,7 +36,7 @@ class MediaSelectExtractTracksListener: EventListener() {
|
||||
}
|
||||
|
||||
|
||||
private fun List<SubtitleStream>.filterOnPreferredLanguage(): List<SubtitleStream> {
|
||||
protected fun List<SubtitleStream>.filterOnPreferredLanguage(): List<SubtitleStream> {
|
||||
val languages = limitToLanguages()
|
||||
if (languages.isEmpty()) return this
|
||||
return this.filter { it.tags.language != null }.filter { languages.contains(it.tags.language) }
|
||||
|
||||
@ -0,0 +1,59 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorEnv
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MigrateContentToStoreTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MigrateToContentStoreTask
|
||||
import no.iktdev.mediaprocessing.shared.common.projection.CollectProjection
|
||||
import no.iktdev.mediaprocessing.shared.common.projection.MigrateContentProject
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Component
|
||||
class MigrateCreateStoreTaskListener: EventListener() {
|
||||
private val log = KotlinLogging.logger {}
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
val useEvent = event as? CollectedEvent ?: return null
|
||||
val useHistory = history.filter { useEvent.eventIds.contains(it.eventId) }
|
||||
|
||||
val collectProjection = CollectProjection(useHistory)
|
||||
log.info { collectProjection.prettyPrint() }
|
||||
|
||||
val statusAcceptable = collectProjection.getTaskStatus().none { it == CollectProjection.TaskStatus.Failed }
|
||||
if (!statusAcceptable) {
|
||||
log.warn { "One or more tasks have failed in ${event.referenceId}" }
|
||||
}
|
||||
|
||||
val migrateContentProjection = MigrateContentProject(useHistory, CoordinatorEnv.outgoingContent)
|
||||
|
||||
val collection = migrateContentProjection.useStore?.name ?:
|
||||
throw RuntimeException("No content store configured for migration in ${event.referenceId}")
|
||||
|
||||
val videContent = migrateContentProjection.getVideoStoreFile()?.let { MigrateToContentStoreTask.Data.SingleContent(it.cachedFile.absolutePath, it.storeFile.absolutePath) }
|
||||
val subtitleContent = migrateContentProjection.getSubtitleStoreFiles()?.map {
|
||||
MigrateToContentStoreTask.Data.SingleSubtitle(it.language, it.cts.cachedFile.absolutePath, it.cts.storeFile.absolutePath, )
|
||||
}
|
||||
val coverContent = migrateContentProjection.getCoverStoreFiles()?.map {
|
||||
MigrateToContentStoreTask.Data.SingleContent(it.cachedFile.absolutePath, it.storeFile.absolutePath)
|
||||
}
|
||||
val storeTask = MigrateToContentStoreTask(
|
||||
MigrateToContentStoreTask.Data(
|
||||
collection = collection,
|
||||
videoContent = videContent,
|
||||
subtitleContent = subtitleContent,
|
||||
coverContent = coverContent
|
||||
)
|
||||
).derivedOf(event)
|
||||
|
||||
TaskStore.persist(storeTask)
|
||||
|
||||
return MigrateContentToStoreTaskCreatedEvent(storeTask.taskId)
|
||||
}
|
||||
}
|
||||
@ -1,16 +1,12 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.FileReadyEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcessFlow
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import org.springframework.core.annotation.Order
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.*
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Order(1)
|
||||
@ListenerOrder(1)
|
||||
@Component
|
||||
class StartedListener : EventListener() {
|
||||
override fun onEvent(
|
||||
@ -21,7 +17,7 @@ class StartedListener : EventListener() {
|
||||
|
||||
return StartProcessingEvent(
|
||||
data = StartData(
|
||||
flow = ProcessFlow.Auto,
|
||||
flow = StartFlow.Auto,
|
||||
fileUri = useEvent.data.fileUri,
|
||||
operation = setOf(
|
||||
OperationType.Extract,
|
||||
|
||||
@ -0,0 +1,54 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.events.EventListener
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MigrateContentToStoreTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StoreContentAndMetadataTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.StoreContentAndMetadataTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.ContentExport
|
||||
import no.iktdev.mediaprocessing.shared.common.projection.StoreProjection
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
@Component
|
||||
class StoreContentAndMetadataListener: EventListener() {
|
||||
val log = KotlinLogging.logger {}
|
||||
|
||||
override fun onEvent(
|
||||
event: Event,
|
||||
history: List<Event>
|
||||
): Event? {
|
||||
val useEvent = event as? MigrateContentToStoreTaskResultEvent ?: return null
|
||||
val collectionEvent = history.lastOrNull { it is CollectedEvent } as? CollectedEvent
|
||||
?: return null
|
||||
|
||||
val useHistory = (history.filter { collectionEvent.eventIds.contains(it.eventId) }) + listOf(useEvent)
|
||||
val projection = StoreProjection(useHistory)
|
||||
|
||||
val collection = projection.getCollection()
|
||||
if (collection.isNullOrBlank()) {
|
||||
log.error { "Collection is null @ ${useEvent.referenceId}" }
|
||||
return null
|
||||
}
|
||||
val metadata = projection.projectMetadata()
|
||||
if (metadata == null) {
|
||||
log.error { "Metadata is null @ ${useEvent.referenceId}"}
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
val exportInfo = ContentExport(
|
||||
collection = collection,
|
||||
media = projection.projectMediaFiles(),
|
||||
episodeInfo = projection.projectEpisodeInfo(),
|
||||
metadata = metadata
|
||||
)
|
||||
|
||||
val task = StoreContentAndMetadataTask(exportInfo).derivedOf(useEvent)
|
||||
TaskStore.persist(task)
|
||||
|
||||
return StoreContentAndMetadataTaskCreatedEvent(task.taskId).derivedOf(useEvent)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,55 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskListener
|
||||
import no.iktdev.eventi.tasks.TaskType
|
||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorEnv
|
||||
import no.iktdev.mediaprocessing.shared.common.DownloadClient
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoverDownloadResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.CoverDownloadTask
|
||||
import org.springframework.stereotype.Component
|
||||
import java.util.UUID
|
||||
|
||||
@Component
|
||||
class DownloadCoverTaskListener: TaskListener(TaskType.MIXED) {
|
||||
val log = KotlinLogging.logger {}
|
||||
|
||||
override fun getWorkerId(): String {
|
||||
return "${this::class.java.simpleName}-${TaskType.CPU_INTENSIVE}-${UUID.randomUUID()}"
|
||||
}
|
||||
|
||||
override fun supports(task: Task): Boolean {
|
||||
return task is CoverDownloadTask
|
||||
}
|
||||
|
||||
override suspend fun onTask(task: Task): Event? {
|
||||
val pickedTask = task as? CoverDownloadTask ?: return null
|
||||
log.info { "Downloading cover from ${pickedTask.data.url}" }
|
||||
val taskData = pickedTask.data
|
||||
|
||||
val downloadClient = DownloadClient(taskData.url, CoordinatorEnv.cachedContent, taskData.outputFileName)
|
||||
val downloadedFile = downloadClient.download()
|
||||
|
||||
|
||||
if (downloadedFile?.exists() == true) {
|
||||
log.info { "Downloaded cover to ${downloadedFile.absolutePath}" }
|
||||
return CoverDownloadResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = CoverDownloadResultEvent.CoverDownloadedData(
|
||||
source = taskData.source,
|
||||
outputFile = downloadedFile.absolutePath
|
||||
)
|
||||
).producedFrom(pickedTask)
|
||||
} else {
|
||||
log.error { "Failed to download cover from ${taskData.url}" }
|
||||
return CoverDownloadResultEvent(
|
||||
status = TaskStatus.Failed,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -3,10 +3,11 @@ package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskType
|
||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorEnv
|
||||
import no.iktdev.mediaprocessing.ffmpeg.FFprobe
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamReadEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoordinatorReadStreamsResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MediaReadTask
|
||||
import org.springframework.stereotype.Component
|
||||
import java.util.UUID
|
||||
@ -29,14 +30,20 @@ class MediaStreamReadTaskListener: FfprobeTaskListener(TaskType.CPU_INTENSIVE) {
|
||||
val probeResult = getFfprobe()
|
||||
.readJsonStreams(pickedTask.fileUri)
|
||||
|
||||
val result = probeResult.data
|
||||
assert(result != null) { "No data returned from ffprobe for ${pickedTask.fileUri}" }
|
||||
val result =
|
||||
probeResult.data ?: throw RuntimeException("No data returned from ffprobe for ${pickedTask.fileUri}")
|
||||
|
||||
return MediaStreamReadEvent(data = result!!).producedFrom(task)
|
||||
return CoordinatorReadStreamsResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = result
|
||||
).producedFrom(task)
|
||||
|
||||
} catch (e: Exception) {
|
||||
log.error(e) { "Error reading media streams for ${pickedTask.fileUri}" }
|
||||
return null
|
||||
return CoordinatorReadStreamsResultEvent(
|
||||
status = TaskStatus.Failed,
|
||||
data = null
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,6 +51,6 @@ class MediaStreamReadTaskListener: FfprobeTaskListener(TaskType.CPU_INTENSIVE) {
|
||||
return JsonFfinfo(CoordinatorEnv.ffprobe)
|
||||
}
|
||||
|
||||
class JsonFfinfo(override val executable: String): FFprobe() {
|
||||
class JsonFfinfo(executable: String): FFprobe(executable) {
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,114 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskListener
|
||||
import no.iktdev.eventi.tasks.TaskType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MigrateContentToStoreTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MigrateToContentStoreTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MigrateStatus
|
||||
import org.springframework.stereotype.Component
|
||||
import java.io.File
|
||||
import java.nio.file.Files
|
||||
import java.util.UUID
|
||||
|
||||
@Component
|
||||
class MigrateContentToStoreTaskListener: TaskListener(TaskType.IO_INTENSIVE) {
|
||||
override fun getWorkerId(): String {
|
||||
return "${this::class.java.simpleName}-${taskType}-${UUID.randomUUID()}"
|
||||
}
|
||||
|
||||
override fun supports(task: Task): Boolean {
|
||||
return task is MigrateToContentStoreTask
|
||||
}
|
||||
|
||||
override suspend fun onTask(task: Task): Event? {
|
||||
val pickedTask = task as? MigrateToContentStoreTask ?: return null
|
||||
|
||||
val videoStatus = migrateVideo(pickedTask.data.videoContent)
|
||||
val subtitleStatus = migrateSubtitle(pickedTask.data.subtitleContent ?: emptyList())
|
||||
val coverStatus = migrateCover(pickedTask.data.coverContent ?: emptyList())
|
||||
|
||||
var status = TaskStatus.Completed
|
||||
if (videoStatus.status != MigrateStatus.Failed &&
|
||||
subtitleStatus.none { it.status == MigrateStatus.Failed } &&
|
||||
coverStatus.none { it.status == MigrateStatus.Failed })
|
||||
{
|
||||
pickedTask.data.videoContent?.cachedUri?.let { File(it) }?.deleteOnExit()
|
||||
pickedTask.data.subtitleContent?.forEach { File(it.cachedUri).deleteOnExit() }
|
||||
pickedTask.data.coverContent?.forEach { File(it.cachedUri).deleteOnExit() }
|
||||
} else {
|
||||
status = TaskStatus.Failed
|
||||
}
|
||||
|
||||
|
||||
val completedEvent = MigrateContentToStoreTaskResultEvent(
|
||||
status = status,
|
||||
collection = pickedTask.data.collection,
|
||||
videoMigrate = videoStatus,
|
||||
subtitleMigrate = subtitleStatus,
|
||||
coverMigrate = coverStatus
|
||||
).producedFrom(task)
|
||||
|
||||
return completedEvent
|
||||
}
|
||||
|
||||
private fun migrateVideo(videoContent: MigrateToContentStoreTask.Data.SingleContent?): MigrateContentToStoreTaskResultEvent.FileMigration {
|
||||
if (videoContent == null) return MigrateContentToStoreTaskResultEvent.FileMigration(null, MigrateStatus.NotPresent)
|
||||
val source = File(videoContent.cachedUri)
|
||||
val destination = File(videoContent.storeUri)
|
||||
return try {
|
||||
source.copyTo(destination, overwrite = true)
|
||||
val identical = Files.mismatch(source.toPath(), destination.toPath()) == -1L
|
||||
if (!identical) {
|
||||
return MigrateContentToStoreTaskResultEvent.FileMigration(null, MigrateStatus.Failed)
|
||||
}
|
||||
MigrateContentToStoreTaskResultEvent.FileMigration(destination.absolutePath, MigrateStatus.Completed)
|
||||
} catch (e: Exception) {
|
||||
MigrateContentToStoreTaskResultEvent.FileMigration(null, MigrateStatus.Failed)
|
||||
}
|
||||
}
|
||||
|
||||
private fun migrateSubtitle(subtitleContents: List<MigrateToContentStoreTask.Data.SingleSubtitle>): List<MigrateContentToStoreTaskResultEvent.SubtitleMigration> {
|
||||
if (subtitleContents.isEmpty()) return listOf(MigrateContentToStoreTaskResultEvent.SubtitleMigration(null, null, MigrateStatus.NotPresent))
|
||||
val results = mutableListOf<MigrateContentToStoreTaskResultEvent.SubtitleMigration>()
|
||||
for (subtitle in subtitleContents) {
|
||||
val source = File(subtitle.cachedUri)
|
||||
val destination = File(subtitle.storeUri)
|
||||
try {
|
||||
source.copyTo(destination, overwrite = true)
|
||||
val identical = Files.mismatch(source.toPath(), destination.toPath()) == -1L
|
||||
if (!identical) {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.SubtitleMigration(subtitle.language, destination.absolutePath, MigrateStatus.Failed))
|
||||
} else {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.SubtitleMigration(subtitle.language,destination.absolutePath, MigrateStatus.Completed))
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.SubtitleMigration(subtitle.language,destination.absolutePath, MigrateStatus.Failed))
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
private fun migrateCover(coverContents: List<MigrateToContentStoreTask.Data.SingleContent>): List<MigrateContentToStoreTaskResultEvent.FileMigration> {
|
||||
if (coverContents.isEmpty()) return listOf(MigrateContentToStoreTaskResultEvent.FileMigration(null, MigrateStatus.NotPresent))
|
||||
val results = mutableListOf<MigrateContentToStoreTaskResultEvent.FileMigration>()
|
||||
for (cover in coverContents) {
|
||||
val source = File(cover.cachedUri)
|
||||
val destination = File(cover.storeUri)
|
||||
try {
|
||||
source.copyTo(destination, overwrite = true)
|
||||
val identical = Files.mismatch(source.toPath(), destination.toPath()) == -1L
|
||||
if (!identical) {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.FileMigration(destination.absolutePath, MigrateStatus.Failed))
|
||||
} else {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.FileMigration(destination.absolutePath, MigrateStatus.Completed))
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
results.add(MigrateContentToStoreTaskResultEvent.FileMigration(destination.absolutePath, MigrateStatus.Failed))
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,56 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskListener
|
||||
import no.iktdev.eventi.tasks.TaskType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StoreContentAndMetadataTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MigrateToContentStoreTask
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.StoreContentAndMetadataTask
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.http.HttpEntity
|
||||
import org.springframework.http.HttpHeaders
|
||||
import org.springframework.http.HttpMethod
|
||||
import org.springframework.http.MediaType
|
||||
import org.springframework.stereotype.Component
|
||||
import org.springframework.web.client.RestTemplate
|
||||
import java.util.UUID
|
||||
|
||||
@Component
|
||||
class StoreContentAndMetadataTaskListener: TaskListener(TaskType.MIXED) {
|
||||
@Autowired
|
||||
lateinit var streamitRestTemplate: RestTemplate
|
||||
|
||||
override fun getWorkerId(): String {
|
||||
return "${this::class.java.simpleName}-${taskType}-${UUID.randomUUID()}"
|
||||
}
|
||||
|
||||
override fun supports(task: Task): Boolean {
|
||||
return task is StoreContentAndMetadataTask
|
||||
}
|
||||
|
||||
override suspend fun onTask(task: Task): Event? {
|
||||
val pickedTask = task as? StoreContentAndMetadataTask ?: return null
|
||||
|
||||
val headers = HttpHeaders().apply { contentType = MediaType.APPLICATION_JSON }
|
||||
val entity = HttpEntity(pickedTask.data, headers)
|
||||
|
||||
val response = try {
|
||||
val res = streamitRestTemplate.exchange(
|
||||
"open/api/mediaprocesser/import",
|
||||
HttpMethod.POST,
|
||||
entity,
|
||||
Void::class.java,
|
||||
)
|
||||
res.statusCode.is2xxSuccessful
|
||||
} catch (e: Exception) {
|
||||
false
|
||||
}
|
||||
|
||||
return StoreContentAndMetadataTaskResultEvent(
|
||||
if (response) TaskStatus.Completed else TaskStatus.Failed
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,57 @@
|
||||
package no.iktdev.mediaprocessing
|
||||
|
||||
import no.iktdev.eventi.ListenerOrder
|
||||
import no.iktdev.eventi.events.EventListenerRegistry
|
||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorApplication
|
||||
import no.iktdev.mediaprocessing.coordinator.listeners.events.*
|
||||
import no.iktdev.mediaprocessing.shared.common.config.DatasourceConfiguration
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.EventRegistry
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.TaskRegistry
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.TestInstance
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.boot.test.context.SpringBootTest
|
||||
import org.springframework.context.ApplicationContext
|
||||
import org.springframework.context.annotation.ComponentScan
|
||||
import org.springframework.test.context.TestPropertySource
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
|
||||
|
||||
@SpringBootTest(
|
||||
classes = [CoordinatorApplication::class,
|
||||
DatasourceConfiguration::class],
|
||||
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT
|
||||
)
|
||||
@TestPropertySource(properties = ["spring.flyway.enabled=true"])
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
@ComponentScan("no.iktdev.mediaprocessing.coordinator.listeners.events")
|
||||
@ExtendWith(SpringExtension::class)
|
||||
class ListenerInformOrderTest(): TestBase() {
|
||||
@Autowired lateinit var ctx: ApplicationContext
|
||||
|
||||
@Test
|
||||
fun verifyTaskRegistryIsNotEmpty() {
|
||||
assertThat { TaskRegistry.getTasks().isNotEmpty() }
|
||||
}
|
||||
@Test
|
||||
fun verifyEventRegistryIsNotEmpty() {
|
||||
assertThat { EventRegistry.getEvents().isNotEmpty() }
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun `only ordered handlers should be in correct order`() {
|
||||
val handlers = EventListenerRegistry.getListeners()
|
||||
assertThat(handlers).isNotEmpty
|
||||
val filtered = handlers.filter { it::class.java.isAnnotationPresent(ListenerOrder::class.java) }
|
||||
assertThat (filtered.map { it::class.simpleName }).containsExactly(
|
||||
StartedListener::class.simpleName,
|
||||
MediaParsedInfoListener::class.java.simpleName,
|
||||
MediaReadStreamsTaskCreatedListener::class.java.simpleName,
|
||||
MediaParseStreamsListener::class.java.simpleName,
|
||||
MediaCreateMetadataSearchTaskListener::class.java.simpleName,
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,122 @@
|
||||
package no.iktdev.mediaprocessing
|
||||
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.TestBase.DummyTask
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.*
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import java.util.*
|
||||
|
||||
object MockData {
|
||||
|
||||
fun mediaParsedEvent(
|
||||
collection: String,
|
||||
fileName: String,
|
||||
mediaType: MediaType
|
||||
) = MediaParsedInfoEvent(
|
||||
data = MediaParsedInfoEvent.ParsedData(
|
||||
parsedCollection = collection,
|
||||
parsedFileName = fileName,
|
||||
parsedSearchTitles = listOf(collection, fileName),
|
||||
mediaType = mediaType
|
||||
)
|
||||
)
|
||||
|
||||
fun metadataEvent(derivedFrom: Event): List<Event> {
|
||||
val dummyTask = DummyTask().derivedOf(derivedFrom)
|
||||
val create = MetadataSearchTaskCreatedEvent(dummyTask.taskId).derivedOf(derivedFrom)
|
||||
|
||||
val result = MetadataSearchResultEvent(
|
||||
results = listOf(
|
||||
MetadataSearchResultEvent.SearchResult(
|
||||
simpleScore = 10,
|
||||
prefixScore = 10,
|
||||
advancedScore = 10,
|
||||
sourceWeight = 1f,
|
||||
data = MetadataSearchResultEvent.SearchResult.MetadataResult(
|
||||
source = "test",
|
||||
title = "MyCollection",
|
||||
cover = "cover.jpg",
|
||||
type = MediaType.Movie,
|
||||
summary = listOf(
|
||||
MetadataSearchResultEvent.SearchResult.MetadataResult.Summary(
|
||||
language = "en",
|
||||
description = "desc"
|
||||
)
|
||||
),
|
||||
genres = listOf("Drama")
|
||||
)
|
||||
)
|
||||
),
|
||||
recommended = null,
|
||||
status = TaskStatus.Completed
|
||||
).producedFrom(dummyTask)
|
||||
return listOf(create, result)
|
||||
}
|
||||
|
||||
fun encodeEvent(cachedFile: String, derivedFrom: Event, status: TaskStatus = TaskStatus.Completed,): List<Event> {
|
||||
val dummyTask = DummyTask().derivedOf(derivedFrom)
|
||||
val create = ProcesserEncodeTaskCreatedEvent(dummyTask.taskId)
|
||||
.derivedOf(derivedFrom)
|
||||
|
||||
val result = ProcesserEncodeResultEvent(
|
||||
data = ProcesserEncodeResultEvent.EncodeResult(
|
||||
cachedOutputFile = cachedFile
|
||||
),
|
||||
status = status
|
||||
).producedFrom(dummyTask)
|
||||
return listOf(create, result)
|
||||
}
|
||||
|
||||
fun extractEvent(language: String, cachedFile: String, derivedFrom: Event): List<Event> {
|
||||
val dummyTask = DummyTask().derivedOf(derivedFrom)
|
||||
val create = ProcesserExtractTaskCreatedEvent(listOf(dummyTask.taskId) as MutableList<UUID>)
|
||||
.derivedOf(derivedFrom)
|
||||
|
||||
val result = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
language = language,
|
||||
cachedOutputFile = cachedFile
|
||||
)
|
||||
).producedFrom(dummyTask)
|
||||
return listOf(create, result)
|
||||
}
|
||||
|
||||
fun convertEvent(
|
||||
language: String,
|
||||
baseName: String,
|
||||
outputFiles: List<String>,
|
||||
derivedFrom: Event
|
||||
): List<Event> {
|
||||
val dummyTask = DummyTask().derivedOf(derivedFrom)
|
||||
val createdTaskEvent = ConvertTaskCreatedEvent(
|
||||
taskId = dummyTask.taskId
|
||||
).derivedOf(derivedFrom)
|
||||
|
||||
val resultTask = ConvertTaskResultEvent(
|
||||
data = ConvertTaskResultEvent.ConvertedData(
|
||||
language = language,
|
||||
baseName = baseName,
|
||||
outputFiles = outputFiles
|
||||
),
|
||||
status = TaskStatus.Completed
|
||||
).producedFrom(dummyTask)
|
||||
return listOf(createdTaskEvent, resultTask)
|
||||
}
|
||||
|
||||
fun coverEvent(cacheFile: String, derivedFrom: Event, source: String = "test"): List<Event> {
|
||||
val dummyTask = DummyTask().derivedOf(derivedFrom)
|
||||
val start = CoverDownloadTaskCreatedEvent(listOf(dummyTask.taskId)).derivedOf(derivedFrom)
|
||||
|
||||
val result = CoverDownloadResultEvent(
|
||||
data = CoverDownloadResultEvent.CoverDownloadedData(
|
||||
source = source,
|
||||
outputFile = cacheFile
|
||||
),
|
||||
status = TaskStatus.Completed
|
||||
).producedFrom(dummyTask)
|
||||
return listOf(start, result)
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,32 @@
|
||||
package no.iktdev.mediaprocessing
|
||||
|
||||
import com.google.gson.JsonObject
|
||||
import kotlinx.coroutines.delay
|
||||
import no.iktdev.mediaprocessing.ffmpeg.FFprobe
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.FFinfoOutput
|
||||
|
||||
class MockFFprobe(
|
||||
private val delayMillis: Long = 0,
|
||||
private val result: FFinfoOutput? = null,
|
||||
private val throwException: Boolean = false
|
||||
) : FFprobe("") {
|
||||
|
||||
var lastInputFile: String? = null
|
||||
|
||||
override suspend fun readJsonStreams(inputFile: String): FFinfoOutput {
|
||||
lastInputFile = inputFile
|
||||
if (delayMillis > 0) delay(delayMillis)
|
||||
if (throwException) throw RuntimeException("Simulated ffprobe failure")
|
||||
return result ?: FFinfoOutput(success = false, data = null, error = "No result configured")
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun success(json: JsonObject) = MockFFprobe(
|
||||
result = FFinfoOutput(success = true, data = json, error = null)
|
||||
)
|
||||
fun failure(errorMsg: String) = MockFFprobe(
|
||||
result = FFinfoOutput(success = false, data = null, error = errorMsg)
|
||||
)
|
||||
fun exception() = MockFFprobe(throwException = true)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,51 @@
|
||||
package no.iktdev.mediaprocessing
|
||||
|
||||
import io.mockk.*
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.mediaprocessing.coordinator.AudioPreference
|
||||
import no.iktdev.mediaprocessing.coordinator.Preference
|
||||
import no.iktdev.mediaprocessing.coordinator.ProcesserPreference
|
||||
import no.iktdev.mediaprocessing.coordinator.VideoPreference
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.AudioCodec
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.VideoCodec
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import java.io.File
|
||||
import java.util.*
|
||||
|
||||
open class TestBase {
|
||||
class DummyEvent: Event()
|
||||
class DummyTask: Task()
|
||||
|
||||
@BeforeEach
|
||||
fun setup() {
|
||||
mockkObject(TaskStore)
|
||||
every { TaskStore.persist(any()) } just Runs
|
||||
mockkObject(Preference)
|
||||
every { Preference.getProcesserPreference() } returns ProcesserPreference(
|
||||
videoPreference = VideoPreference(codec = VideoCodec.Hevc()),
|
||||
audioPreference = AudioPreference(codec = AudioCodec.Aac(channels = 2))
|
||||
)
|
||||
}
|
||||
|
||||
fun mockkIO() {
|
||||
mockkConstructor(File::class)
|
||||
every { anyConstructed<File>().exists() } returns true
|
||||
}
|
||||
|
||||
fun defaultStartEvent(): StartProcessingEvent {
|
||||
val start = StartProcessingEvent(
|
||||
data = StartData(
|
||||
operation = setOf(OperationType.Encode, OperationType.Extract, OperationType.Convert),
|
||||
fileUri = "file:///unit/${UUID.randomUUID()}.mkv"
|
||||
)
|
||||
)
|
||||
start.newReferenceId()
|
||||
return start
|
||||
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,275 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.MockData.convertEvent
|
||||
import no.iktdev.mediaprocessing.MockData.coverEvent
|
||||
import no.iktdev.mediaprocessing.MockData.encodeEvent
|
||||
import no.iktdev.mediaprocessing.MockData.extractEvent
|
||||
import no.iktdev.mediaprocessing.MockData.mediaParsedEvent
|
||||
import no.iktdev.mediaprocessing.MockData.metadataEvent
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
|
||||
class CollectEventsListenerTest : TestBase() {
|
||||
|
||||
private val listener = CollectEventsListener()
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken har alle påkrevde hendelser og alle oppgaver er i en gyldig tisltand
|
||||
Når onEvent kalles og projeksjonen tilsier gyldig status
|
||||
Så:
|
||||
Opprettes CollectEvent basert på historikken
|
||||
"""
|
||||
)
|
||||
fun success1() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val metadata = metadataEvent(parsed)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", parsed)
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
val convert = convertEvent(language = "en", baseName = "sub1", outputFiles = listOf("/tmp/sub1.vtt"), derivedFrom = extract.last())
|
||||
val cover = coverEvent("/tmp/cover.jpg", metadata.last())
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*metadata.toTypedArray(),
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
*convert.toTypedArray(),
|
||||
*cover.toTypedArray(),
|
||||
)
|
||||
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
|
||||
assertThat(result).isNotNull()
|
||||
assertThat {
|
||||
result is CollectedEvent
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis vi har kun encoded hendelse, men vi har sagt at vi også skal ha extract, men ikke har opprettet extract
|
||||
Når encode result kommer inn
|
||||
Så:
|
||||
Opprettes CollectEvent basert på historikken
|
||||
"""
|
||||
)
|
||||
fun success2() {
|
||||
val started = defaultStartEvent().let { ev ->
|
||||
ev.copy(data = ev.data.copy(operation = setOf(OperationType.Encode, OperationType.Extract)))
|
||||
}
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", parsed)
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*encode.toTypedArray(),
|
||||
)
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
assertThat(result).isNotNull()
|
||||
assertThat {
|
||||
result is CollectedEvent
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis vi har kun convert hendelse
|
||||
Når convert har komment inn
|
||||
Så:
|
||||
Opprettes CollectEvent basert på historikken
|
||||
"""
|
||||
)
|
||||
fun success3() {
|
||||
val started = defaultStartEvent().let { ev ->
|
||||
ev.copy(data = ev.data.copy(operation = setOf(OperationType.Convert)))
|
||||
}
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val convert = encodeEvent("/tmp/fancy.srt", parsed)
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*convert.toTypedArray(),
|
||||
)
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
assertThat(result).isNotNull()
|
||||
assertThat {
|
||||
result is CollectedEvent
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis vi har kun encoded og extracted hendelser, men vi har sagt at vi også skal konvertere
|
||||
Når extract result kommer inn
|
||||
Så:
|
||||
Skal vi si pending på convert
|
||||
Listener skal returnerere null
|
||||
"""
|
||||
)
|
||||
fun failure1() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", parsed)
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
)
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken har alle påkrevde media hendelser, men venter på metadata
|
||||
Når onEvent kalles og projeksjonen tilsier ugyldig tilstand
|
||||
Så:
|
||||
Returerer vi failure
|
||||
"""
|
||||
)
|
||||
fun failure2() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val metadata = metadataEvent(parsed).first()
|
||||
val encode = encodeEvent("/tmp/video.mp4", parsed)
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
val convert = convertEvent(language = "en", baseName = "sub1", outputFiles = listOf("/tmp/sub1.vtt"), derivedFrom = extract.last())
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
metadata,
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
*convert.toTypedArray(),
|
||||
)
|
||||
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken har alle påkrevde hendelser og encode feilet
|
||||
Når onEvent kalles og projeksjonen tilsier ugyldig tilstand
|
||||
Så:
|
||||
Collect feiler
|
||||
"""
|
||||
)
|
||||
fun failure3() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val metadata = metadataEvent(parsed)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", parsed, TaskStatus.Failed)
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
val convert = convertEvent(language = "en", baseName = "sub1", outputFiles = listOf("/tmp/sub1.vtt"), derivedFrom = extract.last())
|
||||
val cover = coverEvent("/tmp/cover.jpg", metadata.last())
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*metadata.toTypedArray(),
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
*convert.toTypedArray(),
|
||||
*cover.toTypedArray(),
|
||||
)
|
||||
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis ingen oppgaver har blitt gjort
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Skal projeksjonen gi ugyldig tilstand og returnere null
|
||||
"""
|
||||
)
|
||||
fun failure4() {
|
||||
val started = defaultStartEvent().let { ev ->
|
||||
ev.copy(data = ev.data.copy(operation = setOf(OperationType.Encode)))
|
||||
}
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
)
|
||||
|
||||
val result = listener.onEvent(history.last(), history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,247 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ConvertTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.io.File
|
||||
|
||||
import io.mockk.*
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ConvertTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.mockito.Mockito.mockStatic
|
||||
import org.mockito.kotlin.any
|
||||
import org.mockito.kotlin.whenever
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
class MediaCreateConvertTaskListenerTest : TestBase() {
|
||||
|
||||
private val listener = MediaCreateConvertTaskListener()
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis historikken inneholder StartProcessingEvent med Convert og filen eksisterer
|
||||
Så:
|
||||
Skal det opprettes ConvertTask og returneres ConvertTaskCreatedEvent
|
||||
""")
|
||||
fun verifyConvertTaskCreatedOnValidHistory() {
|
||||
val tempFile = File.createTempFile("test", ".srt")
|
||||
tempFile.writeText("dummy subtitle")
|
||||
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = tempFile.absolutePath,
|
||||
operation = setOf(OperationType.Convert)
|
||||
)
|
||||
)
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = tempFile.absolutePath,
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNotNull(result)
|
||||
assertTrue(result is ConvertTaskCreatedEvent)
|
||||
|
||||
// verifiser at TaskStore.persist ble kalt med ConvertTask
|
||||
verify { TaskStore.persist(match { it is ConvertTask }) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis StartProcessingEvent mangler i historikken
|
||||
Så:
|
||||
Skal onEvent returnere null og TaskStore.persist ikke kalles
|
||||
""")
|
||||
fun verifyNullWhenNoStartEvent() {
|
||||
val tempFile = File.createTempFile("test", ".srt")
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = tempFile.absolutePath,
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = emptyList<Event>()
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNull(result)
|
||||
verify(exactly = 0) { TaskStore.persist(any()) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis StartProcessingEvent finnes men operation ikke inneholder Convert
|
||||
Så:
|
||||
Skal onEvent returnere null
|
||||
""")
|
||||
fun verifyNullWhenOperationNotConvert() {
|
||||
val tempFile = File.createTempFile("test", ".srt")
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = tempFile.absolutePath,
|
||||
operation = setOf(OperationType.Encode) // Ikke Convert
|
||||
)
|
||||
)
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = tempFile.absolutePath,
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNull(result)
|
||||
verify(exactly = 0) { TaskStore.persist(any()) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis status ikke er Completed
|
||||
Så:
|
||||
Skal onEvent returnere null
|
||||
""")
|
||||
fun verifyNullWhenStatusNotCompleted() {
|
||||
val tempFile = File.createTempFile("test", ".srt")
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = tempFile.absolutePath,
|
||||
operation = setOf(OperationType.Convert)
|
||||
)
|
||||
)
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Failed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = tempFile.absolutePath,
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNull(result)
|
||||
verify(exactly = 0) { TaskStore.persist(any()) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis data mangler (er null)
|
||||
Så:
|
||||
Skal onEvent returnere null
|
||||
""")
|
||||
fun verifyNullWhenDataIsNull() {
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = "video.mp4",
|
||||
operation = setOf(OperationType.Convert)
|
||||
)
|
||||
)
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = null
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNull(result)
|
||||
verify(exactly = 0) { TaskStore.persist(any()) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis cachedOutputFile ikke eksisterer
|
||||
Så:
|
||||
Skal onEvent returnere null
|
||||
""")
|
||||
fun verifyNullWhenFileDoesNotExist() {
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = "nonexistent.srt",
|
||||
operation = setOf(OperationType.Convert)
|
||||
)
|
||||
)
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = "nonexistent.srt",
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNull(result)
|
||||
verify(exactly = 0) { TaskStore.persist(any()) }
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en ProcesserExtractResultEvent mottas
|
||||
Hvis historikken inneholder StartEvent med Convert og File.exists() returnerer true
|
||||
Så:
|
||||
Skal det opprettes ConvertTask og returneres ConvertTaskCreatedEvent
|
||||
""")
|
||||
fun verifyConvertTaskCreatedWithMockedFileExists() {
|
||||
// Intercept File konstruktør og mock exists()
|
||||
mockStatic(Files::class.java).use { filesMock ->
|
||||
|
||||
filesMock.`when`<Boolean> {
|
||||
Files.exists(any<Path>())
|
||||
}.thenReturn(true)
|
||||
|
||||
val startEvent = StartProcessingEvent(
|
||||
data = StartData(
|
||||
fileUri = "/tmp/video.srt",
|
||||
operation = setOf(OperationType.Convert)
|
||||
)
|
||||
)
|
||||
|
||||
val extractEvent = ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
cachedOutputFile = "/tmp/video.srt",
|
||||
language = "en"
|
||||
)
|
||||
)
|
||||
|
||||
val history = listOf(startEvent)
|
||||
val result = listener.onEvent(extractEvent, history)
|
||||
|
||||
assertNotNull(result)
|
||||
assertTrue(result is ConvertTaskCreatedEvent)
|
||||
|
||||
filesMock.verify {
|
||||
Files.exists(any<Path>())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,5 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
class MediaCreateCoverDownloadTaskListenerTest {
|
||||
|
||||
}
|
||||
@ -0,0 +1,242 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import io.mockk.Runs
|
||||
import io.mockk.every
|
||||
import io.mockk.just
|
||||
import io.mockk.mockkObject
|
||||
import io.mockk.verify
|
||||
import no.iktdev.mediaprocessing.coordinator.AudioPreference
|
||||
import no.iktdev.mediaprocessing.coordinator.Preference
|
||||
import no.iktdev.mediaprocessing.coordinator.ProcesserPreference
|
||||
import no.iktdev.mediaprocessing.coordinator.VideoPreference
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.AudioStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Disposition
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.ParsedMediaStreams
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Tags
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.VideoStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.AudioCodec
|
||||
import no.iktdev.mediaprocessing.ffmpeg.dsl.VideoCodec
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksEncodeSelectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaCreateEncodeTaskListenerTest {
|
||||
|
||||
private val listener = MediaCreateEncodeTaskListener()
|
||||
|
||||
@BeforeEach
|
||||
fun setup() {
|
||||
mockkObject(TaskStore)
|
||||
every { TaskStore.persist(any()) } just Runs
|
||||
mockkObject(Preference)
|
||||
every { Preference.getProcesserPreference() } returns ProcesserPreference(
|
||||
videoPreference = VideoPreference(codec = VideoCodec.Hevc()),
|
||||
audioPreference = AudioPreference(codec = AudioCodec.Aac(channels = 2))
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis en video- og audio-track er valgt
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
TaskStore.persist mottar et EncodeTask
|
||||
data-feltet har korrekt inputFile, outputFileName og arguments fra MediaPlan
|
||||
""")
|
||||
fun testOnEventWithSingleAudioTrack() {
|
||||
val startEvent = StartProcessingEvent(
|
||||
StartData(setOf(OperationType.Encode), fileUri = "/tmp/movie.mkv")
|
||||
)
|
||||
val parsedEvent = MediaStreamParsedEvent(
|
||||
data = ParsedMediaStreams(
|
||||
videoStream = listOf(mockVideoStream(index = 0, codec = "h264", disposition = mockDisposition(), tags = mockTags())),
|
||||
audioStream = listOf(mockAudioStream(index = 1, codec = "aac", disposition = mockDisposition(), tags = mockTags()))
|
||||
)
|
||||
)
|
||||
val selectedEvent = MediaTracksEncodeSelectedEvent(
|
||||
selectedVideoTrack = 0,
|
||||
selectedAudioTrack = 0
|
||||
)
|
||||
|
||||
val history = listOf(startEvent, parsedEvent)
|
||||
|
||||
val result = listener.onEvent(selectedEvent, history)
|
||||
|
||||
verify {
|
||||
TaskStore.persist(withArg { task ->
|
||||
assertTrue(task is EncodeTask)
|
||||
val data = (task as EncodeTask).data
|
||||
assertEquals("/tmp/movie.mkv", data.inputFile)
|
||||
assertEquals("movie.mp4", data.outputFileName)
|
||||
assertTrue(data.arguments.isNotEmpty(), "Arguments from MediaPlan should not be empty")
|
||||
})
|
||||
}
|
||||
|
||||
assertTrue(result is ProcesserEncodeTaskCreatedEvent)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis en video- og to audio-tracks (inkludert extended) er valgt
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
TaskStore.persist mottar et EncodeTask
|
||||
data-feltet inkluderer begge audio-targets i arguments
|
||||
""")
|
||||
fun testOnEventWithExtendedAudioTrack() {
|
||||
val startEvent = StartProcessingEvent(
|
||||
StartData(setOf(OperationType.Encode), fileUri = "/tmp/movie.mkv")
|
||||
)
|
||||
val parsedEvent = MediaStreamParsedEvent(
|
||||
data = ParsedMediaStreams(
|
||||
videoStream = listOf(mockVideoStream(index = 0, codec = "h264", disposition = mockDisposition(), tags = mockTags())),
|
||||
audioStream = listOf(
|
||||
mockAudioStream(index = 1, codec = "aac", disposition = mockDisposition(), tags = mockTags()),
|
||||
mockAudioStream(index = 2, codec = "aac", disposition = mockDisposition(), tags = mockTags())
|
||||
)
|
||||
)
|
||||
)
|
||||
val selectedEvent = MediaTracksEncodeSelectedEvent(
|
||||
selectedVideoTrack = 0,
|
||||
selectedAudioTrack = 0,
|
||||
selectedAudioExtendedTrack = 1
|
||||
)
|
||||
|
||||
val history = listOf(startEvent, parsedEvent)
|
||||
|
||||
val result = listener.onEvent(selectedEvent, history)
|
||||
|
||||
verify {
|
||||
TaskStore.persist(withArg { task ->
|
||||
val data = (task as EncodeTask).data
|
||||
assertEquals("/tmp/movie.mkv", data.inputFile)
|
||||
assertEquals("movie.mp4", data.outputFileName)
|
||||
// her kan du sjekke at begge audio-tracks er med i ffmpeg-args
|
||||
assertTrue(data.arguments.any { it.contains("0:a:0") })
|
||||
assertTrue(data.arguments.any { it.contains("0:a:1") })
|
||||
})
|
||||
}
|
||||
|
||||
assertTrue(result is ProcesserEncodeTaskCreatedEvent)
|
||||
}
|
||||
|
||||
// Dummy streams for test
|
||||
fun mockVideoStream(
|
||||
index: Int = 0,
|
||||
codec: String = "h264",
|
||||
width: Int = 1920,
|
||||
height: Int = 1080,
|
||||
disposition: Disposition,
|
||||
tags: Tags
|
||||
) = VideoStream(
|
||||
index = index,
|
||||
codec_name = codec,
|
||||
codec_long_name = "H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10",
|
||||
codec_type = "video",
|
||||
codec_tag_string = "avc1",
|
||||
codec_tag = "0x31637661",
|
||||
r_frame_rate = "25/1",
|
||||
avg_frame_rate = "25/1",
|
||||
time_base = "1/90000",
|
||||
start_pts = 0,
|
||||
start_time = "0.000000",
|
||||
disposition = disposition,
|
||||
tags = tags,
|
||||
duration = "60.0",
|
||||
duration_ts = 54000,
|
||||
profile = "High",
|
||||
width = width,
|
||||
height = height,
|
||||
coded_width = width,
|
||||
coded_height = height,
|
||||
closed_captions = 0,
|
||||
has_b_frames = 2,
|
||||
sample_aspect_ratio = "1:1",
|
||||
display_aspect_ratio = "16:9",
|
||||
pix_fmt = "yuv420p",
|
||||
level = 40,
|
||||
color_range = "tv",
|
||||
color_space = "bt709",
|
||||
color_transfer = "bt709",
|
||||
color_primaries = "bt709",
|
||||
chroma_location = "left",
|
||||
refs = 1
|
||||
)
|
||||
|
||||
fun mockAudioStream(
|
||||
index: Int = 0,
|
||||
codec: String = "aac",
|
||||
channels: Int = 2,
|
||||
profile: String = "LC",
|
||||
disposition: Disposition,
|
||||
tags: Tags
|
||||
) = AudioStream(
|
||||
index = index,
|
||||
codec_name = codec,
|
||||
codec_long_name = "AAC (Advanced Audio Coding)",
|
||||
codec_type = "audio",
|
||||
codec_tag_string = "mp4a",
|
||||
codec_tag = "0x6134706d",
|
||||
r_frame_rate = "0/0",
|
||||
avg_frame_rate = "0/0",
|
||||
time_base = "1/48000",
|
||||
start_pts = 0,
|
||||
start_time = "0.000000",
|
||||
duration = "60.0",
|
||||
duration_ts = 2880000,
|
||||
disposition = disposition,
|
||||
tags = tags,
|
||||
profile = profile,
|
||||
sample_fmt = "fltp",
|
||||
sample_rate = "48000",
|
||||
channels = channels,
|
||||
channel_layout = "stereo",
|
||||
bits_per_sample = 0
|
||||
)
|
||||
|
||||
fun mockDisposition(
|
||||
default: Int = 1,
|
||||
forced: Int = 0
|
||||
) = Disposition(
|
||||
default = default,
|
||||
dub = 0,
|
||||
original = 0,
|
||||
comment = 0,
|
||||
lyrics = 0,
|
||||
karaoke = 0,
|
||||
forced = forced,
|
||||
hearing_impaired = 0,
|
||||
captions = 0,
|
||||
visual_impaired = 0,
|
||||
clean_effects = 0,
|
||||
attached_pic = 0,
|
||||
timed_thumbnails = 0
|
||||
)
|
||||
|
||||
fun mockTags(
|
||||
language: String? = "eng",
|
||||
title: String? = null,
|
||||
filename: String? = null
|
||||
) = Tags(
|
||||
title = title,
|
||||
BPS = null,
|
||||
DURATION = null,
|
||||
NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null,
|
||||
_STATISTICS_WRITING_APP = null,
|
||||
_STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null,
|
||||
language = language,
|
||||
filename = filename,
|
||||
mimetype = null
|
||||
)
|
||||
}
|
||||
@ -0,0 +1,264 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
|
||||
import io.mockk.Runs
|
||||
import io.mockk.every
|
||||
import io.mockk.just
|
||||
import io.mockk.mockkObject
|
||||
import io.mockk.verify
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.PersistedTask
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.ParsedMediaStreams
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleTags
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Tags
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksExtractSelectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.OperationType
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleTask
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.io.File
|
||||
import java.time.Duration
|
||||
import java.util.UUID
|
||||
|
||||
class MediaCreateExtractTaskListenerTest {
|
||||
|
||||
object FakeTaskStore: no.iktdev.eventi.stores.TaskStore {
|
||||
val persisted = mutableListOf<Task>()
|
||||
override fun persist(task: Task) {
|
||||
persisted.add(task)
|
||||
}
|
||||
|
||||
override fun findByTaskId(taskId: UUID): PersistedTask? { TODO("Not yet implemented") }
|
||||
override fun findByReferenceId(referenceId: UUID): List<PersistedTask> { TODO("Not yet implemented") }
|
||||
override fun findUnclaimed(referenceId: UUID): List<PersistedTask> { TODO("Not yet implemented") }
|
||||
override fun claim(taskId: UUID, workerId: String): Boolean { TODO("Not yet implemented") }
|
||||
override fun heartbeat(taskId: UUID) { TODO("Not yet implemented") }
|
||||
override fun markConsumed(taskId: UUID, status: TaskStatus) { TODO("Not yet implemented") }
|
||||
override fun releaseExpiredTasks(timeout: Duration) { TODO("Not yet implemented") }
|
||||
override fun getPendingTasks(): List<PersistedTask> { TODO("Not yet implemented") }
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun setup() {
|
||||
mockkObject(TaskStore)
|
||||
every { TaskStore.persist(any()) } just Runs
|
||||
}
|
||||
|
||||
private val listener = MediaCreateExtractTaskListener()
|
||||
|
||||
private fun dummyStream(
|
||||
index: Int,
|
||||
codecName: String,
|
||||
language: String? = null
|
||||
): SubtitleStream {
|
||||
return SubtitleStream(
|
||||
index = index,
|
||||
codec_name = codecName,
|
||||
codec_long_name = codecName,
|
||||
codec_type = "subtitle",
|
||||
codec_tag_string = "",
|
||||
codec_tag = "",
|
||||
r_frame_rate = "0/0",
|
||||
avg_frame_rate = "0/0",
|
||||
time_base = "1/1000",
|
||||
start_pts = 0,
|
||||
start_time = "0",
|
||||
duration = null,
|
||||
duration_ts = null,
|
||||
disposition = null,
|
||||
tags = Tags(
|
||||
title = null,
|
||||
BPS = null,
|
||||
DURATION = null,
|
||||
NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null,
|
||||
_STATISTICS_WRITING_APP = null,
|
||||
_STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null,
|
||||
language = language,
|
||||
filename = null,
|
||||
mimetype = null
|
||||
),
|
||||
subtitle_tags = SubtitleTags(
|
||||
language = language,
|
||||
filename = null,
|
||||
mimetype = null
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis en SRT-subtitle med språk er valgt
|
||||
Når toSubtitleArgumentData kalles
|
||||
Så:
|
||||
Returneres et ExtractSubtitleData-objekt
|
||||
Outputfilen får .srt-extension og språk i navnet
|
||||
Argumentlisten inneholder -map og -c:s copy
|
||||
""")
|
||||
fun testSrtSubtitle() {
|
||||
val stream = dummyStream(0, "subrip", "eng")
|
||||
val inputFile = File("/tmp/movie.mkv")
|
||||
|
||||
val result = listener.toSubtitleArgumentData(0, inputFile, stream)
|
||||
|
||||
assertNotNull(result)
|
||||
assertEquals("movie-eng.srt", result!!.outputFileName)
|
||||
assertEquals("eng", result.language)
|
||||
assertEquals(listOf("-map", "0:s:0", "-c:s", "copy"), result.arguments)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis codec ikke støttes (f.eks pgssub)
|
||||
Når toSubtitleArgumentData kalles
|
||||
Så:
|
||||
Returneres null
|
||||
Ingen ExtractSubtitleData opprettes
|
||||
""")
|
||||
fun testUnsupportedCodec() {
|
||||
val stream = dummyStream(1, "pgssub", "eng")
|
||||
val inputFile = File("/tmp/movie.mkv")
|
||||
|
||||
val result = listener.toSubtitleArgumentData(1, inputFile, stream)
|
||||
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis språk mangler i subtitle-stream
|
||||
Når toSubtitleArgumentData kalles
|
||||
Så:
|
||||
Returneres null
|
||||
Ingen ExtractSubtitleData opprettes
|
||||
""")
|
||||
fun testMissingLanguage() {
|
||||
val stream = dummyStream(2, "subrip", null)
|
||||
val inputFile = File("/tmp/movie.mkv")
|
||||
|
||||
val result = listener.toSubtitleArgumentData(2, inputFile, stream)
|
||||
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis en ASS-subtitle med språk er valgt
|
||||
Når toSubtitleArgumentData kalles
|
||||
Så:
|
||||
Returneres et ExtractSubtitleData-objekt
|
||||
Outputfilen får .ass-extension og språk i navnet
|
||||
Argumentlisten inneholder -map og -c:s copy
|
||||
""")
|
||||
fun testAssSubtitle() {
|
||||
val stream = dummyStream(3, "ass", "jpn")
|
||||
val inputFile = File("/tmp/anime.mkv")
|
||||
|
||||
val result = listener.toSubtitleArgumentData(3, inputFile, stream)
|
||||
|
||||
assertNotNull(result)
|
||||
assertEquals("anime-jpn.ass", result!!.outputFileName)
|
||||
assertEquals("jpn", result.language)
|
||||
assertEquals(listOf("-map", "0:s:3", "-c:s", "copy"), result.arguments)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis en StartProcessingEvent og MediaStreamParsedEvent finnes i historikken
|
||||
Når onEvent kalles med MediaTracksExtractSelectedEvent som velger en SRT-subtitle
|
||||
Så:
|
||||
Returneres et ProcesserExtractTaskCreatedEvent
|
||||
tasksCreated-listen inneholder minst én UUID
|
||||
""")
|
||||
fun testOnEventCreatesTasks() {
|
||||
val startEvent = StartProcessingEvent(
|
||||
StartData(setOf(OperationType.Extract), fileUri = "/tmp/movie.mkv")
|
||||
)
|
||||
val parsedEvent = MediaStreamParsedEvent(
|
||||
data = ParsedMediaStreams(subtitleStream = listOf(dummyStream(0, "subrip", "eng")))
|
||||
)
|
||||
val selectedEvent = MediaTracksExtractSelectedEvent(selectedSubtitleTracks = listOf(0))
|
||||
|
||||
val history = listOf(startEvent, parsedEvent)
|
||||
|
||||
val result = listener.onEvent(selectedEvent, history)
|
||||
|
||||
assertNotNull(result)
|
||||
assertTrue(result is ProcesserExtractTaskCreatedEvent)
|
||||
val created = result as ProcesserExtractTaskCreatedEvent
|
||||
assertTrue(created.tasksCreated.isNotEmpty())
|
||||
verify {
|
||||
TaskStore.persist(withArg { task ->
|
||||
assertTrue(task is ExtractSubtitleTask)
|
||||
val data = (task as ExtractSubtitleTask).data
|
||||
assertEquals("/tmp/movie.mkv", data.inputFile)
|
||||
assertEquals("movie-eng.srt", data.outputFileName)
|
||||
assertEquals("eng", data.language)
|
||||
assertEquals(listOf("-map", "0:s:0", "-c:s", "copy"), data.arguments)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis flere undertekster (SRT og ASS) er valgt
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
TaskStore.persist skal kalles én gang per valgt spor
|
||||
Hvert ExtractSubtitleTask skal ha korrekt data (filnavn, språk, arguments)
|
||||
""")
|
||||
fun testOnEventWithMultipleSubtitles() {
|
||||
// Hvis: vi har en StartProcessingEvent og to subtitle streams
|
||||
val startEvent = StartProcessingEvent(
|
||||
StartData(setOf(OperationType.Extract), fileUri = "/tmp/movie.mkv")
|
||||
)
|
||||
val parsedEvent = MediaStreamParsedEvent(
|
||||
data = ParsedMediaStreams(
|
||||
subtitleStream = listOf(
|
||||
dummyStream(0, "subrip", "eng"),
|
||||
dummyStream(1, "ass", "jpn")
|
||||
)
|
||||
)
|
||||
)
|
||||
val selectedEvent = MediaTracksExtractSelectedEvent(selectedSubtitleTracks = listOf(0, 1))
|
||||
|
||||
val history = listOf(startEvent, parsedEvent)
|
||||
|
||||
// Når: vi kaller onEvent
|
||||
val result = listener.onEvent(selectedEvent, history)
|
||||
|
||||
// Så: TaskStore.persist skal ha blitt kalt to ganger
|
||||
verify(exactly = 2) { TaskStore.persist(any()) }
|
||||
|
||||
// Fang begge objektene
|
||||
val slot = mutableListOf<Task>()
|
||||
verify { TaskStore.persist(capture(slot)) }
|
||||
|
||||
// Sjekk første (SRT)
|
||||
val srtTask = slot[0] as ExtractSubtitleTask
|
||||
assertEquals("movie-eng.srt", srtTask.data.outputFileName)
|
||||
assertEquals("eng", srtTask.data.language)
|
||||
assertEquals(listOf("-map", "0:s:0", "-c:s", "copy"), srtTask.data.arguments)
|
||||
|
||||
// Sjekk andre (ASS)
|
||||
val assTask = slot[1] as ExtractSubtitleTask
|
||||
assertEquals("movie-jpn.ass", assTask.data.outputFileName)
|
||||
assertEquals("jpn", assTask.data.language)
|
||||
assertEquals(listOf("-map", "0:s:1", "-c:s", "copy"), assTask.data.arguments)
|
||||
|
||||
// Og: resultatet er et ProcesserExtractTaskCreatedEvent med to taskIds
|
||||
assertTrue(result is ProcesserExtractTaskCreatedEvent)
|
||||
val created = result as ProcesserExtractTaskCreatedEvent
|
||||
assertEquals(2, created.tasksCreated.size)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
class MediaCreateMetadataSearchTaskListenerTest {
|
||||
|
||||
}
|
||||
@ -0,0 +1,176 @@
|
||||
@file:Suppress("JUnitMalformedDeclaration")
|
||||
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.ParsedMediaStreams
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleTags
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Tags
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksDetermineSubtitleTypeEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.model.SubtitleType
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Named
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.MethodSource
|
||||
import java.util.stream.Stream
|
||||
|
||||
class MediaDetermineSubtitleTrackTypeListenerTest {
|
||||
|
||||
private val listener = MediaDetermineSubtitleTrackTypeListener()
|
||||
|
||||
|
||||
|
||||
data class SubtitleTestCase(
|
||||
val stream: SubtitleStream,
|
||||
val expectedType: SubtitleType,
|
||||
val expectedKept: Boolean
|
||||
)
|
||||
|
||||
companion object {
|
||||
|
||||
private fun makeStream(codec: String, title: String?, language: String = "eng"): SubtitleStream {
|
||||
return SubtitleStream(
|
||||
index = 0,
|
||||
codec_name = codec,
|
||||
codec_long_name = codec,
|
||||
codec_type = codec, // NB: her brukes codec_type i onlySupportedCodecs
|
||||
codec_tag_string = "",
|
||||
codec_tag = "",
|
||||
r_frame_rate = "0/0",
|
||||
avg_frame_rate = "0/0",
|
||||
time_base = "1/1000",
|
||||
start_pts = 0,
|
||||
start_time = "0",
|
||||
duration = null,
|
||||
duration_ts = null,
|
||||
disposition = null,
|
||||
tags = Tags(
|
||||
title = title,
|
||||
BPS = null,
|
||||
DURATION = null,
|
||||
NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null,
|
||||
_STATISTICS_WRITING_APP = null,
|
||||
_STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null,
|
||||
language = language,
|
||||
filename = null,
|
||||
mimetype = null
|
||||
),
|
||||
subtitle_tags = SubtitleTags(language = language, filename = null, mimetype = null)
|
||||
)
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun subtitleCases(): Stream<Named<SubtitleTestCase>> {
|
||||
return Stream.of(
|
||||
Named.of("Commentary filtered out",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("ass", "Director Commentary"),
|
||||
expectedType = SubtitleType.Commentary,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Song filtered out",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("subrip", "Song Lyrics"),
|
||||
expectedType = SubtitleType.Song,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Closed Caption filtered out",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("webvtt", "Closed Caption"),
|
||||
expectedType = SubtitleType.ClosedCaption,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("SHD filtered out",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("smi", "SHD"),
|
||||
expectedType = SubtitleType.SHD,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Dialogue kept",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("ass", "Normal Dialogue"),
|
||||
expectedType = SubtitleType.Dialogue,
|
||||
expectedKept = true
|
||||
)
|
||||
),
|
||||
Named.of("Unsupported codec filtered out",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("pgssub", "Dialogue"),
|
||||
expectedType = SubtitleType.Dialogue,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Commentary with typo",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("ass", "Comentary track"), // missing 'm'
|
||||
expectedType = SubtitleType.Commentary,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Song with variant spelling",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("subrip", "Sogn lyrics"), // 'song' misspelled
|
||||
expectedType = SubtitleType.Song,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Closed Caption with dash",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("webvtt", "Closed-caption subs"),
|
||||
expectedType = SubtitleType.ClosedCaption,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("SHD with abbreviation",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("smi", "HH subs"), // 'hh' is in SHD filters
|
||||
expectedType = SubtitleType.SHD,
|
||||
expectedKept = false
|
||||
)
|
||||
),
|
||||
Named.of("Dialogue with extra tags",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("ass", "Dialogue [ENG] normal"),
|
||||
expectedType = SubtitleType.Dialogue,
|
||||
expectedKept = true
|
||||
)
|
||||
),
|
||||
Named.of("Unsupported codec with random title",
|
||||
SubtitleTestCase(
|
||||
stream = makeStream("pgssub", "Commentary track"),
|
||||
expectedType = SubtitleType.Commentary,
|
||||
expectedKept = false
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "{0}")
|
||||
@MethodSource("subtitleCases")
|
||||
@DisplayName("Hvis ulike subtitles testes → riktig type og filtrering")
|
||||
fun testSubtitleCases(testCase: SubtitleTestCase) {
|
||||
val event = MediaStreamParsedEvent(
|
||||
ParsedMediaStreams(subtitleStream = listOf(testCase.stream))
|
||||
)
|
||||
val result = listener.onEvent(event, emptyList()) as MediaTracksDetermineSubtitleTypeEvent
|
||||
|
||||
if (testCase.expectedKept) {
|
||||
assertEquals(1, result.subtitleTrackItems.size)
|
||||
assertEquals(testCase.expectedType, result.subtitleTrackItems[0].type)
|
||||
} else {
|
||||
assertTrue(result.subtitleTrackItems.isEmpty())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,188 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
|
||||
import com.google.gson.JsonParser
|
||||
import no.iktdev.eventi.models.Event
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaParseStreamsListenerTest {
|
||||
|
||||
private val listener = MediaParseStreamsListener()
|
||||
|
||||
class DummyEvent(): Event() {}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis JSON inneholder video, audio og subtitle streams
|
||||
Når parseStreams kalles
|
||||
Så:
|
||||
Alle tre typer havner i riktig liste
|
||||
""")
|
||||
fun testparseMappingCorrectly() {
|
||||
val json = """
|
||||
{
|
||||
"streams": [
|
||||
{
|
||||
"codec_name":"h264",
|
||||
"codec_long_name":"H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10",
|
||||
"codec_type":"video",
|
||||
"codec_tag_string":"avc1",
|
||||
"codec_tag":"0x31637661",
|
||||
"r_frame_rate":"25/1",
|
||||
"avg_frame_rate":"25/1",
|
||||
"time_base":"1/90000",
|
||||
"start_pts":0,
|
||||
"start_time":"0.000000",
|
||||
"disposition": { "default":1,"dub":0,"original":0,"comment":0,"lyrics":0,"karaoke":0,"forced":0,"hearing_impaired":0,"captions":0,"visual_impaired":0,"clean_effects":0,"attached_pic":0,"timed_thumbnails":0 },
|
||||
"tags": { "title":"Main Video","language":"eng" },
|
||||
"profile":"High",
|
||||
"width":1920,
|
||||
"height":1080,
|
||||
"coded_width":1920,
|
||||
"coded_height":1080,
|
||||
"closed_captions":0,
|
||||
"has_b_frames":2,
|
||||
"sample_aspect_ratio":"1:1",
|
||||
"display_aspect_ratio":"16:9",
|
||||
"pix_fmt":"yuv420p",
|
||||
"level":40,
|
||||
"color_range":"tv",
|
||||
"color_space":"bt709",
|
||||
"color_transfer":"bt709",
|
||||
"color_primaries":"bt709",
|
||||
"chroma_location":"left",
|
||||
"refs":1
|
||||
},
|
||||
{
|
||||
"codec_name":"aac",
|
||||
"codec_long_name":"AAC (Advanced Audio Coding)",
|
||||
"codec_type":"audio",
|
||||
"codec_tag_string":"mp4a",
|
||||
"codec_tag":"0x6134706d",
|
||||
"r_frame_rate":"0/0",
|
||||
"avg_frame_rate":"0/0",
|
||||
"time_base":"1/48000",
|
||||
"start_pts":0,
|
||||
"start_time":"0.000000",
|
||||
"disposition": { "default":1,"dub":0,"original":0,"comment":0,"lyrics":0,"karaoke":0,"forced":0,"hearing_impaired":0,"captions":0,"visual_impaired":0,"clean_effects":0,"attached_pic":0,"timed_thumbnails":0 },
|
||||
"tags": { "title":"Stereo Track","language":"eng" },
|
||||
"profile":"LC",
|
||||
"sample_fmt":"fltp",
|
||||
"sample_rate":"48000",
|
||||
"channels":2,
|
||||
"channel_layout":"stereo",
|
||||
"bits_per_sample":0
|
||||
},
|
||||
{
|
||||
"codec_name":"ass",
|
||||
"codec_long_name":"ASS (Advanced SSA Subtitle)",
|
||||
"codec_type":"subtitle",
|
||||
"codec_tag_string":"[0][0][0][0]",
|
||||
"codec_tag":"0x0000",
|
||||
"r_frame_rate":"0/0",
|
||||
"avg_frame_rate":"0/0",
|
||||
"time_base":"1/1000",
|
||||
"start_pts":0,
|
||||
"start_time":"0.000000",
|
||||
"disposition": { "default":1,"dub":0,"original":0,"comment":0,"lyrics":0,"karaoke":0,"forced":0,"hearing_impaired":0,"captions":0,"visual_impaired":0,"clean_effects":0,"attached_pic":0,"timed_thumbnails":0 },
|
||||
"tags": { "title":"English Subs","language":"eng" },
|
||||
"subtitle_tags": { "language":"eng","filename":"subs.ass","mimetype":"text/x-ssa" }
|
||||
}
|
||||
]
|
||||
}
|
||||
""".trimIndent()
|
||||
|
||||
val parsed = listener.parseStreams(JsonParser.parseString(json).asJsonObject)
|
||||
|
||||
assertEquals(1, parsed.videoStream.size)
|
||||
assertEquals("h264", parsed.videoStream[0].codec_name)
|
||||
|
||||
assertEquals(1, parsed.audioStream.size)
|
||||
assertEquals("aac", parsed.audioStream[0].codec_name)
|
||||
|
||||
assertEquals(1, parsed.subtitleStream.size)
|
||||
assertEquals("ass", parsed.subtitleStream[0].codec_name)
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event ikke er MediaStreamReadEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
""")
|
||||
fun testOnEventNonMediaStreamReadEvent() {
|
||||
val result = listener.onEvent(DummyEvent(), emptyList())
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis JSON inneholder video, audio og subtitle streams
|
||||
Når parseStreams kalles
|
||||
Så:
|
||||
Alle tre typer havner i riktig liste
|
||||
""")
|
||||
fun testParseStreamsMapsCorrectly() {
|
||||
val json = """
|
||||
{
|
||||
"streams": [
|
||||
{"codec_name":"h264","codec_type":"video"},
|
||||
{"codec_name":"aac","codec_type":"audio"},
|
||||
{"codec_name":"ass","codec_type":"subtitle"}
|
||||
]
|
||||
}
|
||||
""".trimIndent()
|
||||
|
||||
val parsed = listener.parseStreams(JsonParser.parseString(json).asJsonObject)
|
||||
|
||||
assertEquals(1, parsed.videoStream.size)
|
||||
assertEquals("h264", parsed.videoStream[0].codec_name)
|
||||
|
||||
assertEquals(1, parsed.audioStream.size)
|
||||
assertEquals("aac", parsed.audioStream[0].codec_name)
|
||||
|
||||
assertEquals(1, parsed.subtitleStream.size)
|
||||
assertEquals("ass", parsed.subtitleStream[0].codec_name)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis JSON inneholder codec_name png og mjpeg
|
||||
Når parseStreams kalles
|
||||
Så:
|
||||
Disse ignoreres og videoStream blir tom
|
||||
""")
|
||||
fun testParseStreamsIgnoresPngAndMjpeg() {
|
||||
val json = """
|
||||
{
|
||||
"streams": [
|
||||
{"codec_name":"png","codec_type":"video"},
|
||||
{"codec_name":"mjpeg","codec_type":"video"}
|
||||
]
|
||||
}
|
||||
""".trimIndent()
|
||||
|
||||
val parsed = listener.parseStreams(JsonParser.parseString(json).asJsonObject)
|
||||
assertTrue(parsed.videoStream.isEmpty())
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis JSON mangler streams array
|
||||
Når parseStreams kalles
|
||||
Så:
|
||||
Kastes Exception
|
||||
""")
|
||||
fun testParseStreamsThrowsOnInvalidJson() {
|
||||
val json = """{}"""
|
||||
assertThrows(Exception::class.java) {
|
||||
listener.parseStreams(JsonParser.parseString(json).asJsonObject)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,41 +1,43 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.events
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.mediaprocessing.coordinator.listeners.events.MediaParsedInfoListener
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.assertj.core.api.Assertions
|
||||
import org.junit.jupiter.api.Named
|
||||
import org.junit.jupiter.api.TestInstance
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.MethodSource
|
||||
import java.io.File
|
||||
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
|
||||
|
||||
@MethodSource("parsedInfoTest")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
@MethodSource("parsedInfoTestCases")
|
||||
fun parsedInfoTest(testCase: ParsedInfoTestCase) {
|
||||
val testFile = testCase.file
|
||||
val collection = testFile.getDesiredCollection()
|
||||
val fileName = testFile.guessDesiredFileName()
|
||||
val searchTitles = testFile.guessSearchableTitle()
|
||||
assertThat(collection).isEqualTo(testCase.expectedTitle)
|
||||
assertThat(fileName).isEqualTo(testCase.expectedFileName)
|
||||
assertThat(searchTitles).isEqualTo(testCase.expectedSearchTitles)
|
||||
Assertions.assertThat(collection).isEqualTo(testCase.expectedTitle)
|
||||
Assertions.assertThat(fileName).isEqualTo(testCase.expectedFileName)
|
||||
Assertions.assertThat(searchTitles).isEqualTo(testCase.expectedSearchTitles)
|
||||
}
|
||||
|
||||
@MethodSource("parseVideoType")
|
||||
@MethodSource("parseVideoTypeCases")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
fun parseVideoType(testCase: ParseVideoTypeTestCase) {
|
||||
val testFile = testCase.file
|
||||
val mediaType = testFile.guessMovieOrSeries()
|
||||
assertThat(mediaType).isEqualTo(testCase.expectedType)
|
||||
Assertions.assertThat(mediaType).isEqualTo(testCase.expectedType)
|
||||
}
|
||||
|
||||
data class ParsedInfoTestCase(
|
||||
val file: File,
|
||||
val expectedTitle: String,
|
||||
val expectedFileName: String,
|
||||
val expectedSearchTitles: List<String>
|
||||
val expectedSearchTitles: List<String>,
|
||||
val expectedEpisodeInfo: MediaParsedInfoEvent.ParsedData.EpisodeInfo? = null
|
||||
)
|
||||
|
||||
data class ParseVideoTypeTestCase(
|
||||
@ -46,7 +48,7 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
companion object {
|
||||
|
||||
@JvmStatic
|
||||
fun parsedInfoTest() = listOf(
|
||||
fun parsedInfoTestCases() = listOf(
|
||||
// existing parsed cases
|
||||
Named.of(
|
||||
"Series episode parsing",
|
||||
@ -54,7 +56,12 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
file = File("Fancy.Thomas.S03E03.Enemy.1080p.AMAZING.WEB-VALUE.DDP5AN.1.H.264.mkv"),
|
||||
expectedTitle = "Fancy Thomas",
|
||||
expectedFileName = "Fancy Thomas - S03E03 - Enemy",
|
||||
expectedSearchTitles = listOf("Fancy Thomas", "Fancy Thomas - S03E03 - Enemy")
|
||||
expectedSearchTitles = listOf("Fancy Thomas", "Fancy Thomas - S03E03 - Enemy"),
|
||||
expectedEpisodeInfo = MediaParsedInfoEvent.ParsedData.EpisodeInfo(
|
||||
seasonNumber = 3,
|
||||
episodeNumber = 3,
|
||||
episodeTitle = "Enemy"
|
||||
)
|
||||
)
|
||||
),
|
||||
Named.of(
|
||||
@ -63,7 +70,8 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
file = File("Epic.Potato.Movie.2021.1080p.BluRay.x264.mkv"),
|
||||
expectedTitle = "Epic Potato Movie",
|
||||
expectedFileName = "Epic Potato Movie",
|
||||
expectedSearchTitles = listOf("Epic Potato Movie")
|
||||
expectedSearchTitles = listOf("Epic Potato Movie"),
|
||||
expectedEpisodeInfo = null
|
||||
)
|
||||
),
|
||||
Named.of(
|
||||
@ -72,7 +80,12 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
file = File("Like.a.Potato.Chef.S01E01.Departure.\\u0026.Skills.1080p.Potato.mkv"),
|
||||
expectedTitle = "Like a Potato Chef",
|
||||
expectedFileName = "Like a Potato Chef - S01E01 - Departure \\u0026 Skills",
|
||||
expectedSearchTitles = listOf("Like a Potato Chef", "Like a Potato Chef - S01E01 - Departure \\u0026 Skills")
|
||||
expectedSearchTitles = listOf("Like a Potato Chef", "Like a Potato Chef - S01E01 - Departure \\u0026 Skills"),
|
||||
expectedEpisodeInfo = MediaParsedInfoEvent.ParsedData.EpisodeInfo(
|
||||
seasonNumber = 1,
|
||||
episodeNumber = 1,
|
||||
episodeTitle = "Departure \\u0026 Skills"
|
||||
)
|
||||
)
|
||||
),
|
||||
Named.of(
|
||||
@ -121,6 +134,11 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
expectedSearchTitles = listOf(
|
||||
"Dumb ways to die",
|
||||
"Dumb ways to die - S01E03 - How to unlucky i am"
|
||||
),
|
||||
expectedEpisodeInfo = MediaParsedInfoEvent.ParsedData.EpisodeInfo(
|
||||
episodeTitle = "How to unlucky i am",
|
||||
episodeNumber = 3,
|
||||
seasonNumber = 1
|
||||
)
|
||||
)
|
||||
),
|
||||
@ -202,7 +220,11 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
file = File("Show.Name.S01.E02.720p.HDTV.x264-Group_v2.mkv"),
|
||||
expectedTitle = "Show Name",
|
||||
expectedFileName = "Show Name - S01E02",
|
||||
expectedSearchTitles = listOf("Show Name", "Show Name - S01E02")
|
||||
expectedSearchTitles = listOf("Show Name", "Show Name - S01E02"),
|
||||
expectedEpisodeInfo = MediaParsedInfoEvent.ParsedData.EpisodeInfo(
|
||||
episodeNumber = 2,
|
||||
seasonNumber = 1
|
||||
)
|
||||
)
|
||||
),
|
||||
Named.of(
|
||||
@ -235,7 +257,7 @@ class MediaParsedInfoListenerTest : MediaParsedInfoListener() {
|
||||
)
|
||||
|
||||
@JvmStatic
|
||||
fun parseVideoType() = listOf(
|
||||
fun parseVideoTypeCases() = listOf(
|
||||
Named.of(
|
||||
"Series file detection full block",
|
||||
ParseVideoTypeTestCase(
|
||||
@ -0,0 +1,75 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoordinatorReadStreamsTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StartProcessingEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaReadStreamsTaskCreatedListenerTest: TestBase() {
|
||||
|
||||
private val listener = MediaReadStreamsTaskCreatedListener()
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event ikke er MediaParsedInfoEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
""")
|
||||
fun testOnEventNonParsedInfoEvent() {
|
||||
val result = listener.onEvent(DummyEvent(), emptyList())
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event er MediaParsedInfoEvent men history mangler StartProcessingEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
""")
|
||||
fun testOnEventParsedInfoEventWithoutStartProcessing() {
|
||||
val parsedEvent = MediaParsedInfoEvent(
|
||||
MediaParsedInfoEvent.ParsedData(
|
||||
parsedCollection = "collection",
|
||||
parsedFileName = "file.mkv",
|
||||
parsedSearchTitles = listOf("title"),
|
||||
mediaType = MediaType.Movie
|
||||
)
|
||||
)
|
||||
val result = listener.onEvent(parsedEvent, emptyList())
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event er MediaParsedInfoEvent og history inneholder StartProcessingEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres CoordinatorReadStreamsTaskCreatedEvent med riktig taskId
|
||||
""")
|
||||
fun testOnEventParsedInfoEventWithStartProcessing() {
|
||||
val parsedEvent = MediaParsedInfoEvent(
|
||||
MediaParsedInfoEvent.ParsedData(
|
||||
parsedCollection = "collection",
|
||||
parsedFileName = "file.mkv",
|
||||
parsedSearchTitles = listOf("title"),
|
||||
mediaType = MediaType.Movie
|
||||
)
|
||||
)
|
||||
val startEvent = StartProcessingEvent(StartData(fileUri = "file://test.mkv", operation = emptySet()))
|
||||
|
||||
val result = listener.onEvent(parsedEvent, listOf(startEvent))
|
||||
|
||||
assertNotNull(result)
|
||||
assertTrue(result is CoordinatorReadStreamsTaskCreatedEvent)
|
||||
|
||||
val coordinatorEvent = result as CoordinatorReadStreamsTaskCreatedEvent
|
||||
assertNotNull(coordinatorEvent.taskId)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,219 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.AudioStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Disposition
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.ParsedMediaStreams
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Tags
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.VideoStream
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaStreamParsedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksEncodeSelectedEvent
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaTracksEncodeSelectorTest: MediaSelectEncodeTracksListener() {
|
||||
|
||||
private fun dummyAudioStream(
|
||||
index: Int,
|
||||
language: String,
|
||||
channels: Int,
|
||||
durationTs: Long = 1000
|
||||
): AudioStream {
|
||||
return AudioStream(
|
||||
index = index,
|
||||
codec_name = "aac",
|
||||
codec_long_name = "AAC",
|
||||
codec_type = "audio",
|
||||
codec_tag_string = "",
|
||||
codec_tag = "",
|
||||
r_frame_rate = "0/0",
|
||||
avg_frame_rate = "0/0",
|
||||
time_base = "1/1000",
|
||||
start_pts = 0,
|
||||
start_time = "0",
|
||||
duration = null,
|
||||
duration_ts = durationTs,
|
||||
disposition = Disposition(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||
tags = Tags(
|
||||
title = null, BPS = null, DURATION = null, NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null, _STATISTICS_WRITING_APP = null, _STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null, language = language, filename = null, mimetype = null
|
||||
),
|
||||
profile = "LC",
|
||||
sample_fmt = "fltp",
|
||||
sample_rate = "48000",
|
||||
channels = channels,
|
||||
channel_layout = "stereo",
|
||||
bits_per_sample = 0
|
||||
)
|
||||
}
|
||||
|
||||
private fun dummyVideoStream(index: Int, durationTs: Long = 1000): VideoStream {
|
||||
return VideoStream(
|
||||
index = index,
|
||||
codec_name = "h264",
|
||||
codec_long_name = "H.264",
|
||||
codec_type = "video",
|
||||
codec_tag_string = "",
|
||||
codec_tag = "",
|
||||
r_frame_rate = "25/1",
|
||||
avg_frame_rate = "25/1",
|
||||
time_base = "1/1000",
|
||||
start_pts = 0,
|
||||
start_time = "0",
|
||||
duration = null,
|
||||
duration_ts = durationTs,
|
||||
disposition = Disposition(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||
tags = Tags(
|
||||
title = null, BPS = null, DURATION = null, NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null, _STATISTICS_WRITING_APP = null, _STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null, language = "eng", filename = null, mimetype = null
|
||||
),
|
||||
profile = "main",
|
||||
width = 1920,
|
||||
height = 1080,
|
||||
coded_width = 1920,
|
||||
coded_height = 1080,
|
||||
closed_captions = 0,
|
||||
has_b_frames = 0,
|
||||
sample_aspect_ratio = "1:1",
|
||||
display_aspect_ratio = "16:9",
|
||||
pix_fmt = "yuv420p",
|
||||
level = 30,
|
||||
color_range = "tv",
|
||||
color_space = "bt709",
|
||||
color_transfer = "bt709",
|
||||
color_primaries = "bt709",
|
||||
chroma_location = "left",
|
||||
refs = 1
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis video streams har ulik varighet
|
||||
Når getVideoTrackToUse kalles
|
||||
Så:
|
||||
Returneres index til stream med lengst varighet
|
||||
""")
|
||||
fun testVideoTrackSelection() {
|
||||
val streams = listOf(dummyVideoStream(0, 1000), dummyVideoStream(1, 5000))
|
||||
val index = getVideoTrackToUse(streams)
|
||||
assertEquals(1, index)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis audio streams inneholder foretrukket språk jpn med 2 kanaler
|
||||
Når getAudioDefaultTrackToUse kalles
|
||||
Så:
|
||||
Returneres index til jpn stereo track
|
||||
""")
|
||||
fun testAudioDefaultTrackSelectionPreferredLanguageStereo() {
|
||||
val streams = listOf(
|
||||
dummyAudioStream(0, "eng", 2),
|
||||
dummyAudioStream(1, "jpn", 2),
|
||||
dummyAudioStream(2, "jpn", 6)
|
||||
)
|
||||
val index = getAudioDefaultTrackToUse(streams)
|
||||
assertEquals(1, index)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis audio streams inneholder foretrukket språk jpn med 6 kanaler
|
||||
Når getAudioExtendedTrackToUse kalles
|
||||
Så:
|
||||
Returneres index til jpn 6-kanals track
|
||||
""")
|
||||
fun testAudioExtendedTrackSelectionPreferredLanguageSurround() {
|
||||
val streams = listOf(
|
||||
dummyAudioStream(0, "jpn", 2),
|
||||
dummyAudioStream(1, "jpn", 6)
|
||||
)
|
||||
val defaultIndex = getAudioDefaultTrackToUse(streams)
|
||||
val extendedIndex = getAudioExtendedTrackToUse(streams, defaultIndex)
|
||||
assertEquals(0, defaultIndex)
|
||||
assertEquals(1, extendedIndex)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis audio streams ikke matcher foretrukket språk
|
||||
Når filterOnPreferredLanguage kalles
|
||||
Så:
|
||||
Returneres original liste uten filtrering
|
||||
""")
|
||||
fun testFilterOnPreferredLanguageFallback() {
|
||||
val streams = listOf(
|
||||
dummyAudioStream(0, "eng", 2),
|
||||
dummyAudioStream(1, "fra", 2)
|
||||
)
|
||||
val filtered = streams.filterOnPreferredLanguage()
|
||||
assertEquals(streams.size, filtered.size)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis audio streams ikke matcher foretrukket språk
|
||||
Når getAudioDefaultTrackToUse kalles
|
||||
Så:
|
||||
Velges et spor (fallback) selv om ingen matcher
|
||||
""")
|
||||
fun testAudioDefaultTrackFallbackSelection() {
|
||||
val streams = listOf(
|
||||
dummyAudioStream(0, "eng", 2),
|
||||
dummyAudioStream(1, "fra", 2)
|
||||
)
|
||||
|
||||
// filterOnPreferredLanguage skal returnere original listen
|
||||
val filtered = streams.filterOnPreferredLanguage()
|
||||
assertEquals(streams.size, filtered.size)
|
||||
|
||||
// getAudioDefaultTrackToUse skal likevel velge et spor
|
||||
val selectedIndex = getAudioDefaultTrackToUse(streams)
|
||||
|
||||
// Sjekk at det faktisk er en gyldig index
|
||||
assertTrue(selectedIndex in streams.indices)
|
||||
|
||||
// I dette tilfellet velges siste med høyest index (1)
|
||||
assertEquals(0, selectedIndex)
|
||||
}
|
||||
|
||||
|
||||
|
||||
class DummyEvent: Event()
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event ikke er MediaStreamParsedEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
""")
|
||||
fun testOnEventNonParsedEvent() {
|
||||
val result = onEvent(DummyEvent(), emptyList())
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event er MediaStreamParsedEvent med video og audio
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres MediaTracksEncodeSelectedEvent med riktige spor
|
||||
""")
|
||||
fun testOnEventParsedEvent() {
|
||||
val videoStreams = listOf(dummyVideoStream(0, 1000))
|
||||
val audioStreams = listOf(dummyAudioStream(0, "jpn", 2), dummyAudioStream(1, "jpn", 6))
|
||||
val parsedEvent = MediaStreamParsedEvent(
|
||||
ParsedMediaStreams(videoStream = videoStreams, audioStream = audioStreams, subtitleStream = emptyList())
|
||||
)
|
||||
val result = onEvent(parsedEvent, emptyList()) as MediaTracksEncodeSelectedEvent
|
||||
assertEquals(0, result.selectedVideoTrack)
|
||||
assertEquals(0, result.selectedAudioTrack)
|
||||
assertEquals(1, result.selectedAudioExtendedTrack)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,121 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleStream
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.SubtitleTags
|
||||
import no.iktdev.mediaprocessing.ffmpeg.data.Tags
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksDetermineSubtitleTypeEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaTracksExtractSelectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.model.SubtitleItem
|
||||
import no.iktdev.mediaprocessing.shared.common.model.SubtitleType
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaSelectExtractTracksListenerTest: TestBase() {
|
||||
|
||||
|
||||
// Vi lager en subclass som gir oss tilgang til alt og lar oss overstyre språkpreferanser
|
||||
class TestableMediaSelectExtractTracksListener(
|
||||
private val preferredLanguages: Set<String> = emptySet()
|
||||
) : MediaSelectExtractTracksListener() {
|
||||
override fun limitToLanguages(): Set<String> = preferredLanguages
|
||||
// gjør private extension tilgjengelig via wrapper
|
||||
fun callFilterOnPreferredLanguage(streams: List<SubtitleStream>): List<SubtitleStream> {
|
||||
return streams.filterOnPreferredLanguage()
|
||||
}
|
||||
}
|
||||
|
||||
private fun dummySubtitleStream(index: Int, language: String?, type: SubtitleType): SubtitleItem {
|
||||
val stream = SubtitleStream(
|
||||
index = index,
|
||||
codec_name = "ass",
|
||||
codec_long_name = "ASS",
|
||||
codec_type = "subtitle",
|
||||
codec_tag_string = "",
|
||||
codec_tag = "",
|
||||
r_frame_rate = "0/0",
|
||||
avg_frame_rate = "0/0",
|
||||
time_base = "1/1000",
|
||||
start_pts = 0,
|
||||
start_time = "0",
|
||||
duration = null,
|
||||
duration_ts = 1000,
|
||||
disposition = null,
|
||||
tags = Tags(
|
||||
title = null, BPS = null, DURATION = null, NUMBER_OF_FRAMES = 0,
|
||||
NUMBER_OF_BYTES = null, _STATISTICS_WRITING_APP = null, _STATISTICS_WRITING_DATE_UTC = null,
|
||||
_STATISTICS_TAGS = null, language = language, filename = null, mimetype = null
|
||||
),
|
||||
subtitle_tags = SubtitleTags(language = language, filename = null, mimetype = null)
|
||||
)
|
||||
return SubtitleItem(stream = stream, type = type)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event ikke er MediaTracksDetermineSubtitleTypeEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
""")
|
||||
fun testOnEventNonSubtitleEvent() {
|
||||
val listener = TestableMediaSelectExtractTracksListener()
|
||||
val result = listener.onEvent(DummyEvent(), emptyList())
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis event inneholder Dialogue subtitles
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres MediaTracksExtractSelectedEvent med index til Dialogue tracks
|
||||
""")
|
||||
fun testOnEventDialogueTracksSelected() {
|
||||
val listener = TestableMediaSelectExtractTracksListener()
|
||||
val items = listOf(
|
||||
dummySubtitleStream(0, "eng", SubtitleType.Dialogue),
|
||||
dummySubtitleStream(1, "eng", SubtitleType.Commentary)
|
||||
)
|
||||
val event = MediaTracksDetermineSubtitleTypeEvent(subtitleTrackItems = items)
|
||||
val result = listener.onEvent(event, emptyList()) as MediaTracksExtractSelectedEvent
|
||||
assertEquals(listOf(0), result.selectedSubtitleTracks)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis limitToLanguages returnerer jpn
|
||||
Når filterOnPreferredLanguage kalles
|
||||
Så:
|
||||
Returneres kun spor med språk jpn
|
||||
""")
|
||||
fun testFilterOnPreferredLanguageWithLimit() {
|
||||
val listener = TestableMediaSelectExtractTracksListener(setOf("jpn"))
|
||||
val streams = listOf(
|
||||
dummySubtitleStream(0, "eng", SubtitleType.Dialogue).stream,
|
||||
dummySubtitleStream(1, "jpn", SubtitleType.Dialogue).stream
|
||||
)
|
||||
val filtered = listener.callFilterOnPreferredLanguage(streams)
|
||||
assertEquals(1, filtered.size)
|
||||
assertEquals("jpn", filtered[0].tags.language)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Hvis limitToLanguages er tom
|
||||
Når filterOnPreferredLanguage kalles
|
||||
Så:
|
||||
Returneres original liste uten filtrering
|
||||
""")
|
||||
fun testFilterOnPreferredLanguageNoLimit() {
|
||||
val listener = TestableMediaSelectExtractTracksListener()
|
||||
val streams = listOf(
|
||||
dummySubtitleStream(0, "eng", SubtitleType.Dialogue).stream,
|
||||
dummySubtitleStream(1, "fra", SubtitleType.Dialogue).stream
|
||||
)
|
||||
val filtered = listener.callFilterOnPreferredLanguage(streams)
|
||||
assertEquals(streams.size, filtered.size)
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,302 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import io.mockk.slot
|
||||
import io.mockk.verify
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.MockData.convertEvent
|
||||
import no.iktdev.mediaprocessing.MockData.coverEvent
|
||||
import no.iktdev.mediaprocessing.MockData.encodeEvent
|
||||
import no.iktdev.mediaprocessing.MockData.extractEvent
|
||||
import no.iktdev.mediaprocessing.MockData.mediaParsedEvent
|
||||
import no.iktdev.mediaprocessing.MockData.metadataEvent
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MigrateContentToStoreTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MigrateToContentStoreTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MigrateStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.io.File
|
||||
|
||||
class MigrateCreateStoreTaskListenerTest : TestBase() {
|
||||
|
||||
private val listener = MigrateCreateStoreTaskListener()
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken inneholder gyldig parsed info, metadata og migreringsdata
|
||||
Når onEvent kalles med CollectedEvent
|
||||
Så:
|
||||
Opprettes MigrateToContentStoreTask og sendes til TaskStore.persist
|
||||
"""
|
||||
)
|
||||
fun `creates migrate-to-store task`() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Movie
|
||||
).derivedOf(started)
|
||||
|
||||
val metadata = metadataEvent(parsed)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", metadata.last())
|
||||
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
|
||||
val coverDownload = coverEvent("/tmp/cover.jpg", metadata.last())
|
||||
|
||||
val convert = convertEvent(
|
||||
language = "en",
|
||||
baseName = "sub1",
|
||||
outputFiles = listOf("/tmp/sub1.vtt"),
|
||||
derivedFrom = extract.last()
|
||||
)
|
||||
|
||||
val migrate = migrateResultEvent(
|
||||
collection = "MyCollection",
|
||||
videoUri = "file:///video.mp4",
|
||||
coverUri = "file:///cover.jpg",
|
||||
subtitleUris = listOf("file:///sub1.srt", "file://sub1.vtt")
|
||||
).derivedOf(convert.last())
|
||||
|
||||
val collected = CollectedEvent(
|
||||
setOf(
|
||||
started.eventId,
|
||||
parsed.eventId,
|
||||
*metadata.map { it.eventId }.toTypedArray(),
|
||||
*encode.map { it.eventId }.toTypedArray(),
|
||||
*extract.map { it.eventId }.toTypedArray(),
|
||||
*convert.map { it.eventId }.toTypedArray(),
|
||||
*coverDownload.map { it.eventId }.toTypedArray(),
|
||||
migrate.eventId
|
||||
)
|
||||
).derivedOf(migrate)
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*metadata.toTypedArray(),
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
*convert.toTypedArray(),
|
||||
*coverDownload.toTypedArray(),
|
||||
migrate,
|
||||
collected
|
||||
)
|
||||
|
||||
val result = listener.onEvent(collected, history)
|
||||
|
||||
assertThat(result).isNotNull()
|
||||
|
||||
verify(exactly = 1) {
|
||||
TaskStore.persist(withArg { task ->
|
||||
val storeTask = task as MigrateToContentStoreTask
|
||||
|
||||
assertThat(storeTask.data.collection).isEqualTo("MyCollection")
|
||||
assertThat(storeTask.data.videoContent).isNotNull()
|
||||
assertThat(storeTask.data.subtitleContent).hasSize(2)
|
||||
assertThat(storeTask.data.coverContent).hasSize(1)
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken inneholder gyldig parsed info, metadata og migreringsdata
|
||||
Når onEvent kalles med CollectedEvent
|
||||
Så:
|
||||
Opprettes MigrateToContentStoreTask og sendes til TaskStore.persist
|
||||
"""
|
||||
)
|
||||
fun success1() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "Baking Bread",
|
||||
fileName = "Baking Bread - S01E01 - Flour",
|
||||
mediaType = MediaType.Serie
|
||||
).derivedOf(started)
|
||||
|
||||
val metadata = metadataEvent(parsed)
|
||||
|
||||
val encode = encodeEvent("/tmp/video.mp4", metadata.last())
|
||||
|
||||
val extract = extractEvent("en", "/tmp/sub1.srt", encode.last())
|
||||
|
||||
val coverDownload = coverEvent("/tmp/cover.jpg", metadata.last())
|
||||
val coverDownload2 = coverEvent("/tmp/cover.jpg", metadata.last(), "potet")
|
||||
|
||||
val convert = convertEvent(
|
||||
language = "en",
|
||||
baseName = "sub1",
|
||||
outputFiles = listOf("/tmp/sub1.vtt"),
|
||||
derivedFrom = extract.last()
|
||||
)
|
||||
|
||||
|
||||
val collected = CollectedEvent(
|
||||
setOf(
|
||||
started.eventId,
|
||||
parsed.eventId,
|
||||
*metadata.map { it.eventId }.toTypedArray(),
|
||||
*encode.map { it.eventId }.toTypedArray(),
|
||||
*extract.map { it.eventId }.toTypedArray(),
|
||||
*convert.map { it.eventId }.toTypedArray(),
|
||||
*coverDownload.map { it.eventId }.toTypedArray(),
|
||||
*coverDownload2.map { it.eventId }.toTypedArray(),
|
||||
)
|
||||
).derivedOf(coverDownload.last())
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*metadata.toTypedArray(),
|
||||
*encode.toTypedArray(),
|
||||
*extract.toTypedArray(),
|
||||
*convert.toTypedArray(),
|
||||
*coverDownload.toTypedArray(),
|
||||
*coverDownload2.toTypedArray(),
|
||||
collected,
|
||||
)
|
||||
|
||||
val result = listener.onEvent(collected, history)
|
||||
|
||||
assertThat(result).isNotNull()
|
||||
|
||||
val slot = slot<MigrateToContentStoreTask>()
|
||||
|
||||
verify(exactly = 1) {
|
||||
TaskStore.persist(capture(slot))
|
||||
}
|
||||
|
||||
val storeTask = slot.captured
|
||||
|
||||
assertThat(storeTask.data.collection).isEqualTo("Baking Bread")
|
||||
assertThat(storeTask.data.videoContent).isNotNull()
|
||||
assertThat(storeTask.data.videoContent?.storeUri.let { f -> File(f).name })
|
||||
.isEqualTo("Baking Bread - S01E01 - Flour.mp4")
|
||||
|
||||
assertThat(storeTask.data.subtitleContent).hasSize(2)
|
||||
assertThat(
|
||||
storeTask.data.subtitleContent!!
|
||||
.map { File(it.storeUri).nameWithoutExtension }
|
||||
).containsOnly("Baking Bread - S01E01 - Flour")
|
||||
|
||||
assertThat(storeTask.data.coverContent).hasSize(2)
|
||||
assertThat(File(storeTask.data.coverContent!!.first().storeUri).name)
|
||||
.isEqualTo("Baking Bread-test.jpg")
|
||||
assertThat(File(storeTask.data.coverContent!!.last().storeUri).name)
|
||||
.isEqualTo("Baking Bread-potet.jpg")
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis start hendelsen kun inneholder converter, og vi har gjennomført konvertering
|
||||
Når onEvent kalles med CollectedEvent
|
||||
Så:
|
||||
Opprettes det migrate task
|
||||
"""
|
||||
)
|
||||
fun createMigrateForConvert() {
|
||||
val started = defaultStartEvent()
|
||||
|
||||
val parsed = mediaParsedEvent(
|
||||
collection = "MyCollection",
|
||||
fileName = "MyCollection 1",
|
||||
mediaType = MediaType.Subtitle
|
||||
).derivedOf(started)
|
||||
|
||||
val convert = convertEvent(
|
||||
language = "en",
|
||||
baseName = "sub1",
|
||||
outputFiles = listOf("/tmp/sub1.vtt"),
|
||||
derivedFrom = started
|
||||
)
|
||||
|
||||
val migrate = migrateResultEvent(
|
||||
collection = "MyCollection",
|
||||
videoUri = "file:///video.mp4",
|
||||
coverUri = "file:///cover.jpg",
|
||||
subtitleUris = listOf("file:///sub1.srt", "file://sub1.vtt")
|
||||
).derivedOf(convert.last())
|
||||
|
||||
val collected = CollectedEvent(
|
||||
setOf(
|
||||
started.eventId,
|
||||
parsed.eventId,
|
||||
*convert.map { it.eventId }.toTypedArray(),
|
||||
migrate.eventId,
|
||||
)
|
||||
).derivedOf(migrate)
|
||||
|
||||
val history = listOf(
|
||||
started,
|
||||
parsed,
|
||||
*convert.toTypedArray(),
|
||||
migrate,
|
||||
collected
|
||||
)
|
||||
|
||||
val result = listener.onEvent(collected, history)
|
||||
|
||||
assertThat(result).isNotNull()
|
||||
|
||||
verify(exactly = 1) {
|
||||
TaskStore.persist(withArg { task ->
|
||||
val storeTask = task as MigrateToContentStoreTask
|
||||
|
||||
assertThat(storeTask.data.collection).isEqualTo("MyCollection")
|
||||
assertThat(storeTask.data.videoContent).isNull()
|
||||
assertThat(storeTask.data.subtitleContent).hasSize(1)
|
||||
assertThat(storeTask.data.coverContent).isEmpty()
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// Helpers for generating events
|
||||
// ---------------------------------------------------------
|
||||
|
||||
private fun migrateResultEvent(
|
||||
collection: String,
|
||||
videoUri: String?,
|
||||
coverUri: String?,
|
||||
subtitleUris: List<String>
|
||||
) = MigrateContentToStoreTaskResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
collection = collection,
|
||||
videoMigrate = MigrateContentToStoreTaskResultEvent.FileMigration(
|
||||
storedUri = videoUri,
|
||||
status = if (videoUri != null) MigrateStatus.Completed else MigrateStatus.Failed
|
||||
),
|
||||
subtitleMigrate = subtitleUris.map {
|
||||
MigrateContentToStoreTaskResultEvent.SubtitleMigration(
|
||||
language = "en",
|
||||
storedUri = it,
|
||||
status = MigrateStatus.Completed
|
||||
)
|
||||
},
|
||||
coverMigrate = listOfNotNull(
|
||||
coverUri?.let {
|
||||
MigrateContentToStoreTaskResultEvent.FileMigration(
|
||||
storedUri = it,
|
||||
status = MigrateStatus.Completed
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
@ -0,0 +1,181 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.events
|
||||
|
||||
import io.mockk.slot
|
||||
import io.mockk.verify
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.TestBase
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CollectedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MediaParsedInfoEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.MigrateContentToStoreTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StoreContentAndMetadataTaskCreatedEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.StoreContentAndMetadataTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MigrateStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class StoreContentAndMetadataListenerTest : TestBase() {
|
||||
|
||||
private val listener = StoreContentAndMetadataListener()
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis event ikke er et MigrateContentToStoreTaskResultEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
"""
|
||||
)
|
||||
fun `ignores non migrate events`() {
|
||||
val startedEvent = defaultStartEvent()
|
||||
val event = DummyEvent().derivedOf(startedEvent)
|
||||
val history = emptyList<Event>()
|
||||
|
||||
val result = listener.onEvent(event, history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken ikke inneholder CollectedEvent
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
"""
|
||||
)
|
||||
fun `returns null when no collected event exists`() {
|
||||
val startedEvent = defaultStartEvent()
|
||||
val event = migrateEvent().derivedOf(startedEvent)
|
||||
val history = listOf(DummyEvent().derivedOf(startedEvent))
|
||||
|
||||
val result = listener.onEvent(event, history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis collection eller metadata ikke kan projiseres
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Returneres null
|
||||
"""
|
||||
)
|
||||
fun `returns null when projection lacks collection or metadata`() {
|
||||
val startedEvent = defaultStartEvent()
|
||||
val event = migrateEvent().derivedOf(startedEvent)
|
||||
val collected = CollectedEvent(setOf(startedEvent.eventId, event.eventId))
|
||||
|
||||
// Historikken inneholder kun collected-eventet, ingen metadata eller parsed info
|
||||
val history = listOf(startedEvent, collected)
|
||||
|
||||
val result = listener.onEvent(event, history)
|
||||
|
||||
assertThat(result).isNull()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Hvis historikken inneholder gyldig collection og metadata
|
||||
Når onEvent kalles
|
||||
Så:
|
||||
Opprettes StoreContentAndMetadataTask og det returneres et StoreContentAndMetadataTaskCreatedEvent
|
||||
"""
|
||||
)
|
||||
fun `creates task and returns created event`() {
|
||||
val startedEvent = defaultStartEvent()
|
||||
|
||||
val parsed = MediaParsedInfoEvent(
|
||||
data = MediaParsedInfoEvent.ParsedData(
|
||||
parsedCollection = "MyCollection",
|
||||
parsedFileName = "MyCollection",
|
||||
parsedSearchTitles = listOf("MyCollection"),
|
||||
mediaType = MediaType.Serie,
|
||||
episodeInfo = null
|
||||
)
|
||||
).derivedOf(startedEvent)
|
||||
|
||||
val migrate = migrateEvent(
|
||||
status = TaskStatus.Completed,
|
||||
collection = "Baking Bread",
|
||||
videoUri = "file:///Baking Bread/Baking Bread - S01E01 - Flour.mp4",
|
||||
coverUri = "file:///Baking Bread/Baking Bread.jpg",
|
||||
subtitleUris = listOf("file:///Baking Bread/en/Baking Bread - S01E01 - Flour.srt")
|
||||
).derivedOf(parsed)
|
||||
|
||||
val collected = CollectedEvent(setOf(startedEvent.eventId, parsed.eventId))
|
||||
.derivedOf(migrate)
|
||||
|
||||
val history = listOf(
|
||||
startedEvent,
|
||||
parsed,
|
||||
collected,
|
||||
migrate
|
||||
)
|
||||
|
||||
val result = listener.onEvent(migrate, history)
|
||||
assertThat(result).isInstanceOf(StoreContentAndMetadataTaskCreatedEvent::class.java)
|
||||
|
||||
val slot = slot<StoreContentAndMetadataTask>()
|
||||
|
||||
verify(exactly = 1) {
|
||||
TaskStore.persist(capture(slot))
|
||||
}
|
||||
|
||||
val storeTask = slot.captured
|
||||
assertThat(storeTask.data.collection).isEqualTo("Baking Bread")
|
||||
assertThat(storeTask.data.metadata.mediaType).isEqualTo(MediaType.Serie)
|
||||
assertThat(storeTask.data.media?.videoFile).isEqualTo("Baking Bread - S01E01 - Flour.mp4")
|
||||
assertThat(storeTask.data.media?.subtitles?.first()?.subtitleFile).isEqualTo("Baking Bread - S01E01 - Flour.srt")
|
||||
assertThat(storeTask.data.media?.subtitles?.first()?.language).isEqualTo("en")
|
||||
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------
|
||||
|
||||
private fun migrateEvent(
|
||||
status: TaskStatus = TaskStatus.Completed,
|
||||
collection: String = "TestCollection",
|
||||
videoUri: String? = null,
|
||||
coverUri: String? = null,
|
||||
subtitleUris: List<String> = emptyList()
|
||||
): MigrateContentToStoreTaskResultEvent {
|
||||
return MigrateContentToStoreTaskResultEvent(
|
||||
status = status,
|
||||
collection = collection,
|
||||
videoMigrate = MigrateContentToStoreTaskResultEvent.FileMigration(
|
||||
storedUri = videoUri,
|
||||
status = if (videoUri != null) MigrateStatus.Completed else MigrateStatus.Failed
|
||||
),
|
||||
subtitleMigrate = subtitleUris.map {
|
||||
MigrateContentToStoreTaskResultEvent.SubtitleMigration(
|
||||
language = "en",
|
||||
storedUri = it,
|
||||
status = MigrateStatus.Completed
|
||||
)
|
||||
},
|
||||
coverMigrate = listOfNotNull(
|
||||
coverUri?.let {
|
||||
MigrateContentToStoreTaskResultEvent.FileMigration(
|
||||
storedUri = it,
|
||||
status = MigrateStatus.Completed
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
class DummyEvent : Event()
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,157 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
|
||||
import com.google.gson.JsonObject
|
||||
import io.mockk.mockk
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.MockFFprobe
|
||||
import no.iktdev.mediaprocessing.ffmpeg.FFprobe
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.CoordinatorReadStreamsResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.MediaReadTask
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MediaStreamReadTaskListenerTest {
|
||||
|
||||
class MediaStreamReadTaskListenerTestImplementation(): MediaStreamReadTaskListener() {
|
||||
|
||||
lateinit var probe: FFprobe
|
||||
override fun getFfprobe(): FFprobe {
|
||||
return probe
|
||||
}
|
||||
}
|
||||
|
||||
private val listener = MediaStreamReadTaskListenerTestImplementation()
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"Når støtter sjekk for MediaReadTask" +
|
||||
"Hvis task er av typen MediaReadTask" +
|
||||
"Så:" +
|
||||
" returnerer true"
|
||||
)
|
||||
fun `supports returns true for MediaReadTask`() {
|
||||
val mediaTask = mockk<MediaReadTask>()
|
||||
assertTrue(listener.supports(mediaTask))
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"Når støtter sjekk for ikke-MediaReadTask" +
|
||||
"Hvis task ikke er av typen MediaReadTask" +
|
||||
"Så:" +
|
||||
" returnerer false"
|
||||
)
|
||||
fun `supports returns false for non MediaReadTask`() {
|
||||
val otherTask = mockk<Task>()
|
||||
assertFalse(listener.supports(otherTask))
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"Når onTask kalles med ikke-MediaReadTask" +
|
||||
"Hvis task ikke kan castes til MediaReadTask" +
|
||||
"Så:" +
|
||||
" returnerer null"
|
||||
)
|
||||
fun `onTask returns null for non MediaReadTask`() = runBlocking {
|
||||
val otherTask = mockk<Task>()
|
||||
val result = listener.onTask(otherTask)
|
||||
assertNull(result)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"Når genererer worker id" +
|
||||
"Hvis worker id blir forespurt" +
|
||||
"Så:" +
|
||||
" inneholder id klasse navn og task type"
|
||||
)
|
||||
fun `getWorkerId contains class name and task type`() {
|
||||
val id = listener.getWorkerId()
|
||||
assertTrue(id.contains("MediaStreamReadTaskListener"))
|
||||
assertTrue(id.contains("CPU_INTENSIVE"))
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en MediaReadTask med gyldig filUri prosesseres
|
||||
Hvis FFprobe returnerer et gyldig JSON-objekt
|
||||
Så:
|
||||
Skal MediaStreamReadEvent produseres med data
|
||||
""")
|
||||
fun verifyEventProducedOnValidJson() = runTest {
|
||||
val listener = MediaStreamReadTaskListenerTestImplementation()
|
||||
val json = JsonObject().apply { addProperty("codec_type", "video") }
|
||||
listener.probe = MockFFprobe.success(json)
|
||||
|
||||
val task = MediaReadTask(fileUri = "test.mp4").newReferenceId()
|
||||
val event = listener.onTask(task)
|
||||
|
||||
assertNotNull(event)
|
||||
assertTrue(event is CoordinatorReadStreamsResultEvent)
|
||||
val result = event as CoordinatorReadStreamsResultEvent
|
||||
assertEquals(json, result.data)
|
||||
assertEquals(TaskStatus.Completed, result.status)
|
||||
assertEquals("test.mp4", (listener.probe as MockFFprobe).lastInputFile)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en MediaReadTask med ugyldig filUri prosesseres
|
||||
Hvis FFprobe feiler med parsing
|
||||
Så:
|
||||
Skal onTask returnere null og ikke kaste unntak
|
||||
""")
|
||||
fun verifyNullOnParsingError() = runTest {
|
||||
val listener = MediaStreamReadTaskListenerTestImplementation()
|
||||
listener.probe = MockFFprobe.failure("Could not parse")
|
||||
|
||||
val task = MediaReadTask(fileUri = "corrupt.mp4").newReferenceId()
|
||||
val event = listener.onTask(task)
|
||||
val result = event as CoordinatorReadStreamsResultEvent
|
||||
|
||||
assertNull(result.data)
|
||||
assertEquals(TaskStatus.Failed, result.status)
|
||||
assertEquals("corrupt.mp4", (listener.probe as MockFFprobe).lastInputFile)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en MediaReadTask prosesseres
|
||||
Hvis FFprobe kaster exception
|
||||
Så:
|
||||
Skal onTask returnere null og logge feilen
|
||||
""")
|
||||
fun verifyExceptionHandling() = runTest {
|
||||
val listener = MediaStreamReadTaskListenerTestImplementation()
|
||||
listener.probe = MockFFprobe.exception()
|
||||
|
||||
val task = MediaReadTask(fileUri = "broken.mp4").newReferenceId()
|
||||
val event = listener.onTask(task)
|
||||
assertInstanceOf(CoordinatorReadStreamsResultEvent::class.java, event)
|
||||
val resultEvent = event as CoordinatorReadStreamsResultEvent
|
||||
assertNull(event.data)
|
||||
assertEquals(TaskStatus.Failed, event.status)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("""
|
||||
Når en Task som ikke er MediaReadTask prosesseres
|
||||
Hvis supports sjekkes
|
||||
Så:
|
||||
Skal supports returnere false og onTask returnere null
|
||||
""")
|
||||
fun verifySupportsOnlyMediaReadTask() = runTest {
|
||||
val listener = MediaStreamReadTaskListenerTestImplementation()
|
||||
listener.probe = MockFFprobe.failure("Not used")
|
||||
|
||||
val otherTask = object : Task() {}.newReferenceId()
|
||||
assertFalse(listener.supports(otherTask))
|
||||
val event = listener.onTask(otherTask)
|
||||
assertNull(event)
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,165 @@
|
||||
package no.iktdev.mediaprocessing.coordinator.listeners.tasks
|
||||
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.StoreContentAndMetadataTaskResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.StoreContentAndMetadataTask
|
||||
import no.iktdev.mediaprocessing.shared.common.model.ContentExport
|
||||
import no.iktdev.mediaprocessing.shared.common.model.MediaType
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.DisplayName
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.mockito.Mock
|
||||
import org.mockito.junit.jupiter.MockitoExtension
|
||||
import org.mockito.kotlin.any
|
||||
import org.mockito.kotlin.eq
|
||||
import org.mockito.kotlin.whenever
|
||||
import org.springframework.http.HttpMethod
|
||||
import org.springframework.http.HttpStatus
|
||||
import org.springframework.http.ResponseEntity
|
||||
import org.springframework.web.client.RestTemplate
|
||||
|
||||
@ExtendWith(MockitoExtension::class)
|
||||
class StoreContentAndMetadataTaskListenerTest {
|
||||
|
||||
@Mock
|
||||
lateinit var restTemplate: RestTemplate
|
||||
|
||||
lateinit var listener: StoreContentAndMetadataTaskListener
|
||||
|
||||
@BeforeEach
|
||||
fun setup() {
|
||||
listener = StoreContentAndMetadataTaskListener()
|
||||
listener.streamitRestTemplate = restTemplate
|
||||
}
|
||||
|
||||
private fun sampleContentExport(): ContentExport {
|
||||
return ContentExport(
|
||||
collection = "series",
|
||||
episodeInfo = ContentExport.EpisodeInfo(episodeNumber = 1, seasonNumber = 1, episodeTitle = "Pilot"),
|
||||
media = ContentExport.MediaExport(
|
||||
videoFile = "bb.s01e01.mkv",
|
||||
subtitles = listOf(ContentExport.MediaExport.Subtitle(subtitleFile = "bb.en.srt", language = "en"))
|
||||
),
|
||||
metadata = ContentExport.MetadataExport(
|
||||
title = "Breaking Bad",
|
||||
genres = listOf("Drama"),
|
||||
cover = "bb.jpg",
|
||||
summary = emptyList(),
|
||||
mediaType = MediaType.Serie,
|
||||
source = "local"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Gitt en StoreContentAndMetadataTask
|
||||
Når supports() kalles
|
||||
Så:
|
||||
Returnerer true
|
||||
"""
|
||||
)
|
||||
fun supports_returnsTrueForCorrectTask() {
|
||||
val task = StoreContentAndMetadataTask(data = sampleContentExport())
|
||||
|
||||
val result = listener.supports(task)
|
||||
|
||||
assertThat(result).isTrue()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Gitt en annen type Task
|
||||
Når supports() kalles
|
||||
Så:
|
||||
Returnerer false
|
||||
"""
|
||||
)
|
||||
fun supports_returnsFalseForWrongTask() {
|
||||
val task = object : Task() {}
|
||||
|
||||
val result = listener.supports(task)
|
||||
|
||||
assertThat(result).isFalse()
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Gitt at RestTemplate returnerer 200 OK
|
||||
Når onTask() kalles
|
||||
Så:
|
||||
Returnerer Completed-event
|
||||
"""
|
||||
)
|
||||
fun onTask_returnsCompletedOnSuccess() = runTest {
|
||||
val task = StoreContentAndMetadataTask(data = sampleContentExport())
|
||||
|
||||
whenever(
|
||||
restTemplate.exchange(
|
||||
eq("open/api/mediaprocesser/import"),
|
||||
eq(HttpMethod.POST),
|
||||
any(),
|
||||
eq(Void::class.java)
|
||||
)
|
||||
).thenReturn(ResponseEntity(HttpStatus.OK))
|
||||
|
||||
val event = listener.onTask(task)
|
||||
|
||||
assertThat(event).isInstanceOf(StoreContentAndMetadataTaskResultEvent::class.java)
|
||||
val result = event as StoreContentAndMetadataTaskResultEvent
|
||||
assertThat(result.taskStatus).isEqualTo(TaskStatus.Completed)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Gitt at RestTemplate kaster exception
|
||||
Når onTask() kalles
|
||||
Så:
|
||||
Returnerer Failed-event
|
||||
"""
|
||||
)
|
||||
fun onTask_returnsFailedOnException() = runTest {
|
||||
val task = StoreContentAndMetadataTask(data = sampleContentExport())
|
||||
|
||||
whenever(
|
||||
restTemplate.exchange(
|
||||
any<String>(),
|
||||
any(),
|
||||
any(),
|
||||
eq(Void::class.java)
|
||||
)
|
||||
).thenThrow(RuntimeException("boom"))
|
||||
|
||||
val event = listener.onTask(task)
|
||||
|
||||
assertThat(event).isInstanceOf(StoreContentAndMetadataTaskResultEvent::class.java)
|
||||
val result = event as StoreContentAndMetadataTaskResultEvent
|
||||
assertThat(result.taskStatus).isEqualTo(TaskStatus.Failed)
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName(
|
||||
"""
|
||||
Gitt en gyldig task
|
||||
Når getWorkerId() kalles
|
||||
Så:
|
||||
Returnerer en streng som inneholder klassenavn, tasktype og UUID
|
||||
"""
|
||||
)
|
||||
fun workerId_hasCorrectFormat() {
|
||||
val id = listener.getWorkerId()
|
||||
|
||||
assertThat(id).contains("StoreContentAndMetadataTaskListener-MIXED-")
|
||||
assertThat(id.split("-").last().length).isGreaterThan(10) // UUID-ish
|
||||
}
|
||||
}
|
||||
28
apps/coordinator/src/test/resources/application.yml
Normal file
28
apps/coordinator/src/test/resources/application.yml
Normal file
@ -0,0 +1,28 @@
|
||||
spring:
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
flyway:
|
||||
enabled: false
|
||||
locations: classpath:flyway
|
||||
autoconfigure:
|
||||
exclude:
|
||||
- org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
|
||||
|
||||
output:
|
||||
ansi:
|
||||
enabled: always
|
||||
|
||||
springdoc:
|
||||
swagger-ui:
|
||||
path: /open/swagger-ui
|
||||
|
||||
logging:
|
||||
level:
|
||||
org.springframework.web.socket.config.WebSocketMessageBrokerStats: WARN
|
||||
org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping: DEBUG
|
||||
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: mappings
|
||||
@ -35,7 +35,7 @@ dependencies {
|
||||
implementation("org.json:json:20210307")
|
||||
|
||||
implementation("no.iktdev:exfl:0.0.16-SNAPSHOT")
|
||||
implementation("no.iktdev:eventi:1.0-rc13")
|
||||
implementation("no.iktdev:eventi:1.0-rc15")
|
||||
|
||||
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
|
||||
|
||||
@ -11,7 +11,6 @@ class ProcesserEnv {
|
||||
val allowOverwrite = System.getenv("ALLOW_OVERWRITE").toBoolean() ?: false
|
||||
|
||||
var cachedContent: File = if (!System.getenv("DIRECTORY_CONTENT_CACHE").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_CACHE")) else File("/src/cache")
|
||||
val outgoingContent: File = if (!System.getenv("DIRECTORY_CONTENT_OUTGOING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_OUTGOING")) else File("/src/output")
|
||||
|
||||
|
||||
val logDirectory = if (!System.getenv("LOG_DIR").isNullOrBlank()) File(System.getenv("LOG_DIR")) else
|
||||
|
||||
@ -4,7 +4,8 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.tasks.AbstractTaskPoller
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskPollerImplementation
|
||||
import no.iktdev.eventi.tasks.TaskReporter
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.EventStore
|
||||
import no.iktdev.mediaprocessing.shared.common.stores.TaskStore
|
||||
@ -29,7 +30,7 @@ class PollerAdministrator(
|
||||
@Service
|
||||
class TaskPoller(
|
||||
private val reporter: TaskReporter,
|
||||
) : AbstractTaskPoller(
|
||||
) : TaskPollerImplementation(
|
||||
taskStore = TaskStore,
|
||||
reporterFactory = { reporter } // én reporter brukes for alle tasks
|
||||
) {
|
||||
@ -48,7 +49,7 @@ class DefaultTaskReporter() : TaskReporter {
|
||||
}
|
||||
|
||||
override fun markConsumed(taskId: UUID) {
|
||||
TaskStore.markConsumed(taskId)
|
||||
TaskStore.markConsumed(taskId, TaskStatus.Completed)
|
||||
}
|
||||
|
||||
override fun updateProgress(taskId: UUID, progress: Int) {
|
||||
|
||||
@ -9,14 +9,13 @@ import no.iktdev.mediaprocessing.ffmpeg.FFmpeg
|
||||
import no.iktdev.mediaprocessing.ffmpeg.arguments.MpegArgument
|
||||
import no.iktdev.mediaprocessing.processer.ProcesserEnv
|
||||
import no.iktdev.mediaprocessing.processer.Util
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ExtractResult
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleTask
|
||||
import org.springframework.stereotype.Service
|
||||
import java.util.UUID
|
||||
|
||||
@Service
|
||||
class SubtitleTaskListener: TaskListener(TaskType.CPU_INTENSIVE) {
|
||||
class SubtitleTaskListener: FfmpegTaskListener(TaskType.CPU_INTENSIVE) {
|
||||
override fun getWorkerId() = "${this::class.java.simpleName}-${taskType}-${UUID.randomUUID()}"
|
||||
|
||||
override fun supports(task: Task) = task is ExtractSubtitleTask
|
||||
@ -31,10 +30,8 @@ class SubtitleTaskListener: TaskListener(TaskType.CPU_INTENSIVE) {
|
||||
}
|
||||
|
||||
if (cachedOutFile.exists() && taskData.data.arguments.firstOrNull() != "-y") {
|
||||
reporter?.publishEvent(ProcesserExtractEvent(
|
||||
data = ExtractResult(
|
||||
status = TaskStatus.Failed
|
||||
)
|
||||
reporter?.publishEvent(ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Failed
|
||||
).producedFrom(task))
|
||||
throw IllegalStateException("${cachedOutFile.absolutePath} does already exist, and arguments does not permit overwrite")
|
||||
}
|
||||
@ -44,23 +41,27 @@ class SubtitleTaskListener: TaskListener(TaskType.CPU_INTENSIVE) {
|
||||
.outputFile(cachedOutFile.absolutePath)
|
||||
.args(taskData.data.arguments)
|
||||
|
||||
val result = SubtitleFFmpeg()
|
||||
val result = getFfmpeg()
|
||||
withHeartbeatRunner {
|
||||
reporter?.updateLastSeen(task.taskId)
|
||||
}
|
||||
result.run(arguments)
|
||||
if (result.result.resultCode != 0 ) {
|
||||
return ProcesserExtractEvent(data = ExtractResult(status = TaskStatus.Failed)).producedFrom(task)
|
||||
return ProcesserExtractResultEvent(status = TaskStatus.Failed).producedFrom(task)
|
||||
}
|
||||
|
||||
return ProcesserExtractEvent(
|
||||
data = ExtractResult(
|
||||
status = TaskStatus.Completed,
|
||||
return ProcesserExtractResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserExtractResultEvent.ExtractResult(
|
||||
language = taskData.data.language,
|
||||
cachedOutputFile = cachedOutFile.absolutePath
|
||||
)
|
||||
).producedFrom(task)
|
||||
}
|
||||
|
||||
override fun getFfmpeg(): FFmpeg {
|
||||
return SubtitleFFmpeg()
|
||||
}
|
||||
|
||||
|
||||
class SubtitleFFmpeg(override val listener: Listener? = null): FFmpeg(executable = ProcesserEnv.ffmpeg, logDir = ProcesserEnv.subtitleExtractLogDirectory ) {
|
||||
|
||||
@ -9,8 +9,7 @@ import no.iktdev.mediaprocessing.ffmpeg.arguments.MpegArgument
|
||||
import no.iktdev.mediaprocessing.ffmpeg.decoder.FfmpegDecodedProgress
|
||||
import no.iktdev.mediaprocessing.processer.ProcesserEnv
|
||||
import no.iktdev.mediaprocessing.processer.Util
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.EncodeResult
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeTask
|
||||
import org.springframework.stereotype.Service
|
||||
import java.util.*
|
||||
@ -29,10 +28,8 @@ class VideoTaskListener: FfmpegTaskListener(TaskType.CPU_INTENSIVE) {
|
||||
}
|
||||
}
|
||||
if (cachedOutFile.exists() && taskData.data.arguments.firstOrNull() != "-y") {
|
||||
reporter?.publishEvent(ProcesserEncodeEvent(
|
||||
data = EncodeResult(
|
||||
status = TaskStatus.Failed
|
||||
)
|
||||
reporter?.publishEvent(ProcesserEncodeResultEvent(
|
||||
status = TaskStatus.Failed
|
||||
).producedFrom(task))
|
||||
throw IllegalStateException("${cachedOutFile.absolutePath} does already exist, and arguments does not permit overwrite")
|
||||
}
|
||||
@ -49,12 +46,12 @@ class VideoTaskListener: FfmpegTaskListener(TaskType.CPU_INTENSIVE) {
|
||||
}
|
||||
result.run(arguments)
|
||||
if (result.result.resultCode != 0 ) {
|
||||
return ProcesserEncodeEvent(data = EncodeResult(status = TaskStatus.Failed)).producedFrom(task)
|
||||
return ProcesserEncodeResultEvent(status = TaskStatus.Failed).producedFrom(task)
|
||||
}
|
||||
|
||||
return ProcesserEncodeEvent(
|
||||
data = EncodeResult(
|
||||
status = TaskStatus.Completed,
|
||||
return ProcesserEncodeResultEvent(
|
||||
status = TaskStatus.Completed,
|
||||
data = ProcesserEncodeResultEvent.EncodeResult(
|
||||
cachedOutputFile = cachedOutFile.absolutePath
|
||||
)
|
||||
).producedFrom(task)
|
||||
|
||||
@ -0,0 +1,82 @@
|
||||
package no.iktdev.mediaprocessing.processer.listeners
|
||||
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import no.iktdev.eventi.models.Event
|
||||
import no.iktdev.eventi.models.Task
|
||||
import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskReporter
|
||||
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
||||
import no.iktdev.mediaprocessing.ffmpeg.FFmpeg
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserExtractResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.ExtractSubtitleTask
|
||||
import org.junit.jupiter.api.Assertions.assertEquals
|
||||
import org.junit.jupiter.api.Assertions.assertTrue
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.util.*
|
||||
import kotlin.system.measureTimeMillis
|
||||
|
||||
class SubtitleTaskListenerTest {
|
||||
|
||||
class TestListener(val delay: Long): SubtitleTaskListener() {
|
||||
fun getJob() = currentJob
|
||||
|
||||
private var _result: Event? = null
|
||||
fun getResult(): Event? {
|
||||
return _result
|
||||
}
|
||||
override fun onComplete(task: Task, result: Event?) {
|
||||
this._result = result
|
||||
}
|
||||
|
||||
override fun getFfmpeg(): FFmpeg {
|
||||
return MockFFmpeg(delayMillis = delay, listener = MockFFmpeg.emptyListener())
|
||||
}
|
||||
}
|
||||
|
||||
val overrideReporter = object : TaskReporter {
|
||||
override fun markClaimed(taskId: UUID, workerId: String) {}
|
||||
override fun updateLastSeen(taskId: UUID) {}
|
||||
override fun markConsumed(taskId: UUID) {}
|
||||
override fun updateProgress(taskId: UUID, progress: Int) {}
|
||||
override fun log(taskId: UUID, message: String) {}
|
||||
override fun publishEvent(event: Event) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun setup() {
|
||||
TaskTypeRegistry.register(ExtractSubtitleTask::class.java)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `onTask waits for runner to complete`() = runTest {
|
||||
val delay = 1000L
|
||||
val testTask = ExtractSubtitleTask(
|
||||
ExtractSubtitleData(
|
||||
inputFile = "input.mp4",
|
||||
outputFileName = "output.srt",
|
||||
arguments = listOf("-y"),
|
||||
language = "eng"
|
||||
)
|
||||
).newReferenceId()
|
||||
|
||||
val listener = TestListener(delay)
|
||||
|
||||
val time = measureTimeMillis {
|
||||
val accepted = listener.accept(testTask, overrideReporter)
|
||||
assertTrue(accepted, "Task listener did not accept the task.")
|
||||
listener.getJob()?.join()
|
||||
val event = listener.getResult()
|
||||
assertTrue(event is ProcesserExtractResultEvent)
|
||||
assertEquals(TaskStatus.Completed, (event as ProcesserExtractResultEvent).status)
|
||||
}
|
||||
|
||||
assertTrue(time >= delay, "Expected onTask to wait at least $delay ms, waited for $time ms")
|
||||
assertTrue(time <= (delay*2), "Expected onTask to wait less than ${(delay*2)} ms, waited for $time ms")
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -7,14 +7,14 @@ import no.iktdev.eventi.models.store.TaskStatus
|
||||
import no.iktdev.eventi.tasks.TaskReporter
|
||||
import no.iktdev.eventi.tasks.TaskTypeRegistry
|
||||
import no.iktdev.mediaprocessing.ffmpeg.FFmpeg
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.events.ProcesserEncodeResultEvent
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeData
|
||||
import no.iktdev.mediaprocessing.shared.common.event_task_contract.tasks.EncodeTask
|
||||
import org.junit.jupiter.api.Assertions.assertEquals
|
||||
import org.junit.jupiter.api.Assertions.assertTrue
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.util.UUID
|
||||
import java.util.*
|
||||
import kotlin.system.measureTimeMillis
|
||||
|
||||
class VideoTaskListenerTest {
|
||||
@ -68,8 +68,8 @@ class VideoTaskListenerTest {
|
||||
listener.accept(testTask, overrideReporter)
|
||||
listener.getJob()?.join()
|
||||
val event = listener.getResult()
|
||||
assertTrue(event is ProcesserEncodeEvent)
|
||||
assertEquals(TaskStatus.Completed, (event as ProcesserEncodeEvent).data.status)
|
||||
assertTrue(event is ProcesserEncodeResultEvent)
|
||||
assertEquals(TaskStatus.Completed, (event as ProcesserEncodeResultEvent).status)
|
||||
}
|
||||
|
||||
assertTrue(time >= delay, "Expected onTask to wait at least $delay ms, waited for $time ms")
|
||||
|
||||
4
apps/pyMetadata/.vscode/settings.json
vendored
Normal file
4
apps/pyMetadata/.vscode/settings.json
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "venv/bin/python",
|
||||
"python.terminal.activateEnvironment": true
|
||||
}
|
||||
11
apps/pyMetadata/.vscode/tasks.json
vendored
Normal file
11
apps/pyMetadata/.vscode/tasks.json
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Install requirements",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/venv/bin/pip install -r requirements.txt",
|
||||
"group": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -12,7 +12,7 @@ from fuzzywuzzy import fuzz
|
||||
from algo.AdvancedMatcher import AdvancedMatcher
|
||||
from algo.SimpleMatcher import SimpleMatcher
|
||||
from algo.PrefixMatcher import PrefixMatcher
|
||||
from clazz.Metadata import Metadata
|
||||
from models.metadata import Metadata
|
||||
|
||||
from clazz.shared import EventData, EventMetadata, MediaEvent
|
||||
from app import MetadataEventHandler
|
||||
|
||||
3
apps/pyMetadata/README.md
Normal file
3
apps/pyMetadata/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
@ -1,41 +1,29 @@
|
||||
from fuzzywuzzy import fuzz
|
||||
import re
|
||||
from typing import List
|
||||
from fuzzywuzzy import fuzz
|
||||
from models.metadata import Metadata
|
||||
from .AlgorithmBase import AlgorithmBase, MatchResult
|
||||
from clazz.Metadata import Metadata
|
||||
|
||||
class AdvancedMatcher(AlgorithmBase):
|
||||
def clean_title(self, title: str) -> str:
|
||||
# Fjerner eventuelle ekstra tekster etter kolon eller andre skilletegn
|
||||
return re.sub(r'[:\-\—].*', '', title).strip()
|
||||
def clean(self, s: str) -> str:
|
||||
# Fjern alt etter kolon eller bindestrek, normaliser til lowercase
|
||||
return re.sub(r'[:\-\—].*', '', s).strip().lower()
|
||||
|
||||
def getBestMatch(self) -> Metadata | None:
|
||||
best_match = None
|
||||
best_score = -1
|
||||
match_results = []
|
||||
def getScore(self) -> int:
|
||||
best_score = 0
|
||||
|
||||
for title in self.titles:
|
||||
cleaned_title = self.clean_title(title) # Renset tittel uten ekstra tekst
|
||||
for metadata in self.metadata:
|
||||
cleaned_metadata_title = self.clean_title(metadata.title) # Renset metadata-tittel
|
||||
cleaned_title = self.clean(self.title)
|
||||
cleaned_metadata_title = self.clean(self.metadata.title)
|
||||
|
||||
# Compute different match ratios for both the original and cleaned titles
|
||||
original_title_ratio = fuzz.token_sort_ratio(title.lower(), metadata.title.lower())
|
||||
cleaned_title_ratio = fuzz.token_sort_ratio(cleaned_title.lower(), cleaned_metadata_title.lower())
|
||||
# Sammenlign original
|
||||
best_score = max(best_score, fuzz.token_sort_ratio(self.title.lower(), self.metadata.title.lower()))
|
||||
|
||||
alt_title_ratios = [fuzz.token_sort_ratio(cleaned_title.lower(), self.clean_title(alt_title).lower()) for alt_title in metadata.altTitle]
|
||||
max_alt_title_ratio = max(alt_title_ratios) if alt_title_ratios else 0
|
||||
# Sammenlign renset
|
||||
best_score = max(best_score, fuzz.token_sort_ratio(cleaned_title, cleaned_metadata_title))
|
||||
|
||||
# Combine ratios: take the best of original vs cleaned title, and alt title match
|
||||
combined_score = max(original_title_ratio, cleaned_title_ratio, max_alt_title_ratio)
|
||||
# Sammenlign mot altTitler
|
||||
for alt in self.metadata.altTitle:
|
||||
alt_score = fuzz.token_sort_ratio(cleaned_title, self.clean(alt))
|
||||
best_score = max(best_score, alt_score)
|
||||
|
||||
match_results.append(MatchResult(title, metadata.title, combined_score, metadata.source, metadata))
|
||||
|
||||
# Update best match if this one is better
|
||||
if combined_score > best_score:
|
||||
best_score = combined_score
|
||||
best_match = metadata if combined_score >= 70 else None
|
||||
|
||||
# Print match summary
|
||||
self.print_match_summary(match_results)
|
||||
|
||||
return best_match
|
||||
return best_score
|
||||
@ -1,13 +1,9 @@
|
||||
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from fuzzywuzzy import fuzz, process
|
||||
from fuzzywuzzy import fuzz
|
||||
from tabulate import tabulate
|
||||
|
||||
from clazz.Metadata import Metadata
|
||||
from models.metadata import Metadata
|
||||
|
||||
@dataclass
|
||||
class MatchResult:
|
||||
@ -17,17 +13,19 @@ class MatchResult:
|
||||
source: str
|
||||
data: Metadata
|
||||
|
||||
|
||||
class AlgorithmBase(ABC):
|
||||
def __init__(self, titles: List[str], metadata: List[Metadata]):
|
||||
self.titles = titles
|
||||
def __init__(self, title: str, metadata: Metadata):
|
||||
self.title = title
|
||||
self.metadata = metadata
|
||||
|
||||
@abstractmethod
|
||||
def getBestMatch(self) -> Metadata | None:
|
||||
def getScore(self) -> int:
|
||||
"""
|
||||
Returnerer alle matchresultater med scorer.
|
||||
"""
|
||||
pass
|
||||
|
||||
def print_match_summary(self, match_results: List[MatchResult]):
|
||||
def print_match_summary(self, match_results: List[MatchResult]) -> None:
|
||||
headers = ["Title", "Matched Title", "Score", "Source"]
|
||||
data = [(result.title, result.matched_title, result.score, result.source) for result in match_results]
|
||||
data = [(r.title, r.matched_title, r.score, r.source) for r in match_results]
|
||||
print(tabulate(data, headers=headers))
|
||||
@ -2,53 +2,27 @@ import re
|
||||
from typing import List, Optional
|
||||
from fuzzywuzzy import fuzz, process
|
||||
from .AlgorithmBase import AlgorithmBase, MatchResult
|
||||
from clazz.Metadata import Metadata
|
||||
from models.metadata import Metadata
|
||||
|
||||
|
||||
class PrefixMatcher(AlgorithmBase):
|
||||
def preprocess(self, s: str) -> str:
|
||||
return re.sub(r'[^a-zA-Z0-9\s]', ' ', s).strip().lower()
|
||||
|
||||
def preprocess_text(self, text: str) -> str:
|
||||
unitext = re.sub(r'[^a-zA-Z0-9\s]', ' ', text)
|
||||
return unitext.strip().lower()
|
||||
def first_word(self, s: str) -> str:
|
||||
return self.preprocess(s).split(" ")[0] if s else ""
|
||||
|
||||
def source_priority(self, source: str) -> int:
|
||||
priority_map = {'mal': 1, 'anii': 2, 'imdb': 3}
|
||||
return priority_map.get(source, 4)
|
||||
def getScore(self) -> int:
|
||||
best_score = 0
|
||||
pt = self.first_word(self.title)
|
||||
|
||||
def getBestMatch(self) -> Optional[Metadata]:
|
||||
best_match = None
|
||||
best_score = -1
|
||||
match_results: List[MatchResult] = []
|
||||
# Mot hovedtittel
|
||||
meta_main = self.first_word(self.metadata.title)
|
||||
best_score = max(best_score, fuzz.ratio(pt, meta_main))
|
||||
|
||||
for title in self.titles:
|
||||
preprocessed_title = self.preprocess_text(title)[:1]
|
||||
# Mot altTitler
|
||||
for alt in self.metadata.altTitle:
|
||||
alt_score = fuzz.ratio(pt, self.first_word(alt))
|
||||
best_score = max(best_score, alt_score)
|
||||
|
||||
for metadata in self.metadata:
|
||||
preprocessed_metadata_title = self.preprocess_text(metadata.title)[:1]
|
||||
|
||||
# Match against metadata title
|
||||
score = fuzz.token_sort_ratio(preprocessed_title, preprocessed_metadata_title)
|
||||
match_results.append(MatchResult(title, metadata.title, score, metadata.source, metadata))
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = metadata if score >= 70 else None
|
||||
|
||||
# Match against metadata altTitles
|
||||
for alt_title in metadata.altTitle:
|
||||
preprocessed_alt_title = self.preprocess_text(alt_title)[:1]
|
||||
alt_score = fuzz.token_sort_ratio(preprocessed_title, preprocessed_alt_title)
|
||||
match_results.append(MatchResult(title, alt_title, alt_score, metadata.source, metadata))
|
||||
if alt_score > best_score:
|
||||
best_score = alt_score
|
||||
best_match = metadata if alt_score >= 70 else None
|
||||
|
||||
match_results.sort(key=lambda x: (-x.score, self.source_priority(x.source)))
|
||||
|
||||
# Print match summary
|
||||
self.print_match_summary(match_results)
|
||||
|
||||
if match_results:
|
||||
top_result = match_results[0].data
|
||||
return top_result
|
||||
|
||||
return best_match
|
||||
return best_score
|
||||
|
||||
@ -1,40 +1,17 @@
|
||||
|
||||
import logging
|
||||
from typing import List
|
||||
from fuzzywuzzy import fuzz, process
|
||||
from .AlgorithmBase import AlgorithmBase, MatchResult
|
||||
from clazz.Metadata import Metadata
|
||||
from models.metadata import Metadata
|
||||
|
||||
|
||||
class SimpleMatcher(AlgorithmBase):
|
||||
def getBestMatch(self) -> Metadata | None:
|
||||
best_match = None
|
||||
best_score = -1
|
||||
match_results = []
|
||||
def getScore(self) -> int:
|
||||
best_score = fuzz.token_sort_ratio(self.title.lower(), self.metadata.title.lower())
|
||||
|
||||
try:
|
||||
for title in self.titles:
|
||||
for metadata in self.metadata:
|
||||
# Match against metadata title
|
||||
score = fuzz.token_sort_ratio(title.lower(), metadata.title.lower())
|
||||
match_results.append(MatchResult(title, metadata.title, score, metadata.source, metadata))
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = metadata if score >= 70 else None
|
||||
for alt in self.metadata.altTitle:
|
||||
alt_score = fuzz.token_sort_ratio(self.title.lower(), alt.lower())
|
||||
best_score = max(best_score, alt_score)
|
||||
|
||||
# Match against metadata altTitles
|
||||
for alt_title in metadata.altTitle:
|
||||
alt_score = fuzz.token_sort_ratio(title.lower(), alt_title.lower())
|
||||
match_results.append(MatchResult(title, alt_title, alt_score, metadata.source, metadata))
|
||||
if alt_score > best_score:
|
||||
best_score = alt_score
|
||||
best_match = metadata if alt_score >= 70 else None
|
||||
except Exception as e:
|
||||
logging.debug("Unntak: {e}")
|
||||
logging.debug(f"type(title): {type(title)}, value: {title}")
|
||||
logging.debug(f"type(alt_title): {type(alt_title)}, value: {alt_title}")
|
||||
logging.debug(f"Metadata objekt:")
|
||||
logging.debug(metadata.to_dict())
|
||||
# Print match summary
|
||||
self.print_match_summary(match_results)
|
||||
|
||||
return best_match
|
||||
return best_score
|
||||
|
||||
@ -1,111 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
from fuzzywuzzy import fuzz
|
||||
from unidecode import unidecode
|
||||
import logging
|
||||
import re
|
||||
|
||||
from clazz.Metadata import Metadata
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@dataclass
|
||||
class WeightedData:
|
||||
result: Metadata
|
||||
weight: float
|
||||
|
||||
@dataclass
|
||||
class DataAndScore:
|
||||
result: Metadata
|
||||
score: float
|
||||
weight: float
|
||||
matched_title: str
|
||||
|
||||
|
||||
class UseSource:
|
||||
titles: List[str] = []
|
||||
dataWeighed: List[WeightedData] = []
|
||||
|
||||
def __init__(self, titles: List[str], mal: Optional[Metadata] = None, imdb: Optional[Metadata] = None, anii: Optional[Metadata] = None) -> None:
|
||||
self.titles = titles
|
||||
if mal is not None:
|
||||
self.dataWeighed.append(WeightedData(mal, 1.5))
|
||||
|
||||
if imdb is not None:
|
||||
self.dataWeighed.append(WeightedData(imdb, 1))
|
||||
|
||||
if anii is not None:
|
||||
self.dataWeighed.append(WeightedData(anii, 1.3))
|
||||
|
||||
|
||||
def stripped(self, input_string) -> str:
|
||||
unitext = unidecode(input_string)
|
||||
unitext = re.sub(r'[^a-zA-Z0-9\s]', ' ', unitext)
|
||||
unitext = re.sub(r'\s{2,}', ' ', unitext)
|
||||
return unitext.strip()
|
||||
|
||||
|
||||
def __calculate_score(self, title: str, weightData: List[WeightedData]) -> List[DataAndScore]:
|
||||
result: List[DataAndScore] = []
|
||||
|
||||
for title_to_check in self.titles:
|
||||
for wd in weightData:
|
||||
if wd.result is None:
|
||||
continue
|
||||
|
||||
highScore = fuzz.ratio(self.stripped(title_to_check.lower()), self.stripped(wd.result.title.lower()))
|
||||
for alt_title in wd.result.altTitle:
|
||||
try:
|
||||
altScore = fuzz.ratio(self.stripped(title_to_check.lower()), self.stripped(alt_title.lower()))
|
||||
if altScore > highScore:
|
||||
highScore = altScore
|
||||
except Exception as e:
|
||||
logging.debug("Unntak: {e}")
|
||||
logging.debug(f"type(title): {type(title)}, value: {title}")
|
||||
logging.debug(f"type(alt_title): {type(alt_title)}, value: {alt_title}")
|
||||
logging.debug(f"Metadata objekt:")
|
||||
logging.debug(weightData)
|
||||
|
||||
givenScore = highScore * wd.weight
|
||||
result.append(DataAndScore(wd.result, givenScore, wd.weight, title_to_check))
|
||||
|
||||
result.sort(key=lambda x: x.score, reverse=True)
|
||||
return result
|
||||
|
||||
def select_result_table(self) -> Optional[pd.DataFrame]:
|
||||
scoredResults = []
|
||||
for title in self.titles:
|
||||
scoredResult = self.__calculate_score(title=title, weightData=self.dataWeighed)
|
||||
scoredResults.append(scoredResult)
|
||||
|
||||
all_results = [item for sublist in scoredResults for item in sublist]
|
||||
|
||||
if not all_results:
|
||||
return None
|
||||
|
||||
# Prepare data for DataFrame
|
||||
data = {
|
||||
"Title": [],
|
||||
"Alt Title": [],
|
||||
"Score": [],
|
||||
"Weight": [],
|
||||
"Matched Title": []
|
||||
}
|
||||
|
||||
for ds in all_results:
|
||||
metadata = ds.result
|
||||
data["Title"].append(metadata.title)
|
||||
data["Alt Title"].append(", ".join(metadata.altTitle))
|
||||
data["Score"].append(ds.score)
|
||||
data["Weight"].append(ds.weight)
|
||||
data["Matched Title"].append(ds.matched_title)
|
||||
|
||||
df = pd.DataFrame(data)
|
||||
df = df.sort_values(by="Score", ascending=False).reset_index(drop=True)
|
||||
|
||||
try:
|
||||
df.to_json(f"./logs/{self.titles[0]}.json", orient="records", indent=4)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to dump JSON: {e}")
|
||||
|
||||
return df
|
||||
@ -1,348 +1,34 @@
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import os
|
||||
from typing import List, Optional
|
||||
import uuid
|
||||
import threading
|
||||
import json
|
||||
import time
|
||||
from fuzzywuzzy import fuzz
|
||||
import mysql.connector
|
||||
import mysql.connector.cursor
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
from config.database_config import DatabaseConfig
|
||||
from db.database import Database
|
||||
from utils.logger import logger
|
||||
from worker.poller import run_worker
|
||||
|
||||
# global flag for shutdown
|
||||
shutdown_flag = False
|
||||
|
||||
from algo.AdvancedMatcher import AdvancedMatcher
|
||||
from algo.SimpleMatcher import SimpleMatcher
|
||||
from algo.PrefixMatcher import PrefixMatcher
|
||||
from clazz.shared import EventMetadata, MediaEvent, event_data_to_json, json_to_media_event
|
||||
from clazz.Metadata import Metadata
|
||||
def handle_shutdown(signum, frame):
|
||||
global shutdown_flag
|
||||
logger.info("🛑 Shutdown signal mottatt, avslutter worker...")
|
||||
shutdown_flag = True
|
||||
|
||||
from sources.anii import Anii
|
||||
from sources.imdb import Imdb
|
||||
from sources.mal import Mal
|
||||
|
||||
from mysql.connector.abstracts import MySQLConnectionAbstract
|
||||
from mysql.connector.pooling import PooledMySQLConnection
|
||||
from mysql.connector.types import RowType as MySqlRowType
|
||||
|
||||
|
||||
# Konfigurer Database
|
||||
events_server_address = os.environ.get("DATABASE_ADDRESS") or "192.168.2.250" # "127.0.0.1"
|
||||
events_server_port = os.environ.get("DATABASE_PORT") or "3306"
|
||||
events_server_database_name = os.environ.get("DATABASE_NAME_E") or "eventsV3" # "events"
|
||||
events_server_username = os.environ.get("DATABASE_USERNAME") or "root"
|
||||
events_server_password = os.environ.get("DATABASE_PASSWORD") or "shFZ27eL2x2NoxyEDBMfDWkvFO" #"root" // default password
|
||||
log_level = os.environ.get("LOG_LEVEL") or None
|
||||
|
||||
configured_level = logging.INFO
|
||||
|
||||
if (log_level is not None):
|
||||
_log_level = log_level.lower()
|
||||
if (_log_level.startswith("d")):
|
||||
configured_level = logging.DEBUG
|
||||
elif (_log_level.startswith("e")):
|
||||
configured_level = logging.ERROR
|
||||
elif (_log_level.startswith("w")):
|
||||
configured_level = logging.WARNING
|
||||
|
||||
|
||||
|
||||
|
||||
# Konfigurer logging
|
||||
logging.basicConfig(
|
||||
level=configured_level,
|
||||
format="%(asctime)s [%(levelname)s] %(message)s",
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout)
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if (configured_level == logging.DEBUG):
|
||||
logger.info("Logger configured with DEBUG")
|
||||
elif (configured_level == logging.ERROR):
|
||||
logger.info("Logger configured with ERROR")
|
||||
elif (configured_level == logging.WARNING):
|
||||
logger.info("Logger configured with WARNING")
|
||||
else:
|
||||
logger.info("Logger configured with INFO")
|
||||
|
||||
class EventsPullerThread(threading.Thread):
|
||||
|
||||
connection: PooledMySQLConnection | MySQLConnectionAbstract | None = None
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.shutdown = threading.Event()
|
||||
|
||||
def getEventsAvailable(self, connection: PooledMySQLConnection | MySQLConnectionAbstract) -> List[MySqlRowType]:
|
||||
logging.debug("Looking for new available events")
|
||||
cursor = connection.cursor(dictionary=True)
|
||||
cursor.execute("""
|
||||
SELECT *
|
||||
FROM events
|
||||
WHERE referenceId IN (
|
||||
SELECT referenceId
|
||||
FROM events
|
||||
GROUP BY referenceId
|
||||
HAVING
|
||||
SUM(event = 'BaseInfoRead') > 0
|
||||
AND SUM(event = 'MetadataSearchPerformed') = 0
|
||||
AND SUM(event = 'ProcessCompleted') = 0
|
||||
)
|
||||
AND event = 'BaseInfoRead'
|
||||
AND JSON_UNQUOTE(JSON_EXTRACT(data, '$.metadata.status')) = 'Success';
|
||||
""")
|
||||
row = cursor.fetchall()
|
||||
cursor.close()
|
||||
return row
|
||||
|
||||
def storeProducedEvent(self, connection: PooledMySQLConnection | MySQLConnectionAbstract, event: MediaEvent) -> bool:
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
|
||||
query = """
|
||||
INSERT INTO events (referenceId, eventId, event, data)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
"""
|
||||
cursor.execute(query, (
|
||||
event.metadata.referenceId,
|
||||
event.metadata.eventId,
|
||||
"MetadataSearchPerformed",
|
||||
event_data_to_json(event)
|
||||
))
|
||||
connection.commit()
|
||||
cursor.close()
|
||||
return True
|
||||
except mysql.connector.Error as err:
|
||||
logger.error("Error inserting into database: %s", err)
|
||||
return False
|
||||
|
||||
def __connect_to_datasource(self) -> bool:
|
||||
try:
|
||||
myConnection = mysql.connector.connect(
|
||||
host=events_server_address,
|
||||
port=events_server_port,
|
||||
database=events_server_database_name,
|
||||
user=events_server_username,
|
||||
password=events_server_password
|
||||
)
|
||||
if myConnection.is_connected():
|
||||
logging.debug(f"Successfully connected to {events_server_database_name} at {events_server_address}:{events_server_port}")
|
||||
self.connection = myConnection
|
||||
return True
|
||||
else:
|
||||
self.connection = None
|
||||
except Exception as e:
|
||||
logging.error(f"Error while connecting to database: {e}")
|
||||
logging.exception(e)
|
||||
self.connection = None
|
||||
return False
|
||||
|
||||
def __has_connection_to_database(self) -> bool:
|
||||
if (self.connection == None or self.connection.is_connected() == False):
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
self.connection.ping(reconnect=True, attempts=5, delay=5)
|
||||
except Exception as e:
|
||||
logging.warning("Incorrect state for connection! Ping yielded no connection!")
|
||||
logging.exception(e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def run(self) -> None:
|
||||
logger.info(f"Using {events_server_address}:{events_server_port} on table: {events_server_database_name} with user: {events_server_username}")
|
||||
while not self.shutdown.is_set():
|
||||
producedMessage: bool = False
|
||||
|
||||
while not self.shutdown.is_set() and self.__has_connection_to_database() != True:
|
||||
logging.debug("Attempting to reconnect to the database...")
|
||||
if (self.__connect_to_datasource() == False):
|
||||
logger.info("Failed to connect to database, waiting 5 seconds before retrying")
|
||||
time.sleep(5) # Wait 5 seconds before retrying
|
||||
else:
|
||||
logging.debug("A successful connection has been made!")
|
||||
|
||||
try:
|
||||
rows = self.getEventsAvailable(connection=self.connection)
|
||||
if (len(rows) == 0):
|
||||
logger.debug("No events found..")
|
||||
for row in rows:
|
||||
event: MediaEvent | None = None
|
||||
if (row is not None):
|
||||
try:
|
||||
referenceId = row["referenceId"]
|
||||
incomingEventType = row["event"]
|
||||
logMessage = f"""
|
||||
============================================================================
|
||||
Found message
|
||||
{referenceId}
|
||||
{incomingEventType}
|
||||
============================================================================\n"""
|
||||
logger.info(logMessage)
|
||||
|
||||
|
||||
event = json_to_media_event(row["data"])
|
||||
producedEvent = asyncio.run(MetadataEventHandler(event).run())
|
||||
|
||||
producedMessage = f"""
|
||||
============================================================================
|
||||
Producing message
|
||||
{referenceId}
|
||||
{incomingEventType}
|
||||
|
||||
{event_data_to_json(producedEvent)}
|
||||
============================================================================\n"""
|
||||
logger.info(producedMessage)
|
||||
|
||||
producedEvent = self.storeProducedEvent(connection=self.connection, event=producedEvent)
|
||||
|
||||
except Exception as e:
|
||||
"""Produce failure here"""
|
||||
logger.exception(e)
|
||||
try:
|
||||
producedEvent = MediaEvent(
|
||||
metadata = EventMetadata(
|
||||
referenceId=event.metadata.referenceId,
|
||||
eventId=str(uuid.uuid4()),
|
||||
derivedFromEventId=event.metadata.eventId,
|
||||
status= "Failed",
|
||||
created= datetime.now().isoformat(),
|
||||
source="metadataApp"
|
||||
),
|
||||
data=None,
|
||||
eventType="MetadataSearchPerformed"
|
||||
)
|
||||
self.storeProducedEvent(connection=self.connection, event=producedEvent)
|
||||
except Exception as iex:
|
||||
logger.error("Failed to push error to database..")
|
||||
self.connection.close()
|
||||
except mysql.connector.Error as err:
|
||||
logger.error("Database error: %s", err)
|
||||
|
||||
# Introduce a small sleep to reduce CPU usage
|
||||
time.sleep(5)
|
||||
if (self.shutdown.is_set()):
|
||||
logger.info("Shutdown is set..")
|
||||
logging.debug("End of puller function..")
|
||||
|
||||
|
||||
def stop(self):
|
||||
self.shutdown.set()
|
||||
global should_stop
|
||||
should_stop = True
|
||||
|
||||
class MetadataEventHandler:
|
||||
mediaEvent: MediaEvent | None = None
|
||||
|
||||
def __init__(self, data: MediaEvent):
|
||||
super().__init__()
|
||||
self.mediaEvent = data
|
||||
logger.info(self.mediaEvent)
|
||||
|
||||
async def run(self) -> MediaEvent | None:
|
||||
logger.info("Starting search")
|
||||
if self.mediaEvent is None:
|
||||
logger.error("Event does not contain anything...")
|
||||
return None
|
||||
|
||||
event: MediaEvent = self.mediaEvent
|
||||
|
||||
unique_titles = set(event.data.searchTitles)
|
||||
unique_titles.update([
|
||||
event.data.title,
|
||||
event.data.sanitizedName
|
||||
])
|
||||
searchableTitles = list(unique_titles)
|
||||
|
||||
joinedTitles = "\n".join(searchableTitles)
|
||||
logger.info("Searching for:\n%s", joinedTitles)
|
||||
|
||||
# Kjør den asynkrone søkemetoden
|
||||
result: Metadata | None = await self.__getMetadata(searchableTitles)
|
||||
|
||||
result_message: str | None = None
|
||||
if result is None:
|
||||
result_message = f"No result for {joinedTitles}"
|
||||
logger.info(result_message)
|
||||
|
||||
producedEvent = MediaEvent(
|
||||
metadata=EventMetadata(
|
||||
referenceId=event.metadata.referenceId,
|
||||
eventId=str(uuid.uuid4()),
|
||||
derivedFromEventId=event.metadata.eventId,
|
||||
status="Failed" if result is None else "Success",
|
||||
created=datetime.now().isoformat(),
|
||||
source="metadataApp"
|
||||
),
|
||||
data=result,
|
||||
eventType="MetadataSearchPerformed"
|
||||
)
|
||||
return producedEvent
|
||||
|
||||
async def __getMetadata(self, titles: List[str]) -> Metadata | None:
|
||||
mal = Mal(titles=titles)
|
||||
anii = Anii(titles=titles)
|
||||
imdb = Imdb(titles=titles)
|
||||
|
||||
results: List[Metadata | None] = await asyncio.gather(
|
||||
mal.search(),
|
||||
anii.search(),
|
||||
imdb.search()
|
||||
)
|
||||
|
||||
filtered_results = [result for result in results if result is not None]
|
||||
logger.info("\nSimple matcher")
|
||||
simpleSelector = SimpleMatcher(titles=titles, metadata=filtered_results).getBestMatch()
|
||||
logger.info("\nAdvanced matcher")
|
||||
advancedSelector = AdvancedMatcher(titles=titles, metadata=filtered_results).getBestMatch()
|
||||
logger.info("\nPrefix matcher")
|
||||
prefixSelector = PrefixMatcher(titles=titles, metadata=filtered_results).getBestMatch()
|
||||
|
||||
if advancedSelector is not None:
|
||||
return advancedSelector
|
||||
if simpleSelector is not None:
|
||||
return simpleSelector
|
||||
if prefixSelector is not None:
|
||||
return prefixSelector
|
||||
return None
|
||||
|
||||
# Global variabel for å indikere om applikasjonen skal avsluttes
|
||||
should_stop = False
|
||||
|
||||
# Signalhåndteringsfunksjon
|
||||
def signal_handler(sig, frame):
|
||||
global should_stop
|
||||
should_stop = True
|
||||
|
||||
# Hovedprogrammet
|
||||
def main():
|
||||
# registrer signal handlers for graceful shutdown
|
||||
signal.signal(signal.SIGINT, handle_shutdown)
|
||||
signal.signal(signal.SIGTERM, handle_shutdown)
|
||||
|
||||
logger.info("🚀 Starter worker-applikasjon")
|
||||
try:
|
||||
# Angi signalhåndterer for å fange opp SIGINT (Ctrl+C)
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
|
||||
# Opprett og start consumer-tråden
|
||||
consumer_thread = EventsPullerThread()
|
||||
consumer_thread.start()
|
||||
|
||||
logger.info("App started")
|
||||
|
||||
# Vent til should_stop er satt til True for å avslutte applikasjonen
|
||||
while not should_stop:
|
||||
time.sleep(60)
|
||||
|
||||
# Stopp consumer-tråden
|
||||
consumer_thread.stop()
|
||||
consumer_thread.join()
|
||||
except:
|
||||
logger.info("App crashed")
|
||||
config: DatabaseConfig = DatabaseConfig.from_env()
|
||||
db: Database = Database(config)
|
||||
db.connect()
|
||||
run_worker(db=db, shutdown_flag_ref=lambda: shutdown_flag)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Kritisk feil i app: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("App stopped")
|
||||
sys.exit(0)
|
||||
if __name__ == '__main__':
|
||||
logger.info("👋 Worker avsluttet gracefully")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@ -1,33 +0,0 @@
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
@dataclass
|
||||
class Summary:
|
||||
summary: str
|
||||
language: str
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Metadata:
|
||||
title: str
|
||||
altTitle: List[str]
|
||||
cover: str
|
||||
banner: Optional[str]
|
||||
type: str # Serie/Movie
|
||||
summary: List[Summary]
|
||||
genres: List[str]
|
||||
source: str
|
||||
|
||||
def to_dict(self):
|
||||
# Trimmer alle strenger før de konverteres til dict
|
||||
def trim(item):
|
||||
if isinstance(item, str):
|
||||
return item.strip()
|
||||
elif isinstance(item, list):
|
||||
return [trim(sub_item) for sub_item in item]
|
||||
return item
|
||||
|
||||
return {key: trim(value) for key, value in asdict(self).items()}
|
||||
@ -1,73 +0,0 @@
|
||||
import json
|
||||
from dataclasses import dataclass, asdict
|
||||
from typing import Any, Dict, List, Optional
|
||||
from datetime import datetime
|
||||
|
||||
# Definer dataclassene for strukturen
|
||||
@dataclass
|
||||
class EventMetadata:
|
||||
derivedFromEventId: str
|
||||
eventId: str
|
||||
referenceId: str
|
||||
status: str
|
||||
created: datetime
|
||||
source: str
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventData:
|
||||
title: str
|
||||
sanitizedName: str
|
||||
searchTitles: List[str]
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
@dataclass
|
||||
class MediaEvent:
|
||||
metadata: EventMetadata
|
||||
eventType: str
|
||||
data: Any| EventData
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
# Funksjon for å parse datetime fra streng
|
||||
def parse_datetime(datetime_str: str) -> datetime:
|
||||
return datetime.fromisoformat(datetime_str)
|
||||
|
||||
def event_data_to_json(event_data: EventData) -> str:
|
||||
return json.dumps(event_data.to_dict())
|
||||
|
||||
# Funksjon for å konvertere JSON til klasser
|
||||
def json_to_media_event(json_data: str) -> MediaEvent:
|
||||
data_dict = json.loads(json_data)
|
||||
|
||||
metadata_dict: Dict[str, str] = data_dict['metadata']
|
||||
event_data_dict = data_dict['data']
|
||||
|
||||
metadata = EventMetadata(
|
||||
derivedFromEventId=metadata_dict['derivedFromEventId'],
|
||||
eventId=metadata_dict['eventId'],
|
||||
referenceId=metadata_dict['referenceId'],
|
||||
status=metadata_dict['status'],
|
||||
created=parse_datetime(metadata_dict['created']),
|
||||
source= metadata_dict.get('source', None)
|
||||
)
|
||||
|
||||
event_data = EventData(
|
||||
title=event_data_dict['title'],
|
||||
sanitizedName=event_data_dict['sanitizedName'],
|
||||
searchTitles=event_data_dict['searchTitles']
|
||||
)
|
||||
|
||||
media_event = MediaEvent(
|
||||
metadata=metadata,
|
||||
eventType=data_dict['eventType'],
|
||||
data=event_data
|
||||
)
|
||||
|
||||
return media_event
|
||||
@ -7,3 +7,4 @@ mal-api>=0.5.3
|
||||
Unidecode>=1.3.8
|
||||
tabulate>=0.9.0
|
||||
mysql-connector-python>=9.0.0
|
||||
pydantic>=2.12.5
|
||||
@ -2,7 +2,8 @@ import logging, sys
|
||||
import hashlib
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
from clazz.Metadata import Metadata, Summary
|
||||
from models.enums import MediaType
|
||||
from models.metadata import Metadata, Summary
|
||||
from .source import SourceBase
|
||||
|
||||
from AnilistPython import Anilist
|
||||
@ -90,5 +91,5 @@ class Anii(SourceBase):
|
||||
return hashlib.md5(text.encode()).hexdigest()
|
||||
return None
|
||||
|
||||
def getMediaType(self, type: str) -> str:
|
||||
return 'movie' if type.lower() == 'movie' else 'serie'
|
||||
def getMediaType(self, type: str) -> MediaType:
|
||||
return MediaType.MOVIE if type.lower() == 'movie' else MediaType.SERIE
|
||||
@ -4,7 +4,8 @@ from imdb.Movie import Movie
|
||||
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
from clazz.Metadata import Metadata, Summary
|
||||
from models.enums import MediaType
|
||||
from models.metadata import Metadata, Summary
|
||||
from .source import SourceBase
|
||||
|
||||
import asyncio
|
||||
@ -74,5 +75,5 @@ class Imdb(SourceBase):
|
||||
log.exception(e)
|
||||
return None
|
||||
|
||||
def getMediaType(self, type: str) -> str:
|
||||
return 'movie' if type.lower() == 'movie' else 'serie'
|
||||
def getMediaType(self, type: str) -> MediaType:
|
||||
return MediaType.MOVIE if type.lower() == 'movie' else MediaType.SERIE
|
||||
@ -1,7 +1,8 @@
|
||||
import logging, sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from clazz.Metadata import Metadata, Summary
|
||||
from models.enums import MediaType
|
||||
from models.metadata import Metadata, Summary
|
||||
from .source import SourceBase
|
||||
|
||||
from mal import Anime, AnimeSearch, AnimeSearchResult
|
||||
@ -69,5 +70,5 @@ class Mal(SourceBase):
|
||||
log.exception(e)
|
||||
return None
|
||||
|
||||
def getMediaType(self, type: str) -> str:
|
||||
return 'movie' if type.lower() == 'movie' else 'serie'
|
||||
def getMediaType(self, type: str) -> MediaType:
|
||||
return MediaType.MOVIE if type.lower() == 'movie' else MediaType.SERIE
|
||||
@ -5,7 +5,7 @@ from typing import List, Tuple
|
||||
|
||||
from fuzzywuzzy import fuzz
|
||||
|
||||
from clazz.Metadata import Metadata
|
||||
from models.metadata import Metadata
|
||||
import asyncio
|
||||
|
||||
|
||||
|
||||
@ -1,39 +0,0 @@
|
||||
import unittest
|
||||
import json
|
||||
from sources.result import Metadata, DataResult
|
||||
|
||||
class SerializationTest(unittest.TestCase):
|
||||
def test_metadata_to_json(self):
|
||||
metadata = Metadata(
|
||||
title='Sample Title',
|
||||
altTitle='Alternate Title',
|
||||
cover='path/to/cover.jpg',
|
||||
type='Movie',
|
||||
summary='Lorem ipsum dolor sit amet',
|
||||
genres=['Action', 'Drama', 'Thriller']
|
||||
)
|
||||
|
||||
metadata_json = json.dumps(metadata.to_dict())
|
||||
self.assertIsInstance(metadata_json, str)
|
||||
|
||||
def test_data_result_to_json(self):
|
||||
metadata = Metadata(
|
||||
title='Sample Title',
|
||||
altTitle='Alternate Title',
|
||||
cover='path/to/cover.jpg',
|
||||
type='Movie',
|
||||
summary='Lorem ipsum dolor sit amet',
|
||||
genres=['Action', 'Drama', 'Thriller']
|
||||
)
|
||||
|
||||
data_result = DataResult(
|
||||
statusType='SUCCESS',
|
||||
errorMessage=None,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
data_result_json = json.dumps(data_result.to_dict())
|
||||
self.assertIsInstance(data_result_json, str)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
4
apps/pyWatcher/.vscode/settings.json
vendored
Normal file
4
apps/pyWatcher/.vscode/settings.json
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "venv/bin/python",
|
||||
"python.terminal.activateEnvironment": true
|
||||
}
|
||||
11
apps/pyWatcher/.vscode/tasks.json
vendored
Normal file
11
apps/pyWatcher/.vscode/tasks.json
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Install requirements",
|
||||
"type": "shell",
|
||||
"command": "${workspaceFolder}/venv/bin/pip install -r requirements.txt",
|
||||
"group": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
65
apps/pyWatcher/app.py
Normal file
65
apps/pyWatcher/app.py
Normal file
@ -0,0 +1,65 @@
|
||||
import asyncio
|
||||
import signal
|
||||
import sys
|
||||
import uvicorn
|
||||
from api.health_api import create_health_app
|
||||
from config.database_config import DatabaseConfig
|
||||
from db.database import Database
|
||||
from db.repository import insert_event
|
||||
from worker.file_watcher import start_observer
|
||||
from utils.logger import logger
|
||||
|
||||
# global flag for shutdown
|
||||
shutdown_flag = False
|
||||
observers = []
|
||||
|
||||
def handle_shutdown(signum, frame):
|
||||
global shutdown_flag
|
||||
logger.info("🛑 Shutdown signal mottatt, avslutter worker...")
|
||||
shutdown_flag = True
|
||||
|
||||
async def run_worker(db: Database, paths, extensions, shutdown_flag_ref):
|
||||
global observers
|
||||
observers = observers = [start_observer(db, [p], extensions, insert_event) for p in paths]
|
||||
try:
|
||||
while not shutdown_flag_ref():
|
||||
await asyncio.sleep(5)
|
||||
finally:
|
||||
logger.info("🛑 Stopper observer...")
|
||||
for obs in observers:
|
||||
obs.stop()
|
||||
obs.join()
|
||||
logger.info("👋 Alle observers stoppet")
|
||||
|
||||
return observers
|
||||
|
||||
def main():
|
||||
# registrer signal handlers for graceful shutdown
|
||||
signal.signal(signal.SIGINT, handle_shutdown)
|
||||
signal.signal(signal.SIGTERM, handle_shutdown)
|
||||
|
||||
logger.info("🚀 Starter worker-applikasjon")
|
||||
try:
|
||||
config: DatabaseConfig = DatabaseConfig.from_env()
|
||||
db: Database = Database(config)
|
||||
db.connect()
|
||||
|
||||
# paths og extensions fra PathsConfig
|
||||
from config.paths_config import PathsConfig
|
||||
paths_config = PathsConfig.from_env()
|
||||
paths_config.validate()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(run_worker(db, paths_config.watch_paths, paths_config.extensions, lambda: shutdown_flag))
|
||||
|
||||
# bruk health_api
|
||||
app = create_health_app(lambda: observers)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Kritisk feil i app: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("👋 Worker avsluttet gracefully")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
29
apps/pyWatcher/config/database_config.py
Normal file
29
apps/pyWatcher/config/database_config.py
Normal file
@ -0,0 +1,29 @@
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
@dataclass
|
||||
class DatabaseConfig:
|
||||
address: str
|
||||
port: int
|
||||
name: str
|
||||
username: str
|
||||
password: str
|
||||
|
||||
@staticmethod
|
||||
def from_env() -> "DatabaseConfig":
|
||||
return DatabaseConfig(
|
||||
address=os.environ.get("DATABASE_ADDRESS") or "192.168.2.250",
|
||||
port=int(os.environ.get("DATABASE_PORT") or "3306"),
|
||||
name=os.environ.get("DATABASE_NAME_E") or "eventsV3",
|
||||
username=os.environ.get("DATABASE_USERNAME") or "root",
|
||||
password=os.environ.get("DATABASE_PASSWORD") or "def",
|
||||
)
|
||||
|
||||
def validate(self) -> None:
|
||||
if not self.address:
|
||||
raise ValueError("Database address mangler")
|
||||
if not self.name:
|
||||
raise ValueError("Database name mangler")
|
||||
if not self.username:
|
||||
raise ValueError("Database username mangler")
|
||||
# du kan legge til flere regler her
|
||||
31
apps/pyWatcher/config/paths_config.py
Normal file
31
apps/pyWatcher/config/paths_config.py
Normal file
@ -0,0 +1,31 @@
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
@dataclass
|
||||
class PathsConfig:
|
||||
watch_paths: List[str]
|
||||
extensions: List[str]
|
||||
|
||||
@staticmethod
|
||||
def from_env() -> "PathsConfig":
|
||||
# Paths kan legges inn som kommaseparert liste i miljøvariabel
|
||||
raw_paths = os.environ.get("WATCH_PATHS")
|
||||
paths = [p.strip() for p in raw_paths.split(",") if p.strip()]
|
||||
|
||||
# Extensions kan legges inn som kommaseparert liste
|
||||
raw_ext = os.environ.get("WATCH_EXTENSIONS") or ".mkv,.mp4,.avi"
|
||||
extensions = [e.strip() for e in raw_ext.split(",") if e.strip()]
|
||||
|
||||
return PathsConfig(watch_paths=paths, extensions=extensions)
|
||||
|
||||
def validate(self) -> None:
|
||||
if not self.watch_paths:
|
||||
raise ValueError("Ingen paths definert for overvåkning")
|
||||
for path in self.watch_paths:
|
||||
if not os.path.exists(path):
|
||||
raise ValueError(f"Path finnes ikke: {path}")
|
||||
if not os.path.isdir(path):
|
||||
raise ValueError(f"Path er ikke en katalog: {path}")
|
||||
if not self.extensions:
|
||||
raise ValueError("Ingen filendelser definert for filtrering")
|
||||
0
apps/pyWatcher/db/__init__.py
Normal file
0
apps/pyWatcher/db/__init__.py
Normal file
53
apps/pyWatcher/db/database.py
Normal file
53
apps/pyWatcher/db/database.py
Normal file
@ -0,0 +1,53 @@
|
||||
from config.database_config import DatabaseConfig
|
||||
from utils.logger import logger
|
||||
import mysql.connector
|
||||
from mysql.connector import Error
|
||||
from utils.backoff import wait_with_backoff
|
||||
|
||||
class Database:
|
||||
def __init__(self, config: DatabaseConfig):
|
||||
self.config = config
|
||||
self.conn = None
|
||||
|
||||
def connect(self):
|
||||
"""Koble til DB med backoff."""
|
||||
self.config.validate()
|
||||
while True:
|
||||
try:
|
||||
self.conn = mysql.connector.connect(
|
||||
host=self.config.address,
|
||||
user=self.config.username,
|
||||
password=self.config.password,
|
||||
database=self.config.name
|
||||
)
|
||||
if self.conn.is_connected():
|
||||
logger.info("✅ Tilkoblet til databasen")
|
||||
return
|
||||
except Error as e:
|
||||
logger.error(f"❌ DB-tilkobling feilet: {e}")
|
||||
for _ in wait_with_backoff():
|
||||
try:
|
||||
self.conn = mysql.connector.connect(
|
||||
host=self.config.address,
|
||||
user=self.config.username,
|
||||
password=self.config.password,
|
||||
database=self.config.name
|
||||
)
|
||||
if self.conn.is_connected():
|
||||
logger.info("✅ Tilkoblet til databasen")
|
||||
return
|
||||
except Error:
|
||||
continue
|
||||
|
||||
def validate(self):
|
||||
"""Sjekk at tilkoblingen er aktiv."""
|
||||
if not self.conn or not self.conn.is_connected():
|
||||
logger.warning("⚠️ Tilkobling mistet, prøver igjen...")
|
||||
self.connect()
|
||||
|
||||
def query(self, sql: str, params=None):
|
||||
"""Kjør en spørring med validering."""
|
||||
self.validate()
|
||||
cursor = self.conn.cursor(dictionary=True)
|
||||
cursor.execute(sql, params or ())
|
||||
return cursor.fetchall()
|
||||
55
apps/pyWatcher/db/repository.py
Normal file
55
apps/pyWatcher/db/repository.py
Normal file
@ -0,0 +1,55 @@
|
||||
from datetime import datetime
|
||||
import json
|
||||
from typing import List, Optional
|
||||
from db.database import Database
|
||||
from models.event import FileAddedEvent
|
||||
from utils.logger import logger
|
||||
from models.event import Event, FileAddedEvent
|
||||
|
||||
def insert_event(db: Database, event: Event) -> None:
|
||||
"""Persistér et Event til Events-tabellen."""
|
||||
db.validate()
|
||||
sql = """
|
||||
INSERT INTO Events(reference_id, event_id, event, data, persisted_at)
|
||||
VALUES (%s, %s, %s, %s, NOW())
|
||||
"""
|
||||
with db.conn.cursor() as cursor:
|
||||
cursor.execute(
|
||||
sql,
|
||||
(event.referenceId, event.eventId, event.__class__.__name__, event.model_dump_json())
|
||||
)
|
||||
db.conn.commit()
|
||||
logger.info(f"📦 Event persisted: {event.__class__.__name__} ({event.referenceId})")
|
||||
|
||||
def get_open_added_events(db: Database) -> List[FileAddedEvent]:
|
||||
"""
|
||||
Hent alle FileAddedEvent som fortsatt er 'åpne',
|
||||
dvs. ikke har en FileReadyEvent eller FileRemovedEvent.
|
||||
Returnerer en liste med FileAddedEvent-objekter.
|
||||
"""
|
||||
db.validate()
|
||||
sql = """
|
||||
SELECT e.reference_id, e.event_id, e.event, e.data
|
||||
FROM Events e
|
||||
WHERE e.event = 'FileAddedEvent'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM Events r
|
||||
WHERE r.reference_id = e.reference_id
|
||||
AND r.event IN ('FileReadyEvent', 'FileRemovedEvent')
|
||||
)
|
||||
ORDER BY e.persisted_at ASC
|
||||
"""
|
||||
events: List[FileAddedEvent] = []
|
||||
with db.conn.cursor(dictionary=True) as cursor:
|
||||
cursor.execute(sql)
|
||||
rows = cursor.fetchall()
|
||||
for row in rows:
|
||||
# Bruk Pydantic v2 sin model_validate_json
|
||||
event = FileAddedEvent.model_validate_json(row["data"])
|
||||
# Overstyr referenceId og eventId fra kolonnene (sannhetskilde)
|
||||
event.referenceId = row["reference_id"]
|
||||
event.eventId = row["event_id"]
|
||||
events.append(event)
|
||||
|
||||
logger.info(f"🔎 Fant {len(events)} åpne FileAddedEvent uten Ready/Removed")
|
||||
return events
|
||||
39
apps/pyWatcher/models/event.py
Normal file
39
apps/pyWatcher/models/event.py
Normal file
@ -0,0 +1,39 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, Set
|
||||
from pydantic import BaseModel
|
||||
|
||||
# --- Metadata ---
|
||||
class Metadata(BaseModel):
|
||||
created: str
|
||||
derivedFromId: Optional[Set[str]] = None
|
||||
|
||||
# --- FileInfo ---
|
||||
class FileInfo(BaseModel):
|
||||
fileName: str
|
||||
fileUri: str
|
||||
|
||||
# --- Base Event ---
|
||||
class Event(BaseModel):
|
||||
referenceId: str
|
||||
eventId: str
|
||||
metadata: Metadata
|
||||
|
||||
# --- Spesifikke events ---
|
||||
class FileAddedEvent(Event):
|
||||
data: FileInfo
|
||||
|
||||
class FileReadyEvent(Event):
|
||||
data: FileInfo
|
||||
|
||||
class FileRemovedEvent(Event):
|
||||
data: FileInfo
|
||||
|
||||
# --- Helper-funksjoner ---
|
||||
def create_event(event_cls, file_name: str, file_uri: str, reference_id: Optional[str] = None) -> Event:
|
||||
return event_cls(
|
||||
referenceId=reference_id or str(uuid.uuid4()),
|
||||
eventId=str(uuid.uuid4()),
|
||||
metadata=Metadata(created=datetime.now().isoformat()),
|
||||
data=FileInfo(fileName=file_name, fileUri=file_uri)
|
||||
)
|
||||
13
apps/pyWatcher/requirements.txt
Normal file
13
apps/pyWatcher/requirements.txt
Normal file
@ -0,0 +1,13 @@
|
||||
cinemagoer>=2023.5.1
|
||||
AnilistPython>=0.1.3
|
||||
fuzzywuzzy>=0.18.0
|
||||
requests>=2.31.0
|
||||
python-Levenshtein>=0.21.1
|
||||
mal-api>=0.5.3
|
||||
Unidecode>=1.3.8
|
||||
tabulate>=0.9.0
|
||||
mysql-connector-python>=9.0.0
|
||||
pydantic>=2.12.5
|
||||
watchdog>=6.0.0
|
||||
fastapi==0.124.4
|
||||
uvicorn==0.38.0
|
||||
2
apps/pyWatcher/requirments-test.txt
Normal file
2
apps/pyWatcher/requirments-test.txt
Normal file
@ -0,0 +1,2 @@
|
||||
pytest==9.0.2
|
||||
pytest-asyncio==1.3.0
|
||||
23
apps/pyWatcher/tests/test_file_handler.py
Normal file
23
apps/pyWatcher/tests/test_file_handler.py
Normal file
@ -0,0 +1,23 @@
|
||||
import os
|
||||
import pytest
|
||||
from utils.file_handler import FileHandler
|
||||
from models.event import FileAddedEvent, FileRemovedEvent
|
||||
|
||||
def test_handle_created_returns_event_for_valid_extension(tmp_path):
|
||||
handler = FileHandler(extensions={".csv"})
|
||||
file_path = tmp_path / "test.csv"
|
||||
file_path.write_text("dummy")
|
||||
|
||||
ev = handler.handle_created(str(file_path))
|
||||
assert isinstance(ev, FileAddedEvent)
|
||||
assert ev.data.fileName == "test.csv"
|
||||
assert ev.data.fileUri == str(file_path)
|
||||
|
||||
def test_handle_deleted_returns_event_for_valid_extension(tmp_path):
|
||||
handler = FileHandler(extensions={".csv"})
|
||||
file_path = tmp_path / "test.csv"
|
||||
file_path.write_text("dummy")
|
||||
|
||||
ev = handler.handle_deleted(str(file_path))
|
||||
assert isinstance(ev, FileRemovedEvent)
|
||||
assert ev.data.fileName == "test.csv"
|
||||
23
apps/pyWatcher/tests/test_readiness.py
Normal file
23
apps/pyWatcher/tests/test_readiness.py
Normal file
@ -0,0 +1,23 @@
|
||||
import asyncio
|
||||
import pytest
|
||||
from utils.readiness import file_is_ready, check_ready
|
||||
from models.event import FileReadyEvent
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_check_ready_creates_event(tmp_path):
|
||||
file_path = tmp_path / "test.csv"
|
||||
file_path.write_text("dummy")
|
||||
|
||||
events = []
|
||||
def fake_insert(db, ev):
|
||||
events.append(ev)
|
||||
|
||||
ev = await check_ready(db=None,
|
||||
ref_id="ref123",
|
||||
file_name="test.csv",
|
||||
file_uri=str(file_path),
|
||||
insert_event=fake_insert)
|
||||
|
||||
assert isinstance(ev, FileReadyEvent)
|
||||
assert ev.referenceId == "ref123"
|
||||
assert events[0] == ev
|
||||
32
apps/pyWatcher/tests/test_repository.py
Normal file
32
apps/pyWatcher/tests/test_repository.py
Normal file
@ -0,0 +1,32 @@
|
||||
import json
|
||||
from models.event import create_event, FileAddedEvent
|
||||
from db.repository import get_open_added_events
|
||||
|
||||
class FakeCursor:
|
||||
def __init__(self, rows):
|
||||
self.rows = rows
|
||||
def execute(self, sql, params=None): pass
|
||||
def fetchall(self): return self.rows
|
||||
def __enter__(self): return self
|
||||
def __exit__(self, *a): pass
|
||||
|
||||
class FakeConn:
|
||||
def cursor(self, dictionary=True): return self.cursor_obj
|
||||
def __init__(self, rows): self.cursor_obj = FakeCursor(rows)
|
||||
|
||||
class FakeDB:
|
||||
def __init__(self, rows): self.conn = FakeConn(rows)
|
||||
def validate(self): pass
|
||||
|
||||
def test_get_open_added_events_returns_typed_objects():
|
||||
ev = create_event(FileAddedEvent, "test.csv", "/tmp/test.csv", reference_id="ref123")
|
||||
row = {
|
||||
"reference_id": ev.referenceId,
|
||||
"event_id": ev.eventId,
|
||||
"event": "FileAddedEvent",
|
||||
"data": ev.model_dump_json()
|
||||
}
|
||||
db = FakeDB([row])
|
||||
events = get_open_added_events(db)
|
||||
assert isinstance(events[0], FileAddedEvent)
|
||||
assert events[0].data.fileName == "test.csv"
|
||||
20
apps/pyWatcher/tests/test_shutdown.py
Normal file
20
apps/pyWatcher/tests/test_shutdown.py
Normal file
@ -0,0 +1,20 @@
|
||||
import asyncio
|
||||
import pytest
|
||||
from app import run_worker
|
||||
|
||||
class FakeObserver:
|
||||
def __init__(self): self.stopped = False
|
||||
def stop(self): self.stopped = True
|
||||
def join(self, timeout=None): pass
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_run_worker_stops_on_shutdown(monkeypatch):
|
||||
fake = FakeObserver()
|
||||
|
||||
def shutdown_ref(): return True
|
||||
|
||||
# monkeypatch start_observer as imported in app.py
|
||||
monkeypatch.setattr("app.start_observer", lambda *a, **kw: fake)
|
||||
|
||||
await run_worker(db=object(), paths=["/tmp"], extensions={".csv"}, shutdown_flag_ref=shutdown_ref)
|
||||
assert fake.stopped # nå blir True
|
||||
11
apps/pyWatcher/utils/backoff.py
Normal file
11
apps/pyWatcher/utils/backoff.py
Normal file
@ -0,0 +1,11 @@
|
||||
from utils.logger import logger
|
||||
import time
|
||||
|
||||
def retry_delays():
|
||||
return [5, 15, 30, 60]
|
||||
|
||||
def wait_with_backoff():
|
||||
for delay in retry_delays():
|
||||
logger.info(f"⏳ Venter {delay} sekunder...")
|
||||
time.sleep(delay)
|
||||
yield
|
||||
26
apps/pyWatcher/utils/file_handler.py
Normal file
26
apps/pyWatcher/utils/file_handler.py
Normal file
@ -0,0 +1,26 @@
|
||||
import os
|
||||
from models.event import Event, create_event, FileAddedEvent, FileRemovedEvent
|
||||
|
||||
class FileHandler:
|
||||
def __init__(self, extensions):
|
||||
self.extensions = extensions
|
||||
self.file_refs = {}
|
||||
|
||||
def get_ref_id(self, path: str) -> str:
|
||||
if path in self.file_refs:
|
||||
return self.file_refs[path]
|
||||
ref_id = os.path.basename(path) + "-" + os.urandom(4).hex()
|
||||
self.file_refs[path] = ref_id
|
||||
return ref_id
|
||||
|
||||
def handle_created(self, path: str) -> FileAddedEvent:
|
||||
if os.path.splitext(path)[1] not in self.extensions:
|
||||
return None
|
||||
ref_id = self.get_ref_id(path)
|
||||
return create_event(FileAddedEvent, os.path.basename(path), path, reference_id=ref_id)
|
||||
|
||||
def handle_deleted(self, path: str) -> FileRemovedEvent:
|
||||
if os.path.splitext(path)[1] not in self.extensions:
|
||||
return None
|
||||
ref_id = self.get_ref_id(path)
|
||||
return create_event(FileRemovedEvent, os.path.basename(path), path, reference_id=ref_id)
|
||||
32
apps/pyWatcher/utils/logger.py
Normal file
32
apps/pyWatcher/utils/logger.py
Normal file
@ -0,0 +1,32 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# ANSI farger
|
||||
COLORS = {
|
||||
"INFO": "\033[94m", # blå
|
||||
"DEBUG": "\033[92m", # grønn
|
||||
"WARNING": "\033[93m", # gul
|
||||
"ERROR": "\033[91m", # rød
|
||||
"RESET": "\033[0m"
|
||||
}
|
||||
|
||||
class ColoredFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
levelname = record.levelname
|
||||
color = COLORS.get(levelname, COLORS["RESET"])
|
||||
prefix = f"[{levelname}]"
|
||||
message = super().format(record)
|
||||
return f"{color}{prefix}{COLORS['RESET']} {message}"
|
||||
|
||||
def setup_logger(level=logging.INFO):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = ColoredFormatter("%(asctime)s - %(name)s - %(message)s")
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(level)
|
||||
logger.handlers = [handler]
|
||||
return logger
|
||||
|
||||
# Opprett global logger
|
||||
logger: logging.Logger = setup_logger()
|
||||
21
apps/pyWatcher/utils/readiness.py
Normal file
21
apps/pyWatcher/utils/readiness.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import asyncio
|
||||
from models.event import create_event, FileReadyEvent
|
||||
|
||||
async def file_is_ready(path: str, wait: float = 1.0) -> bool:
|
||||
try:
|
||||
size1 = os.path.getsize(path)
|
||||
await asyncio.sleep(wait)
|
||||
size2 = os.path.getsize(path)
|
||||
return size1 == size2
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def check_ready(db, ref_id: str, file_name: str, file_uri: str, insert_event):
|
||||
for _ in range(5):
|
||||
await asyncio.sleep(2)
|
||||
if await file_is_ready(file_uri):
|
||||
ev = create_event(FileReadyEvent, file_name, file_uri, reference_id=ref_id)
|
||||
insert_event(db, ev)
|
||||
return ev
|
||||
return None
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user