v3 28
This commit is contained in:
parent
4a47823b09
commit
40918ad008
@ -31,6 +31,9 @@ class ConvertWorkTaskListener: WorkTaskListener() {
|
|||||||
Events.EventWorkExtractPerformed
|
Events.EventWorkExtractPerformed
|
||||||
)
|
)
|
||||||
|
|
||||||
|
override fun canProduceMultipleEvents(): Boolean {
|
||||||
|
return true
|
||||||
|
}
|
||||||
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
||||||
if (!isOfEventsIListenFor(incomingEvent))
|
if (!isOfEventsIListenFor(incomingEvent))
|
||||||
return false
|
return false
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import mu.KotlinLogging
|
|||||||
import no.iktdev.eventi.core.ConsumableEvent
|
import no.iktdev.eventi.core.ConsumableEvent
|
||||||
import no.iktdev.eventi.core.WGson
|
import no.iktdev.eventi.core.WGson
|
||||||
import no.iktdev.eventi.data.EventStatus
|
import no.iktdev.eventi.data.EventStatus
|
||||||
|
import no.iktdev.eventi.data.isSuccessful
|
||||||
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
||||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
|
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
|
||||||
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
|
import no.iktdev.mediaprocessing.shared.common.parsing.NameHelper
|
||||||
@ -23,13 +24,11 @@ class CoverFromMetadataTaskListener: CoordinatorEventListener() {
|
|||||||
|
|
||||||
override val produceEvent: Events = Events.EventMediaReadOutCover
|
override val produceEvent: Events = Events.EventMediaReadOutCover
|
||||||
override val listensForEvents: List<Events> = listOf(
|
override val listensForEvents: List<Events> = listOf(
|
||||||
Events.EventMediaMetadataSearchPerformed,
|
Events.EventMediaMetadataSearchPerformed
|
||||||
Events.EventMediaReadOutNameAndType
|
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun isPrerequisitesFulfilled(incomingEvent: Event, events: List<Event>): Boolean {
|
override fun isPrerequisitesFulfilled(incomingEvent: Event, events: List<Event>): Boolean {
|
||||||
return events.any { it.eventType == Events.EventMediaMetadataSearchPerformed } &&
|
return (events.any { it.eventType == Events.EventMediaReadOutNameAndType && it.isSuccessful() })
|
||||||
events.any { it.eventType == Events.EventMediaReadOutNameAndType }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
||||||
@ -37,6 +36,8 @@ class CoverFromMetadataTaskListener: CoordinatorEventListener() {
|
|||||||
if (!state) {
|
if (!state) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if (!incomingEvent.isSuccessful())
|
||||||
|
return false
|
||||||
return incomingEvent.eventType in listensForEvents
|
return incomingEvent.eventType in listensForEvents
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,11 +55,9 @@ class CoverFromMetadataTaskListener: CoordinatorEventListener() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
val metadata = events.findLast { it.eventType == Events.EventMediaMetadataSearchPerformed }?.az<MediaMetadataReceivedEvent>()?.data
|
val metadataEvent = if (event.eventType == Events.EventMediaMetadataSearchPerformed) event else events.findLast { it.eventType == Events.EventMediaMetadataSearchPerformed }
|
||||||
if (metadata == null) {
|
val metadata = metadataEvent?.az<MediaMetadataReceivedEvent>()?.data
|
||||||
//log.info { "No metadata.." }
|
?: return
|
||||||
return
|
|
||||||
}
|
|
||||||
val mediaOutInfo = events.find { it.eventType == Events.EventMediaReadOutNameAndType }?.az<MediaOutInformationConstructedEvent>()?.data
|
val mediaOutInfo = events.find { it.eventType == Events.EventMediaReadOutNameAndType }?.az<MediaOutInformationConstructedEvent>()?.data
|
||||||
if (mediaOutInfo == null) {
|
if (mediaOutInfo == null) {
|
||||||
log.info { "No Media out info" }
|
log.info { "No Media out info" }
|
||||||
|
|||||||
@ -30,6 +30,9 @@ class EncodeWorkTaskListener : WorkTaskListener() {
|
|||||||
Events.EventMediaWorkProceedPermitted
|
Events.EventMediaWorkProceedPermitted
|
||||||
)
|
)
|
||||||
|
|
||||||
|
override fun canProduceMultipleEvents(): Boolean {
|
||||||
|
return true
|
||||||
|
}
|
||||||
override fun onEventsReceived(incomingEvent: ConsumableEvent<Event>, events: List<Event>) {
|
override fun onEventsReceived(incomingEvent: ConsumableEvent<Event>, events: List<Event>) {
|
||||||
val event = incomingEvent.consume()
|
val event = incomingEvent.consume()
|
||||||
if (event == null) {
|
if (event == null) {
|
||||||
|
|||||||
@ -30,6 +30,10 @@ class ExtractWorkTaskListener: WorkTaskListener() {
|
|||||||
Events.EventMediaWorkProceedPermitted
|
Events.EventMediaWorkProceedPermitted
|
||||||
)
|
)
|
||||||
|
|
||||||
|
override fun canProduceMultipleEvents(): Boolean {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
override fun shouldIProcessAndHandleEvent(incomingEvent: Event, events: List<Event>): Boolean {
|
||||||
val state = super.shouldIProcessAndHandleEvent(incomingEvent, events)
|
val state = super.shouldIProcessAndHandleEvent(incomingEvent, events)
|
||||||
return state
|
return state
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import no.iktdev.eventi.core.ConsumableEvent
|
|||||||
import no.iktdev.eventi.core.WGson
|
import no.iktdev.eventi.core.WGson
|
||||||
import no.iktdev.eventi.data.EventMetadata
|
import no.iktdev.eventi.data.EventMetadata
|
||||||
import no.iktdev.eventi.data.EventStatus
|
import no.iktdev.eventi.data.EventStatus
|
||||||
|
import no.iktdev.eventi.data.isSuccessful
|
||||||
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
|
import no.iktdev.mediaprocessing.coordinator.CoordinatorEventListener
|
||||||
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
import no.iktdev.mediaprocessing.coordinator.Coordinator
|
||||||
import no.iktdev.eventi.database.toEpochSeconds
|
import no.iktdev.eventi.database.toEpochSeconds
|
||||||
@ -44,22 +45,31 @@ class MetadataWaitOrDefaultTaskListener() : CoordinatorEventListener() {
|
|||||||
val metadataTimeout = metadataTimeoutMinutes * 60
|
val metadataTimeout = metadataTimeoutMinutes * 60
|
||||||
val waitingProcessesForMeta: MutableMap<String, MetadataTriggerData> = mutableMapOf()
|
val waitingProcessesForMeta: MutableMap<String, MetadataTriggerData> = mutableMapOf()
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This one gets special treatment, since it will only produce a timeout it does not need to use the incoming event
|
* This one gets special treatment, since it will only produce a timeout it does not need to use the incoming event
|
||||||
*/
|
*/
|
||||||
override fun onEventsReceived(incomingEvent: ConsumableEvent<Event>, events: List<Event>) {
|
override fun onEventsReceived(incomingEvent: ConsumableEvent<Event>, events: List<Event>) {
|
||||||
|
val hasReadBaseInfo = events.any { it.eventType == Events.EventMediaReadBaseInfoPerformed && it.isSuccessful() }
|
||||||
|
val hasMetadataSearched = events.any { it.eventType == Events.EventMediaMetadataSearchPerformed }
|
||||||
|
val hasPollerForMetadataEvent = waitingProcessesForMeta.containsKey(incomingEvent.metadata().referenceId)
|
||||||
|
|
||||||
|
if (!hasReadBaseInfo) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (events.any { it.eventType == Events.EventMediaReadBaseInfoPerformed } &&
|
if (hasPollerForMetadataEvent && hasMetadataSearched) {
|
||||||
events.none { it.eventType == Events.EventMediaMetadataSearchPerformed } &&
|
waitingProcessesForMeta.remove(incomingEvent.metadata().referenceId)
|
||||||
!waitingProcessesForMeta.containsKey(incomingEvent.metadata().referenceId)) {
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasMetadataSearched && !hasPollerForMetadataEvent) {
|
||||||
val consumedIncoming = incomingEvent.consume()
|
val consumedIncoming = incomingEvent.consume()
|
||||||
if (consumedIncoming == null) {
|
if (consumedIncoming == null) {
|
||||||
log.error { "Event is null and should not be available nor provided! ${WGson.gson.toJson(incomingEvent.metadata())}" }
|
log.error { "Event is null and should not be available nor provided! ${WGson.gson.toJson(incomingEvent.metadata())}" }
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
val baseInfo = events.find { it.eventType == Events.EventMediaReadBaseInfoPerformed}?.az<BaseInfoEvent>()?.data
|
val baseInfo = events.find { it.eventType == Events.EventMediaReadBaseInfoPerformed}?.az<BaseInfoEvent>()?.data
|
||||||
if (baseInfo == null) {
|
if (baseInfo == null) {
|
||||||
log.error { "BaseInfoEvent is null for referenceId: ${consumedIncoming.metadata.referenceId} on eventId: ${consumedIncoming.metadata.eventId}" }
|
log.error { "BaseInfoEvent is null for referenceId: ${consumedIncoming.metadata.referenceId} on eventId: ${consumedIncoming.metadata.eventId}" }
|
||||||
@ -70,21 +80,16 @@ class MetadataWaitOrDefaultTaskListener() : CoordinatorEventListener() {
|
|||||||
val dateTime = LocalDateTime.ofEpochSecond(estimatedTimeout, 0, ZoneOffset.UTC)
|
val dateTime = LocalDateTime.ofEpochSecond(estimatedTimeout, 0, ZoneOffset.UTC)
|
||||||
|
|
||||||
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
|
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ENGLISH)
|
||||||
if (!waitingProcessesForMeta.containsKey(consumedIncoming.metadata.referenceId)) {
|
|
||||||
waitingProcessesForMeta[consumedIncoming.metadata.referenceId] =
|
waitingProcessesForMeta[consumedIncoming.metadata.referenceId] =
|
||||||
MetadataTriggerData(consumedIncoming.metadata.eventId, LocalDateTime.now())
|
MetadataTriggerData(consumedIncoming.metadata.eventId, LocalDateTime.now())
|
||||||
|
|
||||||
log.info { "Sending ${baseInfo.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
|
log.info { "Sending ${baseInfo.title} to waiting queue. Expiry ${dateTime.format(formatter)}" }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (events.any { it.eventType == Events.EventMediaMetadataSearchPerformed }
|
|
||||||
&& waitingProcessesForMeta.containsKey(incomingEvent.metadata().referenceId)) {
|
|
||||||
waitingProcessesForMeta.remove(incomingEvent.metadata().referenceId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
@Scheduled(fixedDelay = (5_000))
|
||||||
@Scheduled(fixedDelay = (1_000))
|
|
||||||
fun sendErrorMessageForMetadata() {
|
fun sendErrorMessageForMetadata() {
|
||||||
val expired = waitingProcessesForMeta.filter {
|
val expired = waitingProcessesForMeta.filter {
|
||||||
LocalDateTime.now().toEpochSeconds() > (it.value.executed.toEpochSeconds() + metadataTimeout)
|
LocalDateTime.now().toEpochSeconds() > (it.value.executed.toEpochSeconds() + metadataTimeout)
|
||||||
|
|||||||
@ -163,7 +163,7 @@ class EncodeServiceV2(
|
|||||||
|
|
||||||
taskManager.markTaskAsCompleted(task.referenceId, task.eventId, Status.ERROR)
|
taskManager.markTaskAsCompleted(task.referenceId, task.eventId, Status.ERROR)
|
||||||
|
|
||||||
log.info { "Encode failed for ${task.referenceId}\n$message" }
|
log.error { "Encode failed for ${task.referenceId}\n$message" }
|
||||||
tasks.onProduceEvent(EncodeWorkPerformedEvent(
|
tasks.onProduceEvent(EncodeWorkPerformedEvent(
|
||||||
metadata = EventMetadata(
|
metadata = EventMetadata(
|
||||||
referenceId = task.referenceId,
|
referenceId = task.referenceId,
|
||||||
|
|||||||
@ -135,7 +135,7 @@ class ExtractServiceV2(
|
|||||||
|
|
||||||
taskManager.markTaskAsCompleted(task.referenceId, task.eventId, Status.ERROR)
|
taskManager.markTaskAsCompleted(task.referenceId, task.eventId, Status.ERROR)
|
||||||
|
|
||||||
log.info { "Encode failed for ${task.referenceId}\n$message" }
|
log.error { "Extract failed for ${task.referenceId}\n$message" }
|
||||||
tasks.onProduceEvent(
|
tasks.onProduceEvent(
|
||||||
ExtractWorkPerformedEvent(
|
ExtractWorkPerformedEvent(
|
||||||
metadata = EventMetadata(
|
metadata = EventMetadata(
|
||||||
|
|||||||
@ -11,6 +11,8 @@ from fuzzywuzzy import fuzz
|
|||||||
import mysql.connector
|
import mysql.connector
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
import mysql.connector.cursor
|
||||||
|
|
||||||
from algo.AdvancedMatcher import AdvancedMatcher
|
from algo.AdvancedMatcher import AdvancedMatcher
|
||||||
from algo.SimpleMatcher import SimpleMatcher
|
from algo.SimpleMatcher import SimpleMatcher
|
||||||
from algo.PrefixMatcher import PrefixMatcher
|
from algo.PrefixMatcher import PrefixMatcher
|
||||||
@ -21,15 +23,17 @@ from sources.anii import Anii
|
|||||||
from sources.imdb import Imdb
|
from sources.imdb import Imdb
|
||||||
from sources.mal import Mal
|
from sources.mal import Mal
|
||||||
|
|
||||||
|
from mysql.connector.abstracts import MySQLConnectionAbstract
|
||||||
|
from mysql.connector.pooling import PooledMySQLConnection
|
||||||
|
from mysql.connector.types import RowType as MySqlRowType
|
||||||
|
|
||||||
|
|
||||||
# Konfigurer Database
|
# Konfigurer Database
|
||||||
events_server_address = os.environ.get("DATABASE_ADDRESS") or "127.0.0.1"
|
events_server_address = os.environ.get("DATABASE_ADDRESS") or "192.168.2.250" # "127.0.0.1"
|
||||||
events_server_port = os.environ.get("DATABASE_PORT") or "3306"
|
events_server_port = os.environ.get("DATABASE_PORT") or "3306"
|
||||||
events_server_database_name = os.environ.get("DATABASE_NAME_E") or "events"
|
events_server_database_name = os.environ.get("DATABASE_NAME_E") or "eventsV3" # "events"
|
||||||
events_server_username = os.environ.get("DATABASE_USERNAME") or "root"
|
events_server_username = os.environ.get("DATABASE_USERNAME") or "root"
|
||||||
events_server_password = os.environ.get("DATABASE_PASSWORD") or "root"
|
events_server_password = os.environ.get("DATABASE_PASSWORD") or "shFZ27eL2x2NoxyEDBMfDWkvFO" #"root"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -44,26 +48,12 @@ logging.basicConfig(
|
|||||||
)
|
)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EventsPullerThread(threading.Thread):
|
class EventsPullerThread(threading.Thread):
|
||||||
connector = None
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.shutdown = threading.Event()
|
self.shutdown = threading.Event()
|
||||||
|
|
||||||
def run(self) -> None:
|
def getEventsAvailable(self, connection: PooledMySQLConnection | MySQLConnectionAbstract) -> List[MySqlRowType]:
|
||||||
logger.info(f"Using {events_server_address}:{events_server_port} on table: {events_server_database_name}")
|
|
||||||
while not self.shutdown.is_set():
|
|
||||||
connection = None
|
|
||||||
cursor = None
|
|
||||||
try:
|
|
||||||
connection = mysql.connector.connect(
|
|
||||||
host=events_server_address,
|
|
||||||
port=events_server_port,
|
|
||||||
database=events_server_database_name,
|
|
||||||
user=events_server_username,
|
|
||||||
password=events_server_password
|
|
||||||
)
|
|
||||||
cursor = connection.cursor(dictionary=True)
|
cursor = connection.cursor(dictionary=True)
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT *
|
SELECT *
|
||||||
@ -79,53 +69,122 @@ class EventsPullerThread(threading.Thread):
|
|||||||
)
|
)
|
||||||
AND event = 'event:media-read-base-info:performed';
|
AND event = 'event:media-read-base-info:performed';
|
||||||
""")
|
""")
|
||||||
# not event:media-metadata-search:performed
|
row = cursor.fetchall()
|
||||||
for row in cursor.fetchall():
|
cursor.close()
|
||||||
if self.shutdown.is_set():
|
return row
|
||||||
break
|
|
||||||
logger.info("Event found!")
|
|
||||||
handler_thread = MessageHandlerThread(row)
|
|
||||||
handler_thread.start()
|
|
||||||
|
|
||||||
|
def storeProducedEvent(self, connection: PooledMySQLConnection | MySQLConnectionAbstract, event: MediaEvent) -> bool:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor = connection.cursor()
|
||||||
|
|
||||||
|
query = """
|
||||||
|
INSERT INTO events (referenceId, eventId, event, data)
|
||||||
|
VALUES (%s, %s, %s, %s)
|
||||||
|
"""
|
||||||
|
cursor.execute(query, (
|
||||||
|
event.metadata.referenceId,
|
||||||
|
event.metadata.eventId,
|
||||||
|
"event:media-metadata-search:performed",
|
||||||
|
event_data_to_json(event)
|
||||||
|
))
|
||||||
|
connection.commit()
|
||||||
|
cursor.close()
|
||||||
|
return True
|
||||||
|
except mysql.connector.Error as err:
|
||||||
|
logger.error("Error inserting into database: %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
logger.info(f"Using {events_server_address}:{events_server_port} on table: {events_server_database_name}")
|
||||||
|
while not self.shutdown.is_set():
|
||||||
|
producedMessage: bool = False
|
||||||
|
|
||||||
|
connection = mysql.connector.connect(
|
||||||
|
host=events_server_address,
|
||||||
|
port=events_server_port,
|
||||||
|
database=events_server_database_name,
|
||||||
|
user=events_server_username,
|
||||||
|
password=events_server_password
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
rows = self.getEventsAvailable(connection=connection)
|
||||||
|
for row in rows:
|
||||||
|
if (row is not None):
|
||||||
|
try:
|
||||||
|
referenceId = row["referenceId"]
|
||||||
|
event = row["event"]
|
||||||
|
logMessage = f"""
|
||||||
|
============================================================================
|
||||||
|
Found message for: {referenceId} @ {event}
|
||||||
|
============================================================================"""
|
||||||
|
logger.info(logMessage)
|
||||||
|
|
||||||
|
event: MediaEvent = json_to_media_event(row["data"])
|
||||||
|
producedEvent = MetadataEventHandler(row).run()
|
||||||
|
|
||||||
|
producedMessage = f"""
|
||||||
|
============================================================================
|
||||||
|
Producing message for: {referenceId} @ {event}
|
||||||
|
{event_data_to_json(producedEvent)}
|
||||||
|
============================================================================"""
|
||||||
|
logger.info(producedMessage)
|
||||||
|
|
||||||
|
producedEvent = self.storeProducedEvent(connection=connection, event=producedEvent)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
"""Produce failure here"""
|
||||||
|
logger.exception(e)
|
||||||
|
producedEvent = MediaEvent(
|
||||||
|
metadata = EventMetadata(
|
||||||
|
referenceId=event.metadata.referenceId,
|
||||||
|
eventId=str(uuid.uuid4()),
|
||||||
|
derivedFromEventId=event.metadata.eventId,
|
||||||
|
status= "Failed",
|
||||||
|
created= datetime.now().isoformat()
|
||||||
|
),
|
||||||
|
data=None,
|
||||||
|
eventType="EventMediaMetadataSearchPerformed"
|
||||||
|
)
|
||||||
|
self.storeProducedEvent(connection=connection, event=producedEvent)
|
||||||
|
|
||||||
except mysql.connector.Error as err:
|
except mysql.connector.Error as err:
|
||||||
logger.error("Database error: %s", err)
|
logger.error("Database error: %s", err)
|
||||||
finally:
|
finally:
|
||||||
if cursor:
|
|
||||||
cursor.close()
|
|
||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
connection = None
|
||||||
# Introduce a small sleep to reduce CPU usage
|
# Introduce a small sleep to reduce CPU usage
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if (self.shutdown.is_set()):
|
if (self.shutdown.is_set()):
|
||||||
logger.info("Shutdown is set..")
|
logger.info("Shutdown is set..")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.shutdown.set()
|
self.shutdown.set()
|
||||||
global should_stop
|
global should_stop
|
||||||
should_stop = True
|
should_stop = True
|
||||||
|
|
||||||
# Kafka message handler-klasse
|
class MetadataEventHandler():
|
||||||
class MessageHandlerThread(threading.Thread):
|
|
||||||
mediaEvent: MediaEvent | None = None
|
mediaEvent: MediaEvent | None = None
|
||||||
def __init__(self, row):
|
def __init__(self, data: MediaEvent):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
jsonData = row['data']
|
self.mediaEvent = None
|
||||||
logger.info(jsonData)
|
|
||||||
self.mediaEvent = json_to_media_event(jsonData)
|
self.mediaEvent = data
|
||||||
logger.info(self.mediaEvent)
|
logger.info(self.mediaEvent)
|
||||||
|
|
||||||
def run(self):
|
def run(self) -> MediaEvent:
|
||||||
logger.info("Starting processing")
|
logger.info("Starting search")
|
||||||
if (self.mediaEvent is None):
|
if (self.mediaEvent is None):
|
||||||
logger.error("Event does not contain anything...")
|
logger.error("Event does not contain anything...")
|
||||||
return
|
return
|
||||||
|
|
||||||
event: MediaEvent = self.mediaEvent
|
event: MediaEvent = self.mediaEvent
|
||||||
|
|
||||||
logger.info("Processing event: event=%s, value=%s", event.eventType, event)
|
|
||||||
|
|
||||||
|
|
||||||
searchableTitles: List[str] = event.data.searchTitles
|
searchableTitles: List[str] = event.data.searchTitles
|
||||||
searchableTitles.extend([
|
searchableTitles.extend([
|
||||||
event.data.title,
|
event.data.title,
|
||||||
@ -154,11 +213,7 @@ class MessageHandlerThread(threading.Thread):
|
|||||||
data=result,
|
data=result,
|
||||||
eventType="EventMediaMetadataSearchPerformed"
|
eventType="EventMediaMetadataSearchPerformed"
|
||||||
)
|
)
|
||||||
|
return producedEvent
|
||||||
|
|
||||||
logger.info("<== Outgoing message: %s \n%s", event.eventType, event_data_to_json(producedEvent))
|
|
||||||
self.insert_into_database(producedEvent, "event:media-metadata-search:performed")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __getMetadata(self, titles: List[str]) -> Metadata | None:
|
def __getMetadata(self, titles: List[str]) -> Metadata | None:
|
||||||
@ -186,34 +241,6 @@ class MessageHandlerThread(threading.Thread):
|
|||||||
return prefixSelector
|
return prefixSelector
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def insert_into_database(self, event: MediaEvent, eventKey: str):
|
|
||||||
try:
|
|
||||||
connection = mysql.connector.connect(
|
|
||||||
host=events_server_address,
|
|
||||||
port=events_server_port,
|
|
||||||
database=events_server_database_name,
|
|
||||||
user=events_server_username,
|
|
||||||
password=events_server_password
|
|
||||||
)
|
|
||||||
cursor = connection.cursor()
|
|
||||||
|
|
||||||
query = """
|
|
||||||
INSERT INTO events (referenceId, eventId, event, data)
|
|
||||||
VALUES (%s, %s, %s, %s)
|
|
||||||
"""
|
|
||||||
cursor.execute(query, (
|
|
||||||
event.metadata.referenceId,
|
|
||||||
event.metadata.eventId,
|
|
||||||
eventKey,
|
|
||||||
event_data_to_json(event)
|
|
||||||
))
|
|
||||||
connection.commit()
|
|
||||||
cursor.close()
|
|
||||||
connection.close()
|
|
||||||
logger.info("Storing event")
|
|
||||||
except mysql.connector.Error as err:
|
|
||||||
logger.error("Error inserting into database: %s", err)
|
|
||||||
|
|
||||||
|
|
||||||
# Global variabel for å indikere om applikasjonen skal avsluttes
|
# Global variabel for å indikere om applikasjonen skal avsluttes
|
||||||
should_stop = False
|
should_stop = False
|
||||||
|
|||||||
@ -31,6 +31,7 @@ class SourceBase(ABC):
|
|||||||
partialMatch = fuzz.ratio(title, clean_foundTitle) if clean_foundTitle is not None else 0
|
partialMatch = fuzz.ratio(title, clean_foundTitle) if clean_foundTitle is not None else 0
|
||||||
|
|
||||||
if directMatch >= 60:
|
if directMatch >= 60:
|
||||||
|
log.info(f"{source} -> Direct Match for '{title}' of '{foundTitle}' on part '{clean_foundTitle}' with direct score: {directMatch} and partial {partialMatch}")
|
||||||
return True
|
return True
|
||||||
elif partialMatch >= 80:
|
elif partialMatch >= 80:
|
||||||
log.info(f"{source} -> Partial Match for '{title}' of '{foundTitle}' on part '{clean_foundTitle}' with direct score: {directMatch} and partial {partialMatch}")
|
log.info(f"{source} -> Partial Match for '{title}' of '{foundTitle}' on part '{clean_foundTitle}' with direct score: {directMatch} and partial {partialMatch}")
|
||||||
@ -40,13 +41,36 @@ class SourceBase(ABC):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def getMatchingOnTitleWords(self, idToTitle: dict[str, str], titles: List[str]) -> dict[str, str]:
|
||||||
|
matched_idToTitle = {}
|
||||||
|
|
||||||
|
for title in titles:
|
||||||
|
title_words = set(title.split())
|
||||||
|
for id, stored_title in idToTitle.items():
|
||||||
|
stored_title_words = set(stored_title.split())
|
||||||
|
if title_words & stored_title_words: # sjekker om det er et felles ord
|
||||||
|
score = fuzz.token_set_ratio(title, stored_title)
|
||||||
|
if score >= 75:
|
||||||
|
matched_idToTitle[id] = (stored_title, score)
|
||||||
|
|
||||||
|
# Returnerer den originale dict med score 0 hvis ingen titler matcher
|
||||||
|
if not matched_idToTitle:
|
||||||
|
for id, stored_title in idToTitle.items():
|
||||||
|
matched_idToTitle[id] = (stored_title, 0)
|
||||||
|
|
||||||
|
# Returnerer den originale dict hvis ingen titler matcher
|
||||||
|
return matched_idToTitle if matched_idToTitle else idToTitle
|
||||||
|
|
||||||
def findBestMatchAcrossTitles(self, idToTitle: dict[str, str], titles: List[str]) -> Tuple[str, str]:
|
def findBestMatchAcrossTitles(self, idToTitle: dict[str, str], titles: List[str]) -> Tuple[str, str]:
|
||||||
|
# Få den filtrerte eller originale idToTitle basert på ordmatching
|
||||||
|
filtered_idToTitle = self.getMatchingOnTitleWords(idToTitle, titles)
|
||||||
|
|
||||||
best_match_id = ""
|
best_match_id = ""
|
||||||
best_match_title = ""
|
best_match_title = ""
|
||||||
best_ratio = 0
|
best_ratio = 0
|
||||||
|
|
||||||
for title in titles:
|
for title in titles:
|
||||||
for id, stored_title in idToTitle.items():
|
for id, stored_title in filtered_idToTitle.items():
|
||||||
ratio = fuzz.ratio(title, stored_title)
|
ratio = fuzz.ratio(title, stored_title)
|
||||||
if ratio > best_ratio:
|
if ratio > best_ratio:
|
||||||
best_ratio = ratio
|
best_ratio = ratio
|
||||||
|
|||||||
@ -38,6 +38,11 @@ abstract class EventListenerImpl<T: EventImpl, E: EventsManagerImpl<T>> {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
open fun canProduceMultipleEvents(): Boolean {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
open fun haveProducedExpectedMessageBasedOnEvent(incomingEvent: T, events: List<T>): Boolean {
|
open fun haveProducedExpectedMessageBasedOnEvent(incomingEvent: T, events: List<T>): Boolean {
|
||||||
val eventsProducedByListener = events.filter { it.eventType == produceEvent }
|
val eventsProducedByListener = events.filter { it.eventType == produceEvent }
|
||||||
val triggeredBy = events.filter { it.eventType in listensForEvents }
|
val triggeredBy = events.filter { it.eventType in listensForEvents }
|
||||||
@ -62,6 +67,10 @@ abstract class EventListenerImpl<T: EventImpl, E: EventsManagerImpl<T>> {
|
|||||||
if (haveProducedExpectedMessageBasedOnEvent(incomingEvent, events))
|
if (haveProducedExpectedMessageBasedOnEvent(incomingEvent, events))
|
||||||
return false
|
return false
|
||||||
|
|
||||||
|
if (events.any { it.eventType == produceEvent } && !canProduceMultipleEvents()) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
//val isDerived = events.any { it.metadata.derivedFromEventId == incomingEvent.metadata.eventId } // && incomingEvent.eventType == produceEvent
|
//val isDerived = events.any { it.metadata.derivedFromEventId == incomingEvent.metadata.eventId } // && incomingEvent.eventType == produceEvent
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user