Compare commits

..

660 Commits
master ... v5

Author SHA1 Message Date
866f7f228c Track selection 2026-02-02 21:43:11 +01:00
5babd17d18 New Eventi version 2026-02-02 19:36:30 +01:00
b3d1b866c2 Missing alt titles fix 2026-02-02 18:16:29 +01:00
7c2ab47c98 Missing alt titles 2026-02-02 18:15:34 +01:00
fc0ae7bedd Utropstegn 2026-02-02 13:05:54 +01:00
b8562f48e7 Collect fix 2 2026-02-02 04:36:38 +01:00
1116f80066 Collect fix 2026-02-02 03:17:46 +01:00
2979924068 Throw 2026-02-02 01:29:08 +01:00
14633fe72c Missing call 2026-02-02 01:10:35 +01:00
35111ad7fa Unfuck tests 2026-02-02 00:47:51 +01:00
174ac51736 Unfuck 2026-02-02 00:31:53 +01:00
d76388c28b Fixes 2 2026-02-01 23:40:57 +01:00
dcad56a4b4 Fixes 2026-02-01 23:29:33 +01:00
3339f9a2ce Start validation 2026-02-01 20:16:15 +01:00
2c7af614d6 Lib 2026-02-01 20:06:45 +01:00
e69f8b7ff8 UI Wip 2026-02-01 18:06:30 +01:00
701c939e8d Transliterating 2026-02-01 17:59:16 +01:00
21f6f8c83b Store path for subtitle 2026-02-01 14:32:55 +01:00
46b5454a9d Store path for subtitle 2026-02-01 14:26:03 +01:00
42f8d9a83b Path in test 2026-02-01 05:45:21 +01:00
54960414fe Path 2026-02-01 05:36:28 +01:00
a13b949c9b Adjusted Collection 2026-02-01 05:26:29 +01:00
22627c387a Moved to props + fixing abandoned issue 2026-02-01 03:48:33 +01:00
7fc45625bd Correcting abandoned 2026-02-01 02:15:34 +01:00
f00c45d38b Hiding deleted 2026-02-01 01:50:33 +01:00
a49661736b Signal event 2026-02-01 00:56:52 +01:00
6b47327f87 Fixed in migration 2026-02-01 00:43:49 +01:00
7cc7679746 Check 2026-01-31 23:46:46 +01:00
ec20ff0b02 Corrected inheritance 2026-01-31 21:44:37 +01:00
438044faaf Fixed projection 2026-01-31 21:31:07 +01:00
ed6bddb95f Fixed state reporting and producing 2026-01-31 20:53:10 +01:00
6dab106f2a Correct usage 2026-01-31 13:35:16 +01:00
15f8c4c076 Minor changes 2026-01-31 13:26:41 +01:00
c39366960f Allowing continue 2026-01-31 12:52:35 +01:00
8a042a3fd3 Exposing on hold 2026-01-31 12:22:13 +01:00
44cd5c2182 Fixed path 2026-01-31 11:35:56 +01:00
7cb2eed79d More Health 2026-01-31 11:25:36 +01:00
c8aeb3759f Health stuff 2026-01-31 10:45:52 +01:00
4e7fd5a9f7 Fix 2 2026-01-31 09:51:48 +01:00
767372be54 Fix 2026-01-31 09:20:53 +01:00
10ffc77e7c Fetching global last 2026-01-31 09:04:01 +01:00
768a162ac0 Added health status 2026-01-31 08:30:27 +01:00
30b5d6907c Added more tasks 2026-01-30 22:42:42 +01:00
84fdb98885 Missing return event 2026-01-30 15:14:20 +01:00
6735ab54a5 Projection correction 2026-01-30 15:09:58 +01:00
6a423365cd Projection + aggregation change 2026-01-30 14:39:37 +01:00
8f06f8682d Blocking collect on collect 2026-01-30 14:27:09 +01:00
c5e379a65b Sort 2026-01-30 04:42:09 +01:00
ace6a66455 Fixing missing referenceId usage 2026-01-30 04:18:37 +01:00
811004096b Effective 2026-01-30 04:00:05 +01:00
c54944b4ad Version 2026-01-30 03:17:20 +01:00
0693505d73 Missing derived 2026-01-30 02:19:26 +01:00
4d3a4c9e43 Version + fixes 2026-01-30 02:17:35 +01:00
894288ec0e autocommit everything 2026-01-30 01:41:52 +01:00
1badf860dc Version 2026-01-30 01:39:35 +01:00
ad8e3a669b Resetting exited on enter 2026-01-30 01:20:18 +01:00
1dc1d15652 Cancelled handling 2026-01-30 01:17:49 +01:00
365c131a1e pyton 2026-01-30 00:17:19 +01:00
7d0027f5e6 Metadata - health auto refresh 2026-01-29 23:49:26 +01:00
7f8e7c44bc Metadata 2026-01-29 23:44:38 +01:00
921f724157 Metadata change 2026-01-29 23:32:00 +01:00
9e3a938ce4 Fixed wrong query 2026-01-29 23:15:08 +01:00
ea75487682 Wrong mapper 2026-01-29 19:13:31 +01:00
dc87aa394c Task Reset 2026-01-29 19:05:16 +01:00
66f5e12a51 pyMetadata changes 2026-01-29 18:03:44 +01:00
8e2489c31e Version adjustment + correct status updates 2026-01-29 17:48:12 +01:00
71359254f2 Disabling re-consumption 2026-01-29 16:57:45 +01:00
1d43b9b0a0 Fixed typo 2026-01-29 07:37:09 +01:00
76fa917104 Changed SQL and expanded to multi key search/filter 2026-01-29 07:28:14 +01:00
1ca711a3e2 Fixed sql 2026-01-29 07:04:14 +01:00
a782ad4f8a Query changes 2026-01-29 06:28:29 +01:00
49a3002259 Query changes 2026-01-29 04:42:49 +01:00
7343d51b15 Added last checking for consumed 2026-01-29 03:12:50 +01:00
cb2d6ac593 Changed name 2026-01-29 01:58:35 +01:00
965a05d2b7 Workspace 2026-01-28 22:53:48 +01:00
b94864b00a Exposing mode 2026-01-28 22:49:46 +01:00
534400c738 Minor tweaks 2026-01-25 17:21:02 +01:00
29923771fd Fixed Projection 2026-01-23 03:03:22 +01:00
c0de46b41a Fixed python time again.. and again.. and again.. 2026-01-23 02:14:19 +01:00
dd3de9b380 Fixed python time again.. and again.. 2026-01-23 02:06:16 +01:00
9780923e6a Fixed python time again.. 2026-01-23 01:56:05 +01:00
950d643325 Fixed python time 2026-01-23 01:43:23 +01:00
a86347aaa5 Migrated to instant 2026-01-23 01:13:11 +01:00
ab21fc76d9 Changed query 2026-01-23 00:17:50 +01:00
b58f980c04 Version 2026-01-23 00:09:10 +01:00
c58c3c175f Changed engine 2026-01-22 21:33:04 +01:00
adc715ed8e Removed Zulu 2026-01-22 19:21:05 +01:00
4eb19a51c4 UTC + new version 2026-01-22 19:04:14 +01:00
c5ab426e23 UTC 2026-01-22 14:24:14 +01:00
4044c04118 UTC 2026-01-22 14:22:12 +01:00
78237ed004 Fixes + version 2 2026-01-22 04:08:01 +01:00
5dfa73af08 Fixes + version 2026-01-22 03:03:27 +01:00
75acee51fd Version 2026-01-22 02:49:27 +01:00
99443bc288 Debugging 2026-01-22 02:02:31 +01:00
ca04e34525 Another rouge one 2026-01-22 01:10:52 +01:00
4708252aea Introduced logging in processer 2026-01-22 00:58:10 +01:00
8aeae926fb Adjusted field name 2026-01-22 00:53:41 +01:00
7206224cfa Fixed test 2026-01-22 00:47:17 +01:00
c3a6de00e3 Fixed param 2026-01-22 00:38:16 +01:00
e9750292ba Fixing sz 2026-01-22 00:31:48 +01:00
49bf4ad36d Added missing commit 2026-01-22 00:07:04 +01:00
807b39f24d Rouge event + flow block 2026-01-21 23:46:19 +01:00
089406962b Bugfix - wrong index used 2026-01-21 23:34:02 +01:00
ec3be9eae5 Updated version 2026-01-21 23:16:52 +01:00
642f0e6dd5 Added missing eac3 2026-01-21 22:52:03 +01:00
9d7c9de4ab Bugfixes 3 2026-01-21 22:32:38 +01:00
3c9dfd4276 Bugfixes 2 2026-01-21 22:20:09 +01:00
483c57af69 Bugfixes 2026-01-21 21:44:45 +01:00
7d5bcc40b0 Using correct dockerfile 2026-01-21 20:29:40 +01:00
742ae64d30 Added missing derive 2026-01-21 20:21:49 +01:00
30a067bb90 Added missing task poller in coordinator 2026-01-21 20:11:52 +01:00
d27d37c4db Adjusted flyway logging 2026-01-21 20:05:01 +01:00
d70238d3e0 Metadata app should not find other events 2026-01-21 20:04:53 +01:00
037525d8e3 Fixed event store + build-v5.yml 2026-01-21 19:34:18 +01:00
894d42f941 Updated lib 2026-01-21 19:13:51 +01:00
885cfd9312 Database inserts +++ 2026-01-21 18:46:34 +01:00
36eb28ad6b Database inserts ++ 2026-01-21 18:32:42 +01:00
af3f7bbc1a ZDS Json 2026-01-21 17:29:42 +01:00
13b6e496c8 Logging 2026-01-21 04:25:56 +01:00
bd89f400ce Changed Enum value 2026-01-21 04:10:27 +01:00
d246403f74 Pagination + store event 2026-01-20 03:20:33 +01:00
181b1630be Error level + longer delay due to db backoff 2026-01-18 20:01:57 +01:00
0b7dff7970 Adjustments 2026-01-18 19:52:11 +01:00
4a06a108a9 Adjustments 2026-01-18 19:29:36 +01:00
93de9b4497 Adjustments 2026-01-18 18:02:54 +01:00
fa8be8f08a Adjustments 2026-01-18 16:55:41 +01:00
d76e54e22d Fixes 2026-01-18 16:49:56 +01:00
7a3053741c Fixes 2026-01-18 16:15:41 +01:00
60adf51a4c Health endpoints 2026-01-18 15:14:02 +01:00
922bb08954 Workflow 2026-01-16 20:33:18 +01:00
2bcdc38f09 Migrated database stuff to seperate module 2026-01-16 20:18:29 +01:00
3925fb758b Props + controller 2026-01-11 04:10:54 +01:00
b0ca3efc58 Fixed running 2026-01-05 04:52:13 +01:00
4092cef839 Annotation 2026-01-05 04:03:05 +01:00
1e5ac12f3d Annotation 2026-01-05 04:02:56 +01:00
1415163618 Gradle 2026-01-05 03:55:26 +01:00
2c66739552 New version of eventi 2026-01-05 03:39:20 +01:00
a8a57e714e Fixed database env 2026-01-05 03:16:13 +01:00
d8cd06b5ba Updated java docker image 2026-01-05 03:15:24 +01:00
1c9c8809fa Files Query 2026-01-04 21:54:47 +01:00
45b1327345 Health 2026-01-04 21:51:35 +01:00
6d4fb2d35f Runner 12 2026-01-04 16:45:55 +01:00
312a957294 Runner 11 2026-01-04 16:38:58 +01:00
13bb901552 Docker signin 2026-01-04 15:18:10 +01:00
8a5415300b Rename 2026-01-04 15:12:59 +01:00
b0d1ddaad5 Test data 2026-01-04 15:00:12 +01:00
bcaac44b37 Runner10 2026-01-04 14:52:10 +01:00
f33a0f5064 Runner9 2026-01-04 14:48:33 +01:00
2ced0f6c43 Runner8 2026-01-04 14:48:15 +01:00
d2c9910b5a Runner7 2026-01-04 14:45:02 +01:00
0a40834ab9 Runner6 - Fixed naming 2026-01-04 14:42:49 +01:00
ad3f838d09 Runner5 2026-01-04 14:38:35 +01:00
2cf3717cf7 Runner4 2026-01-04 02:16:24 +01:00
f5113e2cf8 Runner3 2026-01-04 02:14:30 +01:00
b327eb7eca Runner2 2026-01-04 02:12:18 +01:00
5c85587446 Runner 2026-01-04 02:08:20 +01:00
c2e0f18700 Changed pipeline 2026-01-04 02:03:15 +01:00
cc6656ed5d Controllers added + pipeline 2026-01-04 01:58:22 +01:00
82b48b8c13 Dependency versioning 2026-01-03 09:08:02 +01:00
295349e78b Version + more tests 2026-01-03 08:55:45 +01:00
6bc2ade681 Test coverage + some rework 2026-01-03 03:04:02 +01:00
6d615abb0d Metadata search timeout fix + test 2026-01-02 17:59:16 +01:00
8096a979ea Wip 4 - Pushing Python 2026-01-02 01:49:21 +01:00
f85fbde89d Wip 3 - Added one missing TaskStore + Testing Expiry 2026-01-02 01:48:57 +01:00
2400cf0580 Wip 2 2026-01-02 01:09:26 +01:00
2c61650a0e Wip 2025-12-11 02:54:48 +01:00
b32ff8ce4f Regex parsing og media file name 2025-12-08 18:52:14 +01:00
e84a6494aa Parser #1 2025-12-08 02:29:09 +01:00
59ce796db8 Testcase #1 2025-12-08 00:08:02 +01:00
1838b6e4ab Database Setup and usage 2025-11-09 15:35:56 +01:00
69ae0ba5ab v5 init 2025-11-08 12:07:27 +01:00
ef9aeb1dd2 Downgraded stremait lib to alpha14 - inner code 2025-10-19 04:14:00 +02:00
b5ae862812 Downgraded stremait lib to alpha14 2025-10-19 04:05:21 +02:00
9b0b0766aa Aligning with exposed 0.61 2025-10-19 03:49:31 +02:00
931e60c301 Updated streamit db version 2025-10-19 03:32:42 +02:00
099be5e6f7 Changed to text 2025-10-19 02:36:15 +02:00
35d4299e74 Updated version + Naming parsing 2025-10-04 23:34:09 +02:00
b1061b3c9e Publishing reason for failure 2025-08-20 00:02:09 +02:00
d7555b825f Info controller 2025-08-17 02:11:04 +02:00
e8811248db Updated ignore 2025-08-16 17:30:38 +02:00
b9c7c5cdb5 Updated ignore 2025-08-16 17:30:07 +02:00
b7e679a255 Fixing alt is array for imdb 2025-07-10 00:06:28 +02:00
b48c59b377 Catch and logging 2025-07-06 00:15:09 +02:00
7f79eb1e26 logging 2025-04-27 11:35:20 +02:00
421afb8096 Unlisting 2025-04-27 02:39:43 +02:00
9c6aa304a8 enabled mode 2025-04-27 01:58:08 +02:00
7f014a494c Reduced logging 2025-04-24 17:44:51 +02:00
0dc550e435 Fixed missing return 2025-04-21 19:37:13 +02:00
77ca12963c Changes to timeout 2025-04-21 00:15:41 +02:00
6ce7094119 Added changes to encode or remux 2025-04-20 23:42:49 +02:00
b6fa3977b1 Trimming 2025-04-20 13:16:43 +02:00
91588392a3 Error logging in regards to cause of failure 2025-04-20 01:56:14 +02:00
98437a0891 Correction to filter+++ 2025-04-19 02:30:00 +02:00
c86ea07073 Correction to filter++ 2025-04-19 02:20:32 +02:00
f452678990 Correction to filter 2025-04-19 02:02:41 +02:00
dca730afc2 Added filter for commentary + failing broken process 2025-04-19 01:38:37 +02:00
816e268b2d Changed behaviour and verification for timeout listener 2025-04-12 01:03:53 +02:00
d1e96f5d2a Mini change 2025-04-11 23:39:02 +02:00
66589ffaf6 Changed order 2025-04-10 02:15:46 +02:00
073d0a4d8d Fixes 2025-04-10 02:13:54 +02:00
a36db258af Fixes 2025-04-09 00:15:18 +02:00
4dc54379d8 Trimming data in meta 2025-04-09 00:14:25 +02:00
e1df66f994 Changes to behaviour 2025-04-08 23:42:44 +02:00
58e3766ebd Fixed created 2025-04-06 23:04:51 +02:00
33736ceca7 Update 2025-04-06 22:43:05 +02:00
cd821b7e08 Adjusting check 2025-04-06 19:14:34 +02:00
374753595c Updated subtitle package 2025-04-01 22:51:38 +02:00
22c23a6f27 A bit of logging fix 2025-04-01 22:33:00 +02:00
77ba64664f Added missing files 2025-04-01 22:17:50 +02:00
8bd9c9e121 Table fix 2025-04-01 19:49:57 +02:00
48b7a0d2ad Changed timeout + task complete 2025-04-01 19:49:27 +02:00
6284db2bea Added check 2025-03-31 00:27:33 +02:00
a3ffec6fc1 Upgraded Subtitle package 2025-03-30 03:48:58 +02:00
e02754f390 Progress 2025-03-26 23:17:35 +01:00
749194f9e1 minor adjustments 2025-03-22 15:38:34 +01:00
e663c743ab Fixed reconnect issue 2025-03-20 22:05:52 +01:00
c464b110da Fixed duration to set progress 2025-03-20 00:19:56 +01:00
513815a9c5 Added nostats 2025-03-19 23:48:03 +01:00
671b367957 log append 2025-03-19 23:20:54 +01:00
fac82f06eb Wip 2025-03-19 22:57:44 +01:00
278763be31 Fixes 2025-03-19 19:21:43 +01:00
d8f0f5db85 Root 2025-03-19 18:53:54 +01:00
a27e50f909 Updates 2025-03-19 18:48:38 +01:00
01a5d21e69 Minifix 2025-03-18 02:01:11 +01:00
39d2bbe0b3 Updated UI + Indexing 2025-03-17 18:05:25 +01:00
b36f0c5e1d Removed SockJS 2025-03-16 19:19:29 +01:00
14a366f035 Processer now allows for adhoc origins 2025-03-16 18:13:53 +01:00
1543ef6aba Fix for stuck process 2025-03-16 14:10:24 +01:00
7a5c872ab0 mini3 2025-03-15 03:24:57 +01:00
9b4a177529 mini2 2025-03-15 03:03:07 +01:00
364ce23892 mini 2025-03-15 02:30:39 +01:00
66742304a9 Updated metadata 2025-03-13 23:35:39 +01:00
3c7532ba17 Renamed events to simplify, updated ui 2025-03-10 22:58:11 +01:00
ac4f79ff7a Adjustments 2025-03-09 23:06:41 +01:00
422971a914 Fixed log 2025-03-09 22:59:53 +01:00
78d4014d77 Changing subtitle selection for extraction 2025-03-09 20:36:19 +01:00
d9e9aa6a2f Fixes noise 2025-03-09 20:35:55 +01:00
6243b1b46a Printing 2025-03-09 20:29:49 +01:00
078ee42f51 Url fix 2025-02-27 18:49:02 +01:00
c3859b4bea Requiring persist to be present for completion 2025-02-27 01:07:58 +01:00
a763f3ec5f Added missing ui stuff 2025-02-27 01:02:27 +01:00
d2d8b0f12d Minor adjustments 2025-02-27 00:53:09 +01:00
82cb245639 Adjusted process check. Now checking if there is a produced derived event and if it can create more. 2025-02-27 00:46:46 +01:00
e4e972f36b Multiple checks 2025-02-24 02:12:54 +01:00
db333920a1 Logging + adjustments 2025-02-24 02:00:10 +01:00
3216212c43 Logging 2025-02-24 01:38:47 +01:00
b8737fb687 Logging 2025-02-24 01:17:04 +01:00
a1b91eefc0 Gradle 2025-02-24 00:34:10 +01:00
741b2e6ca2 Adjusted Requester 2025-02-24 00:23:40 +01:00
7ab8e73cb5 Logging on ui 2025-02-24 00:16:46 +01:00
a6b2874b56 Added super call 2025-02-23 23:33:59 +01:00
ad02f40085 Simplified canStart 2025-02-23 23:30:07 +01:00
62e0453ce9 Disabled creation of event if task cannot be created 2025-02-23 23:05:37 +01:00
53dea1c480 Increased length 2025-02-23 22:40:01 +01:00
0740242232 Fixes for readback 2025-02-23 22:30:51 +01:00
5683d23db9 Logging 2025-02-23 22:00:42 +01:00
78971ce69e Changed events parsing++ 2025-02-23 21:55:05 +01:00
7375a5a847 Logging++ 2025-02-23 21:09:40 +01:00
6e25d1e67f Logging 2025-02-23 21:09:30 +01:00
5a50c986f4 Minor changes 2025-02-23 20:39:28 +01:00
8b8a073216 Minor changes 2025-02-23 20:08:12 +01:00
f9600c0745 Worker listener should not consume if it can't start 2025-02-23 19:46:53 +01:00
cec4e2aca2 Now using mode 2025-02-23 18:53:11 +01:00
9df446b2b3 Fixed annotation 2025-02-23 18:48:20 +01:00
9244013949 RestTemplate 2025-02-23 17:05:20 +01:00
caab3c02c6 Request all 2025-02-23 16:48:57 +01:00
bf6f0faf4c Adjusted WS url 2025-02-23 04:24:15 +01:00
e380f12cd2 Adjusted WS url 2025-02-23 04:11:08 +01:00
3f403e3009 Attempt to allow sub paths (routing) 3 2025-02-23 03:59:50 +01:00
da0470e88f Attempt to allow sub paths (routing) 2 2025-02-23 03:47:37 +01:00
136ac4e0a7 Attempt to allow sub paths (routing) 2025-02-23 03:40:10 +01:00
db7a82b3b7 Attempting to fix black magic 2.5 2025-02-23 03:29:48 +01:00
4955e49f54 Attempting to fix black magic 2 2025-02-23 03:24:36 +01:00
b02b0ce05b Attempting to fix black magic 2025-02-23 03:12:00 +01:00
58d5fbc1e3 Adjusted ui 2025-02-23 03:04:05 +01:00
18d72c4159 Adjusted ui 2025-02-23 02:55:06 +01:00
28a09dd2f1 Added tasks 2025-02-23 02:34:35 +01:00
2543b1daee Adjusted ui 2025-02-23 02:29:01 +01:00
8f7db18aea Adjusted ui 2025-02-23 02:22:45 +01:00
027bd4241b Added UI workflow 2025-02-23 02:16:46 +01:00
a57c60934c workflow now runs against current branch 2025-02-22 18:04:46 +01:00
d9d20a76c2 Removed status check as db query performs it 2025-02-22 18:01:17 +01:00
eb6be405ef Updated workflow 2025-02-22 17:57:46 +01:00
c576845e56 Fix 2025-02-22 17:44:55 +01:00
663ac056b7 Fixed workflow 2025-02-22 16:51:19 +01:00
4e5b4a293f Adjusted query 2025-02-22 15:59:12 +01:00
31438544ac Fixed logging 2025-02-22 15:55:32 +01:00
5c04c368d8 Move event 2025-02-22 15:45:33 +01:00
2ea3a69bdd Logging + warn + now casts exception if not initialized 2025-02-22 02:46:49 +01:00
6f0ff2da7c Updated UI 2025-02-22 01:53:25 +01:00
ac227f075f ren 2025-02-21 22:54:12 +01:00
f8a69ee620 New completion 2025-02-21 22:48:02 +01:00
a4c1b6978d Changed order 2025-02-20 21:32:28 +01:00
9424f25dcc Added logging 2025-02-20 21:15:50 +01:00
b74d5215e2 Increased fileName path 2025-02-20 21:08:57 +01:00
8700340313 Added missing table creation 2025-02-20 20:41:55 +01:00
38abf4f522 Indexing 2025-02-20 20:36:17 +01:00
57cb1bc8fb Files table 2025-02-20 17:55:12 +01:00
6269f625e3 Watching multiple directories 4 2025-02-15 02:50:57 +01:00
01a3949df4 Watching multiple directories 3 2025-02-15 02:33:03 +01:00
3a919d2638 Watching multiple directories 2 2025-02-15 02:14:30 +01:00
5be1468891 Watching multiple directories 2025-02-14 19:17:36 +01:00
afa0900755 Storing processed files 2025-02-02 16:38:35 +01:00
4638560c30 Storing processed files 2025-02-02 14:38:58 +01:00
d2c1e8ccb7 Adding logging 2025-01-06 03:38:30 +01:00
efd4725074 Attempt to mute exposed 3 2025-01-06 03:00:51 +01:00
1528741713 Attempt to mute exposed 2 2025-01-06 02:38:31 +01:00
b2f9ab1b64 Attempt to mute exposed 2025-01-06 02:20:51 +01:00
c4da613344 Move issue 2025-01-06 02:06:06 +01:00
4f1b4d78ad Update 2025-01-06 01:42:41 +01:00
1d8928a745 Small tweaks 2025-01-06 01:17:49 +01:00
abe863f015 Reducing noisy logging 2025-01-06 01:04:20 +01:00
be7c742b30 Handling erroring insert 2025-01-06 00:19:31 +01:00
64c5df4772 Fixed suffix + cover 2025-01-05 16:52:08 +01:00
c2cd6f451e Updated metadata 2024-12-30 04:14:28 +01:00
ff21cb9595 Updated subtitle stream picker 2024-12-30 03:20:08 +01:00
5cf7dff3f2 Logging 2024-12-29 23:30:42 +01:00
2bc6e91f5c Adjusted utils 2024-12-24 01:22:39 +01:00
60fde76249 Ignore copy on cover 2024-12-24 01:16:08 +01:00
938a5c7ee8 V4 2024-12-23 22:01:06 +01:00
49a88bcbaf Closing connection after every query 2024-12-18 21:39:43 +01:00
01df7ef239 Dot stripping 2024-11-19 22:09:30 +01:00
9ecdcd88fe Fixed placement 2024-11-07 00:49:59 +01:00
717350e23c Error handling 2024-11-07 00:43:23 +01:00
3712ebb115 Logging and ping 2024-11-07 00:29:08 +01:00
48d1eec2a4 Disabling metadata timeout 2024-11-06 15:15:08 +01:00
b6ee215e2f Logging 2024-10-24 00:39:14 +02:00
c3eb9744e9 Logging 2024-10-24 00:29:48 +02:00
612467495a Logging 2024-10-24 00:21:36 +02:00
2d2929da8f Logging 2024-10-17 17:48:43 +02:00
e25d0d4e9e Removing database closer 2024-10-17 03:24:34 +02:00
639b205376 Fix for missing assignment 2024-10-17 03:20:48 +02:00
1c4aca908e Fix for locked loop 2024-10-17 03:15:58 +02:00
5494e1d47e Default 2024-10-17 03:08:06 +02:00
ae2887fdea Updated py 2024-10-17 03:02:31 +02:00
15e719fe8e Database connect if not connected 2024-10-08 20:05:03 +02:00
4f9de1220d Log suppressing 2024-08-22 01:41:36 +02:00
a14e6a1686 Log suppressing 2024-08-22 01:35:12 +02:00
20ce2880d2 Log suppressing 2024-08-22 00:57:52 +02:00
e169fc39e5 Small tweaks 2024-08-21 01:43:54 +02:00
5e24246912 Fix 2024-08-19 00:05:46 +02:00
accce27eb6 Fix 2024-08-19 00:01:52 +02:00
d03da3f37a Reduced logging and replaced with controller 2024-08-18 23:07:44 +02:00
27f45ded91 Reduced logging and replaced with controller 2024-08-18 20:18:15 +02:00
880285e238 Exception on missing title.. 2024-08-13 23:29:34 +02:00
4d21d06781 Update ui 2024-08-06 01:15:01 +02:00
9c7e42ae29 Update 2024-07-28 15:26:44 +02:00
3efc04744c Using all events 2024-07-25 01:50:26 +02:00
778a2045a0 Added missing code 2024-07-24 01:34:49 +02:00
4e54445170 Meta 2024-07-23 16:53:57 +02:00
d08bac2c41 Adjustment 2024-07-23 01:48:54 +02:00
bcc65e7da6 Adjustment 2024-07-22 17:48:49 +02:00
b9b25089b3 Prefix 2024-07-21 22:37:26 +02:00
1d521aca91 Prefix 2024-07-21 20:21:12 +02:00
015052b97c v3 41 - Adjusting collection and title extract 2024-07-21 02:57:42 +02:00
d67e40180d v3 41 - Adjusting directory/collection matching + error checking for metadata 2024-07-20 16:27:23 +02:00
3a4a5b271a v3 40 2024-07-20 16:06:05 +02:00
afe635f1c5 v3 39 2024-07-20 14:49:19 +02:00
9486606351 v3 38 2024-07-20 13:05:53 +02:00
8bf709d899 v3 37 2024-07-20 12:34:29 +02:00
072cb2b192 v3 36 2024-07-20 12:03:00 +02:00
199cee8594 v3 34 2024-07-20 03:45:05 +02:00
7c896688c8 v3 33 - less noise for processer 2024-07-20 02:07:20 +02:00
4c14d525ce v3 32 2024-07-20 01:25:30 +02:00
861dcd00d8 v3 31 2024-07-20 01:23:26 +02:00
0aee64c7f3 v3 31 - Restart watcher 2024-07-19 23:36:31 +02:00
f5a3603a5a v3 30 2024-07-19 19:35:06 +02:00
c8f3fa5102 v3 29 2024-07-19 17:50:52 +02:00
40918ad008 v3 28 2024-07-19 15:07:10 +02:00
4a47823b09 v3 27 2024-07-19 00:16:01 +02:00
a74e39ed1e v3 26 2024-07-18 23:35:53 +02:00
a5fc251da7 v3 25 2024-07-18 21:12:19 +02:00
fc4ec73894 v3 24 2024-07-18 18:53:37 +02:00
0190816614 v3 23 2024-07-18 18:38:58 +02:00
278d7a4e6e v3 21 2024-07-18 01:08:59 +02:00
91ae79e388 v3 20 2024-07-14 01:57:38 +02:00
8d64e88664 v3 19 2024-07-14 01:44:07 +02:00
038037a15e v3 18 2024-07-14 01:41:32 +02:00
3a32f30838 v3 17 2024-07-14 01:13:57 +02:00
d30d175616 v3 16 2024-07-14 01:04:53 +02:00
2af5f81574 v3 15 2024-07-14 00:59:29 +02:00
c531c220fc v3 14 2024-07-14 00:55:53 +02:00
cd8cda3c23 v3 13 2024-07-14 00:52:27 +02:00
63622d6a1f v3 12 2024-07-14 00:48:09 +02:00
0d38a75797 v3 11 2024-07-14 00:16:06 +02:00
94e7aa02b0 v3 10 2024-07-14 00:08:44 +02:00
0c210cf48f v3 9 2024-07-13 23:45:46 +02:00
a316583d6d v3 8 2024-07-13 23:43:06 +02:00
d71f74d7c7 v3 7 2024-07-13 23:32:27 +02:00
1f0be50126 v3 6 2024-07-13 21:18:25 +02:00
b1e250c7a7 v3 5 2024-07-13 21:11:36 +02:00
ed75777a47 v3 4 2024-07-13 20:07:33 +02:00
7389362cae v3 3 2024-07-13 20:01:51 +02:00
8ae9cbc302 v3 2 2024-07-13 19:56:33 +02:00
12f3f6e3ac v3 2024-07-13 19:49:13 +02:00
72b2d30fab v3 - db polling for pyMetadata 2024-07-13 17:44:22 +02:00
c58b00a236 V3 - Moved to database polling 2024-07-11 23:26:09 +02:00
fe3c238adb Updated 2024-07-05 01:02:06 +02:00
31a47405df Naming 2024-07-04 16:52:25 +02:00
7380e8a23e Logging 2024-07-04 16:35:09 +02:00
e48a529a77 Updated ffmpeg stuff 2024-07-04 01:46:54 +02:00
7d8255cb37 Updated ffmpeg stuff 2024-07-03 15:57:05 +02:00
78518514b8 Updated ffmpeg stuff 2024-07-02 23:11:54 +02:00
c39354da09 Updated ffmpeg stuff 2024-07-02 21:44:45 +02:00
1abbd6ecd8 Updated ffmpeg stuff 2024-07-02 18:22:24 +02:00
504b11ba5d Updated ffmpeg stuff 2024-07-02 18:14:48 +02:00
423c6707e9 Updated check 2024-07-02 01:44:49 +02:00
2020022b13 Updated check 2024-07-01 22:49:50 +02:00
7cd4591bf1 Updated check 2024-07-01 22:30:00 +02:00
2b3c3f7f52 Updated check 2024-07-01 21:47:27 +02:00
8024583d36 Updated check 2024-07-01 21:13:28 +02:00
44e34cead6 Updated check 2024-07-01 19:41:20 +02:00
46b053eaf0 Updated check 2024-07-01 19:27:43 +02:00
8ccefc1843 Removed old checking 2024-07-01 01:22:02 +02:00
9ebcaacc38 Removed old checking 2024-07-01 00:55:30 +02:00
fdd631bce0 Fix 2024-07-01 00:43:04 +02:00
13681fb696 Fix 2024-06-30 23:55:19 +02:00
8153d8e9b3 Fix 2024-06-30 12:40:08 +02:00
a9d05511fb Fix 2024-06-30 11:32:55 +02:00
51e1c5a9ee Fix 2024-06-30 11:12:34 +02:00
50aebda69e Table 2024-06-30 02:44:47 +02:00
0a271801dd Version 2024-06-30 02:12:31 +02:00
2b58c4f070 Version 2024-06-30 02:08:57 +02:00
bd85ce5de7 Version 2024-06-30 02:03:41 +02:00
e669b43ba7 Version 2024-06-30 01:57:38 +02:00
c3176333fe Version 2024-06-30 01:52:43 +02:00
41ff93969e Version 2024-06-30 01:47:23 +02:00
307a4e90c5 Version 2024-06-30 01:43:48 +02:00
1330adf2de Version 2024-06-30 01:13:16 +02:00
b238d4ee9a Path 2024-06-30 01:08:13 +02:00
a97432c69b Path 2024-06-30 01:01:23 +02:00
07f21d4f1d Path 2024-06-30 00:55:00 +02:00
8477fd7bb2 Path 2024-06-30 00:50:27 +02:00
32c0eadea2 Path 2024-06-30 00:46:46 +02:00
bf5c710fd3 Path 2024-06-30 00:44:27 +02:00
3358a37341 Path 2024-06-30 00:41:40 +02:00
0607c629da Path 2024-06-30 00:37:54 +02:00
4173c6c9ed Path 2024-06-30 00:28:39 +02:00
35503c7703 Path 2024-06-30 00:24:39 +02:00
c553a554f0 Path 2024-06-30 00:21:49 +02:00
961fd5cc1f Path 2024-06-30 00:16:14 +02:00
edc8d6dfcf Path 2024-06-30 00:12:07 +02:00
f1de123f9b Cleanup 2024-06-30 00:06:47 +02:00
ddfd2a0e5d Cleanup 2024-06-28 00:45:10 +02:00
bbae7d932d Task polling 2024-06-27 02:12:51 +02:00
f0251fae55 Small changes 2024-06-19 20:01:07 +02:00
6886909ec0 Small changes 2024-06-19 20:00:42 +02:00
77f1787403 Small changes 2024-06-19 19:56:51 +02:00
9244a65235 Small changes 2024-06-19 19:56:31 +02:00
8c71df05b9 Handling none in anii 2024-06-18 19:41:21 +02:00
e3da00ee0d Fixing malformed json 2024-06-18 19:38:01 +02:00
4d4e80a2b9 Changed path 2024-06-18 19:23:57 +02:00
f8c5c03438 Added missing files 2024-06-18 19:21:25 +02:00
4f0886b3fd Update 2024-06-18 19:12:38 +02:00
8746ed0d6f Update 2024-06-18 19:04:49 +02:00
1fd7ed5dae Update 2024-06-18 18:59:34 +02:00
858f147553 Updated list 2024-06-18 18:45:40 +02:00
e46029bedd Refactor python 2024-06-18 18:40:37 +02:00
583c0613a5 Removed Integrity 2024-06-15 22:59:33 +02:00
99a864319f Removed Integrity 2024-06-15 22:58:31 +02:00
b0f6e354ea Changed to listens for 2024-05-06 00:53:26 +02:00
92794574d7 Fixed operations 2024-05-06 00:35:07 +02:00
e776e00375 Convert 2024-05-05 17:57:05 +02:00
4d7af0dedb Changed input 2024-05-04 22:53:58 +02:00
ca46332007 Migrated coroutine 2024-04-26 00:36:55 +02:00
a455146441 New removal from queue 2024-04-26 00:08:58 +02:00
13651b6e90 New removal from queue 2024-04-25 23:44:09 +02:00
67ba916360 Changed index 2024-04-24 02:04:22 +02:00
8985476e61 updated logging 2024-04-24 01:44:47 +02:00
066781edb9 Test 2024-04-23 19:09:45 +02:00
ac2baa5787 Test 2024-04-23 00:59:58 +02:00
e090e98fa5 Test 2024-04-23 00:44:05 +02:00
5c1a8bd105 Debug id 2024-04-23 00:17:37 +02:00
1e394c1938 Debug id 2024-04-22 23:59:56 +02:00
e5bb1c1733 Adjustment 2024-04-22 02:07:57 +02:00
d10bca3d1a Adjustment 2024-04-22 01:34:05 +02:00
ea992e6ea7 Creating a cache of "consumed" eventIds 2024-04-22 01:01:00 +02:00
d6a8ea6297 Creating a cache of "consumed" eventIds 2024-04-22 00:17:14 +02:00
29fc5fa118 Logging 2024-04-21 22:39:46 +02:00
2f3fff1e64 Changed behaviour 2024-04-21 22:17:26 +02:00
88c9d3c306 Changed behaviour 2024-04-21 22:12:55 +02:00
f3c5beaacd Changed behaviour 2024-04-21 21:34:32 +02:00
b245cf6f94 Changed behaviour 2024-04-21 20:43:03 +02:00
7663f4d7e5 Changed behaviour 2024-04-21 20:16:09 +02:00
fecd87c136 Bump 2024-04-21 19:04:09 +02:00
f28dda5fd6 Bump 2024-04-21 19:00:58 +02:00
9d4c5d8316 Integrity update 2024-04-21 15:34:57 +02:00
b3366a2b5f Integrity update 2024-04-21 15:28:54 +02:00
9dd1825064 Corrections 2024-04-21 03:43:56 +02:00
c5f785f2ba Corrections 2024-04-21 02:18:55 +02:00
7becd3d30c Corrections 2024-04-21 02:18:10 +02:00
a3a94a5f8c Update 2024-04-20 19:28:00 +02:00
2fea47b841 Update 2024-04-20 18:41:35 +02:00
174e20e11f Update 2024-04-20 14:39:51 +02:00
6b5c115a2c Update 2024-04-20 04:42:53 +02:00
7c25437a10 Update 2024-04-20 04:33:33 +02:00
645c7b7a8e Update 2024-04-20 03:24:00 +02:00
a275514696 Update 2024-04-19 21:05:44 +02:00
bc7f557a3e Update 2024-04-19 20:41:51 +02:00
5235e0838c Update 2024-04-19 02:46:40 +02:00
7638f573dd Update 2024-04-19 02:43:06 +02:00
84bc18ab41 Update 2024-04-19 02:22:25 +02:00
3211cb2608 Minor update 2024-04-18 02:47:57 +02:00
5fd910b78d Minor update 2024-04-18 02:06:54 +02:00
6313c2b990 Minor update 2024-04-18 01:53:23 +02:00
e6550bcfb0 Minor update 2024-04-18 00:51:47 +02:00
528d07f645 Minor update 2024-04-17 22:44:16 +02:00
607142cc75 Fixes + ignore duplicate for multi events 2024-04-17 22:30:41 +02:00
3b8ce8f86a Fixes 2024-04-17 03:13:24 +02:00
80a7d3e706 Fixes 2024-04-17 02:51:33 +02:00
85caa81781 Correcting tags 2024-04-17 02:04:28 +02:00
e889dc3c61 Waiting for listeners annotated with @Service to be loaded into coordinator 2024-04-17 01:51:44 +02:00
f847a0669c Minor fix 2024-04-16 17:50:37 +02:00
d82488405f Minor fix 2024-04-16 01:10:51 +02:00
d25b736aa0 Replacement 2024-04-16 01:01:45 +02:00
385b18e097 Replacement 2024-04-16 00:13:13 +02:00
e519dc9540 Replacement 2024-04-16 00:08:05 +02:00
c215e7aaae Replacement 2024-04-16 00:00:10 +02:00
f37ce39266 Correction4 2024-04-15 18:34:28 +02:00
768f743462 Correction3 2024-04-15 18:10:56 +02:00
fd61dcbeeb Correction2 2024-04-15 18:03:27 +02:00
5e52385c68 Correction1 2024-04-15 17:25:12 +02:00
dcf247fe41 Update to metadata 2024-04-15 17:14:50 +02:00
9f5946b2c7 Correction6Ws 2024-04-15 01:55:07 +02:00
0a632acfcd Correction6W 2024-04-15 01:52:49 +02:00
4a01dac180 Correction5W 2024-04-15 01:49:48 +02:00
88fc344530 Correction4W 2024-04-15 01:43:47 +02:00
47ee7b9ff8 Correction3 2024-04-15 01:23:53 +02:00
fba5ae697a Correction2 2024-04-15 01:18:25 +02:00
eae7d8dd10 Correction 2024-04-15 01:14:27 +02:00
b407159f22 Updated 2024-04-15 01:03:16 +02:00
353217bd3f Updated 2024-04-15 00:51:20 +02:00
1d7280290c Updated 2024-04-15 00:35:31 +02:00
b80ee038b2 Updated 2024-04-15 00:24:06 +02:00
fe47168718 Updated 2024-04-14 23:03:06 +02:00
ea8892ef3f Updated 2024-04-14 22:05:01 +02:00
4a061057a6 Updated 2024-04-14 21:51:05 +02:00
b55f1d65eb Updated 2024-04-14 21:08:04 +02:00
fbb710b8f8 Updated 2024-04-14 20:17:14 +02:00
cf35aa6ea7 Updated 2024-04-14 17:39:25 +02:00
d1fb815d4d Updated 2024-04-14 04:33:47 +02:00
9b85848522 Updated 2024-04-14 04:28:01 +02:00
ca1e603cc0 Updated 2024-04-14 04:00:56 +02:00
e0ad7770e2 Updated 2024-04-14 03:55:56 +02:00
892aef3942 Updated 2024-04-14 03:39:07 +02:00
22c0c0335c Adjustments 2024-04-14 02:48:57 +02:00
ce5cd71daa Adjustments 2024-04-14 02:41:26 +02:00
af8a1d9b9b Adjustments 2024-04-14 02:02:27 +02:00
66f35401bc Adjustments 2024-04-13 20:41:46 +02:00
50b171df65 Adjustments 2024-04-13 19:45:10 +02:00
f416698ad8 Adjustments 2024-04-13 18:51:24 +02:00
a3b83bc8a1 Adjustments 2024-04-13 17:54:53 +02:00
b5a7aa0f36 Adjustments 2024-04-13 17:21:44 +02:00
3d64a99213 Adjustments 2024-04-13 14:02:26 +02:00
5b964970a0 Made exception to sanitazedName for movie 2024-04-13 01:12:53 +02:00
0c1fe99c7a Update 2024-04-12 22:44:34 +02:00
dcdcc57c89 Finding nearest or defaulting to Metadata title 2024-04-12 01:35:35 +02:00
7088f0221b Updated 2024-04-11 22:08:46 +02:00
72e73c2c20 Updated 2024-04-11 18:25:45 +02:00
5eb94df884 Updated 2024-04-11 01:33:42 +02:00
f0a8e14aaa Updates 2024-04-07 19:16:14 +02:00
6090c2e8c0 Changed events and deleting replaced ones 2024-04-03 01:43:22 +02:00
98527ec02f Updates 2024-04-02 22:27:10 +02:00
Brage
b9a10e7585 UI 2024-03-30 14:25:59 +01:00
Brage
98ca3e239f UI + Adjustments 2024-03-30 14:25:35 +01:00
Brage
68f3d05d56 Minor adjustments 2024-03-29 02:00:31 +01:00
Brage
63ead22e49 Minor adjustments 2024-03-28 14:55:45 +01:00
Brage
6e2968e0eb Minor adjustments 2024-03-28 14:31:54 +01:00
Brage
764f95014a Fixed base 2024-03-28 04:42:05 +01:00
Brage
ce4019a1e6 multi database 2024-03-28 04:05:02 +01:00
Brage
0c2a6f3c1c Fixed 2024-03-26 03:06:42 +01:00
Brage
6fd490689e wrapped 2024-03-26 01:16:06 +01:00
Brage
4b4aac9496 Adjustments 2024-03-26 01:03:55 +01:00
Brage
fca4408f8e Adjustments 2024-03-26 00:55:11 +01:00
Brage
173859cffc Changed behaviour 2024-03-26 00:41:40 +01:00
Brage
07111f7b98 Changed work creation 2024-03-25 23:25:23 +01:00
Brage
876d900e9b Updated filter 2024-03-25 18:39:13 +01:00
Brage
4f3be9a642 Updated 2024-03-25 18:21:17 +01:00
Brage
fc5bb6a71c Removed class from log 2024-03-25 16:38:24 +01:00
Brage
3e8924af8f Renaming events 2024-03-25 15:21:06 +01:00
Brage
d6958ff0ce Adding fields 2024-03-25 14:04:37 +01:00
Brage
d4d0f6f0fb Setting failed to consumed to prevent spamming 2024-03-25 00:22:16 +01:00
Brage
c7bad81bd9 Performing filter on waits 2024-03-24 01:01:55 +01:00
Brage
3d872a7488 Added support for no subtitles for extraction 2024-03-23 16:52:40 +01:00
Brage
56d440aa5e Filter 2024-03-23 16:38:42 +01:00
Brage
4bbc43368e Filter 2024-03-23 16:23:25 +01:00
Brage
204af0279b Suppresses known events 2024-03-23 16:01:20 +01:00
Brage
e8eb486721 Minor update 2024-03-23 01:24:54 +01:00
Brage
a92cc60b2f Increased weight if name is equal 2024-03-23 00:48:27 +01:00
Brage
4acc058038 Fixed filename out 2024-03-20 20:32:56 +01:00
Brage
6c9d1da854 Catches 2024-03-15 01:34:54 +01:00
Brage
e159560bca Filter 2024-03-14 23:29:19 +01:00
Brage
d88c75a8cc Fix 2024-03-14 21:55:13 +01:00
Brage
2d263d009e Fix 2024-03-14 01:49:19 +01:00
Brage
24db5444f2 Fix 2024-03-14 01:29:29 +01:00
Brage
5c4e8f7de8 Migrated to shared codebase for coordinator 2024-03-13 21:26:22 +01:00
Brage
d38003f7f9 Filtering 2024-03-13 19:28:11 +01:00
Brage
fc6e8558f6 Fixed usage 2024-03-07 21:42:49 +01:00
Brage
0f5ffe3694 Logging 2024-03-07 00:44:21 +01:00
Brage
f48e7419e9 Minor adjustments 2024-03-02 00:48:49 +01:00
Brage
849c4d1df4 Changed check 2024-03-01 01:45:38 +01:00
Brage
1d7f78bfea Nullable 2024-03-01 00:57:12 +01:00
Brage
08f0a66b20 Small adjustments 2024-02-29 23:34:51 +01:00
Brage
fc29b57cbe Push 2024-02-07 17:13:54 +01:00
Brage
fd468520d5 Log fiks 2024-01-18 12:48:49 +01:00
Brage
f3d42c06d2 bop 2024-01-18 09:14:39 +01:00
Brage
3d798a13fb bop 2024-01-17 23:17:04 +01:00
Brage
e231caae28 creating default 2024-01-17 22:42:56 +01:00
Brage
963af81353 creating default 2024-01-17 19:57:38 +01:00
Brage
0cc161e612 Corrected onCopy Hevc stream 2024-01-16 00:15:19 +01:00
Brage
0283739e19 Wip 2024-01-15 01:42:55 +01:00
Brage
6e1cc17235 Migrated coordinator to shared listener 2024-01-15 00:28:48 +01:00
Brage
08fc781d86 Korrigert eventId 2024-01-14 20:40:31 +01:00
Brage
809e60083b metadata correction 2024-01-14 19:45:30 +01:00
Brage
b8b50e7350 Changed python 2024-01-14 19:23:17 +01:00
Brage
9365e19237 Changed python 2024-01-14 19:21:16 +01:00
Brage
af5e1613be Changing replay 2024-01-14 19:12:29 +01:00
Brage
a365d16962 typo 2024-01-14 18:51:21 +01:00
Brage Skjønborg
e64e4c1142
Update v2.yml 2024-01-14 18:45:45 +01:00
Brage Skjønborg
5f3982f4dd
Update v2.yml 2024-01-14 18:39:53 +01:00
Brage Skjønborg
660a2f436f
Update v2.yml 2024-01-14 18:34:40 +01:00
Brage
38449408f4 Moved images 2024-01-14 18:14:35 +01:00
Brage
9e84b07536 Corrected image 2024-01-14 17:41:09 +01:00
Brage
4a39f67649 Minor update 2024-01-14 16:05:47 +01:00
Brage
b4036d10c4 Adjustment 2024-01-14 15:55:52 +01:00
Brage
b663bcaf05 Adjustment 2024-01-14 05:57:31 +01:00
Brage
a6681dd3e8 Updated v2 updated 2024-01-14 05:43:38 +01:00
Brage
1a15eb07ef Updated v2 2024-01-14 05:41:41 +01:00
Brage
3e7bca531c Dockerfile 2024-01-14 05:33:42 +01:00
Brage
8edb6cc7eb Dockerfile 2024-01-13 17:05:51 +01:00
Brage
8366b6a197 Merge remote-tracking branch 'origin/v2' into v2 2024-01-13 17:05:47 +01:00
Brage
11c440e29b Dockerfile 2024-01-13 17:05:32 +01:00
Brage Skjønborg
71eeaeb256
Update v2.yml 2024-01-13 16:45:58 +01:00
Brage Skjønborg
15fcea06e5
Update v2.yml 2024-01-13 16:40:59 +01:00
Brage
c38d6890fd Dockerfile 2024-01-13 16:31:55 +01:00
Brage
163e5f19af Removed shared as a project 2024-01-13 15:14:25 +01:00
Brage
5a93ff91ff Removed shared as a project 2024-01-13 14:57:19 +01:00
Brage
c3c41cc2c4 Removed 17 2024-01-13 14:53:31 +01:00
Brage
a663271e16 Bop 2024-01-13 14:18:10 +01:00
Brage
e0a2a3c0b1 Bop 2024-01-13 14:10:33 +01:00
Brage
94ec763e29 Updated 2024-01-13 14:00:56 +01:00
Brage
0a3b5782a8 Updated 2024-01-13 13:58:32 +01:00
Brage
ea0528ed2c Updated 2024-01-13 13:50:26 +01:00
Brage
f6f5ce4687 Updated 2024-01-13 13:48:06 +01:00
Brage
fb1d090120 Updated 2024-01-13 13:45:44 +01:00
Brage
71a4edfa4f Path 2024-01-13 13:29:21 +01:00
Brage
2661828ec3 Path 2024-01-13 00:27:42 +01:00
Brage
852f45d85e v2 2024-01-12 23:57:13 +01:00
Brage
858b66a883 v2 2024-01-12 20:46:45 +01:00
Brage
90e9d873f0 Converter now works 2024-01-06 23:29:37 +01:00
Brage
4e9cdb10a4 WIP - Handover to processer works 2024-01-03 17:37:56 +01:00
Brage
3d119813dd WIP - Looping issue 2023-12-13 18:26:16 +01:00
Brage
1ba4c0ee6d Cleanup 2023-12-10 21:30:54 +01:00
Brage
d775f91c3e Autowired producer 2023-12-10 21:30:39 +01:00
Brage
fd2483629a Fikset name parser 2023-12-10 21:28:37 +01:00
Brage
036138bb19 Fikset kjøring 2023-12-08 00:22:58 +01:00
Brage
dd214c8ff9 Cleanup 2023-12-07 23:47:55 +01:00
Brage
57800a1fba V2 update 2 2023-12-07 23:35:05 +01:00
Brage
729bb03b70 V2 update 2023-12-07 23:34:48 +01:00
Brage
1b83bec7c0 v2 init 2023-12-04 00:03:57 +01:00
657 changed files with 32882 additions and 42688 deletions

100
.github/workflows/build-java-app.yml vendored Normal file
View File

@ -0,0 +1,100 @@
name: Build Java App
on:
workflow_call:
inputs:
app:
required: true
type: string
dockerTag:
required: true
type: string
enabled:
required: true
type: boolean
shouldBuild:
required: true
type: boolean
jobs:
build-java:
if: ${{ inputs.enabled && inputs.shouldBuild }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Detect frontend
id: detect_web
run: |
if [ -d "apps/${{ inputs.app }}/web" ]; then
echo "has_web=true" >> $GITHUB_OUTPUT
else
echo "has_web=false" >> $GITHUB_OUTPUT
fi
- name: Build React frontend
if: ${{ steps.detect_web.outputs.has_web == 'true' }}
run: |
cd apps/${{ inputs.app }}/web
npm install
export CI=false
npm run build
- name: Copy React build into Spring Boot resources
if: ${{ steps.detect_web.outputs.has_web == 'true' }}
run: |
rm -rf apps/${{ inputs.app }}/src/main/resources/static
mkdir -p apps/${{ inputs.app }}/src/main/resources/static
cp -r apps/${{ inputs.app }}/web/build/* apps/${{ inputs.app }}/src/main/resources/static
- name: Extract version
run: |
VERSION=$(grep '^version' apps/${{ inputs.app }}/build.gradle.kts | sed 's/.*"\(.*\)".*/\1/')
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Run unit tests
run: |
chmod +x ./gradlew
./gradlew :apps:${{ inputs.app }}:test --info --stacktrace
- name: Build Java module
run: |
chmod +x ./gradlew
./gradlew :apps:${{ inputs.app }}:bootJar --info --stacktrace
- name: Build Docker image locally
run: |
docker build \
-f ./dockerfiles/DebianJava \
-t local-${{ inputs.app }}:${{ inputs.dockerTag }} \
--build-arg MODULE_NAME=${{ inputs.app }} \
--build-arg PASS_APP_VERSION=${{ env.VERSION }} \
.
- name: Test Docker container
run: |
docker run --rm local-${{ inputs.app }}:${{ inputs.dockerTag }} /bin/sh -c "echo 'Smoke test OK'"
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=${{ inputs.app }}
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-${{ inputs.app }}:v5
bskjon/mediaprocessing-${{ inputs.app }}:v5-${{ inputs.dockerTag }}
bskjon/mediaprocessing-${{ inputs.app }}:v5-${{ github.sha }}

127
.github/workflows/build-python-app.yml vendored Normal file
View File

@ -0,0 +1,127 @@
name: Build Python App
on:
workflow_call:
inputs:
app:
required: true
type: string
dockerTag:
required: true
type: string
enabled:
required: true
type: boolean
shouldBuild:
required: true
type: boolean
jobs:
build-python:
if: ${{ inputs.enabled && inputs.shouldBuild }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# -----------------------------
# Cache pip per app
# -----------------------------
- name: Cache pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ inputs.app }}-${{ hashFiles('apps/${{ inputs.app }}/requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-${{ inputs.app }}-
# -----------------------------
# Install Python deps
# -----------------------------
- name: Install dependencies
working-directory: apps/${{ inputs.app }}
run: |
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f requirements-test.txt ]; then pip install -r requirements-test.txt; fi
# -----------------------------
# Run tests
# -----------------------------
- name: Run Python tests
working-directory: apps/${{ inputs.app }}
run: python -m pytest -q
# -----------------------------
# Setup Buildx
# -----------------------------
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# -----------------------------
# Cache Docker layers per app
# -----------------------------
- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache-${{ inputs.app }}
key: ${{ runner.os }}-buildx-${{ inputs.app }}-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-${{ inputs.app }}-
# -----------------------------
# Build image (load locally for smoke test)
# -----------------------------
- name: Build Docker image (local load)
uses: docker/build-push-action@v5
with:
context: .
file: ./dockerfiles/Python
build-args: |
MODULE_NAME=${{ inputs.app }}
load: true
tags: local-${{ inputs.app }}:${{ inputs.dockerTag }}
cache-from: type=local,src=/tmp/.buildx-cache-${{ inputs.app }}
cache-to: type=local,dest=/tmp/.buildx-cache-${{ inputs.app }}-new
# -----------------------------
# Smoke test
# -----------------------------
- name: Smoke test container
run: |
docker run --rm local-${{ inputs.app }}:${{ inputs.dockerTag }} \
/bin/sh -c "echo 'Smoke test OK'"
# -----------------------------
# Docker login
# -----------------------------
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
# -----------------------------
# Push final image (no rebuild)
# -----------------------------
- name: Push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./dockerfiles/Python
build-args: |
MODULE_NAME=${{ inputs.app }}
push: true
tags: |
bskjon/mediaprocessing-${{ inputs.app }}:v5
bskjon/mediaprocessing-${{ inputs.app }}:v5-${{ inputs.dockerTag }}
bskjon/mediaprocessing-${{ inputs.app }}:v5-${{ github.sha }}
cache-from: type=local,src=/tmp/.buildx-cache-${{ inputs.app }}
cache-to: type=local,dest=/tmp/.buildx-cache-${{ inputs.app }}-new
# -----------------------------
# Move Docker cache
# -----------------------------
- name: Move Docker cache
run: |
rm -rf /tmp/.buildx-cache-${{ inputs.app }}
mv /tmp/.buildx-cache-${{ inputs.app }}-new /tmp/.buildx-cache-${{ inputs.app }}

26
.github/workflows/build-shared.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Build Shared
on:
workflow_call:
inputs:
dockerTag:
required: true
type: string
jobs:
build-shared:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Cache Gradle
uses: actions/cache@v4
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Build Shared module
run: |
chmod +x ./gradlew
./gradlew :shared:build --info --stacktrace

115
.github/workflows/build-v5.yml vendored Normal file
View File

@ -0,0 +1,115 @@
name: Build v5
on:
push:
branches: [ v5 ]
pull_request:
branches: [ v5 ]
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
sharedDefinitions: ${{ steps.filter.outputs.sharedDefinitions }}
shared: ${{ steps.filter.outputs.shared }}
processer: ${{ steps.filter.outputs.processer }}
converter: ${{ steps.filter.outputs.converter }}
coordinator: ${{ steps.filter.outputs.coordinator }}
ui: ${{ steps.filter.outputs.ui }}
py-metadata: ${{ steps.filter.outputs.metadata }}
py-watcher: ${{ steps.filter.outputs.watcher }}
dockerTag: ${{ steps.tag.outputs.tag }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.sha }}
base: ${{ github.event.before }}
- name: Detect changes
id: filter
uses: dorny/paths-filter@v3
with:
filters: |
shared:
- 'shared/**'
sharedDefinitions:
- 'gradle/**'
processer:
- 'apps/processer/**'
converter:
- 'apps/converter/**'
coordinator:
- 'apps/coordinator/**'
ui:
- 'apps/ui/**'
metadata:
- 'apps/py-metadata/**'
watcher:
- 'apps/py-watcher/**'
- name: Generate docker tag
id: tag
run: echo "tag=$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)" >> $GITHUB_OUTPUT
build-shared:
needs: pre-check
if: ${{
needs.pre-check.outputs.shared == 'true'
|| needs.pre-check.outputs.sharedDefinitions == 'true'
|| needs.pre-check.outputs.processer == 'true'
|| needs.pre-check.outputs.converter == 'true'
|| needs.pre-check.outputs.coordinator == 'true'
|| needs.pre-check.outputs.ui == 'true'
|| github.event_name == 'workflow_dispatch'
}}
uses: ./.github/workflows/build-shared.yml
with:
dockerTag: ${{ needs.pre-check.outputs.dockerTag }}
build-java:
needs: [pre-check, build-shared]
strategy:
matrix:
include:
- app: processer
enabled: true
- app: converter
enabled: true
- app: coordinator
enabled: true
- app: ui
enabled: false
uses: ./.github/workflows/build-java-app.yml
with:
app: ${{ matrix.app }}
dockerTag: ${{ needs.pre-check.outputs.dockerTag }}
enabled: ${{ matrix.enabled }}
shouldBuild: ${{ needs.pre-check.outputs[matrix.app] == 'true'
|| needs.pre-check.outputs.shared == 'true'
|| needs.pre-check.outputs.sharedDefinitions == 'true'
|| github.event_name == 'workflow_dispatch' }}
secrets: inherit
build-python:
needs: [pre-check]
strategy:
matrix:
include:
- app: py-metadata
enabled: true
- app: py-watcher
enabled: true
uses: ./.github/workflows/build-python-app.yml
with:
app: ${{ matrix.app }}
dockerTag: ${{ needs.pre-check.outputs.dockerTag }}
enabled: ${{ matrix.enabled }}
shouldBuild: ${{ needs.pre-check.outputs[matrix.app] == 'true'
|| github.event_name == 'workflow_dispatch' }}
secrets: inherit

View File

@ -1,250 +0,0 @@
name: Build Modules
on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
pyMetadata: ${{ steps.filter.outputs.pyMetadata }}
commonCode: ${{ steps.filter.outputs.commonCode }}
reader: ${{ steps.filter.outputs.reader }}
encode: ${{ steps.filter.outputs.encode }}
convert: ${{ steps.filter.outputs.convert }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
pyMetadata:
- 'pyMetadata/**'
reader:
- 'Reader/**'
encode:
- 'Encode/**'
convert:
- 'Convert/**'
commonCode:
- 'CommonCode/**'
# Step to print the outputs from "pre-check" job
- name: Print Outputs from pre-check job
run: |
echo "pyMetadata: ${{ needs.pre-check.outputs.pyMetadata }}"
echo "commonCode: ${{ needs.pre-check.outputs.commonCode }}"
echo "reader: ${{ needs.pre-check.outputs.reader }}"
echo "encode: ${{ needs.pre-check.outputs.encode }}"
echo "convert: ${{ needs.pre-check.outputs.convert }}"
build-commoncode:
runs-on: ubuntu-latest
needs: pre-check
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache CommonCode Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
- name: Build CommonCode
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.commonCode == 'true' || github.event_name == 'workflow_dispatch'
run: |
cd CommonCode
chmod +x ./gradlew
./gradlew build
build-encode:
needs: build-commoncode
if: ${{ needs.pre-check.outputs.encode == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache CommonCode Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
- name: Build Encode module
id: build-encode
run: |
cd Encode
chmod +x ./gradlew
./gradlew build
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: ./Encode
push: true
tags: |
bskjon/mediaprocessing-encoder:latest
bskjon/mediaprocessing-encoder:${{ github.sha }}
bskjon/mediaprocessing-encoder:${{ steps.docker-tag.outputs.tag }}
build-reader:
needs: build-commoncode
runs-on: ubuntu-latest
if: ${{ needs.pre-check.outputs.reader == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache CommonCode Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
- name: Build Reader module
id: build-reader
run: |
cd Reader
chmod +x ./gradlew
./gradlew build
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: ./Reader
push: true
tags: |
bskjon/mediaprocessing-reader:latest
bskjon/mediaprocessing-reader:${{ github.sha }}
bskjon/mediaprocessing-reader:${{ steps.docker-tag.outputs.tag }}
build-pymetadata:
needs: pre-check
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build pyMetadata module
id: build-pymetadata
run: |
if [[ "${{ steps.check-pymetadata.outputs.changed }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
cd pyMetadata
# Add the necessary build steps for your Python module here
echo "Build completed"
else
echo "pyMetadata has not changed. Skipping pyMetadata module build."
echo "::set-output name=job_skipped::true"
fi
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: ./pyMetadata
push: true
tags: |
bskjon/mediaprocessing-pymetadata:latest
bskjon/mediaprocessing-pymetadata:${{ github.sha }}
bskjon/mediaprocessing-pymetadata:${{ steps.docker-tag.outputs.tag }}
build-convert:
needs: build-commoncode
if: ${{ needs.pre-check.outputs.convert == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache CommonCode Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
- name: Build Convert module
id: build-convert
run: |
cd Convert
chmod +x ./gradlew
./gradlew build
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: ./Convert
push: true
tags: |
bskjon/mediaprocessing-converter:latest
bskjon/mediaprocessing-converter:${{ github.sha }}
bskjon/mediaprocessing-converter:${{ steps.docker-tag.outputs.tag }}

295
.github/workflows/v2.yml vendored Normal file
View File

@ -0,0 +1,295 @@
name: Build V2
on:
push:
branches:
- v2
pull_request:
branches:
- v2
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
pyMetadata: ${{ steps.filter.outputs.pyMetadata }}
coordinator: ${{ steps.filter.outputs.coordinator }}
processer: ${{ steps.filter.outputs.processer }}
converter: ${{ steps.filter.outputs.converter }}
shared: ${{ steps.filter.outputs.shared }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
pyMetadata:
- 'apps/pyMetadata/**'
apps/coordinator:
- 'apps/coordinator/**'
apps/processer:
- 'apps/processer/**'
apps/converter:
- 'apps/converter/**'
shared:
- 'shared/**'
# Step to print the outputs from "pre-check" job
- name: Print Outputs from pre-check job
run: |
echo "Apps\n"
echo "app:pyMetadata: ${{ needs.pre-check.outputs.pyMetadata }}"
echo "app:coordinator: ${{ needs.pre-check.outputs.coordinator }}"
echo "app:processer: ${{ needs.pre-check.outputs.processer }}"
echo "app:converter: ${{ needs.pre-check.outputs.converter }}"
echo "Shared"
echo "shared: ${{ needs.pre-check.outputs.shared }}"
echo "\n"
echo "${{ needs.pre-check.outputs }}"
echo "${{ needs.pre-check }}"
build-shared:
runs-on: ubuntu-latest
needs: pre-check
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared code Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Build Shared code
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.shared == 'true' || github.event_name == 'workflow_dispatch'
run: |
chmod +x ./gradlew
./gradlew :shared:build --stacktrace --info
build-processer:
needs: build-shared
if: ${{ needs.pre-check.outputs.processer == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/processer/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Processer module
id: build-processer
run: |
chmod +x ./gradlew
./gradlew :apps:processer:bootJar --info
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=processer
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-processer:v2
bskjon/mediaprocessing-processer:v2-${{ github.sha }}
bskjon/mediaprocessing-processer:v2-${{ steps.docker-tag.outputs.tag }}
build-converter:
needs: build-shared
if: ${{ needs.pre-check.outputs.converter == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/converter/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Converter module
id: build-converter
run: |
chmod +x ./gradlew
./gradlew :apps:converter:bootJar --info
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJava
build-args: |
MODULE_NAME=converter
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-converter:v2
bskjon/mediaprocessing-converter:v2-${{ github.sha }}
bskjon/mediaprocessing-converter:v2-${{ steps.docker-tag.outputs.tag }}
build-coordinator:
needs: build-shared
if: ${{ needs.pre-check.outputs.coordinator == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/coordinator/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Coordinator module
id: build-coordinator
run: |
chmod +x ./gradlew
./gradlew :apps:coordinator:bootJar
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Debug Check extracted version
run: |
echo "Extracted version: ${{ env.VERSION }}"
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=coordinator
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-coordinator:v2
bskjon/mediaprocessing-coordinator:v2-${{ github.sha }}
bskjon/mediaprocessing-coordinator:v2-${{ steps.docker-tag.outputs.tag }}
build-pymetadata:
needs: pre-check
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build pyMetadata module
id: build-pymetadata
run: |
if [[ "${{ steps.check-pymetadata.outputs.changed }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
cd apps/pyMetadata
# Add the necessary build steps for your Python module here
echo "Build completed"
else
echo "pyMetadata has not changed. Skipping pyMetadata module build."
echo "::set-output name=job_skipped::true"
fi
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./dockerfiles/Python
build-args:
MODULE_NAME=pyMetadata
push: true
tags: |
bskjon/mediaprocessing-pymetadata:v2
bskjon/mediaprocessing-pymetadata:v2-${{ github.sha }}
bskjon/mediaprocessing-pymetadata:v2-${{ steps.docker-tag.outputs.tag }}

295
.github/workflows/v3.yml vendored Normal file
View File

@ -0,0 +1,295 @@
name: Build V3
on:
push:
branches:
- v3
pull_request:
branches:
- v3
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
pyMetadata: ${{ steps.filter.outputs.pyMetadata }}
coordinator: ${{ steps.filter.outputs.coordinator }}
processer: ${{ steps.filter.outputs.processer }}
converter: ${{ steps.filter.outputs.converter }}
shared: ${{ steps.filter.outputs.shared }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
pyMetadata:
- 'apps/pyMetadata/**'
apps/coordinator:
- 'apps/coordinator/**'
apps/processer:
- 'apps/processer/**'
apps/converter:
- 'apps/converter/**'
shared:
- 'shared/**'
# Step to print the outputs from "pre-check" job
- name: Print Outputs from pre-check job
run: |
echo "Apps\n"
echo "app:pyMetadata: ${{ needs.pre-check.outputs.pyMetadata }}"
echo "app:coordinator: ${{ needs.pre-check.outputs.coordinator }}"
echo "app:processer: ${{ needs.pre-check.outputs.processer }}"
echo "app:converter: ${{ needs.pre-check.outputs.converter }}"
echo "Shared"
echo "shared: ${{ needs.pre-check.outputs.shared }}"
echo "\n"
echo "${{ needs.pre-check.outputs }}"
echo "${{ needs.pre-check }}"
build-shared:
runs-on: ubuntu-latest
needs: pre-check
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared code Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Build Shared code
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.shared == 'true' || github.event_name == 'workflow_dispatch'
run: |
chmod +x ./gradlew
./gradlew :shared:build --stacktrace --info
build-processer:
needs: build-shared
if: ${{ needs.pre-check.outputs.processer == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/processer/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Processer module
id: build-processer
run: |
chmod +x ./gradlew
./gradlew :apps:processer:bootJar --info --stacktrace
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=processer
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-processer:v3
bskjon/mediaprocessing-processer:v3-${{ github.sha }}
bskjon/mediaprocessing-processer:v3-${{ steps.docker-tag.outputs.tag }}
build-converter:
needs: build-shared
if: ${{ needs.pre-check.outputs.converter == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v3
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/converter/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Converter module
id: build-converter
run: |
chmod +x ./gradlew
./gradlew :apps:converter:bootJar --info --debug
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJava
build-args: |
MODULE_NAME=converter
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-converter:v3
bskjon/mediaprocessing-converter:v3-${{ github.sha }}
bskjon/mediaprocessing-converter:v3-${{ steps.docker-tag.outputs.tag }}
build-coordinator:
needs: build-shared
if: ${{ needs.pre-check.outputs.coordinator == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.shared == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v2
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/coordinator/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Coordinator module
id: build-coordinator
run: |
chmod +x ./gradlew
./gradlew :apps:coordinator:bootJar
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Debug Check extracted version
run: |
echo "Extracted version: ${{ env.VERSION }}"
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=coordinator
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-coordinator:v3
bskjon/mediaprocessing-coordinator:v3-${{ github.sha }}
bskjon/mediaprocessing-coordinator:v3-${{ steps.docker-tag.outputs.tag }}
build-pymetadata:
needs: pre-check
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build pyMetadata module
id: build-pymetadata
run: |
if [[ "${{ steps.check-pymetadata.outputs.changed }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
cd apps/pyMetadata
# Add the necessary build steps for your Python module here
echo "Build completed"
else
echo "pyMetadata has not changed. Skipping pyMetadata module build."
echo "::set-output name=job_skipped::true"
fi
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./dockerfiles/Python
build-args:
MODULE_NAME=pyMetadata
push: true
tags: |
bskjon/mediaprocessing-pymetadata:v3
bskjon/mediaprocessing-pymetadata:v3-${{ github.sha }}
bskjon/mediaprocessing-pymetadata:v3-${{ steps.docker-tag.outputs.tag }}

386
.github/workflows/v4.yml vendored Normal file
View File

@ -0,0 +1,386 @@
name: Build v4
on:
push:
branches:
- v4
pull_request:
branches:
- v4
workflow_dispatch:
jobs:
pre-check:
runs-on: ubuntu-latest
outputs:
pyMetadata: ${{ steps.checkAppChanges.outputs.metadata }}
sharedLibrary: ${{ steps.checkAppChanges.outputs.shared }}
coordinator: ${{ steps.checkAppChanges.outputs.coordinator }}
processer: ${{ steps.checkAppChanges.outputs.processer }}
converter: ${{ steps.checkAppChanges.outputs.converter }}
ui: ${{ steps.checkAppChanges.outputs.ui }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
name: "Detect app changes"
id: checkAppChanges
with:
base: ''
filters: |
metadata:
- 'apps/pyMetadata/**'
coordinator:
- 'apps/coordinator/**'
processer:
- 'apps/processer/**'
converter:
- 'apps/converter/**'
ui:
- 'apps/ui/**'
shared:
- 'shared/**'
# Step to print the outputs from "pre-check" job
- name: Print Outputs from pre-check job
run: |
echo "Apps\n"
echo "app:pyMetadata: ${{ steps.checkAppChanges.outputs.metadata }}"
echo "app:coordinator: ${{ steps.checkAppChanges.outputs.coordinator }}"
echo "app:processer: ${{ steps.checkAppChanges.outputs.processer }}"
echo "app:converter: ${{ steps.checkAppChanges.outputs.converter }}"
echo "app:ui: ${{ steps.checkAppChanges.outputs.ui }}"
echo "Shared"
echo "shared: ${{ steps.checkAppChanges.outputs.shared }}"
echo "${{ steps.checkAppChanges.outputs }}"
build-shared:
runs-on: ubuntu-latest
needs: pre-check
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared code Gradle dependencies
id: cache-gradle
uses: actions/cache@v4
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Build Shared code
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.sharedLibrary == 'true' || github.event_name == 'workflow_dispatch'
run: |
chmod +x ./gradlew
./gradlew :shared:build --stacktrace --info
build-processer:
needs:
- build-shared
- pre-check
if: ${{ needs.pre-check.outputs.processer == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.sharedLibrary == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v4
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/processer/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Processer module
id: build-processer
run: |
chmod +x ./gradlew
./gradlew :apps:processer:bootJar --info --stacktrace
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=processer
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-processer:v4
bskjon/mediaprocessing-processer:v4-${{ github.sha }}
bskjon/mediaprocessing-processer:v4-${{ steps.docker-tag.outputs.tag }}
build-converter:
needs:
- build-shared
- pre-check
if: ${{ needs.pre-check.outputs.converter == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.sharedLibrary == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v3
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/converter/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Converter module
id: build-converter
run: |
chmod +x ./gradlew
./gradlew :apps:converter:bootJar --info --debug
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJava
build-args: |
MODULE_NAME=converter
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-converter:v4
bskjon/mediaprocessing-converter:v4-${{ github.sha }}
bskjon/mediaprocessing-converter:v4-${{ steps.docker-tag.outputs.tag }}
build-coordinator:
needs:
- build-shared
- pre-check
if: ${{ needs.pre-check.outputs.coordinator == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.sharedLibrary == 'true' }}
runs-on: ubuntu-latest
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v4
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/coordinator/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build Coordinator module
id: build-coordinator
run: |
chmod +x ./gradlew
./gradlew :apps:coordinator:bootJar
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Debug Check extracted version
run: |
echo "Extracted version: ${{ env.VERSION }}"
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJavaFfmpeg
build-args: |
MODULE_NAME=coordinator
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-coordinator:v4
bskjon/mediaprocessing-coordinator:v4-${{ github.sha }}
bskjon/mediaprocessing-coordinator:v4-${{ steps.docker-tag.outputs.tag }}
build-pymetadata:
needs:
- pre-check
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build pyMetadata module
id: build-pymetadata
run: |
if [[ "${{ needs.pre-check.outputs.pyMetadata }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
cd apps/pyMetadata
# Add the necessary build steps for your Python module here
echo "Build completed"
else
echo "pyMetadata has not changed. Skipping pyMetadata module build."
echo "::set-output name=job_skipped::true"
fi
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v5.1.0
with:
context: .
file: ./dockerfiles/Python
build-args:
MODULE_NAME=pyMetadata
push: true
tags: |
bskjon/mediaprocessing-pymetadata:v4
bskjon/mediaprocessing-pymetadata:v4-${{ github.sha }}
bskjon/mediaprocessing-pymetadata:v4-${{ steps.docker-tag.outputs.tag }}
build-ui:
needs:
- build-shared
- pre-check
if: ${{ needs.pre-check.outputs.ui == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.sharedLibrary == 'true' }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Cache Shared Gradle dependencies
id: cache-gradle
uses: actions/cache@v4
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('shared/build.gradle.kts') }}
- name: Setup Node.js
uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
run: npm install
working-directory: ./apps/ui/web
- name: Build React app
run: |
export CI=false
npm run build
working-directory: ./apps/ui/web
- name : Copy build files
run: |
mkdir -p ./apps/ui/src/main/resources/static
cp -r ./apps/ui/web/build/* ./apps/ui/src/main/resources/static
- name: Extract version from build.gradle.kts
id: extract_version
run: |
VERSION=$(cat ./apps/ui/build.gradle.kts | grep '^version\s*=\s*\".*\"' | sed 's/^version\s*=\s*\"\(.*\)\"/\1/')
echo "VERSION=$VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Build UI module
id: build-ui
run: |
chmod +x ./gradlew
./gradlew :apps:ui:bootJar
echo "Build completed"
- name: Generate Docker image tag
id: docker-tag
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
- name: Docker login
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Debug Check extracted version
run: |
echo "Extracted version: ${{ env.VERSION }}"
- name: Build and push Docker image
uses: docker/build-push-action@v2
with:
context: .
file: ./dockerfiles/DebianJava
build-args: |
MODULE_NAME=ui
PASS_APP_VERSION=${{ env.VERSION }}
push: true
tags: |
bskjon/mediaprocessing-ui:v4
bskjon/mediaprocessing-ui:v4-${{ github.sha }}
bskjon/mediaprocessing-ui:v4-${{ steps.docker-tag.outputs.tag }}

11
.gitignore vendored
View File

@ -5,8 +5,6 @@ build/
!**/src/test/**/build/ !**/src/test/**/build/
### IntelliJ IDEA ### ### IntelliJ IDEA ###
.idea
**/.idea/*
.idea/modules.xml .idea/modules.xml
.idea/jarRepositories.xml .idea/jarRepositories.xml
.idea/compiler.xml .idea/compiler.xml
@ -38,7 +36,12 @@ bin/
/.nb-gradle/ /.nb-gradle/
### VS Code ### ### VS Code ###
.vscode/
### Mac OS ### ### Mac OS ###
.DS_Store .DS_Store
.idea/runConfigurations
/apps/py-metadata/venv/
/apps/py-watcher/venv/

1
.idea/.name generated Normal file
View File

@ -0,0 +1 @@
MediaProcessing

6
.idea/copilot.data.migration.agent.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AgentMigrationStateService">
<option name="migrationStatus" value="COMPLETED" />
</component>
</project>

6
.idea/copilot.data.migration.ask.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AskMigrationStateService">
<option name="migrationStatus" value="COMPLETED" />
</component>
</project>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Ask2AgentMigrationStateService">
<option name="migrationStatus" value="COMPLETED" />
</component>
</project>

6
.idea/copilot.data.migration.edit.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="EditMigrationStateService">
<option name="migrationStatus" value="COMPLETED" />
</component>
</project>

26
.idea/gradle.xml generated Normal file
View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleJvm" value="17" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
<option value="$PROJECT_DIR$/apps" />
<option value="$PROJECT_DIR$/apps/converter" />
<option value="$PROJECT_DIR$/apps/coordinator" />
<option value="$PROJECT_DIR$/apps/processer" />
<option value="$PROJECT_DIR$/apps/ui" />
<option value="$PROJECT_DIR$/shared" />
<option value="$PROJECT_DIR$/shared/common" />
<option value="$PROJECT_DIR$/shared/event-task-contract" />
<option value="$PROJECT_DIR$/shared/ffmpeg" />
</set>
</option>
</GradleProjectSettings>
</option>
</component>
</project>

6
.idea/kotlinc.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="KotlinJpsPluginSettings">
<option name="version" value="2.1.0" />
</component>
</project>

5
.idea/misc.xml generated Normal file
View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" project-jdk-name="azul-17" project-jdk-type="JavaSDK" />
</project>

View File

@ -0,0 +1,26 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="UIApplicationKt" type="JetRunConfigurationType" nameIsGenerated="true">
<envs>
<env name="DATABASE_ADDRESS" value="192.168.2.250" />
<env name="DATABASE_NAME_E" value="eventsV4" />
<env name="DATABASE_NAME_S" value="streamitv3" />
<env name="DATABASE_PASSWORD" value="shFZ27eL2x2NoxyEDBMfDWkvFO" />
<env name="DATABASE_PORT" value="3306" />
<env name="DATABASE_USERNAME" value="root" />
<env name="DIRECTORY_CONTENT_INCOMING" value="G:\MediaProcessingPlayground\input" />
<env name="DIRECTORY_CONTENT_OUTGOING" value="G:\MediaProcessingPlayground\output" />
<env name="DISABLE_COMPLETE" value="true" />
<env name="DISABLE_PRODUCE" value="true" />
<env name="EncoderWs" value="ws://192.168.2.250:6081/ws" />
<env name="METADATA_TIMEOUT" value="0" />
<env name="SUPPORTING_EXECUTABLE_FFMPEG" value="G:\MediaProcessingPlayground\ffmpeg.exe" />
<env name="SUPPORTING_EXECUTABLE_FFPROBE" value="G:\MediaProcessingPlayground\ffprobe.exe" />
</envs>
<option name="MAIN_CLASS_NAME" value="no.iktdev.mediaprocessing.ui.UIApplicationKt" />
<module name="MediaProcessing.apps.ui.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

6
.idea/vcs.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

1281
.idea/workspace.xml generated Normal file

File diff suppressed because it is too large Load Diff

42
CommonCode/.gitignore vendored
View File

@ -1,42 +0,0 @@
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@ -1,41 +0,0 @@
plugins {
kotlin("jvm") version "1.8.21"
}
group = "no.iktdev.streamit.content"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
maven("https://jitpack.io")
maven {
url = uri("https://reposilite.iktdev.no/releases")
}
maven {
url = uri("https://reposilite.iktdev.no/snapshots")
}
}
dependencies {
implementation("com.github.pgreze:kotlin-process:1.3.1")
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
implementation("com.google.code.gson:gson:2.8.9")
implementation("org.json:json:20230227")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
testImplementation("junit:junit:4.13.2")
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.1")
testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.1")
testImplementation("org.assertj:assertj-core:3.4.1")
}
tasks.test {
useJUnitPlatform()
}

View File

@ -1,6 +0,0 @@
#Sat Jul 15 17:55:49 CEST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -1,2 +0,0 @@
rootProject.name = "CommonCode"

View File

@ -1,9 +0,0 @@
package no.iktdev.streamit.content.common
import java.io.File
object CommonConfig {
var kafkaTopic: String = System.getenv("KAFKA_TOPIC") ?: "contentEvents"
var incomingContent: File = if (!System.getenv("DIRECTORY_CONTENT_INCOMING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_INCOMING")) else File("/src/input")
val outgoingContent: File = if (!System.getenv("DIRECTORY_CONTENT_OUTGOING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_OUTGOING")) else File("/src/output")
}

View File

@ -1,53 +0,0 @@
package no.iktdev.streamit.content.common
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.Status
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
import java.util.*
abstract class DefaultKafkaReader(val subId: String = UUID.randomUUID().toString()) {
val messageProducer = DefaultProducer(CommonConfig.kafkaTopic)
val defaultConsumer = DefaultConsumer(subId = subId)
open fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
return emptyMap()
}
fun produceErrorMessage(event: KafkaEvents, baseMessage: Message, reason: String) {
val message = Message(
referenceId = baseMessage.referenceId,
Status(statusType = StatusType.ERROR, message = reason)
)
messageProducer.sendMessage(event.event, message)
}
fun produceErrorMessage(event: KafkaEvents, referenceId: String, reason: String) {
val message = Message(
referenceId = referenceId,
Status(statusType = StatusType.ERROR, message = reason)
)
messageProducer.sendMessage(event.event, message)
}
fun produceMessage(event: KafkaEvents, baseMessage: Message, data: Any?) {
val message = Message(
referenceId = baseMessage.referenceId,
baseMessage.status,
data = data
)
messageProducer.sendMessage(event.event, message)
}
fun produceSuccessMessage(event: KafkaEvents, referenceId: String, data: Any? = null) {
val message = Message(
referenceId = referenceId,
status = Status(StatusType.SUCCESS),
data = data
)
messageProducer.sendMessage(event.event, message)
}
}

View File

@ -1,96 +0,0 @@
package no.iktdev.streamit.content.common
import no.iktdev.exfl.using
import java.io.File
import java.io.FileOutputStream
import java.net.HttpURLConnection
import java.net.URL
import kotlin.math.sign
open class Downloader(val url: String, val outDir: File, val baseName: String) {
protected val http: HttpURLConnection = openConnection()
private val BUFFER_SIZE = 4096
private fun openConnection(): HttpURLConnection {
try {
return URL(url).openConnection() as HttpURLConnection
} catch (e: Exception) {
e.printStackTrace()
throw BadAddressException("Provided url is either not provided (null) or is not a valid http url")
}
}
protected fun getLength(): Int {
return http.contentLength
}
protected fun getProgress(read: Int, total: Int = getLength()): Int {
return ((read * 100) / total)
}
suspend fun download(): File? {
val extension = getExtension()
?: throw UnsupportedFormatException("Provided url does not contain a supported file extension")
val outFile = outDir.using("$baseName.$extension")
val inputStream = http.inputStream
val fos = FileOutputStream(outFile, false)
var totalBytesRead = 0
val buffer = ByteArray(BUFFER_SIZE)
inputStream.apply {
fos.use { fout ->
run {
var bytesRead = read(buffer)
while (bytesRead >= 0) {
fout.write(buffer, 0, bytesRead)
totalBytesRead += bytesRead
bytesRead = read(buffer)
// System.out.println(getProgress(totalBytesRead))
}
}
}
}
inputStream.close()
fos.close()
return outFile
}
open fun getExtension(): String? {
val possiblyExtension = url.lastIndexOf(".") + 1
return if (possiblyExtension > 1) {
return url.toString().substring(possiblyExtension)
} else {
val mimeType = http.contentType ?: null
contentTypeToExtension()[mimeType]
}
}
open fun contentTypeToExtension(): Map<String, String> {
return mapOf(
"image/png" to "png",
"image/jpeg" to "jpg",
"image/webp" to "webp",
"image/bmp" to "bmp",
"image/tiff" to "tiff"
)
}
class BadAddressException : java.lang.Exception {
constructor() : super() {}
constructor(message: String?) : super(message) {}
constructor(message: String?, cause: Throwable?) : super(message, cause) {}
}
class UnsupportedFormatException : Exception {
constructor() : super() {}
constructor(message: String?) : super(message) {}
constructor(message: String?, cause: Throwable?) : super(message, cause) {}
}
class InvalidFileException : Exception {
constructor() : super() {}
constructor(message: String?) : super(message) {}
constructor(message: String?, cause: Throwable?) : super(message, cause) {}
}
}

View File

@ -1,25 +0,0 @@
package no.iktdev.streamit.content.common
import mu.KotlinLogging
import java.io.File
import java.io.RandomAccessFile
private val logger = KotlinLogging.logger {}
class FileAccess {
companion object {
fun isFileAvailable(file: File): Boolean {
if (!file.exists()) return false
var stream: RandomAccessFile? = null
try {
stream = RandomAccessFile(file, "rw")
stream.close()
logger.info { "File ${file.name} is read and writable" }
return true
} catch (e: Exception) {
stream?.close()
}
return false
}
}
}

View File

@ -1,95 +0,0 @@
package no.iktdev.streamit.content.common
class Naming(val fileName: String) {
var cleanedFileName: String
private set
init {
cleanedFileName = fileName
.let { removeBracketedText(it) }
.let { removeParenthesizedText(it) }
.let { removeResolutionAndTags(it) }
.let { removeInBetweenCharacters(it) }
.let { removeExtraWhiteSpace(it) }
}
fun guessDesiredFileName(): String {
val parts = cleanedFileName.split(" - ")
return when {
parts.size == 2 && parts[1].matches(Regex("\\d{4}")) -> {
val title = parts[0]
val year = parts[1]
"$title ($year)"
}
parts.size >= 3 && parts[1].matches(Regex("S\\d+")) && parts[2].matches(Regex("\\d+[vV]\\d+")) -> {
val title = parts[0]
val episodeWithRevision = parts[2]
val episodeParts = episodeWithRevision.split("v", "V")
val episodeNumber = episodeParts[0].toInt()
val revisionNumber = episodeParts[1].toInt()
val seasonEpisode =
"S${episodeNumber.toString().padStart(2, '0')}E${revisionNumber.toString().padStart(2, '0')}"
val episodeTitle = if (parts.size > 3) parts[3] else ""
"$title - $seasonEpisode - $episodeTitle"
}
else -> cleanedFileName
}.trim()
}
fun guessDesiredTitle(): String {
val desiredFileName = guessDesiredFileName()
val seasonRegex = Regex("\\sS[0-9]+(\\s- [0-9]+|\\s[0-9]+)", RegexOption.IGNORE_CASE)
if (seasonRegex.containsMatchIn(desiredFileName)) {
return seasonRegex.replace(desiredFileName, "").trim()
} else {
val result = if (desiredFileName.contains(" - ")) {
return desiredFileName.split(" - ").firstOrNull() ?: desiredFileName
} else desiredFileName
return result.trim()
}
}
/**
* Modifies the input value and removes "[Text]"
* @param text "[TEST] Dummy - 01 [AZ 1080p] "
*/
fun removeBracketedText(text: String): String {
return Regex("\\[.*?]").replace(text, " ")
}
/**
*
*/
fun removeParenthesizedText(text: String): String {
return Regex("\\(.*?\\)").replace(text, " ")
}
/**
*
*/
fun removeResolutionAndTags(text: String): String {
return Regex("(.*?)(?=\\d+[pk]\\b)").replace(text, " ")
}
fun removeInBetweenCharacters(text: String): String {
return Regex("[.]").replace(text, " ")
}
/**
* @param text "example text with extra spaces"
* @return example text with extra spaces
*/
fun removeExtraWhiteSpace(text: String): String {
return Regex("\\s{2,}").replace(text, " ")
}
private fun getMatch(regex: String): String? {
return Regex(regex).find(fileName)?.value ?: return null
}
}

View File

@ -1,20 +0,0 @@
package no.iktdev.streamit.content.common
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.Status
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
import no.iktdev.streamit.library.kafka.listener.sequential.ISequentialMessageEvent
import no.iktdev.streamit.library.kafka.listener.sequential.SequentialMessageListener
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
abstract class SequentialKafkaReader(subId: String): DefaultKafkaReader(subId), ISequentialMessageEvent {
abstract val accept: KafkaEvents
abstract val subAccepts: List<KafkaEvents>
}

View File

@ -1,42 +0,0 @@
package no.iktdev.streamit.content.common.deamon
import com.github.pgreze.process.ProcessResult
import com.github.pgreze.process.Redirect
import com.github.pgreze.process.process
import com.google.gson.Gson
import kotlinx.coroutines.*
import mu.KotlinLogging
import no.iktdev.exfl.coroutines.Coroutines
private val logger = KotlinLogging.logger {}
open class Daemon(open val executable: String, val daemonInterface: IDaemon) {
val scope = Coroutines.io()
var job: Job? = null
var executor: ProcessResult? = null
open suspend fun run(parameters: List<String>): Int {
daemonInterface.onStarted()
logger.info { "\nDaemon arguments: $executable \nParamters:\n${parameters.joinToString(" ")}" }
job = scope.launch {
executor = process(executable, *parameters.toTypedArray(),
stdout = Redirect.CAPTURE,
stderr = Redirect.CAPTURE,
consumer = {
daemonInterface.onOutputChanged(it)
})
}
job?.join()
val resultCode = executor?.resultCode ?: -1
if (resultCode == 0) {
daemonInterface.onEnded()
} else daemonInterface.onError(resultCode)
logger.info { "$executable result: $resultCode" }
return resultCode
}
suspend fun cancel() {
job?.cancelAndJoin()
scope.cancel("Cancel operation triggered!")
}
}

View File

@ -1,13 +0,0 @@
package no.iktdev.streamit.content.common.deamon
interface IDaemon {
fun onStarted() {}
fun onOutputChanged(line: String) {}
fun onEnded() {}
fun onError(code: Int)
}

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.ContentOutName
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class ContentOutNameDeserializer: IMessageDataDeserialization<ContentOutName> {
override fun deserialize(incomingMessage: Message): ContentOutName? {
return incomingMessage.dataAs(ContentOutName::class.java)
}
}

View File

@ -1,13 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class ConvertWorkDeserializer: IMessageDataDeserialization<ConvertWork> {
override fun deserialize(incomingMessage: Message): ConvertWork? {
return incomingMessage.dataAs(ConvertWork::class.java)
}
}

View File

@ -1,52 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class DeserializerRegistry {
companion object {
private val _registry = mutableMapOf<KafkaEvents, IMessageDataDeserialization<*>>(
KafkaEvents.EVENT_READER_RECEIVED_FILE to FileResultDeserializer(),
KafkaEvents.EVENT_READER_RECEIVED_STREAMS to MediaStreamsDeserializer(),
KafkaEvents.EVENT_METADATA_OBTAINED to MetadataResultDeserializer(),
KafkaEvents.EVENT_READER_DETERMINED_SERIE to EpisodeInfoDeserializer(),
KafkaEvents.EVENT_READER_DETERMINED_MOVIE to MovieInfoDeserializer(),
KafkaEvents.EVENT_READER_DETERMINED_FILENAME to ContentOutNameDeserializer(),
KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO to EncodeWorkDeserializer(),
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED to EncodeWorkDeserializer(),
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_STARTED to EncodeWorkDeserializer(),
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED to EncodeWorkDeserializer(),
KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE to ExtractWorkDeserializer(),
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED to ExtractWorkDeserializer(),
KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED to ConvertWorkDeserializer()
)
fun getRegistry(): Map<KafkaEvents, IMessageDataDeserialization<*>> = _registry.toMap()
fun getEventToDeserializer(vararg keys: KafkaEvents): Map<String, IMessageDataDeserialization<*>> {
val missingFields = keys.filter { !getRegistry().keys.contains(it) }
if (missingFields.isNotEmpty()) {
throw MissingDeserializerException("Missing deserializers for: ${missingFields.joinToString(", ")}")
}
return getRegistry().filter { keys.contains(it.key) }.map { it.key.event to it.value }.toMap()
}
private fun toEvent(event: String): KafkaEvents? {
return KafkaEvents.values().find { it.event == event }
}
fun getDeserializerForEvent(event: String): IMessageDataDeserialization<*>? {
val deszEvent = toEvent(event) ?: return null
return getEventToDeserializer(deszEvent).values.first()
}
fun addDeserializer(key: KafkaEvents, deserializer: IMessageDataDeserialization<*>) {
_registry[key] = deserializer
}
}
}
class MissingDeserializerException(override val message: String): RuntimeException()

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class EncodeWorkDeserializer: IMessageDataDeserialization<EncodeWork> {
override fun deserialize(incomingMessage: Message): EncodeWork? {
return incomingMessage.dataAs(EncodeWork::class.java)
}
}

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.EpisodeInfo
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class EpisodeInfoDeserializer: IMessageDataDeserialization<EpisodeInfo> {
override fun deserialize(incomingMessage: Message): EpisodeInfo? {
return incomingMessage.dataAs(EpisodeInfo::class.java)
}
}

View File

@ -1,12 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class ExtractWorkDeserializer: IMessageDataDeserialization<ExtractWork> {
override fun deserialize(incomingMessage: Message): ExtractWork? {
return incomingMessage.dataAs(ExtractWork::class.java)
}
}

View File

@ -1,13 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.FileResult
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class FileResultDeserializer: IMessageDataDeserialization<FileResult> {
override fun deserialize(incomingMessage: Message): FileResult? {
return incomingMessage.dataAs(FileResult::class.java)
}
}

View File

@ -1,47 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import com.google.gson.Gson
import com.google.gson.JsonObject
import no.iktdev.streamit.content.common.streams.AudioStream
import no.iktdev.streamit.content.common.streams.MediaStreams
import no.iktdev.streamit.content.common.streams.SubtitleStream
import no.iktdev.streamit.content.common.streams.VideoStream
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class MediaStreamsDeserializer: IMessageDataDeserialization<MediaStreams> {
override fun deserialize(incomingMessage: Message): MediaStreams? {
return try {
val gson = Gson()
val jsonObject = if (incomingMessage.data is String) {
gson.fromJson(incomingMessage.data as String, JsonObject::class.java)
} else {
gson.fromJson(incomingMessage.dataAsJson(), JsonObject::class.java)
}
val streamsJsonArray = jsonObject.getAsJsonArray("streams")
val rstreams = streamsJsonArray.mapNotNull { streamJson ->
val streamObject = streamJson.asJsonObject
val codecType = streamObject.get("codec_type").asString
if (streamObject.has("codec_name") && streamObject.get("codec_name").asString == "mjpeg") {
null
} else {
when (codecType) {
"video" -> gson.fromJson(streamObject, VideoStream::class.java)
"audio" -> gson.fromJson(streamObject, AudioStream::class.java)
"subtitle" -> gson.fromJson(streamObject, SubtitleStream::class.java)
else -> null //throw IllegalArgumentException("Unknown stream type: $codecType")
}
}
}
return MediaStreams(rstreams)
} catch (e: Exception) {
e.printStackTrace()
null
}
}
}

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.Metadata
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class MetadataResultDeserializer: IMessageDataDeserialization<Metadata> {
override fun deserialize(incomingMessage: Message): Metadata? {
return incomingMessage.dataAs(Metadata::class.java)
}
}

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.deserializers
import no.iktdev.streamit.content.common.dto.reader.MovieInfo
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
class MovieInfoDeserializer: IMessageDataDeserialization<MovieInfo> {
override fun deserialize(incomingMessage: Message): MovieInfo? {
return incomingMessage.dataAs(MovieInfo::class.java)
}
}

View File

@ -1,5 +0,0 @@
package no.iktdev.streamit.content.common.dto
data class ContentOutName(
val baseName: String
)

View File

@ -1,10 +0,0 @@
package no.iktdev.streamit.content.common.dto
data class Metadata(
val title: String,
val altTitle: List<String> = emptyList(),
val cover: String? = null,
val type: String,
val summary: String? = null,
val genres: List<String> = emptyList()
)

View File

@ -1,19 +0,0 @@
package no.iktdev.streamit.content.common.dto
data class WorkOrderItem(
val id: String,
val inputFile: String,
val outputFile: String,
val collection: String,
val state: State,
val progress: Int = 0,
val remainingTime: Long? = null
)
enum class State {
QUEUED,
STARTED,
UPDATED,
FAILURE,
ENDED
}

View File

@ -1,9 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader
data class EpisodeInfo(
val title: String,
val episode: Int,
val season: Int,
val episodeTitle: String?,
override val fullName: String
): VideoInfo(fullName)

View File

@ -1,7 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader
data class FileResult(
val file: String,
val title: String = "",
val sanitizedName: String = ""
)

View File

@ -1,6 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader
data class MovieInfo(
val title: String,
override val fullName: String
) : VideoInfo(fullName)

View File

@ -1,9 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader
import java.io.File
data class SubtitleInfo(
val inputFile: String,
val collection: String,
val language: String
)

View File

@ -1,5 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader
abstract class VideoInfo(
@Transient open val fullName: String
)

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader.work
import java.util.*
data class ConvertWork(
val workId: String = UUID.randomUUID().toString(),
val collection: String,
val language: String,
val inFile: String,
val outFiles: List<String>
)

View File

@ -1,11 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader.work
import java.util.*
data class EncodeWork(
override val workId: String = UUID.randomUUID().toString(),
override val collection: String,
override val inFile: String,
override val outFile: String,
val arguments: List<String>
) : WorkBase(collection = collection, inFile = inFile, outFile = outFile)

View File

@ -1,13 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader.work
import java.util.*
data class ExtractWork(
override val workId: String = UUID.randomUUID().toString(),
override val collection: String,
val language: String,
override val inFile: String,
val arguments: List<String>,
override val outFile: String,
var produceConvertEvent: Boolean = true
) : WorkBase(collection = collection, inFile = inFile, outFile = outFile)

View File

@ -1,10 +0,0 @@
package no.iktdev.streamit.content.common.dto.reader.work
import java.util.UUID
abstract class WorkBase(
@Transient open val workId: String = UUID.randomUUID().toString(),
@Transient open val collection: String,
@Transient open val inFile: String,
@Transient open val outFile: String
)

View File

@ -1,47 +0,0 @@
package no.iktdev.streamit.content.common.streams
class SubtitleStreamSelector(val streams: List<SubtitleStream>) {
fun getCandidateForConversion(): List<SubtitleStream> {
val languageGrouped = getDesiredStreams().groupBy { it.tags.language ?: "eng" }
val priority = listOf("subrip", "srt", "webvtt", "vtt", "ass")
val result = mutableListOf<SubtitleStream>()
for ((language, streams) in languageGrouped) {
val selectedStream = streams.firstOrNull { it.codec_name in priority }
if (selectedStream != null) {
result.add(selectedStream)
}
}
return result
}
fun getDesiredStreams(): List<SubtitleStream> {
val desiredTypes = listOf(SubtitleType.DEFAULT, SubtitleType.CC, SubtitleType.SHD)
val typeGuesser = SubtitleTypeGuesser()
val codecFiltered = streams.filter { getFormatToCodec(it.codec_name) != null }
val mappedToType = codecFiltered.map { typeGuesser.guessType(it) to it }.filter { it.first in desiredTypes }
.groupBy { it.second.tags.language ?: "eng" }
.mapValues { entry ->
val languageStreams = entry.value
val sortedStreams = languageStreams.sortedBy { desiredTypes.indexOf(it.first) }
sortedStreams.firstOrNull()?.second
}.mapNotNull { it.value }
return mappedToType
}
fun getFormatToCodec(codecName: String): String? {
return when(codecName) {
"ass" -> "ass"
"subrip" -> "srt"
"webvtt", "vtt" -> "vtt"
"smi" -> "smi"
"hdmv_pgs_subtitle" -> null
else -> null
}
}
}

View File

@ -1,56 +0,0 @@
package no.iktdev.streamit.content.common.streams
/**
* @property SHD is Hard of hearing
* @property CC is Closed-Captions
* @property NON_DIALOGUE is for Signs or Song (as in lyrics)
* @property DEFAULT is default subtitle as dialog
*/
enum class SubtitleType {
SHD,
CC,
NON_DIALOGUE,
DEFAULT
}
class SubtitleTypeGuesser {
fun guessType(subtitle: SubtitleStream): SubtitleType {
if (subtitle.tags != null && subtitle.tags.title?.isBlank() == false) {
val title = subtitle.tags.title!!
if (title.lowercase().contains("song")
|| title.lowercase().contains("songs")
|| title.lowercase().contains("sign")
|| title.lowercase().contains("signs")
) {
return SubtitleType.NON_DIALOGUE
}
if (getSubtitleType(title, listOf("cc", "closed caption"),
SubtitleType.CC
) == SubtitleType.CC
) return SubtitleType.CC
if (getSubtitleType(title, listOf("shd", "hh", "Hard-of-Hearing", "Hard of Hearing"),
SubtitleType.SHD
) == SubtitleType.SHD
) return SubtitleType.SHD
}
return SubtitleType.DEFAULT
}
private fun getSubtitleType(title: String, keys: List<String>, expected: SubtitleType): SubtitleType {
val bracedText = Regex.fromLiteral("[(](?<=\\().*?(?=\\))[)]").find(title)
val brakedText = Regex.fromLiteral("[(](?<=\\().*?(?=\\))[)]").find(title)
if (bracedText == null || brakedText == null)
return SubtitleType.DEFAULT
var text = bracedText.value.ifBlank { brakedText.value }
text = Regex.fromLiteral("[\\[\\]()-.,_+]").replace(text, "")
return if (keys.find { item ->
item.lowercase().contains(text.lowercase()) || text.lowercase().contains(item.lowercase())
}.isNullOrEmpty()) SubtitleType.DEFAULT else expected
}
}

View File

@ -1,62 +0,0 @@
package no.iktdev.streamit.content.common
import no.iktdev.streamit.content.common.dto.reader.FileResult
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.Status
import no.iktdev.streamit.library.kafka.dto.StatusType
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Named
import org.junit.jupiter.api.Test
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
class NamingTest {
@Test
fun checkThatBracketsGetsRemoved() {
val input = "[AAA] Sir fancy - 13 [1080p HEVC][000000]"
val name = Naming(input)
assertThat(name.guessDesiredTitle()).doesNotContain("[")
}
@Test
fun checkThatSeasonIsStripped() {
val input = "[AAA] Kafka Topic S2 - 01"
val naming = Naming(input)
val result = naming.guessDesiredTitle()
assertThat(result).isEqualTo("Kafka Topic")
}
/*
@ParameterizedTest
@MethodSource("serieOnlyTest")
fun ensureOnlySerieAndDecodedCorrectly(testData: TestData) {
val naming = Naming(testData.input).getName() ?: throw NullPointerException("Named is null")
assertThat(naming.type).isEqualTo("serie")
assertThat(naming.season).isEqualTo(testData.expected.season)
assertThat(naming.episode).isEqualTo(testData.expected.episode)
}
@Test
fun testTest() {
val tmp = TestData(Naming.Name(title = "Demo", season = 1, episode = 1, type = "serie"), "[Kametsu] Ghost in the Shell Arise - 05 - Pyrophoric Cult (BD 1080p Hi10 FLAC) [13FF85A7]")
val naming = Naming(tmp.input).getName()
assertThat(naming).isNotNull()
}
fun serieOnlyTest(): List<Named<TestData>> {
return listOf(
Named.of("Is defined", TestData(Naming.Name(title = "Demo", season = 1, episode = 1, type = "serie"), "Demo - S01E01")),
Named.of("Is decoded", TestData(Naming.Name("Demo!", "serie", season = 1, episode = 1), "[TMP] Demo! - 03")),
Named.of("Is only Episode", TestData(Naming.Name("Demo", "serie", 1, 1), "Demo E1"))
)
}*/
/*
data class TestData(
val expected: Naming.Name,
val input: String
)*/
}

42
Convert/.gitignore vendored
View File

@ -1,42 +0,0 @@
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@ -1,4 +0,0 @@
FROM bskjon/azuljava:17
EXPOSE 8080
COPY ./build/libs/converter.jar /usr/share/app/app.jar

View File

@ -1,62 +0,0 @@
import org.jetbrains.kotlin.gradle.plugin.mpp.pm20.util.archivesName
plugins {
kotlin("jvm") version "1.8.21"
id("org.springframework.boot") version "2.5.5"
id("io.spring.dependency-management") version "1.0.11.RELEASE"
kotlin("plugin.spring") version "1.5.31"
}
group = "no.iktdev.streamit.content"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
maven("https://jitpack.io")
maven {
url = uri("https://reposilite.iktdev.no/releases")
}
maven {
url = uri("https://reposilite.iktdev.no/snapshots")
}
}
dependencies {
implementation(project(":CommonCode"))
implementation("no.iktdev.library:subtitle:1.7.8-SNAPSHOT")
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
implementation("com.github.pgreze:kotlin-process:1.3.1")
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
implementation("com.google.code.gson:gson:2.8.9")
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
implementation("org.springframework.kafka:spring-kafka:2.8.5")
implementation("org.springframework.boot:spring-boot-starter-websocket:2.6.3")
testImplementation(platform("org.junit:junit-bom:5.9.1"))
testImplementation("org.junit.jupiter:junit-jupiter")
}
tasks.test {
useJUnitPlatform()
}
tasks.bootJar {
archiveFileName.set("converter.jar")
launchScript()
}
tasks.jar {
archivesName.set("converter.jar")
archiveBaseName.set("converter")
}
archivesName.set("converter.jar")

Binary file not shown.

View File

@ -1,6 +0,0 @@
#Sun Jul 23 01:48:17 CEST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

234
Convert/gradlew vendored
View File

@ -1,234 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

89
Convert/gradlew.bat vendored
View File

@ -1,89 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,4 +0,0 @@
rootProject.name = "Convert"
include(":CommonCode")
project(":CommonCode").projectDir = File("../CommonCode")

View File

@ -1,19 +0,0 @@
package no.iktdev.streamit.content.convert
import mu.KotlinLogging
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
import org.springframework.context.ApplicationContext
@SpringBootApplication
class ConvertApplication
private var context: ApplicationContext? = null
@Suppress("unused")
fun getContext(): ApplicationContext? {
return context
}
fun main(args: Array<String>) {
context = runApplication<ConvertApplication>(*args)
}
private val logger = KotlinLogging.logger {}

View File

@ -1,7 +0,0 @@
package no.iktdev.streamit.content.convert
class ConvertEnv {
companion object {
val allowOverwrite = System.getenv("ALLOW_OVERWRITE").toBoolean() ?: false
}
}

View File

@ -1,88 +0,0 @@
package no.iktdev.streamit.content.convert
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
import kotlinx.coroutines.withContext
import mu.KotlinLogging
import no.iktdev.library.subtitle.Syncro
import no.iktdev.library.subtitle.classes.DialogType
import no.iktdev.library.subtitle.export.Export
import no.iktdev.library.subtitle.reader.BaseReader
import no.iktdev.library.subtitle.reader.Reader
import no.iktdev.streamit.content.common.dto.reader.SubtitleInfo
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.content.common.streams.SubtitleType
import java.io.File
private val logger = KotlinLogging.logger {}
class ConvertRunner(val referenceId: String, val listener: IConvertListener) {
private fun getReade(inputFile: File): BaseReader? {
return Reader(inputFile).getSubtitleReader()
}
private val maxDelay = 1000 * 5
private var currentDelayed = 0
suspend fun readAndConvert (subtitleInfo: SubtitleInfo) {
val inFile = File(subtitleInfo.inputFile)
while (!inFile.canRead()) {
if (currentDelayed > maxDelay) {
logger.error { "Could not out wait lock on file!" }
withContext(Dispatchers.Default) {
listener.onError(referenceId, subtitleInfo, "Cant read file!")
}
return
}
logger.error { "$referenceId ${subtitleInfo.inputFile}: Cant read file!" }
delay(500)
currentDelayed += 500
}
val reader = getReade(inFile)
val dialogs = reader?.read()
if (dialogs.isNullOrEmpty()) {
logger.error { "$referenceId ${subtitleInfo.inputFile}: Dialogs read from file is null or empty!" }
withContext(Dispatchers.Default) {
listener.onError(referenceId, subtitleInfo, "Dialogs read from file is null or empty!")
}
return
}
withContext(Dispatchers.Default) {
listener.onStarted(referenceId)
}
val filtered = dialogs.filter { !it.ignore && it.type !in listOf(DialogType.SIGN_SONG, DialogType.CAPTION) }
val syncedDialogs = Syncro().sync(filtered)
try {
val converted = Export(inFile,inFile.parentFile, inFile.nameWithoutExtension).write(syncedDialogs)
val item = ConvertWork(
inFile = inFile.absolutePath,
collection = subtitleInfo.collection,
language = subtitleInfo.language,
outFiles = converted.map { it.absolutePath }
)
withContext(Dispatchers.Default) {
listener.onEnded(referenceId, subtitleInfo, work = item)
}
} catch (e: Exception) {
e.printStackTrace()
withContext(Dispatchers.Default) {
listener.onError(referenceId, subtitleInfo, "See log")
}
}
}
}
interface IConvertListener {
fun onStarted(referenceId: String)
fun onError(referenceId: String, info: SubtitleInfo, message: String)
fun onEnded(referenceId: String, info: SubtitleInfo, work: ConvertWork)
}

View File

@ -1,69 +0,0 @@
package no.iktdev.streamit.content.convert.kafka
import kotlinx.coroutines.launch
import mu.KotlinLogging
import no.iktdev.exfl.coroutines.Coroutines
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.content.common.DefaultKafkaReader
import no.iktdev.streamit.content.common.dto.reader.SubtitleInfo
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.content.convert.ConvertRunner
import no.iktdev.streamit.content.convert.IConvertListener
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.Status
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.springframework.stereotype.Service
import java.io.File
private val logger = KotlinLogging.logger {}
@Service
class SubtitleConsumer: DefaultKafkaReader("convertHandlerSubtitle"), IConvertListener {
private final val listener = object : SimpleMessageListener(
topic = CommonConfig.kafkaTopic,
consumer = defaultConsumer,
accepts = listOf(KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event)
) {
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
val referenceId = data.value().referenceId
val workResult = data.value().dataAs(ExtractWork::class.java)
if (workResult?.produceConvertEvent == true) {
logger.info { "Using ${data.value().referenceId} ${workResult.outFile} as it is a convert candidate" }
val convertWork = SubtitleInfo(
inputFile = workResult.outFile,
collection = workResult.collection,
language = workResult.language,
)
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_STARTED, Message(referenceId = referenceId, Status(statusType = StatusType.PENDING)), convertWork)
Coroutines.io().launch {
ConvertRunner(referenceId, this@SubtitleConsumer).readAndConvert(convertWork)
}
} else {
logger.info { "Skipping ${data.value().referenceId} ${workResult?.outFile} as it is not a convert candidate" }
}
}
}
init {
listener.listen()
}
override fun onStarted(referenceId: String) {
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_STARTED, Message(referenceId = referenceId, Status(statusType = StatusType.SUCCESS)), null)
}
override fun onError(referenceId: String, info: SubtitleInfo, message: String) {
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED, Message(referenceId = referenceId, Status(statusType = StatusType.ERROR, message = message)), null)
}
override fun onEnded(referenceId: String, info: SubtitleInfo, work: ConvertWork) {
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED, Message(referenceId = referenceId, Status(statusType = StatusType.SUCCESS)), work)
}
}

View File

@ -1,3 +0,0 @@
spring.output.ansi.enabled=always
logging.level.org.apache.kafka=INFO
#logging.level.root=DEBUG

42
Encode/.gitignore vendored
View File

@ -1,42 +0,0 @@
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@ -1,4 +0,0 @@
FROM bskjon/debian-azuljava17-ffmpeg:latest
EXPOSE 8080
COPY ./build/libs/encoder.jar /usr/share/app/app.jar

View File

@ -1,66 +0,0 @@
import org.jetbrains.kotlin.gradle.plugin.mpp.pm20.util.archivesName
plugins {
kotlin("jvm") version "1.8.21"
id("org.springframework.boot") version "2.5.5"
id("io.spring.dependency-management") version "1.0.11.RELEASE"
kotlin("plugin.spring") version "1.5.31"
}
group = "no.iktdev.streamit.content"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
maven("https://jitpack.io")
maven {
url = uri("https://reposilite.iktdev.no/releases")
}
maven {
url = uri("https://reposilite.iktdev.no/snapshots")
}
}
dependencies {
implementation(project(":CommonCode"))
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
implementation("com.github.pgreze:kotlin-process:1.3.1")
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
implementation("com.google.code.gson:gson:2.8.9")
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
implementation("org.springframework.kafka:spring-kafka:2.8.5")
implementation("org.springframework.boot:spring-boot-starter-websocket:2.6.3")
testImplementation("junit:junit:4.13.2")
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.1")
testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.1")
testImplementation("org.assertj:assertj-core:3.4.1")
testImplementation("org.mockito:mockito-core:3.+")
}
tasks.test {
useJUnitPlatform()
}
tasks.bootJar {
archiveFileName.set("encoder.jar")
launchScript()
}
tasks.jar {
archivesName.set("encoder.jar")
archiveBaseName.set("encoder")
}
archivesName.set("encoder.jar")

Binary file not shown.

View File

@ -1,6 +0,0 @@
#Tue Jul 11 02:14:45 CEST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

234
Encode/gradlew vendored
View File

@ -1,234 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

89
Encode/gradlew.bat vendored
View File

@ -1,89 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,4 +0,0 @@
rootProject.name = "Encode"
include(":CommonCode")
project(":CommonCode").projectDir = File("../CommonCode")

View File

@ -1,35 +0,0 @@
package no.iktdev.streamit.content.encode
import org.springframework.beans.factory.annotation.Value
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory
import org.springframework.boot.web.server.WebServerFactoryCustomizer
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.messaging.simp.config.MessageBrokerRegistry
import org.springframework.web.bind.annotation.RestController
import org.springframework.web.method.HandlerTypePredicate
import org.springframework.web.servlet.config.annotation.CorsRegistry
import org.springframework.web.servlet.config.annotation.PathMatchConfigurer
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer
import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker
import org.springframework.web.socket.config.annotation.StompEndpointRegistry
import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer
@Configuration
@EnableWebSocketMessageBroker
class WebSocketConfig : WebSocketMessageBrokerConfigurer {
override fun registerStompEndpoints(registry: StompEndpointRegistry) {
registry.addEndpoint("/ws")
// .setAllowedOrigins("*")
.withSockJS()
registry.addEndpoint("/")
}
override fun configureMessageBroker(registry: MessageBrokerRegistry) {
registry.enableSimpleBroker("/topic")
registry.setApplicationDestinationPrefixes("/app")
}
}

View File

@ -1,9 +0,0 @@
package no.iktdev.streamit.content.encode
class EncodeEnv {
companion object {
val ffmpeg: String = System.getenv("SUPPORTING_EXECUTABLE_FFMPEG") ?: "ffmpeg"
val allowOverwrite = System.getenv("ALLOW_OVERWRITE").toBoolean() ?: false
val maxRunners: Int = try {System.getenv("SIMULTANEOUS_ENCODE_RUNNERS").toIntOrNull() ?: 1 } catch (e: Exception) {1}
}
}

View File

@ -1,60 +0,0 @@
package no.iktdev.streamit.content.encode
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.content.common.DefaultKafkaReader
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
import no.iktdev.streamit.content.common.deserializers.EncodeWorkDeserializer
import no.iktdev.streamit.content.encode.runner.RunnerCoordinator
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.springframework.stereotype.Service
private val logger = KotlinLogging.logger {}
@Service
class EncodeWorkConsumer(private val runnerCoordinator: RunnerCoordinator) : DefaultKafkaReader("encodeWork") {
lateinit var encodeInstructionsListener: EncodeInformationListener
init {
encodeInstructionsListener = EncodeInformationListener(
topic = CommonConfig.kafkaTopic,
defaultConsumer,
accepts = listOf(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO.event),
runnerCoordinator
)
encodeInstructionsListener.listen()
}
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
return DeserializerRegistry.getEventToDeserializer(
KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO
)
}
class EncodeInformationListener(
topic: String,
consumer: DefaultConsumer,
accepts: List<String>,
val runnerCoordinator: RunnerCoordinator
) : SimpleMessageListener(
topic, consumer,
accepts
) {
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
logger.info { "\nreferenceId: ${data.value().referenceId} \nEvent: ${data.key()} \nData:\n${Gson().toJson(data.value())}" }
val message = data.value().apply {
this.data = EncodeWorkDeserializer().deserializeIfSuccessful(data.value())
}
runnerCoordinator.addEncodeMessageToQueue(message)
}
}
}

View File

@ -1,35 +0,0 @@
package no.iktdev.streamit.content.encode
import no.iktdev.exfl.observable.ObservableMap
import no.iktdev.exfl.observable.observableMapOf
import no.iktdev.streamit.content.common.dto.WorkOrderItem
import no.iktdev.streamit.content.encode.progress.Progress
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
import org.springframework.context.ApplicationContext
@SpringBootApplication
class EncoderApplication
private var context: ApplicationContext? = null
val progressMap = observableMapOf<String, Progress>()
@Suppress("unused")
fun getContext(): ApplicationContext? {
return context
}
fun main(args: Array<String>) {
context = runApplication<EncoderApplication>(*args)
}
val encoderItems = ObservableMap<String, WorkOrderItem>()
val extractItems = ObservableMap<String, WorkOrderItem>()
/*val progress = ObservableMap<String, EncodeInformation>().also {
it.addListener(object: ObservableMap.Listener<String, EncodeInformation> {
override fun onPut(key: String, value: EncodeInformation) {
super.onPut(key, value)
logger.info { "$key with progress: $value." }
}
})
}*/

View File

@ -1,59 +0,0 @@
package no.iktdev.streamit.content.encode
import com.google.gson.Gson
import mu.KotlinLogging
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.content.common.DefaultKafkaReader
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
import no.iktdev.streamit.content.common.deserializers.ExtractWorkDeserializer
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.content.encode.runner.RunnerCoordinator
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.springframework.stereotype.Service
private val logger = KotlinLogging.logger {}
@Service
class ExtractWorkConsumer(private val runnerCoordinator: RunnerCoordinator) : DefaultKafkaReader("extractWork") {
lateinit var encodeInstructionsListener: ExtractWorkListener
init {
encodeInstructionsListener = ExtractWorkListener(
topic = CommonConfig.kafkaTopic,
defaultConsumer,
accepts = listOf(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE.event),
runnerCoordinator
)
encodeInstructionsListener.listen()
}
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
return DeserializerRegistry.getEventToDeserializer(
KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE
)
}
class ExtractWorkListener(
topic: String,
consumer: DefaultConsumer,
accepts: List<String>,
val runnerCoordinator: RunnerCoordinator
) : SimpleMessageListener(
topic, consumer,
accepts
) {
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
logger.info { "\nreferenceId: ${data.value().referenceId} \nEvent: ${data.key()} \nData:\n${Gson().toJson(data.value())}" }
val message = data.value().apply {
this.data = ExtractWorkDeserializer().deserializeIfSuccessful(data.value())
}
runnerCoordinator.addExtractMessageToQueue(message)
}
}
}

View File

@ -1,16 +0,0 @@
package no.iktdev.streamit.content.encode.controllers
import com.google.gson.Gson
import no.iktdev.streamit.content.encode.progressMap
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.RestController
import javax.servlet.http.HttpServletResponse
@RestController
class ProgressController {
@GetMapping("/progress")
fun getValue(response: HttpServletResponse): String {
response.setHeader("Refresh", "5")
return Gson().toJson(progressMap.values)
}
}

View File

@ -1,18 +0,0 @@
package no.iktdev.streamit.content.encode.progress
data class DecodedProgressData(
val frame: Int?,
val fps: Double?,
val stream_0_0_q: Double?,
val bitrate: String?,
val total_size: Int?,
val out_time_us: Long?,
val out_time_ms: Long?,
val out_time: String?,
val dup_frames: Int?,
val drop_frames: Int?,
val speed: Double?,
val progress: String?
)
data class ECT(val day: Int = 0, val hour: Int = 0, val minute: Int = 0, val second: Int = 0)

View File

@ -1,12 +0,0 @@
package no.iktdev.streamit.content.encode.progress
data class Progress(
val workId: String,
val outFileName: String,
val progress: Int = -1,
val time: String,
val duration: String,
val speed: String,
val estimatedCompletionSeconds: Long = -1,
val estimatedCompletion: String = "Unknown",
)

View File

@ -1,108 +0,0 @@
package no.iktdev.streamit.content.encode.runner
import mu.KotlinLogging
import no.iktdev.streamit.content.encode.EncodeEnv
import no.iktdev.exfl.observable.ObservableList
import no.iktdev.exfl.observable.observableListOf
import no.iktdev.exfl.using
import no.iktdev.streamit.content.common.deamon.Daemon
import no.iktdev.streamit.content.common.deamon.IDaemon
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.content.encode.progress.DecodedProgressData
import no.iktdev.streamit.content.encode.progress.Progress
import no.iktdev.streamit.content.encode.progress.ProgressDecoder
import java.io.BufferedWriter
import java.io.File
import java.io.FileWriter
private val logger = KotlinLogging.logger {}
class EncodeDaemon(val referenceId: String, val work: EncodeWork, val daemonInterface: IEncodeListener, val outFile: File = File("src").using("logs", "${work.workId}-${work.collection}.log")): IDaemon {
var outputCache = observableListOf<String>()
private val decoder = ProgressDecoder(work)
fun produceProgress(items: List<String>): Progress? {
try {
val decodedProgress = decoder.parseVideoProgress(items)
if (decodedProgress != null) {
val progress = decoder.getProgress(decodedProgress)
outputCache.clear()
return progress
}
} catch (e: IndexOutOfBoundsException) {
// Do nothing
} catch (e: Exception) {
//logger.error { e.message }
e.printStackTrace()
}
return null
}
init {
outputCache.addListener(object : ObservableList.Listener<String> {
override fun onAdded(item: String) {
val progress = produceProgress(outputCache)
progress?.let {
daemonInterface.onProgress(referenceId, work, progress)
}
}
})
outFile.parentFile.mkdirs()
}
suspend fun runUsingWorkItem(): Int {
val outFile = File(work.outFile)
if (!outFile.parentFile.exists()) {
outFile.parentFile.mkdirs()
}
val adjustedArgs = (if (EncodeEnv.allowOverwrite) listOf("-y") else listOf("-nostdin")) + listOf(
"-hide_banner", "-i", File(work.inFile).absolutePath, *work.arguments.toTypedArray(), outFile.absolutePath,
"-progress", "pipe:1"
)
logger.info { "$referenceId @ ${work.workId} ${adjustedArgs.joinToString(" ")}" }
return Daemon(EncodeEnv.ffmpeg, this).run(adjustedArgs)
}
override fun onStarted() {
super.onStarted()
daemonInterface.onStarted(referenceId, work)
}
override fun onEnded() {
super.onEnded()
daemonInterface.onEnded(referenceId, work)
}
override fun onError(code: Int) {
daemonInterface.onError(referenceId, work, code)
}
override fun onOutputChanged(line: String) {
super.onOutputChanged(line)
if (decoder.isDuration(line))
decoder.setDuration(line)
if (decoder.expectedKeys.any { line.startsWith(it) }) {
outputCache.add(line)
}
writeToLog(line)
}
fun writeToLog(line: String) {
val fileWriter = FileWriter(outFile, true) // true indikerer at vi ønsker å appende til filen
val bufferedWriter = BufferedWriter(fileWriter)
// Skriv logglinjen til filen
bufferedWriter.write(line)
bufferedWriter.newLine() // Legg til en ny linje etter logglinjen
// Lukk BufferedWriter og FileWriter for å frigjøre ressurser
bufferedWriter.close()
fileWriter.close()
}
}
interface IEncodeListener {
fun onStarted(referenceId: String, work: EncodeWork)
fun onError(referenceId: String, work: EncodeWork, code: Int)
fun onProgress(referenceId: String, work: EncodeWork, progress: Progress)
fun onEnded(referenceId: String, work: EncodeWork)
}

View File

@ -1,54 +0,0 @@
package no.iktdev.streamit.content.encode.runner
import mu.KotlinLogging
import no.iktdev.streamit.content.encode.EncodeEnv
import no.iktdev.exfl.observable.observableListOf
import no.iktdev.streamit.content.common.deamon.Daemon
import no.iktdev.streamit.content.common.deamon.IDaemon
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.content.encode.progress.DecodedProgressData
import java.io.File
private val logger = KotlinLogging.logger {}
class ExtractDaemon(val referenceId: String, val work: ExtractWork, val daemonInterface: IExtractListener): IDaemon {
var outputCache = observableListOf<String>()
suspend fun runUsingWorkItem(): Int {
val outFile = File(work.outFile)
if (!outFile.parentFile.exists()) {
outFile.parentFile.mkdirs()
}
val adjustedArgs = (if (EncodeEnv.allowOverwrite) listOf("-y") else emptyList()) + listOf(
"-i", File(work.inFile).absolutePath, *work.arguments.toTypedArray(), outFile.absolutePath
)
logger.info { "$referenceId @ ${work.workId} ${adjustedArgs.joinToString(" ")}" }
return Daemon(EncodeEnv.ffmpeg, this).run(adjustedArgs)
}
override fun onStarted() {
super.onStarted()
daemonInterface.onStarted(referenceId, work)
}
override fun onEnded() {
super.onEnded()
daemonInterface.onEnded(referenceId, work)
}
override fun onError(code: Int) {
daemonInterface.onError(referenceId, work, code)
}
override fun onOutputChanged(line: String) {
super.onOutputChanged(line)
outputCache.add(line)
}
}
interface IExtractListener {
fun onStarted(referenceId: String, work: ExtractWork)
fun onError(referenceId: String, work: ExtractWork, code: Int)
fun onProgress(referenceId: String, work: ExtractWork, progress: DecodedProgressData) {}
fun onEnded(referenceId: String, work: ExtractWork)
}

View File

@ -1,324 +0,0 @@
package no.iktdev.streamit.content.encode.runner
import com.google.gson.Gson
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.Channel
import no.iktdev.streamit.content.encode.EncodeEnv
import mu.KotlinLogging
import no.iktdev.exfl.coroutines.Coroutines
import no.iktdev.streamit.content.common.CommonConfig
import no.iktdev.streamit.content.common.dto.State
import no.iktdev.streamit.content.common.dto.WorkOrderItem
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
import no.iktdev.streamit.content.encode.encoderItems
import no.iktdev.streamit.content.encode.extractItems
import no.iktdev.streamit.content.encode.progress.Progress
import no.iktdev.streamit.content.encode.progressMap
import no.iktdev.streamit.library.kafka.KafkaEvents
import no.iktdev.streamit.library.kafka.dto.Message
import no.iktdev.streamit.library.kafka.dto.Status
import no.iktdev.streamit.library.kafka.dto.StatusType
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
import org.springframework.stereotype.Service
import java.util.concurrent.atomic.AtomicInteger
private val logger = KotlinLogging.logger {}
data class ExecutionBlock(
val workId: String,
val type: String,
val work: suspend () -> Int
)
@Service
class RunnerCoordinator(
private var maxConcurrentJobs: Int = 1,
) {
private val logger = KotlinLogging.logger {}
val producer = DefaultProducer(CommonConfig.kafkaTopic)
final val defaultScope = Coroutines.default()
private val jobsInProgress = AtomicInteger(0)
private var inProgressJobs = mutableListOf<Job>()
val queue = Channel<ExecutionBlock>(Channel.UNLIMITED)
init {
maxConcurrentJobs = EncodeEnv.maxRunners
repeat(EncodeEnv.maxRunners) {
launchWorker()
}
}
fun launchWorker() = defaultScope.launch {
while (true) {
logger.info("Worker is waiting for a work item...")
val workItem = queue.receive() // Coroutine will wait here until a work item is available
logger.info("Worker received a work item.")
if (jobsInProgress.get() < maxConcurrentJobs) {
jobsInProgress.incrementAndGet()
val job = processWorkItem(workItem)
inProgressJobs.add(job)
job.invokeOnCompletion {
logger.info { "OnCompletion invoked!\n\nWorkId: ${workItem.workId}-${workItem.type} \n\tCurrent active worksers: ${jobsInProgress.get()}" }
val workers = jobsInProgress.decrementAndGet()
logger.info { "Worker Released: $workers" }
logger.info { "Available: ${workers}/${maxConcurrentJobs}" }
inProgressJobs.remove(job)
}
}
logger.info { "Available workers: ${jobsInProgress.get()}/$maxConcurrentJobs" }
}
}
private suspend fun processWorkItem(workItem: ExecutionBlock): Job {
logger.info { "Processing work: ${workItem.type}" }
workItem.work()
return Job().apply { complete() }
}
fun addEncodeMessageToQueue(message: Message) {
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.PENDING))
)
try {
if (message.data != null && message.data is EncodeWork) {
val work = message.data as EncodeWork
encoderItems.put(
message.referenceId, WorkOrderItem(
id = message.referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.QUEUED
)
)
val workBlock = suspend {
val data: EncodeWork = work
val encodeDaemon = EncodeDaemon(message.referenceId, data, encodeListener)
logger.info { "\nreferenceId: ${message.referenceId} \nStarting encoding. \nWorkId: ${data.workId}" }
encodeDaemon.runUsingWorkItem()
}
val result = queue.trySend(ExecutionBlock(work.workId, "encode", workBlock))
val statusType = when (result.isClosed) {
true -> StatusType.IGNORED // Køen er lukket, jobben ble ignorert
false -> {
if (result.isSuccess) {
StatusType.SUCCESS // Jobben ble sendt til køen
} else {
StatusType.ERROR // Feil ved sending av jobben
}
}
}
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
message.withNewStatus(Status(statusType))
)
} else {
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.ERROR, "Data is not an instance of EncodeWork or null"))
)
}
} catch (e: Exception) {
e.printStackTrace()
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.ERROR, e.message))
)
}
}
fun addExtractMessageToQueue(message: Message) {
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.PENDING))
)
try {
if (message.data != null && message.data is ExtractWork) {
val work = message.data as ExtractWork
extractItems.put(
message.referenceId, WorkOrderItem(
id = message.referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.QUEUED
)
)
val workBlock = suspend {
val data: ExtractWork = work
val extractDaemon = ExtractDaemon(message.referenceId, data, extractListener)
logger.info { "\nreferenceId: ${message.referenceId} \nStarting extracting. \nWorkId: ${data.workId}" }
extractDaemon.runUsingWorkItem()
}
val result = queue.trySend(ExecutionBlock(work.workId, "extract", workBlock))
val statusType = when (result.isClosed) {
true -> StatusType.IGNORED // Køen er lukket, jobben ble ignorert
false -> {
if (result.isSuccess) {
StatusType.SUCCESS // Jobben ble sendt til køen
} else {
StatusType.ERROR // Feil ved sending av jobben
}
}
}
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
message.withNewStatus(Status(statusType))
)
} else {
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.ERROR, "Data is not an instance of ExtractWork"))
)
}
} catch (e: Exception) {
e.printStackTrace()
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
message.withNewStatus(Status(StatusType.ERROR, e.message))
)
}
}
val encodeListener = object : IEncodeListener {
override fun onStarted(referenceId: String, work: EncodeWork) {
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Started\n${work.outFile}" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_STARTED.event,
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
)
encoderItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.STARTED
)
)
}
override fun onError(referenceId: String, work: EncodeWork, code: Int) {
logger.error { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Failed\n${work.outFile} \nError: $code" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED.event,
Message(referenceId, Status(StatusType.ERROR, message = code.toString()), work)
)
encoderItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.FAILURE
)
)
}
override fun onProgress(referenceId: String, work: EncodeWork, progress: Progress) {
logger.debug {
"Work progress for $referenceId with WorkId ${work.workId} @ ${work.outFile}: Progress: ${
Gson().toJson(
progress
)
}"
}
progressMap.put(work.workId, progress)
encoderItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.UPDATED,
progress = progress.progress,
remainingTime = progress.estimatedCompletionSeconds
)
)
}
override fun onEnded(referenceId: String, work: EncodeWork) {
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Ended\n${work.outFile}" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED.event,
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
)
encoderItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.ENDED,
progress = 100,
remainingTime = null
)
)
}
}
val extractListener = object : IExtractListener {
override fun onStarted(referenceId: String, work: ExtractWork) {
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Started\n${work.outFile}" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_STARTED.event,
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
)
extractItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.STARTED
)
)
}
override fun onError(referenceId: String, work: ExtractWork, code: Int) {
logger.error { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Failed\n${work.outFile} \nError: $code" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event,
Message(referenceId, Status(StatusType.ERROR, code.toString()), work)
)
extractItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.FAILURE
)
)
}
override fun onEnded(referenceId: String, work: ExtractWork) {
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Ended\n${work.outFile}" }
producer.sendMessage(
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event,
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
)
extractItems.put(
referenceId, WorkOrderItem(
id = referenceId,
inputFile = work.inFile,
outputFile = work.outFile,
collection = work.collection,
state = State.ENDED
)
)
}
}
}

View File

@ -1,65 +0,0 @@
package no.iktdev.streamit.content.encode.topics
import no.iktdev.exfl.observable.ObservableMap
import no.iktdev.streamit.content.common.dto.WorkOrderItem
import no.iktdev.streamit.content.encode.encoderItems
import no.iktdev.streamit.content.encode.extractItems
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.messaging.handler.annotation.MessageMapping
import org.springframework.messaging.simp.SimpMessagingTemplate
import org.springframework.stereotype.Controller
@Controller
class EncoderTopic(
@Autowired val template: SimpMessagingTemplate?,
) {
init {
encoderItems.addListener(object : ObservableMap.Listener<String, WorkOrderItem> {
override fun onMapUpdated(map: Map<String, WorkOrderItem>) {
super.onMapUpdated(map)
pushEncoderQueue()
}
override fun onPut(key: String, value: WorkOrderItem) {
super.onPut(key, value)
pushEncoderWorkOrder(value)
}
})
extractItems.addListener(object : ObservableMap.Listener<String, WorkOrderItem> {
override fun onMapUpdated(map: Map<String, WorkOrderItem>) {
super.onMapUpdated(map)
pushExtractorQueue()
}
override fun onPut(key: String, value: WorkOrderItem) {
super.onPut(key, value)
pushExtractorWorkOrder(value)
}
})
}
fun pushEncoderWorkOrder(item: WorkOrderItem) {
template?.convertAndSend("/topic/encoder/workorder", item)
}
fun pushExtractorWorkOrder(item: WorkOrderItem) {
template?.convertAndSend("/topic/extractor/workorder", item)
}
@MessageMapping("/encoder/queue")
fun pushEncoderQueue() {
template?.convertAndSend("/topic/encoder/queue", encoderItems.values)
}
@MessageMapping("/extractor/queue")
fun pushExtractorQueue() {
template?.convertAndSend("/topic/extractor/queue", extractItems.values)
}
}

View File

@ -1,3 +0,0 @@
spring.output.ansi.enabled=always
logging.level.org.apache.kafka=WARN
#logging.level.root=DEBUG

View File

@ -1,29 +0,0 @@
package no.iktdev.streamit.content.encode
import org.apache.kafka.clients.consumer.ConsumerRecord
open class Resources {
fun getText(path: String): String? {
return this.javaClass.classLoader.getResource(path)?.readText()
}
open class Streams(): Resources() {
fun all(): List<String> {
return listOf<String>(
getSample(0),
getSample(1),
getSample(2),
getSample(3),
getSample(4),
getSample(5),
getSample(6),
)
}
fun getSample(number: Int): String {
return getText("streams/sample$number.json")!!
}
}
}

View File

@ -1,176 +0,0 @@
package no.iktdev.streamit.content.encode.progress
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
import no.iktdev.streamit.content.encode.Resources
import no.iktdev.streamit.content.encode.runner.EncodeDaemon
import no.iktdev.streamit.content.encode.runner.IEncodeListener
import org.assertj.core.api.Assertions.assertThat
import org.junit.BeforeClass
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertDoesNotThrow
import org.mockito.ArgumentMatchers.anyBoolean
import org.mockito.ArgumentMatchers.anyString
import org.mockito.Mockito.*
import java.io.BufferedWriter
import java.io.File
import java.io.FileWriter
import java.util.UUID
class DecodedProgressDataDecoderTest {
@Test
fun test() {
val progress = ProgressDecoder(EncodeWork(
workId = UUID.randomUUID().toString(),
collection = "Demo",
inFile = "Demo.mkv",
outFile = "FancyDemo.mp4",
arguments = emptyList()
))
val lines = text.split("\n")
val cache: MutableList<String> = mutableListOf()
lines.forEach {
cache.add(it)
assertDoesNotThrow {
val progressItem = progress.parseVideoProgress(cache)
progressItem?.progress
}
}
assertThat(lines).isNotEmpty()
}
@Test
fun testCanRead() {
val res = Resources()
val data = res.getText("Output1.txt") ?: ""
assertThat(data).isNotEmpty()
val lines = data.split("\n").map { it.trim() }
assertThat(lines).isNotEmpty()
val encodeWork = EncodeWork(
workId = UUID.randomUUID().toString(),
collection = "Demo",
inFile = "Demo.mkv",
outFile = "FancyDemo.mp4",
arguments = emptyList()
)
val decoder = ProgressDecoder(encodeWork)
lines.forEach { decoder.setDuration(it) }
assertThat(decoder.duration).isNotNull()
val produced = mutableListOf<Progress>()
val tempFile = File.createTempFile("test", ".log")
val encoder = EncodeDaemon(UUID.randomUUID().toString(), encodeWork, object : IEncodeListener {
override fun onStarted(referenceId: String, work: EncodeWork) {
}
override fun onError(referenceId: String, work: EncodeWork, code: Int) {
}
override fun onProgress(referenceId: String, work: EncodeWork, progress: Progress) {
produced.add(progress)
}
override fun onEnded(referenceId: String, work: EncodeWork) {
}
}, tempFile)
lines.forEach {
encoder.onOutputChanged(it)
}
assertThat(produced).isNotEmpty()
}
@Test
fun testThatProgressIsCalculated() {
val encodeWork = EncodeWork(
workId = UUID.randomUUID().toString(),
collection = "Demo",
inFile = "Demo.mkv",
outFile = "FancyDemo.mp4",
arguments = emptyList()
)
val decoder = ProgressDecoder(encodeWork)
decoder.setDuration("Duration: 01:48:54.82,")
assertThat(decoder.duration).isNotNull()
val decodedProgressData = DecodedProgressData(
frame = null,
fps = null,
stream_0_0_q = null,
bitrate = null,
total_size = null,
out_time_ms = null,
out_time_us = null,
out_time = "01:48:54.82",
dup_frames = null,
drop_frames = null,
speed = 1.0,
progress = "Continue"
)
val progress = decoder.getProgress(decodedProgressData)
assertThat(progress.progress).isGreaterThanOrEqualTo(99)
}
@Test
fun testThatProgressIsNotNone() {
val encodeWork = EncodeWork(
workId = UUID.randomUUID().toString(),
collection = "Demo",
inFile = "Demo.mkv",
outFile = "FancyDemo.mp4",
arguments = emptyList()
)
val decoder = ProgressDecoder(encodeWork)
decoder.setDuration("Duration: 01:48:54.82,")
assertThat(decoder.duration).isNotNull()
val decodedProgressData = DecodedProgressData(
frame = null,
fps = null,
stream_0_0_q = null,
bitrate = null,
total_size = null,
out_time_ms = null,
out_time_us = null,
out_time = "01:00:50.174667",
dup_frames = null,
drop_frames = null,
speed = 1.0,
progress = "Continue"
)
val progress = decoder.getProgress(decodedProgressData)
assertThat(progress.progress).isGreaterThanOrEqualTo(1)
}
val text = """
frame=16811 fps= 88 q=40.0 size= 9984kB time=00:x01:10.79 bitrate=1155.3kbits/s speed=3.71x
fps=88.03
stream_0_0_q=40.0
bitrate=1155.3kbits/s
total_size=10223752
out_time_us=70798005
out_time_ms=70798005
out_time=00:01:10.798005
dup_frames=0
drop_frames=0
speed=3.71x
progress=continue
frame= 1710 fps= 84 q=-1.0 Lsize= 12124kB time=00:01:11.91 bitrate=1381.2kbits/s speed=3.53x
frame=1710
fps=84.01
stream_0_0_q=-1.0
bitrate=1381.2kbits/s
total_size=12415473
out_time_us=71910998
out_time_ms=71910998
out_time=00:01:11.910998
dup_frames=0
drop_frames=0
speed=3.53x
progress=end
""".trimIndent()
}

View File

@ -1,389 +0,0 @@
Guessed Channel Layout for Input Stream #0.1 : 5.1
Input #0, matroska,webm, from '/src/input/DemoFile.mkv':
Metadata:
CREATION_TIME : 2019-06-15T08:06:07Z
ENCODER : Lavf57.7.2
Duration: 01:48:54.82, start: 0.000000, bitrate: 2709 kb/s
Chapter #0:0: start 0.000000, end 328.537000
Metadata:
title : 00:00:00.000
Chapter #0:1: start 328.537000, end 419.044000
Metadata:
title : 00:05:28.537
Chapter #0:2: start 419.044000, end 916.874000
Metadata:
title : 00:06:59.044
Chapter #0:3: start 916.874000, end 1309.433000
Metadata:
title : 00:15:16.749
Chapter #0:4: start 1309.433000, end 1399.023000
Metadata:
title : 00:21:49.391
Chapter #0:5: start 1399.023000, end 1508.924000
Metadata:
title : 00:23:19.023
Chapter #0:6: start 1508.924000, end 1767.099000
Metadata:
title : 00:25:08.924
Chapter #0:7: start 1767.099000, end 1975.474000
Metadata:
title : 00:29:27.099
Chapter #0:8: start 1975.474000, end 2301.466000
Metadata:
title : 00:32:55.473
Chapter #0:9: start 2301.466000, end 2498.246000
Metadata:
title : 00:38:21.466
Chapter #0:10: start 2498.246000, end 2622.036000
Metadata:
title : 00:41:38.246
Chapter #0:11: start 2622.036000, end 2925.172000
Metadata:
title : 00:43:42.036
Chapter #0:12: start 2925.172000, end 3183.472000
Metadata:
title : 00:48:45.172
Chapter #0:13: start 3183.472000, end 3467.172000
Metadata:
title : 00:53:03.472
Chapter #0:14: start 3467.172000, end 3684.472000
Metadata:
title : 00:57:47.172
Chapter #0:15: start 3684.472000, end 3885.840000
Metadata:
title : 01:01:24.472
Chapter #0:16: start 3885.840000, end 4063.059000
Metadata:
title : 01:04:45.840
Chapter #0:17: start 4063.059000, end 4275.605000
Metadata:
title : 01:07:43.059
Chapter #0:18: start 4275.605000, end 4434.263000
Metadata:
title : 01:11:15.605
Chapter #0:19: start 4434.263000, end 4709.205000
Metadata:
title : 01:13:54.263
Chapter #0:20: start 4709.205000, end 4900.020000
Metadata:
title : 01:18:29.204
Chapter #0:21: start 4900.020000, end 5081.201000
Metadata:
title : 01:21:40.020
Chapter #0:22: start 5081.201000, end 5211.123000
Metadata:
title : 01:24:41.201
Chapter #0:23: start 5211.123000, end 5359.938000
Metadata:
title : 01:26:51.123
Chapter #0:24: start 5359.938000, end 5833.786000
Metadata:
title : 01:29:19.938
Chapter #0:25: start 5833.786000, end 5953.865000
Metadata:
title : 01:37:13.786
Chapter #0:26: start 5953.865000, end 6229.432000
Metadata:
title : 01:39:13.865
Chapter #0:27: start 6229.432000, end 6534.779000
Metadata:
title : 01:43:49.181
Stream #0:0: Video: h264 (High), yuv420p(tv, bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 23.98 fps, 23.98 tbr, 1k tbn, 47.95 tbc (default)
Stream #0:1(eng): Audio: ac3, 48000 Hz, 5.1, fltp (default)
Metadata:
title : Surround
Stream #0:2(jpn): Audio: ac3, 48000 Hz, 5.1(side), fltp, 640 kb/s
Metadata:
title : Surround
Stream #0:3(eng): Subtitle: ass (default) (forced)
Stream #0:4(eng): Subtitle: ass
Stream mapping:
Stream #0:0 -> #0:0 (h264 (native) -> hevc (libx265))
Stream #0:2 -> #0:1 (ac3 (native) -> eac3 (native))
x265 [info]: HEVC encoder version 3.4
x265 [info]: build info [Linux][GCC 9.3.0][64 bit] 8bit+10bit+12bit
x265 [info]: using cpu capabilities: MMX2 SSE2Fast LZCNT SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
x265 [info]: Main profile, Level-4 (Main tier)
x265 [info]: Thread pool created using 12 threads
x265 [info]: Slices : 1
x265 [info]: frame threads / pool features : 3 / wpp(17 rows)
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
set_mempolicy: Operation not permitted
x265 [info]: Coding QT: max CU size, min CU size : 64 / 8
x265 [info]: Residual QT: max TU size, max depth : 32 / 1 inter / 1 intra
x265 [info]: ME / range / subpel / merge : hex / 57 / 2 / 3
x265 [info]: Keyframe min / max / scenecut / bias : 23 / 250 / 40 / 5.00
x265 [info]: Lookahead / bframes / badapt : 20 / 4 / 2
x265 [info]: b-pyramid / weightp / weightb : 1 / 1 / 0
x265 [info]: References / ref-limit cu / depth : 3 / off / on
x265 [info]: AQ: mode / str / qg-size / cu-tree : 2 / 1.0 / 32 / 1
x265 [info]: Rate Control / qCompress : CRF-16.0 / 0.60
x265 [info]: tools: rd=3 psy-rd=2.00 early-skip rskip mode=1 signhide tmvp
x265 [info]: tools: b-intra strong-intra-smoothing lslices=6 deblock sao
Output #0, mp4, to '/src/output/Demo/Demo.mp4':
Metadata:
encoder : Lavf58.45.100
Chapter #0:0: start 0.000000, end 328.537000
Metadata:
title : 00:00:00.000
Chapter #0:1: start 328.537000, end 419.044000
Metadata:
title : 00:05:28.537
Chapter #0:2: start 419.044000, end 916.874000
Metadata:
title : 00:06:59.044
Chapter #0:3: start 916.874000, end 1309.433000
Metadata:
title : 00:15:16.749
Chapter #0:4: start 1309.433000, end 1399.023000
Metadata:
title : 00:21:49.391
Chapter #0:5: start 1399.023000, end 1508.924000
Metadata:
title : 00:23:19.023
Chapter #0:6: start 1508.924000, end 1767.099000
Metadata:
title : 00:25:08.924
Chapter #0:7: start 1767.099000, end 1975.474000
Metadata:
title : 00:29:27.099
Chapter #0:8: start 1975.474000, end 2301.466000
Metadata:
title : 00:32:55.473
Chapter #0:9: start 2301.466000, end 2498.246000
Metadata:
title : 00:38:21.466
Chapter #0:10: start 2498.246000, end 2622.036000
Metadata:
title : 00:41:38.246
Chapter #0:11: start 2622.036000, end 2925.172000
Metadata:
title : 00:43:42.036
Chapter #0:12: start 2925.172000, end 3183.472000
Metadata:
title : 00:48:45.172
Chapter #0:13: start 3183.472000, end 3467.172000
Metadata:
title : 00:53:03.472
Chapter #0:14: start 3467.172000, end 3684.472000
Metadata:
title : 00:57:47.172
Chapter #0:15: start 3684.472000, end 3885.840000
Metadata:
title : 01:01:24.472
Chapter #0:16: start 3885.840000, end 4063.059000
Metadata:
title : 01:04:45.840
Chapter #0:17: start 4063.059000, end 4275.605000
Metadata:
title : 01:07:43.059
Chapter #0:18: start 4275.605000, end 4434.263000
Metadata:
title : 01:11:15.605
Chapter #0:19: start 4434.263000, end 4709.205000
Metadata:
title : 01:13:54.263
Chapter #0:20: start 4709.205000, end 4900.020000
Metadata:
title : 01:18:29.204
Chapter #0:21: start 4900.020000, end 5081.201000
Metadata:
title : 01:21:40.020
Chapter #0:22: start 5081.201000, end 5211.123000
Metadata:
title : 01:24:41.201
Chapter #0:23: start 5211.123000, end 5359.938000
Metadata:
title : 01:26:51.123
Chapter #0:24: start 5359.938000, end 5833.786000
Metadata:
title : 01:29:19.938
Chapter #0:25: start 5833.786000, end 5953.865000
Metadata:
title : 01:37:13.786
Chapter #0:26: start 5953.865000, end 6229.432000
Metadata:
title : 01:39:13.865
Chapter #0:27: start 6229.432000, end 6534.779000
Metadata:
title : 01:43:49.181
Stream #0:0: Video: hevc (libx265) (hev1 / 0x31766568), yuv420p(progressive), 1920x1080 [SAR 1:1 DAR 16:9], q=-1--1, 23.98 fps, 24k tbn, 23.98 tbc (default)
Metadata:
encoder : Lavc58.91.100 libx265
Side data:
cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: N/A
Stream #0:1(jpn): Audio: eac3 (ec-3 / 0x332D6365), 48000 Hz, 5.1(side), fltp, 448 kb/s
Metadata:
title : Surround
encoder : Lavc58.91.100 eac3
frame= 49 fps=0.0 q=24.0 size= 1kB time=00:00:02.52 bitrate= 2.4kbits/s speed=4.85x
frame=49
fps=0.00
stream_0_0_q=24.0
bitrate= 2.4kbits/s
total_size=772
out_time_us=2526667
out_time_ms=2526667
out_time=00:00:02.526667
dup_frames=0
drop_frames=0
speed=4.85x
progress=continue
frame= 87 fps= 84 q=16.7 size= 1kB time=00:00:04.09 bitrate= 1.5kbits/s speed=3.96x
frame=87
fps=84.21
stream_0_0_q=16.7
bitrate= 1.5kbits/s
total_size=772
out_time_us=4094667
out_time_ms=4094667
out_time=00:00:04.094667
dup_frames=0
drop_frames=0
speed=3.96x
progress=continue
frame= 115 fps= 75 q=22.4 size= 257kB time=00:00:05.27 bitrate= 398.5kbits/s speed=3.44x
frame=115
fps=74.95
stream_0_0_q=22.4
bitrate= 398.5kbits/s
total_size=262916
out_time_us=5278667
out_time_ms=5278667
out_time=00:00:05.278667
dup_frames=0
drop_frames=0
speed=3.44x
progress=continue
frame= 146 fps= 72 q=22.6 size= 257kB time=00:00:06.55 bitrate= 320.7kbits/s speed=3.22x
frame=146
fps=71.64
stream_0_0_q=22.6
bitrate= 320.7kbits/s
total_size=262916
out_time_us=6558667
out_time_ms=6558667
out_time=00:00:06.558667
dup_frames=0
drop_frames=0
speed=3.22x
progress=continue
frame= 175 fps= 69 q=20.5 size= 513kB time=00:00:07.77 bitrate= 540.3kbits/s speed=3.06x
frame=175
fps=68.82
stream_0_0_q=20.5
bitrate= 540.3kbits/s
total_size=525060
out_time_us=7774667
out_time_ms=7774667
out_time=00:00:07.774667
dup_frames=0
drop_frames=0
speed=3.06x
progress=continue
frame= 204 fps= 67 q=21.1 size= 769kB time=00:00:08.99 bitrate= 700.5kbits/s speed=2.94x
frame=204
fps=66.66
stream_0_0_q=21.1
bitrate= 700.5kbits/s
total_size=787204
out_time_us=8990667
out_time_ms=8990667
out_time=00:00:08.990667
dup_frames=0
drop_frames=0
speed=2.94x
progress=continue
frame= 231 fps= 65 q=20.5 size= 1025kB time=00:00:10.11 bitrate= 830.3kbits/s speed=2.83x
frame=231
fps=64.66
stream_0_0_q=20.5
bitrate= 830.3kbits/s
total_size=1049348
out_time_us=10110667
out_time_ms=10110667
out_time=00:00:10.110667
dup_frames=0
drop_frames=0
speed=2.83x
progress=continue
frame= 268 fps= 65 q=20.7 size= 1025kB time=00:00:11.64 bitrate= 720.8kbits/s speed=2.84x
frame=268
fps=65.29
stream_0_0_q=20.7
bitrate= 720.8kbits/s
total_size=1049348
out_time_us=11646667
out_time_ms=11646667
out_time=00:00:11.646667
dup_frames=0
drop_frames=0
speed=2.84x
progress=continue
frame= 312 fps= 68 q=21.0 size= 1281kB time=00:00:13.47 bitrate= 778.9kbits/s speed=2.92x
frame=312
fps=67.67
stream_0_0_q=21.0
bitrate= 778.9kbits/s
total_size=1311492
out_time_us=13470667
out_time_ms=13470667
out_time=00:00:13.470667
dup_frames=0
drop_frames=0
speed=2.92x
progress=continue
frame= 353 fps= 69 q=19.9 size= 1281kB time=00:00:15.19 bitrate= 690.3kbits/s speed=2.97x
frame=353
fps=68.97
stream_0_0_q=19.9
bitrate= 690.3kbits/s
total_size=1311492
out_time_us=15198667
out_time_ms=15198667
out_time=00:00:15.198667
dup_frames=0
drop_frames=0
speed=2.97x
progress=continue
frame= 372 fps= 66 q=17.9 size= 1537kB time=00:00:15.99 bitrate= 786.9kbits/s speed=2.84x
frame=372
fps=66.01
stream_0_0_q=17.9
bitrate= 786.9kbits/s
total_size=1573636
out_time_us=15998667
out_time_ms=15998667
out_time=00:00:15.998667
dup_frames=0
drop_frames=0
speed=2.84x
progress=continue

42
Reader/.gitignore vendored
View File

@ -1,42 +0,0 @@
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@ -1,4 +0,0 @@
FROM bskjon/debian-azuljava17-ffmpeg:latest
EXPOSE 8080
COPY ./build/libs/reader.jar /usr/share/app/app.jar

View File

@ -1,79 +0,0 @@
import org.jetbrains.kotlin.gradle.plugin.mpp.pm20.util.archivesName
plugins {
kotlin("jvm") version "1.8.21"
id("org.springframework.boot") version "2.5.5"
id("io.spring.dependency-management") version "1.0.11.RELEASE"
kotlin("plugin.spring") version "1.5.31"
}
archivesName.set("reader.jar")
group = "no.iktdev.streamit.content"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
maven("https://jitpack.io")
maven {
url = uri("https://reposilite.iktdev.no/releases")
}
maven {
url = uri("https://reposilite.iktdev.no/snapshots")
}
}
val exposedVersion = "0.38.2"
dependencies {
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
implementation("no.iktdev.streamit.library:streamit-library-db:0.0.6-alpha14")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
implementation("org.jetbrains.exposed:exposed-core:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-dao:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-jdbc:$exposedVersion")
implementation("org.jetbrains.exposed:exposed-java-time:$exposedVersion")
implementation ("mysql:mysql-connector-java:8.0.29")
implementation("com.github.pgreze:kotlin-process:1.3.1")
implementation("com.github.vishna:watchservice-ktx:master-SNAPSHOT")
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
implementation("com.google.code.gson:gson:2.8.9")
implementation("org.json:json:20210307")
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
implementation("org.springframework.kafka:spring-kafka:2.8.5")
implementation("org.springframework.boot:spring-boot-starter-websocket:2.6.3")
implementation(project(":CommonCode"))
testImplementation("junit:junit:4.13.2")
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.1")
testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.1")
testImplementation("org.assertj:assertj-core:3.4.1")
testImplementation("org.mockito:mockito-core:3.+")
}
tasks.test {
useJUnitPlatform()
}
tasks.bootJar {
archiveFileName.set("reader.jar")
launchScript()
}
tasks.jar {
archivesName.set("reader.jar")
archiveBaseName.set("reader")
}

Binary file not shown.

View File

@ -1,6 +0,0 @@
#Tue Jul 11 02:16:45 CEST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

234
Reader/gradlew vendored
View File

@ -1,234 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

Some files were not shown because too many files have changed in this diff Show More