v2 init
This commit is contained in:
parent
843acfaa89
commit
1b83bec7c0
250
.github/workflows/main.yml
vendored
250
.github/workflows/main.yml
vendored
@ -1,250 +0,0 @@
|
||||
name: Build Modules
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pre-check:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pyMetadata: ${{ steps.filter.outputs.pyMetadata }}
|
||||
commonCode: ${{ steps.filter.outputs.commonCode }}
|
||||
reader: ${{ steps.filter.outputs.reader }}
|
||||
encode: ${{ steps.filter.outputs.encode }}
|
||||
convert: ${{ steps.filter.outputs.convert }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
pyMetadata:
|
||||
- 'pyMetadata/**'
|
||||
reader:
|
||||
- 'Reader/**'
|
||||
encode:
|
||||
- 'Encode/**'
|
||||
convert:
|
||||
- 'Convert/**'
|
||||
commonCode:
|
||||
- 'CommonCode/**'
|
||||
# Step to print the outputs from "pre-check" job
|
||||
- name: Print Outputs from pre-check job
|
||||
run: |
|
||||
echo "pyMetadata: ${{ needs.pre-check.outputs.pyMetadata }}"
|
||||
echo "commonCode: ${{ needs.pre-check.outputs.commonCode }}"
|
||||
echo "reader: ${{ needs.pre-check.outputs.reader }}"
|
||||
echo "encode: ${{ needs.pre-check.outputs.encode }}"
|
||||
echo "convert: ${{ needs.pre-check.outputs.convert }}"
|
||||
|
||||
build-commoncode:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pre-check
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Cache CommonCode Gradle dependencies
|
||||
id: cache-gradle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.gradle/caches
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
|
||||
|
||||
- name: Build CommonCode
|
||||
if: steps.cache-gradle.outputs.cache-hit != 'true' || needs.pre-check.outputs.commonCode == 'true' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
cd CommonCode
|
||||
chmod +x ./gradlew
|
||||
./gradlew build
|
||||
|
||||
build-encode:
|
||||
needs: build-commoncode
|
||||
if: ${{ needs.pre-check.outputs.encode == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Cache CommonCode Gradle dependencies
|
||||
id: cache-gradle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.gradle/caches
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
|
||||
|
||||
|
||||
- name: Build Encode module
|
||||
id: build-encode
|
||||
run: |
|
||||
cd Encode
|
||||
chmod +x ./gradlew
|
||||
./gradlew build
|
||||
echo "Build completed"
|
||||
|
||||
|
||||
- name: Generate Docker image tag
|
||||
id: docker-tag
|
||||
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
|
||||
|
||||
- name: Docker login
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./Encode
|
||||
push: true
|
||||
tags: |
|
||||
bskjon/mediaprocessing-encoder:latest
|
||||
bskjon/mediaprocessing-encoder:${{ github.sha }}
|
||||
bskjon/mediaprocessing-encoder:${{ steps.docker-tag.outputs.tag }}
|
||||
|
||||
build-reader:
|
||||
needs: build-commoncode
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.pre-check.outputs.reader == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Cache CommonCode Gradle dependencies
|
||||
id: cache-gradle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.gradle/caches
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
|
||||
|
||||
- name: Build Reader module
|
||||
id: build-reader
|
||||
run: |
|
||||
cd Reader
|
||||
chmod +x ./gradlew
|
||||
./gradlew build
|
||||
echo "Build completed"
|
||||
|
||||
- name: Generate Docker image tag
|
||||
id: docker-tag
|
||||
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
|
||||
|
||||
- name: Docker login
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./Reader
|
||||
push: true
|
||||
tags: |
|
||||
bskjon/mediaprocessing-reader:latest
|
||||
bskjon/mediaprocessing-reader:${{ github.sha }}
|
||||
bskjon/mediaprocessing-reader:${{ steps.docker-tag.outputs.tag }}
|
||||
|
||||
|
||||
|
||||
build-pymetadata:
|
||||
needs: pre-check
|
||||
if: ${{ needs.pre-check.outputs.pyMetadata == 'true' || github.event_name == 'workflow_dispatch' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build pyMetadata module
|
||||
id: build-pymetadata
|
||||
run: |
|
||||
if [[ "${{ steps.check-pymetadata.outputs.changed }}" == "true" || "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
cd pyMetadata
|
||||
# Add the necessary build steps for your Python module here
|
||||
echo "Build completed"
|
||||
else
|
||||
echo "pyMetadata has not changed. Skipping pyMetadata module build."
|
||||
echo "::set-output name=job_skipped::true"
|
||||
fi
|
||||
|
||||
- name: Generate Docker image tag
|
||||
id: docker-tag
|
||||
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
|
||||
|
||||
- name: Docker login
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./pyMetadata
|
||||
push: true
|
||||
tags: |
|
||||
bskjon/mediaprocessing-pymetadata:latest
|
||||
bskjon/mediaprocessing-pymetadata:${{ github.sha }}
|
||||
bskjon/mediaprocessing-pymetadata:${{ steps.docker-tag.outputs.tag }}
|
||||
|
||||
|
||||
build-convert:
|
||||
needs: build-commoncode
|
||||
if: ${{ needs.pre-check.outputs.convert == 'true' || github.event_name == 'workflow_dispatch' || needs.pre-check.outputs.commonCode == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
#if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Cache CommonCode Gradle dependencies
|
||||
id: cache-gradle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.gradle/caches
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('CommonCode/gradle/wrapper/gradle-wrapper.properties') }}
|
||||
|
||||
|
||||
- name: Build Convert module
|
||||
id: build-convert
|
||||
run: |
|
||||
cd Convert
|
||||
chmod +x ./gradlew
|
||||
./gradlew build
|
||||
echo "Build completed"
|
||||
|
||||
|
||||
- name: Generate Docker image tag
|
||||
id: docker-tag
|
||||
run: echo "::set-output name=tag::$(date -u +'%Y.%m.%d')-$(uuidgen | cut -c 1-8)"
|
||||
|
||||
- name: Docker login
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./Convert
|
||||
push: true
|
||||
tags: |
|
||||
bskjon/mediaprocessing-converter:latest
|
||||
bskjon/mediaprocessing-converter:${{ github.sha }}
|
||||
bskjon/mediaprocessing-converter:${{ steps.docker-tag.outputs.tag }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -5,8 +5,6 @@ build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
**/.idea/*
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
|
||||
3
.idea/.gitignore
generated
vendored
Normal file
3
.idea/.gitignore
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
27
.idea/gradle.xml
generated
Normal file
27
.idea/gradle.xml
generated
Normal file
@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="GradleMigrationSettings" migrationVersion="1" />
|
||||
<component name="GradleSettings">
|
||||
<option name="linkedExternalProjectsSettings">
|
||||
<GradleProjectSettings>
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="gradleHome" value="" />
|
||||
<option name="modules">
|
||||
<set>
|
||||
<option value="$PROJECT_DIR$" />
|
||||
<option value="$PROJECT_DIR$/apps" />
|
||||
<option value="$PROJECT_DIR$/apps/converter" />
|
||||
<option value="$PROJECT_DIR$/apps/coordinator" />
|
||||
<option value="$PROJECT_DIR$/apps/encoder" />
|
||||
<option value="$PROJECT_DIR$/apps/processer" />
|
||||
<option value="$PROJECT_DIR$/apps/ui" />
|
||||
<option value="$PROJECT_DIR$/shared" />
|
||||
<option value="$PROJECT_DIR$/shared/common" />
|
||||
<option value="$PROJECT_DIR$/shared/contract" />
|
||||
<option value="$PROJECT_DIR$/shared/kafka" />
|
||||
</set>
|
||||
</option>
|
||||
</GradleProjectSettings>
|
||||
</option>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/kotlinc.xml
generated
Normal file
6
.idea/kotlinc.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="KotlinJpsPluginSettings">
|
||||
<option name="version" value="1.9.20" />
|
||||
</component>
|
||||
</project>
|
||||
12
.idea/misc.xml
generated
Normal file
12
.idea/misc.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="EntryPointsManager">
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="org.springframework.scheduling.annotation.Scheduled" />
|
||||
</list>
|
||||
</component>
|
||||
<component name="ExternalStorageConfigurationManager" enabled="true" />
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="true" project-jdk-name="17" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/out" />
|
||||
</component>
|
||||
</project>
|
||||
124
.idea/uiDesigner.xml
generated
Normal file
124
.idea/uiDesigner.xml
generated
Normal file
@ -0,0 +1,124 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Palette2">
|
||||
<group name="Swing">
|
||||
<item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
|
||||
</item>
|
||||
<item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.svg" removable="false" auto-create-binding="false" can-attach-label="true">
|
||||
<default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
|
||||
<initial-values>
|
||||
<property name="text" value="Button" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="RadioButton" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="CheckBox" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="Label" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
|
||||
<preferred-size width="200" height="200" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
|
||||
<preferred-size width="200" height="200" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.svg" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
|
||||
<preferred-size width="-1" height="20" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.svg" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.svg" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
|
||||
</item>
|
||||
</group>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
42
CommonCode/.gitignore
vendored
42
CommonCode/.gitignore
vendored
@ -1,42 +0,0 @@
|
||||
.gradle
|
||||
build/
|
||||
!gradle/wrapper/gradle-wrapper.jar
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
.idea/libraries/
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
out/
|
||||
!**/src/main/**/out/
|
||||
!**/src/test/**/out/
|
||||
|
||||
### Eclipse ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
bin/
|
||||
!**/src/main/**/bin/
|
||||
!**/src/test/**/bin/
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
||||
@ -1,6 +0,0 @@
|
||||
#Sat Jul 15 17:55:49 CEST 2023
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
@ -1,2 +0,0 @@
|
||||
rootProject.name = "CommonCode"
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
package no.iktdev.streamit.content.common
|
||||
|
||||
import java.io.File
|
||||
|
||||
object CommonConfig {
|
||||
var kafkaTopic: String = System.getenv("KAFKA_TOPIC") ?: "contentEvents"
|
||||
var incomingContent: File = if (!System.getenv("DIRECTORY_CONTENT_INCOMING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_INCOMING")) else File("/src/input")
|
||||
val outgoingContent: File = if (!System.getenv("DIRECTORY_CONTENT_OUTGOING").isNullOrBlank()) File(System.getenv("DIRECTORY_CONTENT_OUTGOING")) else File("/src/output")
|
||||
}
|
||||
@ -1,53 +0,0 @@
|
||||
package no.iktdev.streamit.content.common
|
||||
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
|
||||
import java.util.*
|
||||
|
||||
abstract class DefaultKafkaReader(val subId: String = UUID.randomUUID().toString()) {
|
||||
val messageProducer = DefaultProducer(CommonConfig.kafkaTopic)
|
||||
val defaultConsumer = DefaultConsumer(subId = subId)
|
||||
|
||||
open fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return emptyMap()
|
||||
}
|
||||
|
||||
fun produceErrorMessage(event: KafkaEvents, baseMessage: Message, reason: String) {
|
||||
val message = Message(
|
||||
referenceId = baseMessage.referenceId,
|
||||
Status(statusType = StatusType.ERROR, message = reason)
|
||||
)
|
||||
messageProducer.sendMessage(event.event, message)
|
||||
}
|
||||
|
||||
fun produceErrorMessage(event: KafkaEvents, referenceId: String, reason: String) {
|
||||
val message = Message(
|
||||
referenceId = referenceId,
|
||||
Status(statusType = StatusType.ERROR, message = reason)
|
||||
)
|
||||
messageProducer.sendMessage(event.event, message)
|
||||
}
|
||||
|
||||
fun produceMessage(event: KafkaEvents, baseMessage: Message, data: Any?) {
|
||||
val message = Message(
|
||||
referenceId = baseMessage.referenceId,
|
||||
baseMessage.status,
|
||||
data = data
|
||||
)
|
||||
messageProducer.sendMessage(event.event, message)
|
||||
}
|
||||
fun produceSuccessMessage(event: KafkaEvents, referenceId: String, data: Any? = null) {
|
||||
val message = Message(
|
||||
referenceId = referenceId,
|
||||
status = Status(StatusType.SUCCESS),
|
||||
data = data
|
||||
)
|
||||
messageProducer.sendMessage(event.event, message)
|
||||
}
|
||||
}
|
||||
@ -1,25 +0,0 @@
|
||||
package no.iktdev.streamit.content.common
|
||||
|
||||
import mu.KotlinLogging
|
||||
import java.io.File
|
||||
import java.io.RandomAccessFile
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
class FileAccess {
|
||||
companion object {
|
||||
fun isFileAvailable(file: File): Boolean {
|
||||
if (!file.exists()) return false
|
||||
var stream: RandomAccessFile? = null
|
||||
try {
|
||||
stream = RandomAccessFile(file, "rw")
|
||||
stream.close()
|
||||
logger.info { "File ${file.name} is read and writable" }
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
stream?.close()
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
package no.iktdev.streamit.content.common
|
||||
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.ISequentialMessageEvent
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.SequentialMessageListener
|
||||
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
|
||||
|
||||
abstract class SequentialKafkaReader(subId: String): DefaultKafkaReader(subId), ISequentialMessageEvent {
|
||||
|
||||
abstract val accept: KafkaEvents
|
||||
abstract val subAccepts: List<KafkaEvents>
|
||||
|
||||
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.ContentOutName
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class ContentOutNameDeserializer: IMessageDataDeserialization<ContentOutName> {
|
||||
override fun deserialize(incomingMessage: Message): ContentOutName? {
|
||||
return incomingMessage.dataAs(ContentOutName::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class ConvertWorkDeserializer: IMessageDataDeserialization<ConvertWork> {
|
||||
override fun deserialize(incomingMessage: Message): ConvertWork? {
|
||||
return incomingMessage.dataAs(ConvertWork::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,52 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class DeserializerRegistry {
|
||||
companion object {
|
||||
private val _registry = mutableMapOf<KafkaEvents, IMessageDataDeserialization<*>>(
|
||||
KafkaEvents.EVENT_READER_RECEIVED_FILE to FileResultDeserializer(),
|
||||
KafkaEvents.EVENT_READER_RECEIVED_STREAMS to MediaStreamsDeserializer(),
|
||||
KafkaEvents.EVENT_METADATA_OBTAINED to MetadataResultDeserializer(),
|
||||
KafkaEvents.EVENT_READER_DETERMINED_SERIE to EpisodeInfoDeserializer(),
|
||||
KafkaEvents.EVENT_READER_DETERMINED_MOVIE to MovieInfoDeserializer(),
|
||||
KafkaEvents.EVENT_READER_DETERMINED_FILENAME to ContentOutNameDeserializer(),
|
||||
|
||||
KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO to EncodeWorkDeserializer(),
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED to EncodeWorkDeserializer(),
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_STARTED to EncodeWorkDeserializer(),
|
||||
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED to EncodeWorkDeserializer(),
|
||||
KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE to ExtractWorkDeserializer(),
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED to ExtractWorkDeserializer(),
|
||||
KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED to ConvertWorkDeserializer()
|
||||
|
||||
)
|
||||
fun getRegistry(): Map<KafkaEvents, IMessageDataDeserialization<*>> = _registry.toMap()
|
||||
fun getEventToDeserializer(vararg keys: KafkaEvents): Map<String, IMessageDataDeserialization<*>> {
|
||||
val missingFields = keys.filter { !getRegistry().keys.contains(it) }
|
||||
|
||||
if (missingFields.isNotEmpty()) {
|
||||
throw MissingDeserializerException("Missing deserializers for: ${missingFields.joinToString(", ")}")
|
||||
}
|
||||
return getRegistry().filter { keys.contains(it.key) }.map { it.key.event to it.value }.toMap()
|
||||
}
|
||||
|
||||
private fun toEvent(event: String): KafkaEvents? {
|
||||
return KafkaEvents.values().find { it.event == event }
|
||||
}
|
||||
|
||||
fun getDeserializerForEvent(event: String): IMessageDataDeserialization<*>? {
|
||||
val deszEvent = toEvent(event) ?: return null
|
||||
return getEventToDeserializer(deszEvent).values.first()
|
||||
}
|
||||
|
||||
fun addDeserializer(key: KafkaEvents, deserializer: IMessageDataDeserialization<*>) {
|
||||
_registry[key] = deserializer
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class MissingDeserializerException(override val message: String): RuntimeException()
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class EncodeWorkDeserializer: IMessageDataDeserialization<EncodeWork> {
|
||||
override fun deserialize(incomingMessage: Message): EncodeWork? {
|
||||
return incomingMessage.dataAs(EncodeWork::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.EpisodeInfo
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class EpisodeInfoDeserializer: IMessageDataDeserialization<EpisodeInfo> {
|
||||
override fun deserialize(incomingMessage: Message): EpisodeInfo? {
|
||||
return incomingMessage.dataAs(EpisodeInfo::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,12 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class ExtractWorkDeserializer: IMessageDataDeserialization<ExtractWork> {
|
||||
override fun deserialize(incomingMessage: Message): ExtractWork? {
|
||||
return incomingMessage.dataAs(ExtractWork::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class FileResultDeserializer: IMessageDataDeserialization<FileResult> {
|
||||
override fun deserialize(incomingMessage: Message): FileResult? {
|
||||
return incomingMessage.dataAs(FileResult::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,47 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.JsonObject
|
||||
import no.iktdev.streamit.content.common.streams.AudioStream
|
||||
import no.iktdev.streamit.content.common.streams.MediaStreams
|
||||
import no.iktdev.streamit.content.common.streams.SubtitleStream
|
||||
import no.iktdev.streamit.content.common.streams.VideoStream
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class MediaStreamsDeserializer: IMessageDataDeserialization<MediaStreams> {
|
||||
override fun deserialize(incomingMessage: Message): MediaStreams? {
|
||||
return try {
|
||||
val gson = Gson()
|
||||
val jsonObject = if (incomingMessage.data is String) {
|
||||
gson.fromJson(incomingMessage.data as String, JsonObject::class.java)
|
||||
} else {
|
||||
gson.fromJson(incomingMessage.dataAsJson(), JsonObject::class.java)
|
||||
}
|
||||
|
||||
val streamsJsonArray = jsonObject.getAsJsonArray("streams")
|
||||
|
||||
val rstreams = streamsJsonArray.mapNotNull { streamJson ->
|
||||
val streamObject = streamJson.asJsonObject
|
||||
|
||||
val codecType = streamObject.get("codec_type").asString
|
||||
if (streamObject.has("codec_name") && streamObject.get("codec_name").asString == "mjpeg") {
|
||||
null
|
||||
} else {
|
||||
when (codecType) {
|
||||
"video" -> gson.fromJson(streamObject, VideoStream::class.java)
|
||||
"audio" -> gson.fromJson(streamObject, AudioStream::class.java)
|
||||
"subtitle" -> gson.fromJson(streamObject, SubtitleStream::class.java)
|
||||
else -> null //throw IllegalArgumentException("Unknown stream type: $codecType")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MediaStreams(rstreams)
|
||||
} catch (e: Exception) {
|
||||
e.printStackTrace()
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.Metadata
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class MetadataResultDeserializer: IMessageDataDeserialization<Metadata> {
|
||||
override fun deserialize(incomingMessage: Message): Metadata? {
|
||||
return incomingMessage.dataAs(Metadata::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.deserializers
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.MovieInfo
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
|
||||
class MovieInfoDeserializer: IMessageDataDeserialization<MovieInfo> {
|
||||
override fun deserialize(incomingMessage: Message): MovieInfo? {
|
||||
return incomingMessage.dataAs(MovieInfo::class.java)
|
||||
}
|
||||
}
|
||||
@ -1,5 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto
|
||||
|
||||
data class ContentOutName(
|
||||
val baseName: String
|
||||
)
|
||||
@ -1,10 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto
|
||||
|
||||
data class Metadata(
|
||||
val title: String,
|
||||
val altTitle: List<String> = emptyList(),
|
||||
val cover: String? = null,
|
||||
val type: String,
|
||||
val summary: String? = null,
|
||||
val genres: List<String> = emptyList()
|
||||
)
|
||||
@ -1,19 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto
|
||||
|
||||
data class WorkOrderItem(
|
||||
val id: String,
|
||||
val inputFile: String,
|
||||
val outputFile: String,
|
||||
val collection: String,
|
||||
val state: State,
|
||||
val progress: Int = 0,
|
||||
val remainingTime: Long? = null
|
||||
)
|
||||
|
||||
enum class State {
|
||||
QUEUED,
|
||||
STARTED,
|
||||
UPDATED,
|
||||
FAILURE,
|
||||
ENDED
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader
|
||||
|
||||
data class EpisodeInfo(
|
||||
val title: String,
|
||||
val episode: Int,
|
||||
val season: Int,
|
||||
val episodeTitle: String?,
|
||||
override val fullName: String
|
||||
): VideoInfo(fullName)
|
||||
@ -1,7 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader
|
||||
|
||||
data class FileResult(
|
||||
val file: String,
|
||||
val title: String = "",
|
||||
val sanitizedName: String = ""
|
||||
)
|
||||
@ -1,6 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader
|
||||
|
||||
data class MovieInfo(
|
||||
val title: String,
|
||||
override val fullName: String
|
||||
) : VideoInfo(fullName)
|
||||
@ -1,9 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader
|
||||
|
||||
import java.io.File
|
||||
|
||||
data class SubtitleInfo(
|
||||
val inputFile: String,
|
||||
val collection: String,
|
||||
val language: String
|
||||
)
|
||||
@ -1,5 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader
|
||||
|
||||
abstract class VideoInfo(
|
||||
@Transient open val fullName: String
|
||||
)
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader.work
|
||||
|
||||
import java.util.*
|
||||
|
||||
data class ConvertWork(
|
||||
val workId: String = UUID.randomUUID().toString(),
|
||||
val collection: String,
|
||||
val language: String,
|
||||
val inFile: String,
|
||||
val outFiles: List<String>
|
||||
)
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader.work
|
||||
|
||||
import java.util.*
|
||||
|
||||
data class EncodeWork(
|
||||
override val workId: String = UUID.randomUUID().toString(),
|
||||
override val collection: String,
|
||||
override val inFile: String,
|
||||
override val outFile: String,
|
||||
val arguments: List<String>
|
||||
) : WorkBase(collection = collection, inFile = inFile, outFile = outFile)
|
||||
@ -1,13 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader.work
|
||||
|
||||
import java.util.*
|
||||
|
||||
data class ExtractWork(
|
||||
override val workId: String = UUID.randomUUID().toString(),
|
||||
override val collection: String,
|
||||
val language: String,
|
||||
override val inFile: String,
|
||||
val arguments: List<String>,
|
||||
override val outFile: String,
|
||||
var produceConvertEvent: Boolean = true
|
||||
) : WorkBase(collection = collection, inFile = inFile, outFile = outFile)
|
||||
@ -1,10 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.dto.reader.work
|
||||
|
||||
import java.util.UUID
|
||||
|
||||
abstract class WorkBase(
|
||||
@Transient open val workId: String = UUID.randomUUID().toString(),
|
||||
@Transient open val collection: String,
|
||||
@Transient open val inFile: String,
|
||||
@Transient open val outFile: String
|
||||
)
|
||||
@ -1,47 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.streams
|
||||
|
||||
class SubtitleStreamSelector(val streams: List<SubtitleStream>) {
|
||||
|
||||
fun getCandidateForConversion(): List<SubtitleStream> {
|
||||
val languageGrouped = getDesiredStreams().groupBy { it.tags.language ?: "eng" }
|
||||
val priority = listOf("subrip", "srt", "webvtt", "vtt", "ass")
|
||||
|
||||
val result = mutableListOf<SubtitleStream>()
|
||||
for ((language, streams) in languageGrouped) {
|
||||
val selectedStream = streams.firstOrNull { it.codec_name in priority }
|
||||
if (selectedStream != null) {
|
||||
result.add(selectedStream)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fun getDesiredStreams(): List<SubtitleStream> {
|
||||
val desiredTypes = listOf(SubtitleType.DEFAULT, SubtitleType.CC, SubtitleType.SHD)
|
||||
val typeGuesser = SubtitleTypeGuesser()
|
||||
val codecFiltered = streams.filter { getFormatToCodec(it.codec_name) != null }
|
||||
|
||||
val mappedToType = codecFiltered.map { typeGuesser.guessType(it) to it }.filter { it.first in desiredTypes }
|
||||
.groupBy { it.second.tags.language ?: "eng" }
|
||||
.mapValues { entry ->
|
||||
val languageStreams = entry.value
|
||||
val sortedStreams = languageStreams.sortedBy { desiredTypes.indexOf(it.first) }
|
||||
sortedStreams.firstOrNull()?.second
|
||||
}.mapNotNull { it.value }
|
||||
|
||||
|
||||
return mappedToType
|
||||
}
|
||||
|
||||
|
||||
fun getFormatToCodec(codecName: String): String? {
|
||||
return when(codecName) {
|
||||
"ass" -> "ass"
|
||||
"subrip" -> "srt"
|
||||
"webvtt", "vtt" -> "vtt"
|
||||
"smi" -> "smi"
|
||||
"hdmv_pgs_subtitle" -> null
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,56 +0,0 @@
|
||||
package no.iktdev.streamit.content.common.streams
|
||||
|
||||
/**
|
||||
* @property SHD is Hard of hearing
|
||||
* @property CC is Closed-Captions
|
||||
* @property NON_DIALOGUE is for Signs or Song (as in lyrics)
|
||||
* @property DEFAULT is default subtitle as dialog
|
||||
*/
|
||||
enum class SubtitleType {
|
||||
SHD,
|
||||
CC,
|
||||
NON_DIALOGUE,
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
class SubtitleTypeGuesser {
|
||||
fun guessType(subtitle: SubtitleStream): SubtitleType {
|
||||
if (subtitle.tags != null && subtitle.tags.title?.isBlank() == false) {
|
||||
val title = subtitle.tags.title!!
|
||||
if (title.lowercase().contains("song")
|
||||
|| title.lowercase().contains("songs")
|
||||
|| title.lowercase().contains("sign")
|
||||
|| title.lowercase().contains("signs")
|
||||
) {
|
||||
return SubtitleType.NON_DIALOGUE
|
||||
}
|
||||
if (getSubtitleType(title, listOf("cc", "closed caption"),
|
||||
SubtitleType.CC
|
||||
) == SubtitleType.CC
|
||||
) return SubtitleType.CC
|
||||
if (getSubtitleType(title, listOf("shd", "hh", "Hard-of-Hearing", "Hard of Hearing"),
|
||||
SubtitleType.SHD
|
||||
) == SubtitleType.SHD
|
||||
) return SubtitleType.SHD
|
||||
}
|
||||
|
||||
return SubtitleType.DEFAULT
|
||||
}
|
||||
|
||||
private fun getSubtitleType(title: String, keys: List<String>, expected: SubtitleType): SubtitleType {
|
||||
val bracedText = Regex.fromLiteral("[(](?<=\\().*?(?=\\))[)]").find(title)
|
||||
val brakedText = Regex.fromLiteral("[(](?<=\\().*?(?=\\))[)]").find(title)
|
||||
|
||||
if (bracedText == null || brakedText == null)
|
||||
return SubtitleType.DEFAULT
|
||||
|
||||
var text = bracedText.value.ifBlank { brakedText.value }
|
||||
text = Regex.fromLiteral("[\\[\\]()-.,_+]").replace(text, "")
|
||||
|
||||
return if (keys.find { item ->
|
||||
item.lowercase().contains(text.lowercase()) || text.lowercase().contains(item.lowercase())
|
||||
}.isNullOrEmpty()) SubtitleType.DEFAULT else expected
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,62 +0,0 @@
|
||||
package no.iktdev.streamit.content.common
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.Named
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.MethodSource
|
||||
|
||||
class NamingTest {
|
||||
|
||||
@Test
|
||||
fun checkThatBracketsGetsRemoved() {
|
||||
val input = "[AAA] Sir fancy - 13 [1080p HEVC][000000]"
|
||||
val name = Naming(input)
|
||||
assertThat(name.guessDesiredTitle()).doesNotContain("[")
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
fun checkThatSeasonIsStripped() {
|
||||
val input = "[AAA] Kafka Topic S2 - 01"
|
||||
val naming = Naming(input)
|
||||
val result = naming.guessDesiredTitle()
|
||||
assertThat(result).isEqualTo("Kafka Topic")
|
||||
}
|
||||
|
||||
/*
|
||||
@ParameterizedTest
|
||||
@MethodSource("serieOnlyTest")
|
||||
fun ensureOnlySerieAndDecodedCorrectly(testData: TestData) {
|
||||
val naming = Naming(testData.input).getName() ?: throw NullPointerException("Named is null")
|
||||
assertThat(naming.type).isEqualTo("serie")
|
||||
assertThat(naming.season).isEqualTo(testData.expected.season)
|
||||
assertThat(naming.episode).isEqualTo(testData.expected.episode)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testTest() {
|
||||
val tmp = TestData(Naming.Name(title = "Demo", season = 1, episode = 1, type = "serie"), "[Kametsu] Ghost in the Shell Arise - 05 - Pyrophoric Cult (BD 1080p Hi10 FLAC) [13FF85A7]")
|
||||
val naming = Naming(tmp.input).getName()
|
||||
assertThat(naming).isNotNull()
|
||||
}
|
||||
|
||||
|
||||
fun serieOnlyTest(): List<Named<TestData>> {
|
||||
return listOf(
|
||||
Named.of("Is defined", TestData(Naming.Name(title = "Demo", season = 1, episode = 1, type = "serie"), "Demo - S01E01")),
|
||||
Named.of("Is decoded", TestData(Naming.Name("Demo!", "serie", season = 1, episode = 1), "[TMP] Demo! - 03")),
|
||||
Named.of("Is only Episode", TestData(Naming.Name("Demo", "serie", 1, 1), "Demo E1"))
|
||||
)
|
||||
}*/
|
||||
|
||||
/*
|
||||
data class TestData(
|
||||
val expected: Naming.Name,
|
||||
val input: String
|
||||
)*/
|
||||
}
|
||||
42
Convert/.gitignore
vendored
42
Convert/.gitignore
vendored
@ -1,42 +0,0 @@
|
||||
.gradle
|
||||
build/
|
||||
!gradle/wrapper/gradle-wrapper.jar
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
.idea/libraries/
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
out/
|
||||
!**/src/main/**/out/
|
||||
!**/src/test/**/out/
|
||||
|
||||
### Eclipse ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
bin/
|
||||
!**/src/main/**/bin/
|
||||
!**/src/test/**/bin/
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
||||
@ -1,4 +0,0 @@
|
||||
FROM bskjon/azuljava:17
|
||||
EXPOSE 8080
|
||||
|
||||
COPY ./build/libs/converter.jar /usr/share/app/app.jar
|
||||
BIN
Convert/gradle/wrapper/gradle-wrapper.jar
vendored
BIN
Convert/gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
#Sun Jul 23 01:48:17 CEST 2023
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
234
Convert/gradlew
vendored
234
Convert/gradlew
vendored
@ -1,234 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
89
Convert/gradlew.bat
vendored
89
Convert/gradlew.bat
vendored
@ -1,89 +0,0 @@
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
||||
@ -1,4 +0,0 @@
|
||||
rootProject.name = "Convert"
|
||||
|
||||
include(":CommonCode")
|
||||
project(":CommonCode").projectDir = File("../CommonCode")
|
||||
@ -1,19 +0,0 @@
|
||||
package no.iktdev.streamit.content.convert
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication
|
||||
import org.springframework.boot.runApplication
|
||||
import org.springframework.context.ApplicationContext
|
||||
|
||||
@SpringBootApplication
|
||||
class ConvertApplication
|
||||
|
||||
private var context: ApplicationContext? = null
|
||||
@Suppress("unused")
|
||||
fun getContext(): ApplicationContext? {
|
||||
return context
|
||||
}
|
||||
fun main(args: Array<String>) {
|
||||
context = runApplication<ConvertApplication>(*args)
|
||||
}
|
||||
private val logger = KotlinLogging.logger {}
|
||||
@ -1,7 +0,0 @@
|
||||
package no.iktdev.streamit.content.convert
|
||||
|
||||
class ConvertEnv {
|
||||
companion object {
|
||||
val allowOverwrite = System.getenv("ALLOW_OVERWRITE").toBoolean() ?: false
|
||||
}
|
||||
}
|
||||
@ -1,88 +0,0 @@
|
||||
package no.iktdev.streamit.content.convert
|
||||
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.withContext
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.library.subtitle.Syncro
|
||||
import no.iktdev.library.subtitle.classes.DialogType
|
||||
import no.iktdev.library.subtitle.export.Export
|
||||
import no.iktdev.library.subtitle.reader.BaseReader
|
||||
import no.iktdev.library.subtitle.reader.Reader
|
||||
import no.iktdev.streamit.content.common.dto.reader.SubtitleInfo
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.common.streams.SubtitleType
|
||||
import java.io.File
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
|
||||
class ConvertRunner(val referenceId: String, val listener: IConvertListener) {
|
||||
|
||||
private fun getReade(inputFile: File): BaseReader? {
|
||||
return Reader(inputFile).getSubtitleReader()
|
||||
}
|
||||
private val maxDelay = 1000 * 5
|
||||
private var currentDelayed = 0
|
||||
suspend fun readAndConvert (subtitleInfo: SubtitleInfo) {
|
||||
val inFile = File(subtitleInfo.inputFile)
|
||||
while (!inFile.canRead()) {
|
||||
if (currentDelayed > maxDelay) {
|
||||
logger.error { "Could not out wait lock on file!" }
|
||||
withContext(Dispatchers.Default) {
|
||||
listener.onError(referenceId, subtitleInfo, "Cant read file!")
|
||||
}
|
||||
return
|
||||
}
|
||||
logger.error { "$referenceId ${subtitleInfo.inputFile}: Cant read file!" }
|
||||
delay(500)
|
||||
currentDelayed += 500
|
||||
}
|
||||
val reader = getReade(inFile)
|
||||
val dialogs = reader?.read()
|
||||
if (dialogs.isNullOrEmpty()) {
|
||||
logger.error { "$referenceId ${subtitleInfo.inputFile}: Dialogs read from file is null or empty!" }
|
||||
withContext(Dispatchers.Default) {
|
||||
listener.onError(referenceId, subtitleInfo, "Dialogs read from file is null or empty!")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
withContext(Dispatchers.Default) {
|
||||
listener.onStarted(referenceId)
|
||||
}
|
||||
|
||||
val filtered = dialogs.filter { !it.ignore && it.type !in listOf(DialogType.SIGN_SONG, DialogType.CAPTION) }
|
||||
|
||||
val syncedDialogs = Syncro().sync(filtered)
|
||||
|
||||
try {
|
||||
val converted = Export(inFile, syncedDialogs, ConvertEnv.allowOverwrite).write()
|
||||
val item = ConvertWork(
|
||||
inFile = inFile.absolutePath,
|
||||
collection = subtitleInfo.collection,
|
||||
language = subtitleInfo.language,
|
||||
outFiles = converted.map { it.absolutePath }
|
||||
)
|
||||
|
||||
withContext(Dispatchers.Default) {
|
||||
listener.onEnded(referenceId, subtitleInfo, work = item)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
e.printStackTrace()
|
||||
withContext(Dispatchers.Default) {
|
||||
listener.onError(referenceId, subtitleInfo, "See log")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
interface IConvertListener {
|
||||
fun onStarted(referenceId: String)
|
||||
fun onError(referenceId: String, info: SubtitleInfo, message: String)
|
||||
fun onEnded(referenceId: String, info: SubtitleInfo, work: ConvertWork)
|
||||
}
|
||||
@ -1,69 +0,0 @@
|
||||
package no.iktdev.streamit.content.convert.kafka
|
||||
|
||||
import kotlinx.coroutines.launch
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.exfl.coroutines.Coroutines
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.dto.reader.SubtitleInfo
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.convert.ConvertRunner
|
||||
import no.iktdev.streamit.content.convert.IConvertListener
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.springframework.stereotype.Service
|
||||
import java.io.File
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class SubtitleConsumer: DefaultKafkaReader("convertHandlerSubtitle"), IConvertListener {
|
||||
|
||||
private final val listener = object : SimpleMessageListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
consumer = defaultConsumer,
|
||||
accepts = listOf(KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event)
|
||||
) {
|
||||
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
|
||||
val referenceId = data.value().referenceId
|
||||
val workResult = data.value().dataAs(ExtractWork::class.java)
|
||||
|
||||
if (workResult?.produceConvertEvent == true) {
|
||||
logger.info { "Using ${data.value().referenceId} ${workResult.outFile} as it is a convert candidate" }
|
||||
val convertWork = SubtitleInfo(
|
||||
inputFile = workResult.outFile,
|
||||
collection = workResult.collection,
|
||||
language = workResult.language,
|
||||
)
|
||||
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_STARTED, Message(referenceId = referenceId, Status(statusType = StatusType.PENDING)), convertWork)
|
||||
Coroutines.io().launch {
|
||||
ConvertRunner(referenceId, this@SubtitleConsumer).readAndConvert(convertWork)
|
||||
}
|
||||
} else {
|
||||
logger.info { "Skipping ${data.value().referenceId} ${workResult?.outFile} as it is not a convert candidate" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
listener.listen()
|
||||
}
|
||||
|
||||
override fun onStarted(referenceId: String) {
|
||||
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_STARTED, Message(referenceId = referenceId, Status(statusType = StatusType.SUCCESS)), null)
|
||||
}
|
||||
|
||||
override fun onError(referenceId: String, info: SubtitleInfo, message: String) {
|
||||
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED, Message(referenceId = referenceId, Status(statusType = StatusType.ERROR, message = message)), null)
|
||||
}
|
||||
|
||||
override fun onEnded(referenceId: String, info: SubtitleInfo, work: ConvertWork) {
|
||||
produceMessage(KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED, Message(referenceId = referenceId, Status(statusType = StatusType.SUCCESS)), work)
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
spring.output.ansi.enabled=always
|
||||
logging.level.org.apache.kafka=INFO
|
||||
#logging.level.root=DEBUG
|
||||
42
Encode/.gitignore
vendored
42
Encode/.gitignore
vendored
@ -1,42 +0,0 @@
|
||||
.gradle
|
||||
build/
|
||||
!gradle/wrapper/gradle-wrapper.jar
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
.idea/libraries/
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
out/
|
||||
!**/src/main/**/out/
|
||||
!**/src/test/**/out/
|
||||
|
||||
### Eclipse ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
bin/
|
||||
!**/src/main/**/bin/
|
||||
!**/src/test/**/bin/
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
||||
@ -1,4 +0,0 @@
|
||||
FROM bskjon/debian-azuljava17-ffmpeg:latest
|
||||
EXPOSE 8080
|
||||
|
||||
COPY ./build/libs/encoder.jar /usr/share/app/app.jar
|
||||
@ -1,66 +0,0 @@
|
||||
import org.jetbrains.kotlin.gradle.plugin.mpp.pm20.util.archivesName
|
||||
|
||||
plugins {
|
||||
kotlin("jvm") version "1.8.21"
|
||||
id("org.springframework.boot") version "2.5.5"
|
||||
id("io.spring.dependency-management") version "1.0.11.RELEASE"
|
||||
kotlin("plugin.spring") version "1.5.31"
|
||||
}
|
||||
|
||||
group = "no.iktdev.streamit.content"
|
||||
version = "1.0-SNAPSHOT"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven("https://jitpack.io")
|
||||
maven {
|
||||
url = uri("https://reposilite.iktdev.no/releases")
|
||||
}
|
||||
maven {
|
||||
url = uri("https://reposilite.iktdev.no/snapshots")
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
implementation(project(":CommonCode"))
|
||||
|
||||
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
|
||||
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
|
||||
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
|
||||
|
||||
|
||||
implementation("com.github.pgreze:kotlin-process:1.3.1")
|
||||
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
|
||||
|
||||
implementation("com.google.code.gson:gson:2.8.9")
|
||||
|
||||
implementation("org.springframework.boot:spring-boot-starter-web")
|
||||
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
|
||||
implementation("org.springframework.kafka:spring-kafka:2.8.5")
|
||||
implementation("org.springframework.boot:spring-boot-starter-websocket:2.6.3")
|
||||
|
||||
|
||||
|
||||
testImplementation("junit:junit:4.13.2")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.1")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.1")
|
||||
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||
testImplementation("org.mockito:mockito-core:3.+")
|
||||
|
||||
}
|
||||
|
||||
tasks.test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
tasks.bootJar {
|
||||
archiveFileName.set("encoder.jar")
|
||||
launchScript()
|
||||
}
|
||||
|
||||
tasks.jar {
|
||||
archivesName.set("encoder.jar")
|
||||
archiveBaseName.set("encoder")
|
||||
}
|
||||
archivesName.set("encoder.jar")
|
||||
BIN
Encode/gradle/wrapper/gradle-wrapper.jar
vendored
BIN
Encode/gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
#Tue Jul 11 02:14:45 CEST 2023
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
234
Encode/gradlew
vendored
234
Encode/gradlew
vendored
@ -1,234 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
89
Encode/gradlew.bat
vendored
89
Encode/gradlew.bat
vendored
@ -1,89 +0,0 @@
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
||||
@ -1,4 +0,0 @@
|
||||
rootProject.name = "Encode"
|
||||
|
||||
include(":CommonCode")
|
||||
project(":CommonCode").projectDir = File("../CommonCode")
|
||||
@ -1,35 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value
|
||||
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory
|
||||
import org.springframework.boot.web.server.WebServerFactoryCustomizer
|
||||
import org.springframework.context.annotation.Bean
|
||||
import org.springframework.context.annotation.Configuration
|
||||
import org.springframework.messaging.simp.config.MessageBrokerRegistry
|
||||
import org.springframework.web.bind.annotation.RestController
|
||||
import org.springframework.web.method.HandlerTypePredicate
|
||||
import org.springframework.web.servlet.config.annotation.CorsRegistry
|
||||
import org.springframework.web.servlet.config.annotation.PathMatchConfigurer
|
||||
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer
|
||||
import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker
|
||||
import org.springframework.web.socket.config.annotation.StompEndpointRegistry
|
||||
import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer
|
||||
|
||||
@Configuration
|
||||
@EnableWebSocketMessageBroker
|
||||
class WebSocketConfig : WebSocketMessageBrokerConfigurer {
|
||||
|
||||
override fun registerStompEndpoints(registry: StompEndpointRegistry) {
|
||||
registry.addEndpoint("/ws")
|
||||
// .setAllowedOrigins("*")
|
||||
.withSockJS()
|
||||
|
||||
registry.addEndpoint("/")
|
||||
}
|
||||
|
||||
override fun configureMessageBroker(registry: MessageBrokerRegistry) {
|
||||
registry.enableSimpleBroker("/topic")
|
||||
registry.setApplicationDestinationPrefixes("/app")
|
||||
}
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
class EncodeEnv {
|
||||
companion object {
|
||||
val ffmpeg: String = System.getenv("SUPPORTING_EXECUTABLE_FFMPEG") ?: "ffmpeg"
|
||||
val allowOverwrite = System.getenv("ALLOW_OVERWRITE").toBoolean() ?: false
|
||||
val maxRunners: Int = try {System.getenv("SIMULTANEOUS_ENCODE_RUNNERS").toIntOrNull() ?: 1 } catch (e: Exception) {1}
|
||||
}
|
||||
}
|
||||
@ -1,60 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
import com.google.gson.Gson
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
|
||||
import no.iktdev.streamit.content.common.deserializers.EncodeWorkDeserializer
|
||||
import no.iktdev.streamit.content.encode.runner.RunnerCoordinator
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.springframework.stereotype.Service
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class EncodeWorkConsumer(private val runnerCoordinator: RunnerCoordinator) : DefaultKafkaReader("encodeWork") {
|
||||
|
||||
lateinit var encodeInstructionsListener: EncodeInformationListener
|
||||
|
||||
init {
|
||||
encodeInstructionsListener = EncodeInformationListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
defaultConsumer,
|
||||
accepts = listOf(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO.event),
|
||||
runnerCoordinator
|
||||
)
|
||||
encodeInstructionsListener.listen()
|
||||
}
|
||||
|
||||
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return DeserializerRegistry.getEventToDeserializer(
|
||||
KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class EncodeInformationListener(
|
||||
topic: String,
|
||||
consumer: DefaultConsumer,
|
||||
accepts: List<String>,
|
||||
val runnerCoordinator: RunnerCoordinator
|
||||
) : SimpleMessageListener(
|
||||
topic, consumer,
|
||||
accepts
|
||||
) {
|
||||
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
|
||||
logger.info { "\nreferenceId: ${data.value().referenceId} \nEvent: ${data.key()} \nData:\n${Gson().toJson(data.value())}" }
|
||||
val message = data.value().apply {
|
||||
this.data = EncodeWorkDeserializer().deserializeIfSuccessful(data.value())
|
||||
}
|
||||
runnerCoordinator.addEncodeMessageToQueue(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,35 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
import no.iktdev.exfl.observable.ObservableMap
|
||||
import no.iktdev.exfl.observable.observableMapOf
|
||||
import no.iktdev.streamit.content.common.dto.WorkOrderItem
|
||||
import no.iktdev.streamit.content.encode.progress.Progress
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication
|
||||
import org.springframework.boot.runApplication
|
||||
import org.springframework.context.ApplicationContext
|
||||
|
||||
@SpringBootApplication
|
||||
class EncoderApplication
|
||||
|
||||
private var context: ApplicationContext? = null
|
||||
val progressMap = observableMapOf<String, Progress>()
|
||||
|
||||
@Suppress("unused")
|
||||
fun getContext(): ApplicationContext? {
|
||||
return context
|
||||
}
|
||||
fun main(args: Array<String>) {
|
||||
context = runApplication<EncoderApplication>(*args)
|
||||
}
|
||||
|
||||
val encoderItems = ObservableMap<String, WorkOrderItem>()
|
||||
val extractItems = ObservableMap<String, WorkOrderItem>()
|
||||
|
||||
/*val progress = ObservableMap<String, EncodeInformation>().also {
|
||||
it.addListener(object: ObservableMap.Listener<String, EncodeInformation> {
|
||||
override fun onPut(key: String, value: EncodeInformation) {
|
||||
super.onPut(key, value)
|
||||
logger.info { "$key with progress: $value." }
|
||||
}
|
||||
})
|
||||
}*/
|
||||
@ -1,59 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
import com.google.gson.Gson
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
|
||||
import no.iktdev.streamit.content.common.deserializers.ExtractWorkDeserializer
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.encode.runner.RunnerCoordinator
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.springframework.stereotype.Service
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class ExtractWorkConsumer(private val runnerCoordinator: RunnerCoordinator) : DefaultKafkaReader("extractWork") {
|
||||
lateinit var encodeInstructionsListener: ExtractWorkListener
|
||||
|
||||
init {
|
||||
encodeInstructionsListener = ExtractWorkListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
defaultConsumer,
|
||||
accepts = listOf(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE.event),
|
||||
runnerCoordinator
|
||||
)
|
||||
encodeInstructionsListener.listen()
|
||||
}
|
||||
|
||||
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return DeserializerRegistry.getEventToDeserializer(
|
||||
KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class ExtractWorkListener(
|
||||
topic: String,
|
||||
consumer: DefaultConsumer,
|
||||
accepts: List<String>,
|
||||
val runnerCoordinator: RunnerCoordinator
|
||||
) : SimpleMessageListener(
|
||||
topic, consumer,
|
||||
accepts
|
||||
) {
|
||||
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
|
||||
logger.info { "\nreferenceId: ${data.value().referenceId} \nEvent: ${data.key()} \nData:\n${Gson().toJson(data.value())}" }
|
||||
val message = data.value().apply {
|
||||
this.data = ExtractWorkDeserializer().deserializeIfSuccessful(data.value())
|
||||
}
|
||||
runnerCoordinator.addExtractMessageToQueue(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.controllers
|
||||
|
||||
import com.google.gson.Gson
|
||||
import no.iktdev.streamit.content.encode.progressMap
|
||||
import org.springframework.web.bind.annotation.GetMapping
|
||||
import org.springframework.web.bind.annotation.RestController
|
||||
import javax.servlet.http.HttpServletResponse
|
||||
|
||||
@RestController
|
||||
class ProgressController {
|
||||
@GetMapping("/progress")
|
||||
fun getValue(response: HttpServletResponse): String {
|
||||
response.setHeader("Refresh", "5")
|
||||
return Gson().toJson(progressMap.values)
|
||||
}
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.progress
|
||||
|
||||
data class DecodedProgressData(
|
||||
val frame: Int?,
|
||||
val fps: Double?,
|
||||
val stream_0_0_q: Double?,
|
||||
val bitrate: String?,
|
||||
val total_size: Int?,
|
||||
val out_time_us: Long?,
|
||||
val out_time_ms: Long?,
|
||||
val out_time: String?,
|
||||
val dup_frames: Int?,
|
||||
val drop_frames: Int?,
|
||||
val speed: Double?,
|
||||
val progress: String?
|
||||
)
|
||||
|
||||
data class ECT(val day: Int = 0, val hour: Int = 0, val minute: Int = 0, val second: Int = 0)
|
||||
@ -1,12 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.progress
|
||||
|
||||
data class Progress(
|
||||
val workId: String,
|
||||
val outFileName: String,
|
||||
val progress: Int = -1,
|
||||
val time: String,
|
||||
val duration: String,
|
||||
val speed: String,
|
||||
val estimatedCompletionSeconds: Long = -1,
|
||||
val estimatedCompletion: String = "Unknown",
|
||||
)
|
||||
@ -1,141 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.progress
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.WorkBase
|
||||
import java.io.File
|
||||
import java.lang.StringBuilder
|
||||
import java.time.LocalTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
import java.util.concurrent.TimeUnit
|
||||
import kotlin.math.floor
|
||||
|
||||
class ProgressDecoder(val workBase: WorkBase) {
|
||||
val expectedKeys = listOf<String>(
|
||||
"frame=",
|
||||
"fps=",
|
||||
"stream_0_0_q=",
|
||||
"bitrate=",
|
||||
"total_size=",
|
||||
"out_time_us=",
|
||||
"out_time_ms=",
|
||||
"out_time=",
|
||||
"dup_frames=",
|
||||
"drop_frames=",
|
||||
"speed=",
|
||||
"progress="
|
||||
)
|
||||
var duration: Int? = null
|
||||
set(value) {
|
||||
if (field == null || field == 0)
|
||||
field = value
|
||||
}
|
||||
var durationTime: String = "NA"
|
||||
fun parseVideoProgress(lines: List<String>): DecodedProgressData? {
|
||||
var frame: Int? = null
|
||||
var progress: String? = null
|
||||
val metadataMap = mutableMapOf<String, String>()
|
||||
|
||||
for (line in lines) {
|
||||
val keyValuePairs = Regex("=\\s*").replace(line, "=").split(" ").filter { it.isNotBlank() }
|
||||
for (keyValuePair in keyValuePairs) {
|
||||
val (key, value) = keyValuePair.split("=")
|
||||
metadataMap[key] = value
|
||||
}
|
||||
|
||||
if (frame == null) {
|
||||
frame = metadataMap["frame"]?.toIntOrNull()
|
||||
}
|
||||
|
||||
progress = metadataMap["progress"]
|
||||
}
|
||||
|
||||
return if (progress != null) {
|
||||
// When "progress" is found, build and return the VideoMetadata object
|
||||
DecodedProgressData(
|
||||
frame, metadataMap["fps"]?.toDoubleOrNull(), metadataMap["stream_0_0_q"]?.toDoubleOrNull(),
|
||||
metadataMap["bitrate"], metadataMap["total_size"]?.toIntOrNull(), metadataMap["out_time_us"]?.toLongOrNull(),
|
||||
metadataMap["out_time_ms"]?.toLongOrNull(), metadataMap["out_time"], metadataMap["dup_frames"]?.toIntOrNull(),
|
||||
metadataMap["drop_frames"]?.toIntOrNull(), metadataMap["speed"]?.replace("x", "", ignoreCase = true)?.toDoubleOrNull(), progress
|
||||
)
|
||||
} else {
|
||||
null // If "progress" is not found, return null
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun isDuration(value: String): Boolean {
|
||||
return value.contains("Duration", ignoreCase = true)
|
||||
}
|
||||
fun setDuration(value: String) {
|
||||
val results = Regex("Duration:\\s*([^,]+),").find(value)?.groupValues?.firstOrNull()
|
||||
durationTime = Regex("[0-9]+:[0-9]+:[0-9]+.[0-9]+").find(results.toString())?.value ?: "NA"
|
||||
duration = timeSpanToSeconds(results)
|
||||
}
|
||||
|
||||
private fun timeSpanToSeconds(time: String?): Int?
|
||||
{
|
||||
time ?: return null
|
||||
val timeString = Regex("[0-9]+:[0-9]+:[0-9]+.[0-9]+").find(time) ?: return null
|
||||
val strippedMS = Regex("[0-9]+:[0-9]+:[0-9]+").find(timeString.value) ?: return null
|
||||
val outTime = LocalTime.parse(strippedMS.value, DateTimeFormatter.ofPattern("HH:mm:ss"))
|
||||
return outTime.toSecondOfDay()
|
||||
}
|
||||
|
||||
|
||||
fun getProgress(decoded: DecodedProgressData): Progress {
|
||||
if (duration == null)
|
||||
return Progress(workId = workBase.workId, outFileName = File(workBase.outFile).name, duration = durationTime, time = "NA", speed = "NA")
|
||||
val progressTime = timeSpanToSeconds(decoded.out_time) ?: 0
|
||||
val progress = floor((progressTime.toDouble() / duration!!.toDouble()) *100).toInt()
|
||||
|
||||
val ect = getEstimatedTimeRemaining(decoded)
|
||||
|
||||
return Progress(
|
||||
workId = workBase.workId, outFileName = File(workBase.outFile).name,
|
||||
progress = progress,
|
||||
estimatedCompletionSeconds = ect,
|
||||
estimatedCompletion = getETA(ect),
|
||||
duration = durationTime,
|
||||
time = decoded.out_time ?: "NA",
|
||||
speed = decoded.speed?.toString() ?: "NA"
|
||||
)
|
||||
}
|
||||
|
||||
fun getEstimatedTimeRemaining(decoded: DecodedProgressData): Long {
|
||||
val position = timeSpanToSeconds(decoded.out_time) ?: 0
|
||||
return if(duration == null || decoded.speed == null) -1 else
|
||||
Math.round(Math.round(duration!!.toDouble() - position.toDouble()) / decoded.speed)
|
||||
}
|
||||
|
||||
fun getECT(time: Long): ECT {
|
||||
var seconds = time
|
||||
val day = TimeUnit.SECONDS.toDays(seconds)
|
||||
seconds -= java.util.concurrent.TimeUnit.DAYS.toSeconds(day)
|
||||
|
||||
val hour = TimeUnit.SECONDS.toHours(seconds)
|
||||
seconds -= java.util.concurrent.TimeUnit.HOURS.toSeconds(hour)
|
||||
|
||||
val minute = TimeUnit.SECONDS.toMinutes(seconds)
|
||||
seconds -= java.util.concurrent.TimeUnit.MINUTES.toSeconds(minute)
|
||||
|
||||
return ECT(day.toInt(), hour.toInt(), minute.toInt(), seconds.toInt())
|
||||
}
|
||||
private fun getETA(time: Long): String {
|
||||
val etc = getECT(time) ?: return "Unknown"
|
||||
val str = StringBuilder()
|
||||
if (etc.day > 0) {
|
||||
str.append("${etc.day}d").append(" ")
|
||||
}
|
||||
if (etc.hour > 0) {
|
||||
str.append("${etc.hour}h").append(" ")
|
||||
}
|
||||
if (etc.day == 0 && etc.minute > 0) {
|
||||
str.append("${etc.minute}m").append(" ")
|
||||
}
|
||||
if (etc.hour == 0 && etc.second > 0) {
|
||||
str.append("${etc.second}s").append(" ")
|
||||
}
|
||||
return str.toString().trim()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -1,108 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.runner
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.encode.EncodeEnv
|
||||
import no.iktdev.exfl.observable.ObservableList
|
||||
import no.iktdev.exfl.observable.observableListOf
|
||||
import no.iktdev.exfl.using
|
||||
import no.iktdev.streamit.content.common.deamon.Daemon
|
||||
import no.iktdev.streamit.content.common.deamon.IDaemon
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.encode.progress.DecodedProgressData
|
||||
import no.iktdev.streamit.content.encode.progress.Progress
|
||||
import no.iktdev.streamit.content.encode.progress.ProgressDecoder
|
||||
import java.io.BufferedWriter
|
||||
import java.io.File
|
||||
import java.io.FileWriter
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
class EncodeDaemon(val referenceId: String, val work: EncodeWork, val daemonInterface: IEncodeListener, val outFile: File = File("src").using("logs", "${work.workId}-${work.collection}.log")): IDaemon {
|
||||
var outputCache = observableListOf<String>()
|
||||
private val decoder = ProgressDecoder(work)
|
||||
fun produceProgress(items: List<String>): Progress? {
|
||||
try {
|
||||
val decodedProgress = decoder.parseVideoProgress(items)
|
||||
if (decodedProgress != null) {
|
||||
val progress = decoder.getProgress(decodedProgress)
|
||||
outputCache.clear()
|
||||
return progress
|
||||
}
|
||||
} catch (e: IndexOutOfBoundsException) {
|
||||
// Do nothing
|
||||
} catch (e: Exception) {
|
||||
//logger.error { e.message }
|
||||
e.printStackTrace()
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
init {
|
||||
outputCache.addListener(object : ObservableList.Listener<String> {
|
||||
override fun onAdded(item: String) {
|
||||
val progress = produceProgress(outputCache)
|
||||
progress?.let {
|
||||
daemonInterface.onProgress(referenceId, work, progress)
|
||||
}
|
||||
}
|
||||
})
|
||||
outFile.parentFile.mkdirs()
|
||||
}
|
||||
|
||||
suspend fun runUsingWorkItem(): Int {
|
||||
val outFile = File(work.outFile)
|
||||
if (!outFile.parentFile.exists()) {
|
||||
outFile.parentFile.mkdirs()
|
||||
}
|
||||
val adjustedArgs = (if (EncodeEnv.allowOverwrite) listOf("-y") else listOf("-nostdin")) + listOf(
|
||||
"-hide_banner", "-i", File(work.inFile).absolutePath, *work.arguments.toTypedArray(), outFile.absolutePath,
|
||||
"-progress", "pipe:1"
|
||||
)
|
||||
logger.info { "$referenceId @ ${work.workId} ${adjustedArgs.joinToString(" ")}" }
|
||||
return Daemon(EncodeEnv.ffmpeg, this).run(adjustedArgs)
|
||||
}
|
||||
|
||||
override fun onStarted() {
|
||||
super.onStarted()
|
||||
daemonInterface.onStarted(referenceId, work)
|
||||
}
|
||||
|
||||
override fun onEnded() {
|
||||
super.onEnded()
|
||||
daemonInterface.onEnded(referenceId, work)
|
||||
}
|
||||
|
||||
override fun onError(code: Int) {
|
||||
daemonInterface.onError(referenceId, work, code)
|
||||
}
|
||||
|
||||
override fun onOutputChanged(line: String) {
|
||||
super.onOutputChanged(line)
|
||||
if (decoder.isDuration(line))
|
||||
decoder.setDuration(line)
|
||||
if (decoder.expectedKeys.any { line.startsWith(it) }) {
|
||||
outputCache.add(line)
|
||||
}
|
||||
writeToLog(line)
|
||||
}
|
||||
fun writeToLog(line: String) {
|
||||
val fileWriter = FileWriter(outFile, true) // true indikerer at vi ønsker å appende til filen
|
||||
val bufferedWriter = BufferedWriter(fileWriter)
|
||||
|
||||
// Skriv logglinjen til filen
|
||||
bufferedWriter.write(line)
|
||||
bufferedWriter.newLine() // Legg til en ny linje etter logglinjen
|
||||
|
||||
// Lukk BufferedWriter og FileWriter for å frigjøre ressurser
|
||||
bufferedWriter.close()
|
||||
fileWriter.close()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface IEncodeListener {
|
||||
fun onStarted(referenceId: String, work: EncodeWork)
|
||||
fun onError(referenceId: String, work: EncodeWork, code: Int)
|
||||
fun onProgress(referenceId: String, work: EncodeWork, progress: Progress)
|
||||
fun onEnded(referenceId: String, work: EncodeWork)
|
||||
}
|
||||
@ -1,54 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.runner
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.encode.EncodeEnv
|
||||
import no.iktdev.exfl.observable.observableListOf
|
||||
import no.iktdev.streamit.content.common.deamon.Daemon
|
||||
import no.iktdev.streamit.content.common.deamon.IDaemon
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.encode.progress.DecodedProgressData
|
||||
import java.io.File
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
class ExtractDaemon(val referenceId: String, val work: ExtractWork, val daemonInterface: IExtractListener): IDaemon {
|
||||
var outputCache = observableListOf<String>()
|
||||
|
||||
|
||||
suspend fun runUsingWorkItem(): Int {
|
||||
val outFile = File(work.outFile)
|
||||
if (!outFile.parentFile.exists()) {
|
||||
outFile.parentFile.mkdirs()
|
||||
}
|
||||
val adjustedArgs = (if (EncodeEnv.allowOverwrite) listOf("-y") else emptyList()) + listOf(
|
||||
"-i", File(work.inFile).absolutePath, *work.arguments.toTypedArray(), outFile.absolutePath
|
||||
)
|
||||
logger.info { "$referenceId @ ${work.workId} ${adjustedArgs.joinToString(" ")}" }
|
||||
return Daemon(EncodeEnv.ffmpeg, this).run(adjustedArgs)
|
||||
}
|
||||
|
||||
override fun onStarted() {
|
||||
super.onStarted()
|
||||
daemonInterface.onStarted(referenceId, work)
|
||||
}
|
||||
|
||||
override fun onEnded() {
|
||||
super.onEnded()
|
||||
daemonInterface.onEnded(referenceId, work)
|
||||
}
|
||||
|
||||
override fun onError(code: Int) {
|
||||
daemonInterface.onError(referenceId, work, code)
|
||||
}
|
||||
override fun onOutputChanged(line: String) {
|
||||
super.onOutputChanged(line)
|
||||
outputCache.add(line)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface IExtractListener {
|
||||
fun onStarted(referenceId: String, work: ExtractWork)
|
||||
fun onError(referenceId: String, work: ExtractWork, code: Int)
|
||||
fun onProgress(referenceId: String, work: ExtractWork, progress: DecodedProgressData) {}
|
||||
fun onEnded(referenceId: String, work: ExtractWork)
|
||||
}
|
||||
@ -1,324 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.runner
|
||||
|
||||
import com.google.gson.Gson
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.channels.Channel
|
||||
import no.iktdev.streamit.content.encode.EncodeEnv
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.exfl.coroutines.Coroutines
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.dto.State
|
||||
import no.iktdev.streamit.content.common.dto.WorkOrderItem
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.encode.encoderItems
|
||||
import no.iktdev.streamit.content.encode.extractItems
|
||||
import no.iktdev.streamit.content.encode.progress.Progress
|
||||
import no.iktdev.streamit.content.encode.progressMap
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
|
||||
import org.springframework.stereotype.Service
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
data class ExecutionBlock(
|
||||
val workId: String,
|
||||
val type: String,
|
||||
val work: suspend () -> Int
|
||||
)
|
||||
|
||||
@Service
|
||||
class RunnerCoordinator(
|
||||
private var maxConcurrentJobs: Int = 1,
|
||||
) {
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
val producer = DefaultProducer(CommonConfig.kafkaTopic)
|
||||
final val defaultScope = Coroutines.default()
|
||||
|
||||
private val jobsInProgress = AtomicInteger(0)
|
||||
private var inProgressJobs = mutableListOf<Job>()
|
||||
val queue = Channel<ExecutionBlock>(Channel.UNLIMITED)
|
||||
|
||||
|
||||
init {
|
||||
maxConcurrentJobs = EncodeEnv.maxRunners
|
||||
repeat(EncodeEnv.maxRunners) {
|
||||
launchWorker()
|
||||
}
|
||||
}
|
||||
|
||||
fun launchWorker() = defaultScope.launch {
|
||||
while (true) {
|
||||
logger.info("Worker is waiting for a work item...")
|
||||
val workItem = queue.receive() // Coroutine will wait here until a work item is available
|
||||
logger.info("Worker received a work item.")
|
||||
if (jobsInProgress.get() < maxConcurrentJobs) {
|
||||
jobsInProgress.incrementAndGet()
|
||||
val job = processWorkItem(workItem)
|
||||
inProgressJobs.add(job)
|
||||
job.invokeOnCompletion {
|
||||
logger.info { "OnCompletion invoked!\n\nWorkId: ${workItem.workId}-${workItem.type} \n\tCurrent active worksers: ${jobsInProgress.get()}" }
|
||||
val workers = jobsInProgress.decrementAndGet()
|
||||
logger.info { "Worker Released: $workers" }
|
||||
logger.info { "Available: ${workers}/${maxConcurrentJobs}" }
|
||||
inProgressJobs.remove(job)
|
||||
}
|
||||
}
|
||||
logger.info { "Available workers: ${jobsInProgress.get()}/$maxConcurrentJobs" }
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun processWorkItem(workItem: ExecutionBlock): Job {
|
||||
logger.info { "Processing work: ${workItem.type}" }
|
||||
workItem.work()
|
||||
return Job().apply { complete() }
|
||||
}
|
||||
|
||||
|
||||
fun addEncodeMessageToQueue(message: Message) {
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.PENDING))
|
||||
)
|
||||
try {
|
||||
if (message.data != null && message.data is EncodeWork) {
|
||||
val work = message.data as EncodeWork
|
||||
encoderItems.put(
|
||||
message.referenceId, WorkOrderItem(
|
||||
id = message.referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.QUEUED
|
||||
)
|
||||
)
|
||||
|
||||
val workBlock = suspend {
|
||||
val data: EncodeWork = work
|
||||
val encodeDaemon = EncodeDaemon(message.referenceId, data, encodeListener)
|
||||
logger.info { "\nreferenceId: ${message.referenceId} \nStarting encoding. \nWorkId: ${data.workId}" }
|
||||
encodeDaemon.runUsingWorkItem()
|
||||
}
|
||||
val result = queue.trySend(ExecutionBlock(work.workId, "encode", workBlock))
|
||||
val statusType = when (result.isClosed) {
|
||||
true -> StatusType.IGNORED // Køen er lukket, jobben ble ignorert
|
||||
false -> {
|
||||
if (result.isSuccess) {
|
||||
StatusType.SUCCESS // Jobben ble sendt til køen
|
||||
} else {
|
||||
StatusType.ERROR // Feil ved sending av jobben
|
||||
}
|
||||
}
|
||||
}
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(statusType))
|
||||
)
|
||||
} else {
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.ERROR, "Data is not an instance of EncodeWork or null"))
|
||||
)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
e.printStackTrace()
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.ERROR, e.message))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fun addExtractMessageToQueue(message: Message) {
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.PENDING))
|
||||
)
|
||||
try {
|
||||
if (message.data != null && message.data is ExtractWork) {
|
||||
val work = message.data as ExtractWork
|
||||
extractItems.put(
|
||||
message.referenceId, WorkOrderItem(
|
||||
id = message.referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.QUEUED
|
||||
)
|
||||
)
|
||||
val workBlock = suspend {
|
||||
val data: ExtractWork = work
|
||||
val extractDaemon = ExtractDaemon(message.referenceId, data, extractListener)
|
||||
logger.info { "\nreferenceId: ${message.referenceId} \nStarting extracting. \nWorkId: ${data.workId}" }
|
||||
extractDaemon.runUsingWorkItem()
|
||||
}
|
||||
val result = queue.trySend(ExecutionBlock(work.workId, "extract", workBlock))
|
||||
val statusType = when (result.isClosed) {
|
||||
true -> StatusType.IGNORED // Køen er lukket, jobben ble ignorert
|
||||
false -> {
|
||||
if (result.isSuccess) {
|
||||
StatusType.SUCCESS // Jobben ble sendt til køen
|
||||
} else {
|
||||
StatusType.ERROR // Feil ved sending av jobben
|
||||
}
|
||||
}
|
||||
}
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(statusType))
|
||||
)
|
||||
} else {
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.ERROR, "Data is not an instance of ExtractWork"))
|
||||
)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
e.printStackTrace()
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_QUEUED.event,
|
||||
message.withNewStatus(Status(StatusType.ERROR, e.message))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
val encodeListener = object : IEncodeListener {
|
||||
override fun onStarted(referenceId: String, work: EncodeWork) {
|
||||
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Started\n${work.outFile}" }
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_STARTED.event,
|
||||
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
|
||||
)
|
||||
encoderItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.STARTED
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override fun onError(referenceId: String, work: EncodeWork, code: Int) {
|
||||
logger.error { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Failed\n${work.outFile} \nError: $code" }
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED.event,
|
||||
Message(referenceId, Status(StatusType.ERROR, message = code.toString()), work)
|
||||
)
|
||||
encoderItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.FAILURE
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override fun onProgress(referenceId: String, work: EncodeWork, progress: Progress) {
|
||||
logger.debug {
|
||||
"Work progress for $referenceId with WorkId ${work.workId} @ ${work.outFile}: Progress: ${
|
||||
Gson().toJson(
|
||||
progress
|
||||
)
|
||||
}"
|
||||
}
|
||||
progressMap.put(work.workId, progress)
|
||||
encoderItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.UPDATED,
|
||||
progress = progress.progress,
|
||||
remainingTime = progress.estimatedCompletionSeconds
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override fun onEnded(referenceId: String, work: EncodeWork) {
|
||||
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nEncode: Ended\n${work.outFile}" }
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED.event,
|
||||
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
|
||||
)
|
||||
encoderItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.ENDED,
|
||||
progress = 100,
|
||||
remainingTime = null
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
val extractListener = object : IExtractListener {
|
||||
override fun onStarted(referenceId: String, work: ExtractWork) {
|
||||
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Started\n${work.outFile}" }
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_STARTED.event,
|
||||
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
|
||||
)
|
||||
extractItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.STARTED
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override fun onError(referenceId: String, work: ExtractWork, code: Int) {
|
||||
logger.error { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Failed\n${work.outFile} \nError: $code" }
|
||||
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event,
|
||||
Message(referenceId, Status(StatusType.ERROR, code.toString()), work)
|
||||
)
|
||||
extractItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.FAILURE
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override fun onEnded(referenceId: String, work: ExtractWork) {
|
||||
logger.info { "\nreferenceId: $referenceId \nWorkId ${work.workId} \nExtract: Ended\n${work.outFile}" }
|
||||
producer.sendMessage(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event,
|
||||
Message(referenceId, Status(statusType = StatusType.SUCCESS), work)
|
||||
)
|
||||
extractItems.put(
|
||||
referenceId, WorkOrderItem(
|
||||
id = referenceId,
|
||||
inputFile = work.inFile,
|
||||
outputFile = work.outFile,
|
||||
collection = work.collection,
|
||||
state = State.ENDED
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,65 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.topics
|
||||
|
||||
import no.iktdev.exfl.observable.ObservableMap
|
||||
import no.iktdev.streamit.content.common.dto.WorkOrderItem
|
||||
import no.iktdev.streamit.content.encode.encoderItems
|
||||
import no.iktdev.streamit.content.encode.extractItems
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.messaging.handler.annotation.MessageMapping
|
||||
import org.springframework.messaging.simp.SimpMessagingTemplate
|
||||
import org.springframework.stereotype.Controller
|
||||
|
||||
@Controller
|
||||
class EncoderTopic(
|
||||
@Autowired val template: SimpMessagingTemplate?,
|
||||
) {
|
||||
|
||||
init {
|
||||
encoderItems.addListener(object : ObservableMap.Listener<String, WorkOrderItem> {
|
||||
override fun onMapUpdated(map: Map<String, WorkOrderItem>) {
|
||||
super.onMapUpdated(map)
|
||||
pushEncoderQueue()
|
||||
}
|
||||
|
||||
override fun onPut(key: String, value: WorkOrderItem) {
|
||||
super.onPut(key, value)
|
||||
pushEncoderWorkOrder(value)
|
||||
}
|
||||
})
|
||||
extractItems.addListener(object : ObservableMap.Listener<String, WorkOrderItem> {
|
||||
override fun onMapUpdated(map: Map<String, WorkOrderItem>) {
|
||||
super.onMapUpdated(map)
|
||||
pushExtractorQueue()
|
||||
}
|
||||
|
||||
override fun onPut(key: String, value: WorkOrderItem) {
|
||||
super.onPut(key, value)
|
||||
pushExtractorWorkOrder(value)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fun pushEncoderWorkOrder(item: WorkOrderItem) {
|
||||
template?.convertAndSend("/topic/encoder/workorder", item)
|
||||
}
|
||||
|
||||
fun pushExtractorWorkOrder(item: WorkOrderItem) {
|
||||
template?.convertAndSend("/topic/extractor/workorder", item)
|
||||
}
|
||||
|
||||
@MessageMapping("/encoder/queue")
|
||||
fun pushEncoderQueue() {
|
||||
template?.convertAndSend("/topic/encoder/queue", encoderItems.values)
|
||||
}
|
||||
|
||||
@MessageMapping("/extractor/queue")
|
||||
fun pushExtractorQueue() {
|
||||
template?.convertAndSend("/topic/extractor/queue", extractItems.values)
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
spring.output.ansi.enabled=always
|
||||
logging.level.org.apache.kafka=WARN
|
||||
#logging.level.root=DEBUG
|
||||
@ -1,29 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode
|
||||
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
|
||||
open class Resources {
|
||||
|
||||
fun getText(path: String): String? {
|
||||
return this.javaClass.classLoader.getResource(path)?.readText()
|
||||
}
|
||||
|
||||
open class Streams(): Resources() {
|
||||
fun all(): List<String> {
|
||||
return listOf<String>(
|
||||
getSample(0),
|
||||
getSample(1),
|
||||
getSample(2),
|
||||
getSample(3),
|
||||
getSample(4),
|
||||
getSample(5),
|
||||
getSample(6),
|
||||
)
|
||||
}
|
||||
|
||||
fun getSample(number: Int): String {
|
||||
return getText("streams/sample$number.json")!!
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,176 +0,0 @@
|
||||
package no.iktdev.streamit.content.encode.progress
|
||||
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.encode.Resources
|
||||
import no.iktdev.streamit.content.encode.runner.EncodeDaemon
|
||||
import no.iktdev.streamit.content.encode.runner.IEncodeListener
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.BeforeClass
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.assertDoesNotThrow
|
||||
import org.mockito.ArgumentMatchers.anyBoolean
|
||||
import org.mockito.ArgumentMatchers.anyString
|
||||
import org.mockito.Mockito.*
|
||||
import java.io.BufferedWriter
|
||||
import java.io.File
|
||||
import java.io.FileWriter
|
||||
import java.util.UUID
|
||||
|
||||
class DecodedProgressDataDecoderTest {
|
||||
|
||||
@Test
|
||||
fun test() {
|
||||
val progress = ProgressDecoder(EncodeWork(
|
||||
workId = UUID.randomUUID().toString(),
|
||||
collection = "Demo",
|
||||
inFile = "Demo.mkv",
|
||||
outFile = "FancyDemo.mp4",
|
||||
arguments = emptyList()
|
||||
))
|
||||
val lines = text.split("\n")
|
||||
val cache: MutableList<String> = mutableListOf()
|
||||
lines.forEach {
|
||||
cache.add(it)
|
||||
assertDoesNotThrow {
|
||||
val progressItem = progress.parseVideoProgress(cache)
|
||||
progressItem?.progress
|
||||
}
|
||||
}
|
||||
assertThat(lines).isNotEmpty()
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
fun testCanRead() {
|
||||
val res = Resources()
|
||||
val data = res.getText("Output1.txt") ?: ""
|
||||
assertThat(data).isNotEmpty()
|
||||
val lines = data.split("\n").map { it.trim() }
|
||||
assertThat(lines).isNotEmpty()
|
||||
|
||||
val encodeWork = EncodeWork(
|
||||
workId = UUID.randomUUID().toString(),
|
||||
collection = "Demo",
|
||||
inFile = "Demo.mkv",
|
||||
outFile = "FancyDemo.mp4",
|
||||
arguments = emptyList()
|
||||
)
|
||||
val decoder = ProgressDecoder(encodeWork)
|
||||
lines.forEach { decoder.setDuration(it) }
|
||||
assertThat(decoder.duration).isNotNull()
|
||||
val produced = mutableListOf<Progress>()
|
||||
|
||||
val tempFile = File.createTempFile("test", ".log")
|
||||
|
||||
val encoder = EncodeDaemon(UUID.randomUUID().toString(), encodeWork, object : IEncodeListener {
|
||||
override fun onStarted(referenceId: String, work: EncodeWork) {
|
||||
}
|
||||
override fun onError(referenceId: String, work: EncodeWork, code: Int) {
|
||||
}
|
||||
override fun onProgress(referenceId: String, work: EncodeWork, progress: Progress) {
|
||||
produced.add(progress)
|
||||
}
|
||||
override fun onEnded(referenceId: String, work: EncodeWork) {
|
||||
}
|
||||
|
||||
}, tempFile)
|
||||
|
||||
|
||||
lines.forEach {
|
||||
encoder.onOutputChanged(it)
|
||||
}
|
||||
assertThat(produced).isNotEmpty()
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testThatProgressIsCalculated() {
|
||||
val encodeWork = EncodeWork(
|
||||
workId = UUID.randomUUID().toString(),
|
||||
collection = "Demo",
|
||||
inFile = "Demo.mkv",
|
||||
outFile = "FancyDemo.mp4",
|
||||
arguments = emptyList()
|
||||
)
|
||||
val decoder = ProgressDecoder(encodeWork)
|
||||
decoder.setDuration("Duration: 01:48:54.82,")
|
||||
assertThat(decoder.duration).isNotNull()
|
||||
val decodedProgressData = DecodedProgressData(
|
||||
frame = null,
|
||||
fps = null,
|
||||
stream_0_0_q = null,
|
||||
bitrate = null,
|
||||
total_size = null,
|
||||
out_time_ms = null,
|
||||
out_time_us = null,
|
||||
out_time = "01:48:54.82",
|
||||
dup_frames = null,
|
||||
drop_frames = null,
|
||||
speed = 1.0,
|
||||
progress = "Continue"
|
||||
)
|
||||
val progress = decoder.getProgress(decodedProgressData)
|
||||
assertThat(progress.progress).isGreaterThanOrEqualTo(99)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testThatProgressIsNotNone() {
|
||||
val encodeWork = EncodeWork(
|
||||
workId = UUID.randomUUID().toString(),
|
||||
collection = "Demo",
|
||||
inFile = "Demo.mkv",
|
||||
outFile = "FancyDemo.mp4",
|
||||
arguments = emptyList()
|
||||
)
|
||||
val decoder = ProgressDecoder(encodeWork)
|
||||
decoder.setDuration("Duration: 01:48:54.82,")
|
||||
assertThat(decoder.duration).isNotNull()
|
||||
val decodedProgressData = DecodedProgressData(
|
||||
frame = null,
|
||||
fps = null,
|
||||
stream_0_0_q = null,
|
||||
bitrate = null,
|
||||
total_size = null,
|
||||
out_time_ms = null,
|
||||
out_time_us = null,
|
||||
out_time = "01:00:50.174667",
|
||||
dup_frames = null,
|
||||
drop_frames = null,
|
||||
speed = 1.0,
|
||||
progress = "Continue"
|
||||
)
|
||||
val progress = decoder.getProgress(decodedProgressData)
|
||||
assertThat(progress.progress).isGreaterThanOrEqualTo(1)
|
||||
}
|
||||
|
||||
val text = """
|
||||
frame=16811 fps= 88 q=40.0 size= 9984kB time=00:x01:10.79 bitrate=1155.3kbits/s speed=3.71x
|
||||
fps=88.03
|
||||
stream_0_0_q=40.0
|
||||
bitrate=1155.3kbits/s
|
||||
total_size=10223752
|
||||
out_time_us=70798005
|
||||
out_time_ms=70798005
|
||||
out_time=00:01:10.798005
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.71x
|
||||
progress=continue
|
||||
frame= 1710 fps= 84 q=-1.0 Lsize= 12124kB time=00:01:11.91 bitrate=1381.2kbits/s speed=3.53x
|
||||
frame=1710
|
||||
fps=84.01
|
||||
stream_0_0_q=-1.0
|
||||
bitrate=1381.2kbits/s
|
||||
total_size=12415473
|
||||
out_time_us=71910998
|
||||
out_time_ms=71910998
|
||||
out_time=00:01:11.910998
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.53x
|
||||
progress=end
|
||||
""".trimIndent()
|
||||
}
|
||||
@ -1,389 +0,0 @@
|
||||
Guessed Channel Layout for Input Stream #0.1 : 5.1
|
||||
Input #0, matroska,webm, from '/src/input/DemoFile.mkv':
|
||||
Metadata:
|
||||
CREATION_TIME : 2019-06-15T08:06:07Z
|
||||
ENCODER : Lavf57.7.2
|
||||
Duration: 01:48:54.82, start: 0.000000, bitrate: 2709 kb/s
|
||||
Chapter #0:0: start 0.000000, end 328.537000
|
||||
Metadata:
|
||||
title : 00:00:00.000
|
||||
Chapter #0:1: start 328.537000, end 419.044000
|
||||
Metadata:
|
||||
title : 00:05:28.537
|
||||
Chapter #0:2: start 419.044000, end 916.874000
|
||||
Metadata:
|
||||
title : 00:06:59.044
|
||||
Chapter #0:3: start 916.874000, end 1309.433000
|
||||
Metadata:
|
||||
title : 00:15:16.749
|
||||
Chapter #0:4: start 1309.433000, end 1399.023000
|
||||
Metadata:
|
||||
title : 00:21:49.391
|
||||
Chapter #0:5: start 1399.023000, end 1508.924000
|
||||
Metadata:
|
||||
title : 00:23:19.023
|
||||
Chapter #0:6: start 1508.924000, end 1767.099000
|
||||
Metadata:
|
||||
title : 00:25:08.924
|
||||
Chapter #0:7: start 1767.099000, end 1975.474000
|
||||
Metadata:
|
||||
title : 00:29:27.099
|
||||
Chapter #0:8: start 1975.474000, end 2301.466000
|
||||
Metadata:
|
||||
title : 00:32:55.473
|
||||
Chapter #0:9: start 2301.466000, end 2498.246000
|
||||
Metadata:
|
||||
title : 00:38:21.466
|
||||
Chapter #0:10: start 2498.246000, end 2622.036000
|
||||
Metadata:
|
||||
title : 00:41:38.246
|
||||
Chapter #0:11: start 2622.036000, end 2925.172000
|
||||
Metadata:
|
||||
title : 00:43:42.036
|
||||
Chapter #0:12: start 2925.172000, end 3183.472000
|
||||
Metadata:
|
||||
title : 00:48:45.172
|
||||
Chapter #0:13: start 3183.472000, end 3467.172000
|
||||
Metadata:
|
||||
title : 00:53:03.472
|
||||
Chapter #0:14: start 3467.172000, end 3684.472000
|
||||
Metadata:
|
||||
title : 00:57:47.172
|
||||
Chapter #0:15: start 3684.472000, end 3885.840000
|
||||
Metadata:
|
||||
title : 01:01:24.472
|
||||
Chapter #0:16: start 3885.840000, end 4063.059000
|
||||
Metadata:
|
||||
title : 01:04:45.840
|
||||
Chapter #0:17: start 4063.059000, end 4275.605000
|
||||
Metadata:
|
||||
title : 01:07:43.059
|
||||
Chapter #0:18: start 4275.605000, end 4434.263000
|
||||
Metadata:
|
||||
title : 01:11:15.605
|
||||
Chapter #0:19: start 4434.263000, end 4709.205000
|
||||
Metadata:
|
||||
title : 01:13:54.263
|
||||
Chapter #0:20: start 4709.205000, end 4900.020000
|
||||
Metadata:
|
||||
title : 01:18:29.204
|
||||
Chapter #0:21: start 4900.020000, end 5081.201000
|
||||
Metadata:
|
||||
title : 01:21:40.020
|
||||
Chapter #0:22: start 5081.201000, end 5211.123000
|
||||
Metadata:
|
||||
title : 01:24:41.201
|
||||
Chapter #0:23: start 5211.123000, end 5359.938000
|
||||
Metadata:
|
||||
title : 01:26:51.123
|
||||
Chapter #0:24: start 5359.938000, end 5833.786000
|
||||
Metadata:
|
||||
title : 01:29:19.938
|
||||
Chapter #0:25: start 5833.786000, end 5953.865000
|
||||
Metadata:
|
||||
title : 01:37:13.786
|
||||
Chapter #0:26: start 5953.865000, end 6229.432000
|
||||
Metadata:
|
||||
title : 01:39:13.865
|
||||
Chapter #0:27: start 6229.432000, end 6534.779000
|
||||
Metadata:
|
||||
title : 01:43:49.181
|
||||
Stream #0:0: Video: h264 (High), yuv420p(tv, bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 23.98 fps, 23.98 tbr, 1k tbn, 47.95 tbc (default)
|
||||
Stream #0:1(eng): Audio: ac3, 48000 Hz, 5.1, fltp (default)
|
||||
Metadata:
|
||||
title : Surround
|
||||
Stream #0:2(jpn): Audio: ac3, 48000 Hz, 5.1(side), fltp, 640 kb/s
|
||||
Metadata:
|
||||
title : Surround
|
||||
Stream #0:3(eng): Subtitle: ass (default) (forced)
|
||||
Stream #0:4(eng): Subtitle: ass
|
||||
Stream mapping:
|
||||
Stream #0:0 -> #0:0 (h264 (native) -> hevc (libx265))
|
||||
Stream #0:2 -> #0:1 (ac3 (native) -> eac3 (native))
|
||||
x265 [info]: HEVC encoder version 3.4
|
||||
x265 [info]: build info [Linux][GCC 9.3.0][64 bit] 8bit+10bit+12bit
|
||||
x265 [info]: using cpu capabilities: MMX2 SSE2Fast LZCNT SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
|
||||
x265 [info]: Main profile, Level-4 (Main tier)
|
||||
x265 [info]: Thread pool created using 12 threads
|
||||
x265 [info]: Slices : 1
|
||||
x265 [info]: frame threads / pool features : 3 / wpp(17 rows)
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
set_mempolicy: Operation not permitted
|
||||
x265 [info]: Coding QT: max CU size, min CU size : 64 / 8
|
||||
x265 [info]: Residual QT: max TU size, max depth : 32 / 1 inter / 1 intra
|
||||
x265 [info]: ME / range / subpel / merge : hex / 57 / 2 / 3
|
||||
x265 [info]: Keyframe min / max / scenecut / bias : 23 / 250 / 40 / 5.00
|
||||
x265 [info]: Lookahead / bframes / badapt : 20 / 4 / 2
|
||||
x265 [info]: b-pyramid / weightp / weightb : 1 / 1 / 0
|
||||
x265 [info]: References / ref-limit cu / depth : 3 / off / on
|
||||
x265 [info]: AQ: mode / str / qg-size / cu-tree : 2 / 1.0 / 32 / 1
|
||||
x265 [info]: Rate Control / qCompress : CRF-16.0 / 0.60
|
||||
x265 [info]: tools: rd=3 psy-rd=2.00 early-skip rskip mode=1 signhide tmvp
|
||||
x265 [info]: tools: b-intra strong-intra-smoothing lslices=6 deblock sao
|
||||
Output #0, mp4, to '/src/output/Demo/Demo.mp4':
|
||||
Metadata:
|
||||
encoder : Lavf58.45.100
|
||||
Chapter #0:0: start 0.000000, end 328.537000
|
||||
Metadata:
|
||||
title : 00:00:00.000
|
||||
Chapter #0:1: start 328.537000, end 419.044000
|
||||
Metadata:
|
||||
title : 00:05:28.537
|
||||
Chapter #0:2: start 419.044000, end 916.874000
|
||||
Metadata:
|
||||
title : 00:06:59.044
|
||||
Chapter #0:3: start 916.874000, end 1309.433000
|
||||
Metadata:
|
||||
title : 00:15:16.749
|
||||
Chapter #0:4: start 1309.433000, end 1399.023000
|
||||
Metadata:
|
||||
title : 00:21:49.391
|
||||
Chapter #0:5: start 1399.023000, end 1508.924000
|
||||
Metadata:
|
||||
title : 00:23:19.023
|
||||
Chapter #0:6: start 1508.924000, end 1767.099000
|
||||
Metadata:
|
||||
title : 00:25:08.924
|
||||
Chapter #0:7: start 1767.099000, end 1975.474000
|
||||
Metadata:
|
||||
title : 00:29:27.099
|
||||
Chapter #0:8: start 1975.474000, end 2301.466000
|
||||
Metadata:
|
||||
title : 00:32:55.473
|
||||
Chapter #0:9: start 2301.466000, end 2498.246000
|
||||
Metadata:
|
||||
title : 00:38:21.466
|
||||
Chapter #0:10: start 2498.246000, end 2622.036000
|
||||
Metadata:
|
||||
title : 00:41:38.246
|
||||
Chapter #0:11: start 2622.036000, end 2925.172000
|
||||
Metadata:
|
||||
title : 00:43:42.036
|
||||
Chapter #0:12: start 2925.172000, end 3183.472000
|
||||
Metadata:
|
||||
title : 00:48:45.172
|
||||
Chapter #0:13: start 3183.472000, end 3467.172000
|
||||
Metadata:
|
||||
title : 00:53:03.472
|
||||
Chapter #0:14: start 3467.172000, end 3684.472000
|
||||
Metadata:
|
||||
title : 00:57:47.172
|
||||
Chapter #0:15: start 3684.472000, end 3885.840000
|
||||
Metadata:
|
||||
title : 01:01:24.472
|
||||
Chapter #0:16: start 3885.840000, end 4063.059000
|
||||
Metadata:
|
||||
title : 01:04:45.840
|
||||
Chapter #0:17: start 4063.059000, end 4275.605000
|
||||
Metadata:
|
||||
title : 01:07:43.059
|
||||
Chapter #0:18: start 4275.605000, end 4434.263000
|
||||
Metadata:
|
||||
title : 01:11:15.605
|
||||
Chapter #0:19: start 4434.263000, end 4709.205000
|
||||
Metadata:
|
||||
title : 01:13:54.263
|
||||
Chapter #0:20: start 4709.205000, end 4900.020000
|
||||
Metadata:
|
||||
title : 01:18:29.204
|
||||
Chapter #0:21: start 4900.020000, end 5081.201000
|
||||
Metadata:
|
||||
title : 01:21:40.020
|
||||
Chapter #0:22: start 5081.201000, end 5211.123000
|
||||
Metadata:
|
||||
title : 01:24:41.201
|
||||
Chapter #0:23: start 5211.123000, end 5359.938000
|
||||
Metadata:
|
||||
title : 01:26:51.123
|
||||
Chapter #0:24: start 5359.938000, end 5833.786000
|
||||
Metadata:
|
||||
title : 01:29:19.938
|
||||
Chapter #0:25: start 5833.786000, end 5953.865000
|
||||
Metadata:
|
||||
title : 01:37:13.786
|
||||
Chapter #0:26: start 5953.865000, end 6229.432000
|
||||
Metadata:
|
||||
title : 01:39:13.865
|
||||
Chapter #0:27: start 6229.432000, end 6534.779000
|
||||
Metadata:
|
||||
title : 01:43:49.181
|
||||
Stream #0:0: Video: hevc (libx265) (hev1 / 0x31766568), yuv420p(progressive), 1920x1080 [SAR 1:1 DAR 16:9], q=-1--1, 23.98 fps, 24k tbn, 23.98 tbc (default)
|
||||
Metadata:
|
||||
encoder : Lavc58.91.100 libx265
|
||||
Side data:
|
||||
cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: N/A
|
||||
Stream #0:1(jpn): Audio: eac3 (ec-3 / 0x332D6365), 48000 Hz, 5.1(side), fltp, 448 kb/s
|
||||
Metadata:
|
||||
title : Surround
|
||||
encoder : Lavc58.91.100 eac3
|
||||
frame= 49 fps=0.0 q=24.0 size= 1kB time=00:00:02.52 bitrate= 2.4kbits/s speed=4.85x
|
||||
frame=49
|
||||
fps=0.00
|
||||
stream_0_0_q=24.0
|
||||
bitrate= 2.4kbits/s
|
||||
total_size=772
|
||||
out_time_us=2526667
|
||||
out_time_ms=2526667
|
||||
out_time=00:00:02.526667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=4.85x
|
||||
progress=continue
|
||||
frame= 87 fps= 84 q=16.7 size= 1kB time=00:00:04.09 bitrate= 1.5kbits/s speed=3.96x
|
||||
frame=87
|
||||
fps=84.21
|
||||
stream_0_0_q=16.7
|
||||
bitrate= 1.5kbits/s
|
||||
total_size=772
|
||||
out_time_us=4094667
|
||||
out_time_ms=4094667
|
||||
out_time=00:00:04.094667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.96x
|
||||
progress=continue
|
||||
frame= 115 fps= 75 q=22.4 size= 257kB time=00:00:05.27 bitrate= 398.5kbits/s speed=3.44x
|
||||
frame=115
|
||||
fps=74.95
|
||||
stream_0_0_q=22.4
|
||||
bitrate= 398.5kbits/s
|
||||
total_size=262916
|
||||
out_time_us=5278667
|
||||
out_time_ms=5278667
|
||||
out_time=00:00:05.278667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.44x
|
||||
progress=continue
|
||||
frame= 146 fps= 72 q=22.6 size= 257kB time=00:00:06.55 bitrate= 320.7kbits/s speed=3.22x
|
||||
frame=146
|
||||
fps=71.64
|
||||
stream_0_0_q=22.6
|
||||
bitrate= 320.7kbits/s
|
||||
total_size=262916
|
||||
out_time_us=6558667
|
||||
out_time_ms=6558667
|
||||
out_time=00:00:06.558667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.22x
|
||||
progress=continue
|
||||
frame= 175 fps= 69 q=20.5 size= 513kB time=00:00:07.77 bitrate= 540.3kbits/s speed=3.06x
|
||||
frame=175
|
||||
fps=68.82
|
||||
stream_0_0_q=20.5
|
||||
bitrate= 540.3kbits/s
|
||||
total_size=525060
|
||||
out_time_us=7774667
|
||||
out_time_ms=7774667
|
||||
out_time=00:00:07.774667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=3.06x
|
||||
progress=continue
|
||||
frame= 204 fps= 67 q=21.1 size= 769kB time=00:00:08.99 bitrate= 700.5kbits/s speed=2.94x
|
||||
frame=204
|
||||
fps=66.66
|
||||
stream_0_0_q=21.1
|
||||
bitrate= 700.5kbits/s
|
||||
total_size=787204
|
||||
out_time_us=8990667
|
||||
out_time_ms=8990667
|
||||
out_time=00:00:08.990667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.94x
|
||||
progress=continue
|
||||
frame= 231 fps= 65 q=20.5 size= 1025kB time=00:00:10.11 bitrate= 830.3kbits/s speed=2.83x
|
||||
frame=231
|
||||
fps=64.66
|
||||
stream_0_0_q=20.5
|
||||
bitrate= 830.3kbits/s
|
||||
total_size=1049348
|
||||
out_time_us=10110667
|
||||
out_time_ms=10110667
|
||||
out_time=00:00:10.110667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.83x
|
||||
progress=continue
|
||||
frame= 268 fps= 65 q=20.7 size= 1025kB time=00:00:11.64 bitrate= 720.8kbits/s speed=2.84x
|
||||
frame=268
|
||||
fps=65.29
|
||||
stream_0_0_q=20.7
|
||||
bitrate= 720.8kbits/s
|
||||
total_size=1049348
|
||||
out_time_us=11646667
|
||||
out_time_ms=11646667
|
||||
out_time=00:00:11.646667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.84x
|
||||
progress=continue
|
||||
frame= 312 fps= 68 q=21.0 size= 1281kB time=00:00:13.47 bitrate= 778.9kbits/s speed=2.92x
|
||||
frame=312
|
||||
fps=67.67
|
||||
stream_0_0_q=21.0
|
||||
bitrate= 778.9kbits/s
|
||||
total_size=1311492
|
||||
out_time_us=13470667
|
||||
out_time_ms=13470667
|
||||
out_time=00:00:13.470667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.92x
|
||||
progress=continue
|
||||
frame= 353 fps= 69 q=19.9 size= 1281kB time=00:00:15.19 bitrate= 690.3kbits/s speed=2.97x
|
||||
frame=353
|
||||
fps=68.97
|
||||
stream_0_0_q=19.9
|
||||
bitrate= 690.3kbits/s
|
||||
total_size=1311492
|
||||
out_time_us=15198667
|
||||
out_time_ms=15198667
|
||||
out_time=00:00:15.198667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.97x
|
||||
progress=continue
|
||||
frame= 372 fps= 66 q=17.9 size= 1537kB time=00:00:15.99 bitrate= 786.9kbits/s speed=2.84x
|
||||
frame=372
|
||||
fps=66.01
|
||||
stream_0_0_q=17.9
|
||||
bitrate= 786.9kbits/s
|
||||
total_size=1573636
|
||||
out_time_us=15998667
|
||||
out_time_ms=15998667
|
||||
out_time=00:00:15.998667
|
||||
dup_frames=0
|
||||
drop_frames=0
|
||||
speed=2.84x
|
||||
progress=continue
|
||||
42
Reader/.gitignore
vendored
42
Reader/.gitignore
vendored
@ -1,42 +0,0 @@
|
||||
.gradle
|
||||
build/
|
||||
!gradle/wrapper/gradle-wrapper.jar
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
.idea/libraries/
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
out/
|
||||
!**/src/main/**/out/
|
||||
!**/src/test/**/out/
|
||||
|
||||
### Eclipse ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
bin/
|
||||
!**/src/main/**/bin/
|
||||
!**/src/test/**/bin/
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
||||
@ -1,4 +0,0 @@
|
||||
FROM bskjon/debian-azuljava17-ffmpeg:latest
|
||||
EXPOSE 8080
|
||||
|
||||
COPY ./build/libs/reader.jar /usr/share/app/app.jar
|
||||
@ -1,79 +0,0 @@
|
||||
import org.jetbrains.kotlin.gradle.plugin.mpp.pm20.util.archivesName
|
||||
|
||||
plugins {
|
||||
kotlin("jvm") version "1.8.21"
|
||||
id("org.springframework.boot") version "2.5.5"
|
||||
id("io.spring.dependency-management") version "1.0.11.RELEASE"
|
||||
kotlin("plugin.spring") version "1.5.31"
|
||||
}
|
||||
|
||||
archivesName.set("reader.jar")
|
||||
group = "no.iktdev.streamit.content"
|
||||
version = "1.0-SNAPSHOT"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven("https://jitpack.io")
|
||||
maven {
|
||||
url = uri("https://reposilite.iktdev.no/releases")
|
||||
}
|
||||
maven {
|
||||
url = uri("https://reposilite.iktdev.no/snapshots")
|
||||
}
|
||||
}
|
||||
|
||||
val exposedVersion = "0.38.2"
|
||||
dependencies {
|
||||
implementation("no.iktdev.streamit.library:streamit-library-kafka:0.0.2-alpha84")
|
||||
implementation("no.iktdev:exfl:0.0.13-SNAPSHOT")
|
||||
|
||||
implementation("no.iktdev.streamit.library:streamit-library-db:0.0.6-alpha14")
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.1")
|
||||
|
||||
|
||||
implementation("org.jetbrains.exposed:exposed-core:$exposedVersion")
|
||||
implementation("org.jetbrains.exposed:exposed-dao:$exposedVersion")
|
||||
implementation("org.jetbrains.exposed:exposed-jdbc:$exposedVersion")
|
||||
implementation("org.jetbrains.exposed:exposed-java-time:$exposedVersion")
|
||||
implementation ("mysql:mysql-connector-java:8.0.29")
|
||||
|
||||
implementation("com.github.pgreze:kotlin-process:1.3.1")
|
||||
implementation("com.github.vishna:watchservice-ktx:master-SNAPSHOT")
|
||||
implementation("io.github.microutils:kotlin-logging-jvm:2.0.11")
|
||||
|
||||
implementation("com.google.code.gson:gson:2.8.9")
|
||||
implementation("org.json:json:20210307")
|
||||
|
||||
implementation("org.springframework.boot:spring-boot-starter-web")
|
||||
implementation("org.springframework.boot:spring-boot-starter:2.7.0")
|
||||
implementation("org.springframework.kafka:spring-kafka:2.8.5")
|
||||
implementation("org.springframework.boot:spring-boot-starter-websocket:2.6.3")
|
||||
|
||||
|
||||
implementation(project(":CommonCode"))
|
||||
|
||||
testImplementation("junit:junit:4.13.2")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.1")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.1")
|
||||
testImplementation("org.assertj:assertj-core:3.4.1")
|
||||
testImplementation("org.mockito:mockito-core:3.+")
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
tasks.test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
tasks.bootJar {
|
||||
archiveFileName.set("reader.jar")
|
||||
launchScript()
|
||||
}
|
||||
|
||||
tasks.jar {
|
||||
archivesName.set("reader.jar")
|
||||
archiveBaseName.set("reader")
|
||||
}
|
||||
BIN
Reader/gradle/wrapper/gradle-wrapper.jar
vendored
BIN
Reader/gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
#Tue Jul 11 02:16:45 CEST 2023
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
234
Reader/gradlew
vendored
234
Reader/gradlew
vendored
@ -1,234 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
89
Reader/gradlew.bat
vendored
89
Reader/gradlew.bat
vendored
@ -1,89 +0,0 @@
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
||||
@ -1,4 +0,0 @@
|
||||
rootProject.name = "Reader"
|
||||
|
||||
include(":CommonCode")
|
||||
project(":CommonCode").projectDir = File("../CommonCode")
|
||||
@ -1,66 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader
|
||||
|
||||
import kotlinx.coroutines.launch
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.exfl.coroutines.Coroutines
|
||||
import no.iktdev.exfl.observable.Observables
|
||||
import no.iktdev.streamit.content.reader.analyzer.encoding.helpers.PreferenceReader
|
||||
import no.iktdev.streamit.library.db.datasource.MySqlDataSource
|
||||
import no.iktdev.streamit.library.db.tables.*
|
||||
import no.iktdev.streamit.library.db.tables.helper.cast_errors
|
||||
import no.iktdev.streamit.library.db.tables.helper.data_audio
|
||||
import no.iktdev.streamit.library.db.tables.helper.data_video
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication
|
||||
import org.springframework.boot.runApplication
|
||||
import org.springframework.context.ApplicationContext
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@SpringBootApplication
|
||||
class ReaderApplication
|
||||
|
||||
val preference = PreferenceReader().getPreference()
|
||||
private var context: ApplicationContext? = null
|
||||
|
||||
@Suppress("unused")
|
||||
fun getContext(): ApplicationContext? {
|
||||
return context
|
||||
}
|
||||
fun main(args: Array<String>) {
|
||||
Coroutines.addListener(object : Observables.ObservableValue.ValueListener<Throwable> {
|
||||
override fun onUpdated(value: Throwable) {
|
||||
logger.error { "Received error: ${value.message}" }
|
||||
value.cause?.printStackTrace()
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
val ds = MySqlDataSource.fromDatabaseEnv().createDatabase()
|
||||
System.out.println(ds)
|
||||
|
||||
Coroutines.default().launch {
|
||||
val tables = arrayOf(
|
||||
catalog,
|
||||
genre,
|
||||
movie,
|
||||
serie,
|
||||
subtitle,
|
||||
summary,
|
||||
users,
|
||||
progress,
|
||||
data_audio,
|
||||
data_video,
|
||||
cast_errors
|
||||
)
|
||||
transaction {
|
||||
SchemaUtils.createMissingTablesAndColumns(*tables)
|
||||
logger.info {"Database transaction completed"}
|
||||
}
|
||||
}
|
||||
|
||||
context = runApplication<ReaderApplication>(*args)
|
||||
|
||||
}
|
||||
|
||||
@ -1,11 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader
|
||||
|
||||
import java.io.File
|
||||
|
||||
class ReaderEnv {
|
||||
companion object {
|
||||
val metadataTimeOut: Long = System.getenv("TIMEOUT_READER_WAIT_FOR_METADATA")?.toLongOrNull() ?: 300000
|
||||
val ffprobe: String = System.getenv("SUPPORTING_EXECUTABLE_FFPROBE") ?: "ffprobe"
|
||||
val encodePreference: File = File("/data/config/preference.json")
|
||||
}
|
||||
}
|
||||
@ -1,122 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.contentDeterminator
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.deserializers.FileResultDeserializer
|
||||
import no.iktdev.streamit.content.common.deserializers.MetadataResultDeserializer
|
||||
import no.iktdev.streamit.content.common.dto.ContentOutName
|
||||
import no.iktdev.streamit.content.common.dto.Metadata
|
||||
import no.iktdev.streamit.content.common.dto.reader.EpisodeInfo
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.content.common.dto.reader.MovieInfo
|
||||
import no.iktdev.streamit.content.reader.ReaderEnv
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.ISequentialMessageEvent
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.SequentialMessageListener
|
||||
import org.springframework.stereotype.Service
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class ContentDeterminate: DefaultKafkaReader("contentDeterminate"), ISequentialMessageEvent {
|
||||
|
||||
final val mainListener = object : SequentialMessageListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
consumer = defaultConsumer,
|
||||
accept = KafkaEvents.EVENT_READER_RECEIVED_FILE.event,
|
||||
subAccepts = listOf(KafkaEvents.EVENT_METADATA_OBTAINED.event),
|
||||
deserializers = loadDeserializers(),
|
||||
listener = this,
|
||||
validity = ReaderEnv.metadataTimeOut
|
||||
) {}
|
||||
|
||||
init {
|
||||
mainListener.listen()
|
||||
}
|
||||
|
||||
|
||||
|
||||
override fun getRequiredMessages(): List<String> {
|
||||
return mainListener.subAccepts + listOf(mainListener.accept)
|
||||
}
|
||||
|
||||
override fun onAllMessagesProcessed(referenceId: String, result: Map<String, Message?>) {
|
||||
logger.info { "All messages are received" }
|
||||
|
||||
val initMessage = result[KafkaEvents.EVENT_READER_RECEIVED_FILE.event]
|
||||
if (initMessage == null || initMessage.status.statusType != StatusType.SUCCESS) {
|
||||
produceErrorMessage(
|
||||
KafkaEvents.EVENT_READER_DETERMINED_FILENAME,
|
||||
Message(referenceId = referenceId, status = Status(statusType = StatusType.ERROR)),
|
||||
"Initiator message not found!"
|
||||
)
|
||||
return
|
||||
}
|
||||
val fileResult = initMessage.data as FileResult?
|
||||
if (fileResult == null) {
|
||||
produceErrorMessage(
|
||||
KafkaEvents.EVENT_READER_DETERMINED_FILENAME,
|
||||
initMessage,
|
||||
"FileResult is either null or not deserializable!"
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
val metadataMessage = result[KafkaEvents.EVENT_METADATA_OBTAINED.event]
|
||||
val metadata =
|
||||
if (metadataMessage?.status?.statusType == StatusType.SUCCESS) metadataMessage.data as Metadata? else null
|
||||
|
||||
|
||||
// Due to the fact that the sources might say serie, but it is not a serie input we will give serie a try then default to movie
|
||||
|
||||
|
||||
val videoInfo = when (metadata?.type) {
|
||||
"serie" -> {
|
||||
FileNameDeterminate(
|
||||
fileResult.title,
|
||||
fileResult.sanitizedName,
|
||||
FileNameDeterminate.ContentType.SERIE
|
||||
).getDeterminedVideoInfo()
|
||||
}
|
||||
|
||||
"movie" -> {
|
||||
FileNameDeterminate(
|
||||
fileResult.title,
|
||||
fileResult.sanitizedName,
|
||||
FileNameDeterminate.ContentType.MOVIE
|
||||
).getDeterminedVideoInfo()
|
||||
}
|
||||
|
||||
else -> null
|
||||
} ?: FileNameDeterminate(fileResult.title, fileResult.sanitizedName).getDeterminedVideoInfo()
|
||||
|
||||
if (videoInfo == null) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_DETERMINED_FILENAME, initMessage, "VideoInfo is null.")
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (videoInfo is EpisodeInfo) {
|
||||
produceSuccessMessage(KafkaEvents.EVENT_READER_DETERMINED_SERIE, referenceId, videoInfo)
|
||||
} else if (videoInfo is MovieInfo) {
|
||||
produceSuccessMessage(KafkaEvents.EVENT_READER_DETERMINED_MOVIE, referenceId, videoInfo)
|
||||
}
|
||||
|
||||
val out = ContentOutName(videoInfo.fullName)
|
||||
produceSuccessMessage(KafkaEvents.EVENT_READER_DETERMINED_FILENAME, referenceId, out)
|
||||
}
|
||||
|
||||
final override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return mutableMapOf(
|
||||
KafkaEvents.EVENT_READER_RECEIVED_FILE.event to FileResultDeserializer(),
|
||||
KafkaEvents.EVENT_METADATA_OBTAINED.event to MetadataResultDeserializer()
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,150 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.deserializers.ContentOutNameDeserializer
|
||||
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
|
||||
import no.iktdev.streamit.content.common.deserializers.FileResultDeserializer
|
||||
import no.iktdev.streamit.content.common.deserializers.MediaStreamsDeserializer
|
||||
import no.iktdev.streamit.content.common.dto.ContentOutName
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.content.common.streams.MediaStreams
|
||||
import no.iktdev.streamit.content.reader.analyzer.encoding.helpers.EncodeArgumentSelector
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.CollectorMessageListener
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.ICollectedMessagesEvent
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.ISequentialMessageEvent
|
||||
import no.iktdev.streamit.library.kafka.listener.sequential.SequentialMessageListener
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.springframework.stereotype.Service
|
||||
import java.io.File
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class EncodedStreams : DefaultKafkaReader("streamSelector"), ISequentialMessageEvent {
|
||||
|
||||
val listener = object : SequentialMessageListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
consumer = defaultConsumer,
|
||||
accept = KafkaEvents.EVENT_READER_RECEIVED_FILE.event,
|
||||
subAccepts = listOf(
|
||||
KafkaEvents.EVENT_READER_DETERMINED_FILENAME.event,
|
||||
KafkaEvents.EVENT_READER_RECEIVED_STREAMS.event,
|
||||
),
|
||||
listener = this,
|
||||
deserializers = this.loadDeserializers()
|
||||
) {}
|
||||
|
||||
init {
|
||||
listener.listen()
|
||||
}
|
||||
|
||||
fun createEncodeWork(referenceId: String, collection: String?, inFile: String?, streams: MediaStreams?, outFileName: String?) {
|
||||
if (inFile.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, "No input file received"); return
|
||||
}
|
||||
if (streams == null) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, "No input streams received"); return
|
||||
}
|
||||
if (outFileName.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, "No output file name received!"); return
|
||||
}
|
||||
if (collection.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, "No collection provided for file!"); return
|
||||
}
|
||||
|
||||
val encodeInformation =
|
||||
EncodeArgumentSelector(collection = collection, inputFile = inFile, streams = streams, outFileName = outFileName)
|
||||
|
||||
val videoInstructions = encodeInformation.getVideoAndAudioArguments()
|
||||
if (videoInstructions == null) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, "Failed to generate Video Arguments Bundle")
|
||||
return
|
||||
}
|
||||
produceSuccessMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_VIDEO, referenceId, videoInstructions)
|
||||
|
||||
}
|
||||
|
||||
fun createExtractWork(referenceId: String, collection: String?, inFile: String?, streams: MediaStreams?, outFileName: String?) {
|
||||
if (inFile.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, referenceId, "No input file received"); return
|
||||
}
|
||||
if (streams == null) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, referenceId, "No input streams received"); return
|
||||
}
|
||||
if (outFileName.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, referenceId, "No output file name received!"); return
|
||||
}
|
||||
if (collection.isNullOrBlank()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, referenceId, "No collection provided for file!"); return
|
||||
}
|
||||
|
||||
val argsSelector = EncodeArgumentSelector(collection = collection, inputFile = inFile, streams = streams, outFileName = outFileName)
|
||||
val items = argsSelector.getSubtitleArguments()
|
||||
if (argsSelector == null || items.isEmpty()) {
|
||||
produceErrorMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, referenceId, "Failed to generate Subtitle Arguments Bundle")
|
||||
return
|
||||
}
|
||||
|
||||
argsSelector.getSubtitleArguments().forEach {
|
||||
produceMessage(KafkaEvents.EVENT_READER_ENCODE_GENERATED_SUBTITLE, Message(referenceId, Status(StatusType.SUCCESS)), it)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
final override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return DeserializerRegistry.getEventToDeserializer(
|
||||
KafkaEvents.EVENT_READER_RECEIVED_FILE,
|
||||
KafkaEvents.EVENT_READER_RECEIVED_STREAMS,
|
||||
KafkaEvents.EVENT_READER_DETERMINED_FILENAME
|
||||
)
|
||||
}
|
||||
|
||||
override fun getRequiredMessages(): List<String> {
|
||||
return listener.subAccepts + listOf(listener.accept)
|
||||
}
|
||||
|
||||
override fun onAllMessagesProcessed(referenceId: String, result: Map<String, Message?>) {
|
||||
logger.info { "Collection received" }
|
||||
if (result.keys.isEmpty()) {
|
||||
logger.error { "\nConsumer $subId collected: is null or empty!" }
|
||||
} else {
|
||||
logger.info { "\nConsumer $subId collected:\n ${result.keys.joinToString("\n\t")}" }
|
||||
}
|
||||
|
||||
val outFileNameWithoutExtension: String? = if (getFileName(result) != null) {
|
||||
getFileName(result)?.baseName
|
||||
} else {
|
||||
logger.info { "Getting filename from ${KafkaEvents.EVENT_READER_DETERMINED_FILENAME.event} resulted in null. Falling back to sanitized name" }
|
||||
getFileResult(result)?.sanitizedName
|
||||
}
|
||||
|
||||
val fileResult = getFileResult(result)
|
||||
createEncodeWork(referenceId, fileResult?.title, fileResult?.file, getStreams(result), outFileNameWithoutExtension)
|
||||
createExtractWork(referenceId, fileResult?.title, fileResult?.file, getStreams(result), outFileNameWithoutExtension)
|
||||
}
|
||||
|
||||
fun getFileResult(result: Map<String, Message?>): FileResult? {
|
||||
val record = result[KafkaEvents.EVENT_READER_RECEIVED_FILE.event] ?: return null
|
||||
return FileResultDeserializer().deserializeIfSuccessful(record)
|
||||
}
|
||||
|
||||
fun getFileName(result: Map<String, Message?>): ContentOutName? {
|
||||
val record = result[KafkaEvents.EVENT_READER_DETERMINED_FILENAME.event] ?: return null
|
||||
return ContentOutNameDeserializer().deserializeIfSuccessful(record)
|
||||
}
|
||||
|
||||
fun getStreams(result: Map<String, Message?>): MediaStreams? {
|
||||
val record = result[KafkaEvents.EVENT_READER_RECEIVED_STREAMS.event] ?: return null
|
||||
return MediaStreamsDeserializer().deserializeIfSuccessful(record)
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding
|
||||
|
||||
import no.iktdev.streamit.content.common.deserializers.ContentOutNameDeserializer
|
||||
import no.iktdev.streamit.content.common.deserializers.FileResultDeserializer
|
||||
import no.iktdev.streamit.content.common.deserializers.MediaStreamsDeserializer
|
||||
import no.iktdev.streamit.content.common.dto.ContentOutName
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.content.common.streams.MediaStreams
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.DefaultEventCollection
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
|
||||
class ResultCollection: DefaultEventCollection() {
|
||||
|
||||
fun getFirstOrNull(events: KafkaEvents): ConsumerRecord<String, Message>? {
|
||||
return getRecords().firstOrNull { it.key() == events.event }
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,24 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding.dto
|
||||
|
||||
import no.iktdev.streamit.content.common.streams.AudioStream
|
||||
import no.iktdev.streamit.content.reader.preference
|
||||
|
||||
class AudioEncodeArguments(val audio: AudioStream, val index: Int) {
|
||||
|
||||
fun isAudioCodecEqual() = audio.codec_name.lowercase() == preference.audio.codec.lowercase()
|
||||
|
||||
fun shouldUseEAC3(): Boolean {
|
||||
return (preference.audio.defaultToEAC3OnSurroundDetected && audio.channels > 2 && audio.codec_name.lowercase() != "eac3")
|
||||
}
|
||||
|
||||
fun getAudioArguments(): MutableList<String> {
|
||||
val result = mutableListOf<String>()
|
||||
if (shouldUseEAC3()) {
|
||||
result.addAll(listOf("-c:a", "eac3"))
|
||||
} else if (!isAudioCodecEqual()) {
|
||||
result.addAll(listOf("-c:a", preference.audio.codec))
|
||||
} else result.addAll(listOf("-acodec", "copy"))
|
||||
result.addAll(listOf("-map", "0:a:${index}"))
|
||||
return result
|
||||
}
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding.dto
|
||||
|
||||
import no.iktdev.streamit.content.common.streams.SubtitleStream
|
||||
|
||||
class SubtitleEncodeArguments(val subtitle: SubtitleStream, val index: Int) {
|
||||
|
||||
fun getSubtitleArguments(): List<String> {
|
||||
val result = mutableListOf<String>()
|
||||
result.addAll(listOf("-c:s", "copy"))
|
||||
result.addAll(listOf("-map", "0:s:$index"))
|
||||
return result
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding.dto
|
||||
|
||||
import no.iktdev.streamit.content.common.streams.VideoStream
|
||||
import no.iktdev.streamit.content.reader.preference
|
||||
|
||||
class VideoEncodeArguments(val video: VideoStream, val index: Int) {
|
||||
|
||||
fun isVideoCodecEqual() = getCodec(video.codec_name) == getCodec(preference.video.codec.lowercase())
|
||||
|
||||
|
||||
fun getVideoArguments(): List<String> {
|
||||
val result = mutableListOf<String>()
|
||||
if (isVideoCodecEqual()) result.addAll(listOf(
|
||||
"-vcodec", "copy"
|
||||
)) else {
|
||||
result.addAll(listOf("-c:v", getCodec(preference.video.codec.lowercase())))
|
||||
result.addAll(listOf("-crf", preference.video.threshold.toString()))
|
||||
}
|
||||
if (preference.video.pixelFormatPassthrough.none { it == video.pix_fmt }) {
|
||||
result.addAll(listOf("-pix_fmt", preference.video.pixelFormat))
|
||||
}
|
||||
result.addAll(listOf("-map", "0:v:${index}"))
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
protected fun getCodec(name: String): String {
|
||||
return when(name) {
|
||||
"hevc", "hevec", "h265", "h.265", "libx265"
|
||||
-> "libx265"
|
||||
"h.264", "h264", "libx264"
|
||||
-> "libx264"
|
||||
else -> name
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,88 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding.helpers
|
||||
|
||||
import no.iktdev.exfl.using
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.content.common.streams.*
|
||||
import no.iktdev.streamit.content.reader.analyzer.encoding.dto.AudioEncodeArguments
|
||||
import no.iktdev.streamit.content.reader.analyzer.encoding.dto.SubtitleEncodeArguments
|
||||
import no.iktdev.streamit.content.reader.analyzer.encoding.dto.VideoEncodeArguments
|
||||
import no.iktdev.streamit.content.reader.preference
|
||||
|
||||
class EncodeArgumentSelector(val collection: String, val inputFile: String, val streams: MediaStreams, val outFileName: String) {
|
||||
var defaultSelectedVideo: VideoStream? = defaultSelectedVideo()
|
||||
var defaultSelectedAudio: AudioStream? = defaultSelectedAudio()
|
||||
|
||||
private fun obtainAudioStreams() = streams.streams.filterIsInstance<AudioStream>()
|
||||
private fun obtainVideoStreams() = streams.streams.filterIsInstance<VideoStream>()
|
||||
|
||||
|
||||
private fun defaultSelectedVideo(): VideoStream? {
|
||||
return obtainVideoStreams().filter { (it.duration_ts ?: 0) > 0 }.maxByOrNull { it.duration_ts!! } ?: obtainVideoStreams().minByOrNull { it.index }
|
||||
}
|
||||
|
||||
private fun defaultSelectedAudio(): AudioStream? {
|
||||
return obtainAudioStreams().filter { (it.duration_ts ?: 0) > 0 }.maxByOrNull { it.duration_ts!! } ?: obtainAudioStreams().minByOrNull { it.index }
|
||||
}
|
||||
|
||||
/**
|
||||
* @return VideoStream based on preference or defaultSelectedVideo
|
||||
*/
|
||||
/*private fun getSelectedVideoBasedOnPreference(): VideoStream {
|
||||
val
|
||||
}*/
|
||||
|
||||
/**
|
||||
* @return AudioStrem based on preference or defaultSelectedAudio
|
||||
*/
|
||||
private fun getSelectedAudioBasedOnPreference(): AudioStream? {
|
||||
val languageFiltered = obtainAudioStreams().filter { it.tags.language == preference.audio.language }
|
||||
val channeledAndCodec = languageFiltered.find { it.channels >= (preference.audio.channels ?: 2) && it.codec_name == preference.audio.codec.lowercase() }
|
||||
return channeledAndCodec ?: return languageFiltered.minByOrNull { it.index } ?: defaultSelectedAudio
|
||||
}
|
||||
|
||||
|
||||
fun getVideoAndAudioArguments(): EncodeWork? {
|
||||
val selectedVideo = defaultSelectedVideo
|
||||
val selectedAudio = getSelectedAudioBasedOnPreference() ?: defaultSelectedAudio
|
||||
return if (selectedVideo == null || selectedAudio == null) return null
|
||||
else {
|
||||
val outFileName = "$outFileName.mp4"
|
||||
val outFile = CommonConfig.outgoingContent.using(collection, outFileName)
|
||||
val audioIndex = obtainAudioStreams().indexOf(selectedAudio)
|
||||
val videoIndex = obtainVideoStreams().indexOf(selectedVideo)
|
||||
EncodeWork(
|
||||
collection = collection,
|
||||
inFile = inputFile,
|
||||
arguments = VideoEncodeArguments(selectedVideo, videoIndex).getVideoArguments() +
|
||||
AudioEncodeArguments(selectedAudio, audioIndex).getAudioArguments(),
|
||||
outFile = outFile.absolutePath
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fun getSubtitleArguments(): List<ExtractWork> {
|
||||
val availableSubtitleStreams = streams.streams.filterIsInstance<SubtitleStream>()
|
||||
val subtitleStreams = SubtitleStreamSelector(availableSubtitleStreams)
|
||||
|
||||
val conversionCandidates = subtitleStreams.getCandidateForConversion()
|
||||
|
||||
return subtitleStreams.getDesiredStreams().map {
|
||||
val args = SubtitleEncodeArguments(it, availableSubtitleStreams.indexOf(it))
|
||||
val language = it.tags.language ?: "eng"
|
||||
val outFileName = "$outFileName.${subtitleStreams.getFormatToCodec(it.codec_name)}"
|
||||
val outFile = CommonConfig.outgoingContent.using(collection, "sub", language, outFileName)
|
||||
|
||||
ExtractWork(
|
||||
collection = collection,
|
||||
language = language,
|
||||
inFile = inputFile,
|
||||
outFile = outFile.absolutePath,
|
||||
arguments = args.getSubtitleArguments(),
|
||||
produceConvertEvent = conversionCandidates.contains(it)
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,85 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.analyzer.encoding.helpers
|
||||
|
||||
import com.google.gson.Gson
|
||||
import no.iktdev.streamit.content.reader.ReaderEnv
|
||||
import org.slf4j.LoggerFactory
|
||||
|
||||
data class EncodingPreference(
|
||||
val video: VideoPreference,
|
||||
val audio: AudioPreference
|
||||
)
|
||||
|
||||
data class VideoPreference(
|
||||
val codec: String = "h264",
|
||||
val pixelFormat: String = "yuv420p",
|
||||
val pixelFormatPassthrough: List<String> = listOf<String>("yuv420p", "yuv420p10le"),
|
||||
val threshold: Int = 16
|
||||
)
|
||||
|
||||
data class AudioPreference(
|
||||
val codec: String = "aac",
|
||||
val sample_rate: Int? = null,
|
||||
val channels: Int? = null,
|
||||
val language: String = "eng", //ISO3 format
|
||||
val preserveChannels: Boolean = true,
|
||||
val defaultToEAC3OnSurroundDetected: Boolean = true,
|
||||
val forceStereo: Boolean = false
|
||||
)
|
||||
|
||||
|
||||
class PreferenceReader {
|
||||
fun getPreference(): EncodingPreference {
|
||||
val defaultPreference = EncodingPreference(video = VideoPreference(), audio = AudioPreference())
|
||||
val preferenceText = readPreference() ?: return defaultPreference
|
||||
val configured = deserialize(preferenceText)
|
||||
|
||||
printConfiguration("Audio", "Codec", configured?.audio?.codec, defaultPreference.audio.codec)
|
||||
printConfiguration("Audio", "Language", configured?.audio?.language, defaultPreference.audio.language)
|
||||
printConfiguration("Audio", "Channels", configured?.audio?.channels.toString(), defaultPreference.audio.channels.toString())
|
||||
printConfiguration("Audio", "Sample rate", configured?.audio?.sample_rate.toString(), defaultPreference.audio.sample_rate.toString())
|
||||
printConfiguration("Audio", "Override to EAC3 for surround", configured?.audio?.defaultToEAC3OnSurroundDetected.toString(), defaultPreference.audio.defaultToEAC3OnSurroundDetected.toString())
|
||||
|
||||
|
||||
printConfiguration("Video", "Codec", configured?.video?.codec, defaultPreference.video.codec)
|
||||
printConfiguration("Video", "Pixel format", configured?.video?.pixelFormat, defaultPreference.video.pixelFormat)
|
||||
printConfiguration("Video", "Threshold", configured?.video?.threshold.toString(), defaultPreference.video.threshold.toString())
|
||||
|
||||
|
||||
return configured ?: defaultPreference
|
||||
}
|
||||
|
||||
fun printConfiguration(sourceType: String, key: String, value: String?, default: String?) {
|
||||
val usedValue = if (!value.isNullOrEmpty()) value else if (!default.isNullOrEmpty()) "$default (default)" else "no changes will be made"
|
||||
LoggerFactory.getLogger(javaClass.simpleName).info("$sourceType: $key => $usedValue")
|
||||
|
||||
}
|
||||
|
||||
|
||||
fun readPreference(): String? {
|
||||
val prefFile = ReaderEnv.encodePreference
|
||||
if (!prefFile.exists()) {
|
||||
LoggerFactory.getLogger(javaClass.simpleName).info("Preference file: ${prefFile.absolutePath} does not exists...")
|
||||
LoggerFactory.getLogger(javaClass.simpleName).info("Using default configuration")
|
||||
return null
|
||||
}
|
||||
else {
|
||||
LoggerFactory.getLogger(javaClass.simpleName).info("Preference file: ${prefFile.absolutePath} found")
|
||||
}
|
||||
|
||||
try {
|
||||
val instr = prefFile.inputStream()
|
||||
return instr.bufferedReader().use { it.readText() }
|
||||
}
|
||||
catch (e: Exception) {
|
||||
LoggerFactory.getLogger(javaClass.simpleName).error("Failed to read preference file: ${prefFile.absolutePath}.. Will use default configuration")
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
fun deserialize(value: String?): EncodingPreference? {
|
||||
value ?: return null
|
||||
return Gson().fromJson(value, EncodingPreference::class.java) ?: null
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -1,72 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.collector
|
||||
|
||||
import no.iktdev.streamit.content.common.deserializers.*
|
||||
import no.iktdev.streamit.content.common.dto.ContentOutName
|
||||
import no.iktdev.streamit.content.common.dto.Metadata
|
||||
import no.iktdev.streamit.content.common.dto.reader.EpisodeInfo
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
import no.iktdev.streamit.content.common.dto.reader.MovieInfo
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.EncodeWork
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.DefaultEventCollection
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.deserializeIfSuccessful
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
|
||||
class ResultCollection: DefaultEventCollection() {
|
||||
|
||||
fun getFirstOrNull(events: KafkaEvents): ConsumerRecord<String, Message>? {
|
||||
return getRecords().firstOrNull { it.key() == events.event }
|
||||
}
|
||||
|
||||
fun getReferenceId(): String? {
|
||||
return getRecords().firstOrNull()?.value()?.referenceId
|
||||
}
|
||||
|
||||
/**
|
||||
* @see KafkaEvents.EVENT_READER_RECEIVED_FILE
|
||||
* @see FileResult for data structure
|
||||
*/
|
||||
fun getFileResult(): FileResult? {
|
||||
val record = getRecords().firstOrNull { it.key() == KafkaEvents.EVENT_READER_RECEIVED_FILE.event } ?: return null
|
||||
return FileResultDeserializer().deserializeIfSuccessful(record.value())
|
||||
}
|
||||
|
||||
/**
|
||||
* @see KafkaEvents.EVENT_READER_DETERMINED_FILENAME
|
||||
* @see ContentOutName for data structure
|
||||
*/
|
||||
fun getFileName(): ContentOutName? {
|
||||
val record = getFirstOrNull(KafkaEvents.EVENT_READER_DETERMINED_FILENAME) ?: return null
|
||||
return ContentOutNameDeserializer().deserializeIfSuccessful(record.value())
|
||||
}
|
||||
|
||||
/**
|
||||
* @see KafkaEvents.EVENT_METADATA_OBTAINED and
|
||||
* @see Metadata for datastructure
|
||||
*/
|
||||
fun getMetadata(): Metadata? {
|
||||
return firstOrNull(KafkaEvents.EVENT_METADATA_OBTAINED)?.let {
|
||||
MetadataResultDeserializer().deserializeIfSuccessful(it.value())
|
||||
}
|
||||
}
|
||||
|
||||
fun getMovieInfo(): MovieInfo? {
|
||||
return firstOrNull(KafkaEvents.EVENT_READER_DETERMINED_MOVIE)?.let {
|
||||
MovieInfoDeserializer().deserializeIfSuccessful(it.value())
|
||||
}
|
||||
}
|
||||
|
||||
fun getSerieInfo(): EpisodeInfo? {
|
||||
return firstOrNull(KafkaEvents.EVENT_READER_DETERMINED_SERIE)?.let {
|
||||
EpisodeInfoDeserializer().deserializeIfSuccessful(it.value())
|
||||
}
|
||||
}
|
||||
|
||||
fun getEncodeWork(): EncodeWork? {
|
||||
return firstOrNull(KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED)?.let {
|
||||
EncodeWorkDeserializer().deserializeIfSuccessful(it.value())
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,120 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.collector
|
||||
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ConvertWork
|
||||
import no.iktdev.streamit.content.common.dto.reader.work.ExtractWork
|
||||
import no.iktdev.streamit.library.db.query.SubtitleQuery
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.springframework.stereotype.Service
|
||||
import java.io.File
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class SubtitleConsumer : DefaultKafkaReader("collectorConsumerExtractedSubtitle") {
|
||||
|
||||
private val listener = object: SimpleMessageListener(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
consumer = defaultConsumer,
|
||||
accepts = listOf(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event,
|
||||
KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED.event
|
||||
)
|
||||
) {
|
||||
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
|
||||
val referenceId = data.value().referenceId
|
||||
if (data.key() == KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED.event) {
|
||||
val work = data.value().dataAs(ExtractWork::class.java)
|
||||
if (work == null) {
|
||||
logger.info { "Event: ${data.key()} value is null" }
|
||||
} else {
|
||||
storeExtractWork(referenceId, work)
|
||||
}
|
||||
} else if (data.key() == KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED.event) {
|
||||
val work = data.value().dataAs(ConvertWork::class.java)
|
||||
if (work == null) {
|
||||
logger.info { "Event: ${data.key()} value is null" }
|
||||
} else {
|
||||
storeConvertWork(referenceId, work)
|
||||
}
|
||||
} else {
|
||||
if (data.value().isSuccessful()) {
|
||||
logger.warn { "Event: ${data.key()} is not captured" }
|
||||
} else {
|
||||
logger.info { "Event: ${data.key()} is not ${StatusType.SUCCESS.name}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
listener.listen()
|
||||
}
|
||||
|
||||
fun produceMessage(referenceId: String, outFile: String, statusType: StatusType, result: Any?) {
|
||||
if (statusType == StatusType.SUCCESS) {
|
||||
produceSuccessMessage(KafkaEvents.EVENT_COLLECTOR_STORED_SUBTITLE, referenceId)
|
||||
logger.info { "Stored ${File(outFile).absolutePath} subtitle" }
|
||||
} else {
|
||||
produceErrorMessage(KafkaEvents.EVENT_COLLECTOR_STORED_SUBTITLE, Message(referenceId, Status(statusType), result), "See log")
|
||||
logger.error { "Failed to store ${File(outFile).absolutePath} subtitle" }
|
||||
}
|
||||
}
|
||||
|
||||
fun storeExtractWork(referenceId: String, work: ExtractWork) {
|
||||
val of = File(work.outFile)
|
||||
val status = transaction {
|
||||
SubtitleQuery(
|
||||
associatedWithVideo = of.nameWithoutExtension,
|
||||
language = work.language,
|
||||
collection = work.collection,
|
||||
format = of.extension.uppercase(),
|
||||
file = File(work.outFile).name
|
||||
)
|
||||
.insertAndGetStatus()
|
||||
}
|
||||
produceMessage(referenceId, work.outFile, if (status) StatusType.SUCCESS else StatusType.ERROR, "Store Extracted: $status")
|
||||
}
|
||||
|
||||
fun storeConvertWork(referenceId: String, work: ConvertWork) {
|
||||
|
||||
val status = transaction {
|
||||
work.outFiles.map {
|
||||
val of = File(it)
|
||||
transaction {
|
||||
SubtitleQuery(
|
||||
associatedWithVideo = of.nameWithoutExtension,
|
||||
language = work.language,
|
||||
collection = work.collection,
|
||||
format = of.extension.uppercase(),
|
||||
file = of.name
|
||||
)
|
||||
.insertAndGetStatus()
|
||||
} to it
|
||||
}
|
||||
}
|
||||
val failed = status.filter { !it.first }.map { it.second }
|
||||
val success = status.filter { it.first }.map { it.second }
|
||||
|
||||
produceSuccessMessage(KafkaEvents.EVENT_COLLECTOR_STORED_SUBTITLE, referenceId, success)
|
||||
produceErrorMessage(KafkaEvents.EVENT_COLLECTOR_STORED_SUBTITLE, Message(referenceId, Status(StatusType.ERROR), failed), "See log")
|
||||
}
|
||||
|
||||
|
||||
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return DeserializerRegistry.getEventToDeserializer(
|
||||
KafkaEvents.EVENT_ENCODER_SUBTITLE_FILE_ENDED,
|
||||
KafkaEvents.EVENT_CONVERTER_SUBTITLE_FILE_ENDED
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -1,183 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.collector
|
||||
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.DefaultKafkaReader
|
||||
import no.iktdev.streamit.content.common.Downloader
|
||||
import no.iktdev.streamit.content.common.deserializers.DeserializerRegistry
|
||||
import no.iktdev.streamit.content.common.dto.Metadata
|
||||
import no.iktdev.streamit.content.common.dto.reader.EpisodeInfo
|
||||
import no.iktdev.streamit.library.db.query.*
|
||||
import no.iktdev.streamit.library.db.tables.catalog
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.CollectorMessageListener
|
||||
import no.iktdev.streamit.library.kafka.listener.collector.ICollectedMessagesEvent
|
||||
import no.iktdev.streamit.library.kafka.listener.deserializer.IMessageDataDeserialization
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
|
||||
import org.jetbrains.exposed.sql.andWhere
|
||||
import org.jetbrains.exposed.sql.insert
|
||||
import org.jetbrains.exposed.sql.select
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.jetbrains.exposed.sql.update
|
||||
import org.springframework.stereotype.Service
|
||||
import java.io.File
|
||||
import kotlin.math.log
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Service
|
||||
class VideoConsumer: DefaultKafkaReader("collectorConsumerEncodedVideo"), ICollectedMessagesEvent<ResultCollection> {
|
||||
|
||||
val listener = CollectorMessageListener<ResultCollection>(
|
||||
topic = CommonConfig.kafkaTopic,
|
||||
consumer = defaultConsumer,
|
||||
initiatorEvent = KafkaEvents.EVENT_READER_RECEIVED_FILE,
|
||||
completionEvent = KafkaEvents.EVENT_ENCODER_VIDEO_FILE_ENDED,
|
||||
acceptsFilter = listOf(
|
||||
KafkaEvents.EVENT_METADATA_OBTAINED,
|
||||
KafkaEvents.EVENT_READER_DETERMINED_SERIE,
|
||||
KafkaEvents.EVENT_READER_DETERMINED_MOVIE,
|
||||
),
|
||||
listener = this,
|
||||
eventCollectionClass = ResultCollection::class.java
|
||||
)
|
||||
|
||||
|
||||
init {
|
||||
listener.listen()
|
||||
}
|
||||
|
||||
|
||||
override fun loadDeserializers(): Map<String, IMessageDataDeserialization<*>> {
|
||||
return DeserializerRegistry.getEventToDeserializer(*listener.acceptsFilter.toTypedArray(), listener.initiatorEvent, listener.completionEvent)
|
||||
}
|
||||
|
||||
override fun onCollectionCompleted(collection: ResultCollection?) {
|
||||
val metadata = collection?.getMetadata()
|
||||
val fileData = collection?.getFileResult()
|
||||
val encodeWork = collection?.getEncodeWork()
|
||||
val serieData = collection?.getSerieInfo()
|
||||
val movieData = collection?.getMovieInfo()
|
||||
logger.info { "Obtained collection: \n\t${collection?.getRecords()?.map { it.key() }?.joinToString("\n\t")}" }
|
||||
|
||||
if (fileData == null || encodeWork == null || collection.getReferenceId() == null) {
|
||||
logger.error { "Required data is null, as it has either status as non successful or simply missing" }
|
||||
return
|
||||
}
|
||||
val videoFileNameWithExtension = File(encodeWork.outFile).name
|
||||
val outDir = File(encodeWork.outFile).parentFile
|
||||
|
||||
val iid = transaction {
|
||||
if (serieData != null) {
|
||||
val serieInsertStatus = getSerieQueryInstance(serieData, videoFileNameWithExtension)?.insertAndGetStatus()
|
||||
if (serieInsertStatus == false) {
|
||||
logger.warn { "Failed to insert episode $videoFileNameWithExtension" }
|
||||
}
|
||||
}
|
||||
if (serieData == null || metadata?.type == "movie") {
|
||||
val iid = MovieQuery(videoFileNameWithExtension).insertAndGetId()
|
||||
if (iid == null) {
|
||||
logger.warn { "Failed to insert movie and get id for it $videoFileNameWithExtension" }
|
||||
}
|
||||
iid
|
||||
} else null
|
||||
}
|
||||
|
||||
val coverUrl = metadata?.cover
|
||||
val currentCover = getExistingCover(outDir)
|
||||
val coverFile = if (currentCover == null || !currentCover.exists()) {
|
||||
if (coverUrl != null) {
|
||||
logger.info { "Downloading Cover: $coverUrl" }
|
||||
runBlocking {
|
||||
try {
|
||||
val _file = Downloader(coverUrl, outDir, fileData.title).download()
|
||||
if (_file == null || !_file.exists()) {
|
||||
logger.info { "Failed to download the file" }
|
||||
}
|
||||
_file
|
||||
} catch (e: Exception) {
|
||||
// No cover
|
||||
e.printStackTrace()
|
||||
null
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.info { "No cover url received" }
|
||||
null
|
||||
}
|
||||
} else currentCover
|
||||
|
||||
|
||||
|
||||
|
||||
// Serie må alltid fullføres før catalog. dette i tilfelle catalog allerede eksisterer og den thrower slik at transaskjonen blir versertert!
|
||||
|
||||
val status = try {
|
||||
transaction {
|
||||
val genres = metadata?.let { insertAndGetGenres(it) }
|
||||
|
||||
val cq = CatalogQuery(
|
||||
title = fileData.title,
|
||||
cover = coverFile?.name,
|
||||
type = if (serieData == null) "movie" else "serie",
|
||||
collection = fileData.title,
|
||||
iid = iid,
|
||||
genres = genres
|
||||
)
|
||||
val catalogType = if (serieData == null) "movie" else "serie"
|
||||
cq.insertAndGetStatus()
|
||||
|
||||
if (coverFile != null) {
|
||||
val qres = catalog.select { catalog.title eq fileData.title }.andWhere { catalog.type eq catalogType}.firstOrNull() ?: null
|
||||
if (qres != null && qres[catalog.cover].isNullOrBlank()) {
|
||||
catalog.update({ catalog.id eq qres[catalog.id] }) {
|
||||
it[catalog.cover] = coverFile.name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val cqId = cq.getId() ?: throw RuntimeException("No Catalog id found!")
|
||||
metadata?.let {
|
||||
val summary = it.summary
|
||||
if (summary != null) {
|
||||
val success = SummaryQuery(cid = cqId, language = "eng", description = summary).insertAndGetStatus()
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
e.printStackTrace()
|
||||
}
|
||||
|
||||
produceSuccessMessage(KafkaEvents.EVENT_COLLECTOR_STORED_VIDEO, collection.getReferenceId() ?: "M.I.A", status)
|
||||
logger.info { "Stored ${encodeWork.outFile} video" }
|
||||
}
|
||||
|
||||
/**
|
||||
* Needs to be wrapped in transaction
|
||||
*/
|
||||
fun insertAndGetGenres(meta: Metadata): String? {
|
||||
val gq = GenreQuery(*meta.genres.toTypedArray())
|
||||
gq.insertAndGetIds()
|
||||
return gq.getIds().joinToString(",")
|
||||
}
|
||||
|
||||
fun getSerieQueryInstance(data: EpisodeInfo?, baseName: String?): SerieQuery? {
|
||||
if (data == null || baseName == null) return null
|
||||
return SerieQuery(data.episodeTitle, data.episode, data.season, data.title, baseName)
|
||||
}
|
||||
|
||||
val validCoverFormat = listOf(
|
||||
"png",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"webp",
|
||||
"bmp",
|
||||
"tiff"
|
||||
)
|
||||
fun getExistingCover(contentDir: File): File? {
|
||||
val possibleCovers = contentDir.walkTopDown().filter { it.isFile && validCoverFormat.contains(it.extension)}
|
||||
return possibleCovers.firstOrNull()
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.dto
|
||||
|
||||
data class CompletedItem(
|
||||
val name: String,
|
||||
val fullName: String,
|
||||
val time: String,
|
||||
val operations: List<CompletedTypes>
|
||||
)
|
||||
|
||||
enum class CompletedTypes {
|
||||
ENCODE,
|
||||
EXTRACT,
|
||||
CONVERT
|
||||
}
|
||||
@ -1,127 +0,0 @@
|
||||
package no.iktdev.streamit.content.reader.fileWatcher
|
||||
|
||||
import com.google.gson.Gson
|
||||
import dev.vishna.watchservice.KWatchEvent
|
||||
import dev.vishna.watchservice.asWatchChannel
|
||||
import kotlinx.coroutines.channels.consumeEach
|
||||
import kotlinx.coroutines.launch
|
||||
import mu.KotlinLogging
|
||||
import no.iktdev.exfl.coroutines.Coroutines
|
||||
import no.iktdev.streamit.content.common.CommonConfig
|
||||
import no.iktdev.streamit.content.common.Naming
|
||||
import no.iktdev.streamit.content.common.dto.reader.FileResult
|
||||
|
||||
import no.iktdev.streamit.library.kafka.KafkaEvents
|
||||
import no.iktdev.streamit.library.kafka.dto.Message
|
||||
import no.iktdev.streamit.library.kafka.dto.Status
|
||||
import no.iktdev.streamit.library.kafka.dto.StatusType
|
||||
import no.iktdev.streamit.library.kafka.consumers.DefaultConsumer
|
||||
import no.iktdev.streamit.library.kafka.listener.SimpleMessageListener
|
||||
import no.iktdev.streamit.library.kafka.producer.DefaultProducer
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord
|
||||
import org.springframework.stereotype.Service
|
||||
import java.io.File
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
@Service
|
||||
class FileWatcher: FileWatcherEvents {
|
||||
val messageProducer = DefaultProducer(CommonConfig.kafkaTopic)
|
||||
val defaultConsumer = DefaultConsumer(subId = "fileWatcher")
|
||||
|
||||
val queue = FileWatcherQueue()
|
||||
|
||||
|
||||
val watcherChannel = CommonConfig.incomingContent.asWatchChannel()
|
||||
init {
|
||||
Coroutines.io().launch {
|
||||
watcherChannel.consumeEach {
|
||||
when (it.kind) {
|
||||
KWatchEvent.Kind.Deleted -> {
|
||||
queue.removeFromQueue(it.file, this@FileWatcher::onFileRemoved)
|
||||
}
|
||||
|
||||
KWatchEvent.Kind.Created, KWatchEvent.Kind.Initialized -> {
|
||||
if (validVideoFiles().contains(it.file.extension)) {
|
||||
queue.addToQueue(it.file, this@FileWatcher::onFilePending, this@FileWatcher::onFileAvailable)
|
||||
} else if (it.file.isFile) {
|
||||
logger.warn { "${it.file.name} is not a valid file type" }
|
||||
} else if (it.file.isDirectory) {
|
||||
val valid = it.file.walkTopDown().filter { f -> f.isFile && f.extension in validVideoFiles() }
|
||||
logger.warn { "Ignoring directory: ${it.file.name}" }
|
||||
}
|
||||
}
|
||||
|
||||
else -> {
|
||||
logger.info { "Ignoring event kind: ${it.kind.name} for file ${it.file.name}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object : SimpleMessageListener(CommonConfig.kafkaTopic, defaultConsumer, listOf(KafkaEvents.REQUEST_FILE_READ.event)) {
|
||||
override fun onMessageReceived(data: ConsumerRecord<String, Message>) {
|
||||
if (data.value().status.statusType == StatusType.SUCCESS) {
|
||||
if (data.value().data is String) {
|
||||
val file = File(CommonConfig.incomingContent, data.value().data as String)
|
||||
Coroutines.io().launch {
|
||||
watcherChannel?.send(KWatchEvent(
|
||||
file = file,
|
||||
kind = KWatchEvent.Kind.Initialized,
|
||||
tag = null
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun validVideoFiles(): List<String> = listOf(
|
||||
"mkv",
|
||||
"avi",
|
||||
"mp4",
|
||||
"wmv",
|
||||
"webm",
|
||||
"mov"
|
||||
)
|
||||
|
||||
|
||||
override fun onFileAvailable(file: PendingFile) {
|
||||
logger.debug { "onFileAvailable har mottatt pendingFile ${file.file.name}" }
|
||||
val naming = Naming(file.file.nameWithoutExtension)
|
||||
val message = Message(
|
||||
referenceId = file.id,
|
||||
status = Status(StatusType.SUCCESS),
|
||||
data = FileResult(file = file.file.absolutePath, title = naming.guessDesiredTitle(), sanitizedName = naming.guessDesiredFileName())
|
||||
)
|
||||
logger.debug { "Producing message: ${Gson().toJson(message)}" }
|
||||
messageProducer.sendMessage(KafkaEvents.EVENT_READER_RECEIVED_FILE.event, message)
|
||||
}
|
||||
|
||||
override fun onFilePending(file: PendingFile) {
|
||||
val message = Message(
|
||||
status = Status(StatusType.PENDING),
|
||||
data = FileResult(file = file.file.absolutePath)
|
||||
)
|
||||
messageProducer.sendMessage(KafkaEvents.EVENT_READER_RECEIVED_FILE.event , message)
|
||||
}
|
||||
|
||||
override fun onFileFailed(file: PendingFile) {
|
||||
val message = Message(
|
||||
status = Status(StatusType.ERROR),
|
||||
data = file.file.absolutePath
|
||||
)
|
||||
messageProducer.sendMessage(KafkaEvents.EVENT_READER_RECEIVED_FILE.event , message)
|
||||
}
|
||||
|
||||
override fun onFileRemoved(file: PendingFile) {
|
||||
val message = Message(
|
||||
status = Status(StatusType.IGNORED),
|
||||
data = file.file.absolutePath
|
||||
)
|
||||
messageProducer.sendMessage(KafkaEvents.EVENT_READER_RECEIVED_FILE.event , message)
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user