Commit fc6cf383 authored by Jonas Waeber's avatar Jonas Waeber
Browse files

refactor implementation to csv parser

parent c2c1bff7
Pipeline #8281 passed with stages
in 5 minutes and 52 seconds
......@@ -6,4 +6,4 @@ RUN cd /build/distributions && tar xf app.tar
FROM openjdk:8-jre-alpine
COPY --from=0 /build/distributions/app /app
CMD /app/bin/record-parser
CMD /app/bin/table-data-transform
## Record Parser
## Table Data Import Service
[Confluence Doku](https://memobase.atlassian.net/wiki/spaces/TBAS/pages/29196525/Service%2BsFTP%2BReader%2BValidator)
[Confluence Doku](https://memobase.atlassian.net/wiki/spaces/TBAS/pages/48693312/Service+Table-Data+Formatter)
......@@ -52,6 +52,11 @@ dependencies {
implementation 'org.snakeyaml:snakeyaml-engine:2.1'
// CSV Reader
implementation("com.github.doyaaaaaken:kotlin-csv-jvm:0.7.3")
// XSLX / XSL Reader
implementation 'org.apache.poi:poi:4.1.2'
// ODS Reader
implementation 'org.odftoolkit:odftoolkit:1.0.0-BETA1'
// JSON Parser
implementation 'com.beust:klaxon:5.2'
// Compression
......@@ -62,6 +67,7 @@ dependencies {
implementation "org.jetbrains.kotlin:kotlin-reflect:1.3.71"
testCompile("org.junit.jupiter:junit-jupiter:5.4.2")
testImplementation 'org.assertj:assertj-core:3.15.0'
// https://mvnrepository.com/artifact/org.apache.kafka/kafka-streams-test-utils
testCompile group: 'org.apache.kafka', name: 'kafka-streams-test-utils', version: kafkaV
......@@ -76,6 +82,7 @@ dependencies {
testCompile group: 'org.apache.sshd', name: 'sshd-core', version: '2.4.0'
// https://mvnrepository.com/artifact/org.apache.sshd/sshd-sftp
testCompile group: 'org.apache.sshd', name: 'sshd-sftp', version: '2.4.0'
}
compileKotlin {
......
rootProject.name = 'record-parser'
rootProject.name = 'table-data-transform'
/*
* sftp-reader
* Table Data Import Service
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
......@@ -19,13 +19,12 @@
package org.memobase
import org.apache.kafka.streams.KafkaStreams
import java.io.File
import kotlin.system.exitProcess
import org.apache.logging.log4j.LogManager
import kotlin.system.exitProcess
class App {
companion object {
private val log = LogManager.getLogger("RecordParser")
private val log = LogManager.getLogger("TableDataTransform")
@JvmStatic fun main(args: Array<String>) {
try {
val settings = SettingsLoader("app.yml")
......
/*
* Table Data Import Service
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.memobase
class InvalidInputException(message: String) : Exception(message)
\ No newline at end of file
/*
* record-parser
* Table Data Import Service
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
......@@ -22,25 +22,23 @@ import com.beust.klaxon.JsonObject
import com.beust.klaxon.Klaxon
import com.beust.klaxon.json
import com.github.doyaaaaaken.kotlincsv.dsl.csvReader
import com.github.doyaaaaaken.kotlincsv.dsl.csvWriter
import org.apache.kafka.streams.KeyValue
import org.apache.kafka.streams.StreamsBuilder
import org.apache.kafka.streams.Topology
import org.apache.kafka.streams.kstream.KStream
import org.apache.kafka.streams.kstream.Predicate
import org.apache.logging.log4j.LogManager
import java.io.ByteArrayOutputStream
import java.io.File
import java.io.StringReader
import java.nio.charset.Charset
class KafkaTopology(private val settings: SettingsLoader) {
private val log = LogManager.getLogger("KafkaTopology")
private val sftpClient: SftpClient = SftpClient(settings.sftpSettings)
private val csvHeaderCount = settings.appSettings.getProperty("app.csv.header.count").toInt()
private val csvUseHeaderProperties = settings.appSettings.getProperty("app.csv.header.line")!!.toBoolean()
private val csvUsedHeaderIndex = settings.appSettings.getProperty("app.csv.header.index").toInt()
private val csvIdentifierIndex = settings.appSettings.getProperty("app.csv.identifier").toInt()
private val sheetIndex = settings.appSettings.getProperty("sheet.index").toInt()
private val headerCount = settings.appSettings.getProperty("header.count").toInt()
private val propertyNamesIndex = settings.appSettings.getProperty("header.line").toInt()
private val identifierIndex = settings.appSettings.getProperty("identifier").toInt()
fun build(): Topology {
val builder = StreamsBuilder()
......@@ -48,85 +46,142 @@ class KafkaTopology(private val settings: SettingsLoader) {
val branchedSource = builder
.stream<String, String>(settings.inputTopic)
.branch(
Predicate { _, value -> filer(value) },
Predicate { _, value -> filter(value) },
Predicate { _, _ -> true }
)
val processedValue = branchedSource[0]
.mapValues { _, value -> mapValues(value) }
.flatMapValues { _, value -> parser(value) }
// report filtered error message from previous job.
// TODO: what to send to main topic?
branchedSource[1]
.mapValues { key, _ -> reportToJson(Report(key, "Failure", "Ignored message as file validator reported error.", 0)) }
.map { key, _ ->
reportToJson(
Report(
key,
"Failure",
"Ignored message as file validator reported error.",
0
)
)
}
.to("${settings.outputTopic}-reporting")
// data branch
processedValue
.map { _, value -> value.first }
.to(settings.outputTopic)
// work branch
val formatBranches = branchedSource[0]
.mapValues { _, value -> parseJsonObject(value) }
.branch(
Predicate { _, value -> listOf("CSV", "TSV").contains(value["format"]) },
Predicate { _, value -> listOf("XSL", "XSLX").contains(value["format"]) },
Predicate { _, value -> listOf("ODS").contains(value["format"]) }
)
// CSV Branch
buildHelper(formatBranches[0]
.flatMapValues { _, value -> csvMapper(value) })
// Excel Branch
buildHelper(formatBranches[1]
.flatMapValues { value -> excelMapper(value) })
// OpenDocument Spreadsheet Branch
buildHelper(formatBranches[2]
.flatMapValues { value -> odsMapper(value) })
// reporting branch
processedValue
.mapValues { _, value -> reportToJson(value.second) }
.to("${settings.outputTopic}-reporting")
return builder.build()
}
private fun filer(value: String): Boolean {
private fun buildHelper(stream: KStream<String, Pair<Pair<String, JsonObject>, Report>>) {
stream
.map { _, value -> KeyValue(value.first.first, value.first.second.toJsonString()) }
.to(settings.outputTopic)
stream
.map { _, value -> reportToJson(value.second) }
.to("${settings.outputTopic}-reporting")
}
private fun filter(value: String): Boolean {
return !value.contains("ERROR")
}
private fun mapValues(value: String): JsonObject {
private fun parseJsonObject(value: String): JsonObject {
// TODO: try catch errors
return Klaxon().parseJsonObject(StringReader(value))
}
private fun parser(value: JsonObject): List<Pair<KeyValue<String, String>, Report>> {
val resultMessages = mutableListOf<Pair<KeyValue<String, String>, Report>>()
sftpClient.open(File(value["path"] as String)).use {
val reader = csvReader().readAll(it.RemoteFileInputStream())
private fun csvMapper(value: JsonObject): List<Pair<Pair<String, JsonObject>, Report>> {
val resultMessages = mutableListOf<Pair<Pair<String, JsonObject>, Report>>()
val mutableSet = mutableSetOf<String>()
sftpClient.open(File(value["path"] as String)).use { remoteFile ->
// What about TSV? is that automatic or not? probably not ...
val reader =
csvReader {
this.quoteChar = '"'
this.delimiter = if (value["format"] == "CSV") ',' else '\t'
this.charset = Charsets.UTF_8.displayName()
// this.skipEmptyLine = true
}.readAll(remoteFile.RemoteFileInputStream())
var headerProperties = emptyList<String>()
var count = 0
for (line in reader) {
count += 1
if (count <= csvHeaderCount) {
if (csvUseHeaderProperties) {
if (count == csvUsedHeaderIndex) {
// TODO: check if there is a property name for each column!
headerProperties = line
if (count <= headerCount) {
if (count == propertyNamesIndex) {
// TODO: check if there is a property name for each column!
headerProperties = line
headerProperties.forEachIndexed { index, property ->
if (property.isEmpty()) {
throw InvalidInputException("Missing a property name on row $count in column $index!")
}
// TODO: Any validations on the field names themselves?
// Like no special characters? might be a good idea for processing
}
}
continue
}
val output = ByteArrayOutputStream()
csvWriter {
this.charset = Charsets.UTF_8.name()
this.delimiter = ','
this.lineTerminator = "\n"
this.nullCode = ""
}.open(output) {
if (headerProperties.isNotEmpty()) {
writeRow(headerProperties)
val identifier = line[identifierIndex]
when {
identifier.isEmpty() -> {
throw InvalidInputException("The unique identifier in column $identifierIndex in row $count is empty!")
}
mutableSet.contains(identifier) -> {
throw InvalidInputException("The unique identifier in column $identifierIndex in row $count is a duplicate of another row!")
}
else -> {
mutableSet.add(identifier)
}
writeRow(line)
}
// TODO: check if there is an identifier for each row and that each value is unique!
val identifier = line[csvIdentifierIndex]
val keyValueMap = json {
obj(
headerProperties.zip(line)
)
}
val data = output.toString(Charset.defaultCharset().displayName()).trim()
val message = json { obj(Pair("format", "CSV"), Pair("data", data)) }
val result = KeyValue(identifier, message.toJsonString())
output.reset()
val report = Report(identifier, "SUCCESS","Successfully created record with identifier $identifier with format CSV!", 1)
val result = Pair(identifier, keyValueMap)
val report = Report(
identifier,
"SUCCESS",
"Successfully created record with identifier $identifier with format CSV!",
1
)
resultMessages.add(Pair(result, report))
} }
}
}
return resultMessages
}
private fun excelMapper(value: JsonObject): List<Pair<Pair<String, JsonObject>, Report>> {
return emptyList()
}
private fun odsMapper(value: JsonObject): List<Pair<Pair<String, JsonObject>, Report>> {
return emptyList()
}
private fun reportToJson(value: Report): String {
return Klaxon().toJsonString(value)
private fun reportToJson(value: Report): KeyValue<String, String> {
return KeyValue(value.id, Klaxon().toJsonString(value))
}
}
......@@ -77,15 +77,17 @@ class SettingsLoader(private val fileName: String) {
suppliedKafkaProducerSettings = kafkaOptions["producer"] as Map<String, String>
suppliedKafkaStreamsSettings = kafkaOptions["stream"] as Map<String, String>
appSettings.setProperty("app.csv.header.count", addSetting("app.csv.header.count"))
appSettings.setProperty("app.csv.header.line", addSetting("app.csv.header.line"))
appSettings.setProperty("app.csv.header.index", addSetting("app.csv.header.index"))
appSettings.setProperty("app.csv.identifier", addSetting("app.csv.identifier"))
val appMap = mappedYaml["app"] as Map<String, Any>
appSettings.setProperty("header.count",addSetting(appMap,"header.count"))
appSettings.setProperty("header.line", addSetting(appMap,"header.line"))
appSettings.setProperty("sheet.index", addSetting(appMap,"sheet.index"))
appSettings.setProperty("identifier", addSetting(appMap,"identifier"))
sftpSettings.setProperty("host", addSetting("sftp.host"))
sftpSettings.setProperty("port", addSetting("sftp.port"))
sftpSettings.setProperty("user", addSetting("sftp.user"))
sftpSettings.setProperty("password", addSetting("sftp.password"))
val sftpMap = mappedYaml["sftp"] as Map<String, Any>
sftpSettings.setProperty("host", addSetting(sftpMap,"host"))
sftpSettings.setProperty("port", addSetting(sftpMap,"port"))
sftpSettings.setProperty("user", addSetting(sftpMap,"user"))
sftpSettings.setProperty("password", addSetting(sftpMap,"password"))
} catch (ex: ClassCastException) {
ex.printStackTrace()
log.error("The properties file has an invalid structure: $ex")
......@@ -98,9 +100,9 @@ class SettingsLoader(private val fileName: String) {
kafkaStreamsSettings = initKafkaStreamsSettings()
}
private fun addSetting(base: String): String {
private fun addSetting(map: Map<String, Any>, base: String): String {
val levels = base.split('.')
return recursiveFunction(levels.first(), levels.drop(1), mappedYaml, base)
return recursiveFunction(levels.first(), levels.drop(1), map, base)
}
private fun recursiveFunction(current: String, rest: List<String> , map: Map<String, Any>, base: String): String {
......
/*
* sftp-reader
* Table Data Import Service
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
......
......@@ -4,12 +4,11 @@ sftp:
user: ${SFTP_USER:?system}
password: ${SFTP_PASSWORD:?system}
app:
csv:
header:
count: ${HEADER_COUNT:?user}
line: ${USE_HEADER_LINE:?user}
index: ${USED_HEADER_LINE_INDEX:?user}
identifier: ${IDENTIFIER_INDEX:?user}
sheet: ${SHEET_INDEX:?user}
header:
count: ${HEADER_COUNT:?user}
line: ${USED_HEADER_LINE_INDEX:?user}
identifier: ${IDENTIFIER_INDEX:?user}
kafka:
producer:
bootstrap.servers: localhost:9092
......
......@@ -24,14 +24,17 @@ import org.apache.kafka.common.serialization.StringSerializer
import org.apache.kafka.streams.TopologyTestDriver
import org.apache.kafka.streams.test.ConsumerRecordFactory
import org.apache.logging.log4j.LogManager
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import org.junit.jupiter.api.assertAll
import org.junit.jupiter.api.extension.ExtendWith
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
import java.io.File
import java.nio.charset.Charset
import kotlin.test.assertNotNull
import java.util.stream.Stream
import kotlin.test.assertNull
@ExtendWith(EmbeddedSftpServerExtension::class)
......@@ -77,53 +80,48 @@ class Tests {
)
}
@Test
fun `test csv file import with header line`() {
val settingsLoader = SettingsLoader("test1.yml")
@ParameterizedTest
@MethodSource("csvFileTestParams")
fun `test valid inputs`(params: TestParams) {
val settingsLoader = SettingsLoader(params.settingsFileName)
val testDriver = TopologyTestDriver(KafkaTopology(settingsLoader).build(), settingsLoader.kafkaStreamsSettings)
val factory = ConsumerRecordFactory(
StringSerializer(), StringSerializer()
)
testDriver.pipeInput(
factory.create(
settingsLoader.inputTopic, "brandt_metadaten.csv", readFile("csv_file_import_1.json")
)
)
val record: ProducerRecord<String, String> =
testDriver.readOutput(
settingsLoader.outputTopic,
StringDeserializer(),
StringDeserializer()
settingsLoader.inputTopic, params.inputKey, readFile(params.inputFileName)
)
assertNotNull(record)
assertAll("test csv file import",
{ assertEquals("AVGR13716", record.key()) },
{ assertEquals(readFile("csv_file_import_with_header_line_output.csv"), record.value()) }
)
}
@Test
fun `test csv file import without header line`() {
val settingsLoader = SettingsLoader("test2.yml")
val testDriver = TopologyTestDriver(KafkaTopology(settingsLoader).build(), settingsLoader.kafkaStreamsSettings)
val factory = ConsumerRecordFactory(
StringSerializer(), StringSerializer()
)
testDriver.pipeInput(
factory.create(
settingsLoader.inputTopic, "brandt_metadaten.csv", readFile("csv_file_import_1.json")
)
)
val record: ProducerRecord<String, String> =
testDriver.readOutput(
settingsLoader.outputTopic,
StringDeserializer(),
StringDeserializer()
)
assertNotNull(record)
assertAll("test csv file import",
{ assertEquals("AVGR13716", record.key()) },
{ assertEquals(readFile("csv_file_import_without_header_line_output.csv"), record.value()) }
)
assertThat(record)
.isNotNull
.hasFieldOrPropertyWithValue("key", params.expectedOutputKey)
.hasFieldOrPropertyWithValue("value", readFile(params.expectedOutputFileName))
}
private fun csvFileTestParams() = Stream.of(
TestParams(
"test1.yml",
"csv_file_import_1.json",
"brandt_metadaten.csv",
"AVGR13716",
"csv_file_import_with_header_line_output.json")
)
}
data class TestParams(
val settingsFileName: String,
val inputFileName: String,
val inputKey: String,
val expectedOutputKey: String,
val expectedOutputFileName: String
)
{"format":"CSV","data":"Exemplar-AVGRNr,Permalink,Titel-Title,Titel-ZusatzTitel,Titel-Beschreibung,Titel-ProduktionsjahrdesOriginals,Titel-FilmPersonen,Titel-Funktionen,Titel-Genre,\"Titel-Genres, Titel-Beschreibung (Intel.)\",Titel-Drehort,Titel-Weiteres,Titel-Stream-Url,Titel-Benutzerzugang,Medium-Materialbezeichnung,Medium-MedienFormat,Medium-Ton,Medium-Farbe,Medium-Dauer,Medium-Bandlaenge\nAVGR13716,https://www.gr.ch/Exemplare/13716,Wintersport in Arosa,,\"Pferderennen am Obersee bei strahlendem Sonnenschein, viel Publikum, Gedränge vor Wettbüro, Reiter in Armeeuniform, Fotografen, Skijöring \u2013 Eisfest mit kostümierten Teilnehmer/innen vor Hotel Altein bei Nacht \u2013 Pferderennen am Obersee \u2013 Eiskunstlauf \u2013 Pferderennen, diesmal winterlicher \u2013 Schanzenspringen im Skigelände und viel Volk um die Alpgebäude Carmenna \u2013 Skifahrer im Aufstieg, Winterwanderer und nochmals Sprünge auf der Schneeschanze, Gruppe Skifahrer in wilder Schussfahrt, Wartende um die Hütten \u2013 Eishockey-Match \u2013 Impressionen von einem Abfahrtsrennen und Rundsicht über Arosa und Umgebung\",\"1920, 1920-1929, genaues Datum nicht eruierbar\",\"Brandt, Carl\",Autor/in,Dokumentarfilm; Amateurfilm,Pferdesport; Ski alpin; Skispringen; Eishochey; Tourismus,Arosa,Eisfest: teilweise identische Aufnahmen in AVGR12097 \u201eAnkunft David Zogg\u201c ; Schanzenspringen auf Carmenna: teilweise identische Aufnahmen in AVGR12115 \u201eTouristen auf dem Tschuggen\u201c,https://s3-eu-west-1.amazonaws.com/streaming.av-portal.gr.ch/13716/AVGR13716.mov,Intranet,Film,\"35-mm-Film, Negativ und Positiv, Nitrat\",stumm,s/w getönt,0:17:02,\"Vorhandene Elemente: AVGR9942: Negativ, Nitrat (CS, Z 986-172.8); AVGR9943: Positiv Nitrat (CS, Z 986-172.7); AVGR12098: Interpositiv / Marron 2366, Kopie 2016 (KBG); AVGR13715: Internegativ 2234, Kopie 2016 (KBG); AVGR13716: Positivkopie Farbe 2383, Kopie 2016, eingefärbte Sequenzen (KBG)\""}
\ No newline at end of file
{"Exemplar-AVGRNr":"AVGR13716","Permalink":"https://www.gr.ch/Exemplare/13716","Titel-Title":"Wintersport in Arosa","Titel-ZusatzTitel":"","Titel-Beschreibung":"Pferderennen am Obersee bei strahlendem Sonnenschein, viel Publikum, Gedränge vor Wettbüro, Reiter in Armeeuniform, Fotografen, Skijöring \u2013 Eisfest mit kostümierten Teilnehmer/innen vor Hotel Altein bei Nacht \u2013 Pferderennen am Obersee \u2013 Eiskunstlauf \u2013 Pferderennen, diesmal winterlicher \u2013 Schanzenspringen im Skigelände und viel Volk um die Alpgebäude Carmenna \u2013 Skifahrer im Aufstieg, Winterwanderer und nochmals Sprünge auf der Schneeschanze, Gruppe Skifahrer in wilder Schussfahrt, Wartende um die Hütten \u2013 Eishockey-Match \u2013 Impressionen von einem Abfahrtsrennen und Rundsicht über Arosa und Umgebung","Titel-ProduktionsjahrdesOriginals":"1920, 1920-1929, genaues Datum nicht eruierbar","Titel-FilmPersonen":"Brandt, Carl","Titel-Funktionen":"Autor/in","Titel-Genre":"Dokumentarfilm; Amateurfilm","Titel-Genres, Titel-Beschreibung (Intel.)":"Pferdesport; Ski alpin; Skispringen; Eishochey; Tourismus","Titel-Drehort":"Arosa","Titel-Weiteres":"Eisfest: teilweise identische Aufnahmen in AVGR12097 \u201eAnkunft David Zogg\u201c ; Schanzenspringen auf Carmenna: teilweise identische Aufnahmen in AVGR12115 \u201eTouristen auf dem Tschuggen\u201c","Titel-Stream-Url":"https://s3-eu-west-1.amazonaws.com/streaming.av-portal.gr.ch/13716/AVGR13716.mov","Titel-Benutzerzugang":"Intranet","Medium-Materialbezeichnung":"Film","Medium-MedienFormat":"35-mm-Film, Negativ und Positiv, Nitrat","Medium-Ton":"stumm","Medium-Farbe":"s/w getönt","Medium-Dauer":"0:17:02","Medium-Bandlaenge":"Vorhandene Elemente: AVGR9942: Negativ, Nitrat (CS, Z 986-172.8); AVGR9943: Positiv Nitrat (CS, Z 986-172.7); AVGR12098: Interpositiv / Marron 2366, Kopie 2016 (KBG); AVGR13715: Internegativ 2234, Kopie 2016 (KBG); AVGR13716: Positivkopie Farbe 2383, Kopie 2016, eingefärbte Sequenzen (KBG)"}
\ No newline at end of file
{"format":"CSV","data":"AVGR13716,https://www.gr.ch/Exemplare/13716,Wintersport in Arosa,,\"Pferderennen am Obersee bei strahlendem Sonnenschein, viel Publikum, Gedränge vor Wettbüro, Reiter in Armeeuniform, Fotografen, Skijöring \u2013 Eisfest mit kostümierten Teilnehmer/innen vor Hotel Altein bei Nacht \u2013 Pferderennen am Obersee \u2013 Eiskunstlauf \u2013 Pferderennen, diesmal winterlicher \u2013 Schanzenspringen im Skigelände und viel Volk um die Alpgebäude Carmenna \u2013 Skifahrer im Aufstieg, Winterwanderer und nochmals Sprünge auf der Schneeschanze, Gruppe Skifahrer in wilder Schussfahrt, Wartende um die Hütten \u2013 Eishockey-Match \u2013 Impressionen von einem Abfahrtsrennen und Rundsicht über Arosa und Umgebung\",\"1920, 1920-1929, genaues Datum nicht eruierbar\",\"Brandt, Carl\",Autor/in,Dokumentarfilm; Amateurfilm,Pferdesport; Ski alpin; Skispringen; Eishochey; Tourismus,Arosa,Eisfest: teilweise identische Aufnahmen in AVGR12097 \u201eAnkunft David Zogg\u201c ; Schanzenspringen auf Carmenna: teilweise identische Aufnahmen in AVGR12115 \u201eTouristen auf dem Tschuggen\u201c,https://s3-eu-west-1.amazonaws.com/streaming.av-portal.gr.ch/13716/AVGR13716.mov,Intranet,Film,\"35-mm-Film, Negativ und Positiv, Nitrat\",stumm,s/w getönt,0:17:02,\"Vorhandene Elemente: AVGR9942: Negativ, Nitrat (CS, Z 986-172.8); AVGR9943: Positiv Nitrat (CS, Z 986-172.7); AVGR12098: Interpositiv / Marron 2366, Kopie 2016 (KBG); AVGR13715: Internegativ 2234, Kopie 2016 (KBG); AVGR13716: Positivkopie Farbe 2383, Kopie 2016, eingefärbte Sequenzen (KBG)\""}
\ No newline at end of file
......@@ -4,12 +4,11 @@ sftp:
user: user
password: password
app:
csv:
header:
count: 3
line: true
index: 3
identifier: 0
sheet: 0
header:
count: 3
line: 3
identifier: 0
kafka:
producer:
bootstrap.servers: localhost:12345
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment