Commit 56b12b32 authored by Jonas Waeber's avatar Jonas Waeber
Browse files

refactor table data parser

Load external library for most things and fixes some errors.
parent 51a1db93
Pipeline #8538 passed with stages
in 5 minutes and 13 seconds
......@@ -2,4 +2,3 @@
[Confluence Doku](https://memobase.atlassian.net/wiki/spaces/TBAS/pages/48693312/Service+Table-Data+Formatter)
......@@ -21,8 +21,9 @@ targetCompatibility = 1.8
repositories {
jcenter()
maven { url 'https://gitlab.com/api/v4/projects/11507450/packages/maven' }
maven { url 'https://jitpack.io' }
maven {
url "https://dl.bintray.com/jonas-waeber/memobase"
}
}
ext {
......@@ -43,13 +44,10 @@ dependencies {
implementation group: 'org.apache.kafka', name: 'kafka-clients', version: kafkaV
implementation "org.apache.kafka:kafka-streams:${kafkaV}"
// https://mvnrepository.com/artifact/org.apache.kafka/kafka-streams-test-utils
//testCompile group: 'org.apache.kafka', name: 'kafka-streams-test-utils', version: kafkaV
implementation 'org.memobase:memobase-service-utilities:1.2.0'
// SFTP Client
implementation 'com.hierynomus:sshj:0.27.0'
// YAML Parser
implementation 'org.snakeyaml:snakeyaml-engine:2.1'
// CSV Reader
implementation("com.github.doyaaaaaken:kotlin-csv-jvm:0.7.3")
// XSLX / XSL Reader
......@@ -71,18 +69,6 @@ dependencies {
// https://mvnrepository.com/artifact/org.apache.kafka/kafka-streams-test-utils
testCompile group: 'org.apache.kafka', name: 'kafka-streams-test-utils', version: kafkaV
testImplementation "org.apache.kafka:kafka-clients:$kafkaV:test"
testImplementation "org.apache.kafka:kafka_2.11:$kafkaV"
testImplementation "org.apache.kafka:kafka_2.11:$kafkaV:test"
// https://mvnrepository.com/artifact/com.github.marschall/memoryfilesystem
testCompile group: 'com.github.marschall', name: 'memoryfilesystem', version: '2.1.0'
// https://mvnrepository.com/artifact/org.apache.sshd/sshd-core
testCompile group: 'org.apache.sshd', name: 'sshd-core', version: '2.4.0'
// https://mvnrepository.com/artifact/org.apache.sshd/sshd-sftp
testCompile group: 'org.apache.sshd', name: 'sshd-sftp', version: '2.4.0'
}
compileKotlin {
......
......@@ -20,6 +20,7 @@ package org.memobase
import org.apache.kafka.streams.KafkaStreams
import org.apache.logging.log4j.LogManager
import org.memobase.settings.SettingsLoader
import kotlin.system.exitProcess
class App {
......@@ -27,7 +28,15 @@ class App {
private val log = LogManager.getLogger("TableDataTransform")
@JvmStatic fun main(args: Array<String>) {
try {
val settings = SettingsLoader("app.yml")
val settings = SettingsLoader(
listOf(
"sheet",
"header.count",
"header.line",
"identifier"
),
readSftpSettings = true
)
val topology = KafkaTopology(settings).build()
val stream = KafkaStreams(topology, settings.kafkaStreamsSettings)
......
/*
* record-parser
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.memobase
import org.apache.logging.log4j.Logger
import java.util.*
class KafkaSettings(private val map: Map<String, String>, private val log: Logger) {
val settings = Properties()
fun setKafkaProperty(propertyName: String, defaultValue: Any? = null, abortIfMissing: Boolean = false) {
val envProperty = propertyName.replace("\\.".toRegex(), "_").toUpperCase()
when {
System.getenv(envProperty) != null -> {
log.debug("Found value for property $propertyName in environment variable $envProperty.")
settings.setProperty(propertyName, System.getenv(envProperty))
}
map.containsKey(propertyName) -> {
log.debug("Found value for property $propertyName in app.yml file.")
settings.setProperty(propertyName, map[propertyName])
}
defaultValue != null -> {
log.debug("Using default value $defaultValue for $propertyName.")
settings[propertyName] = defaultValue
}
abortIfMissing -> {
log.error("Required producer property $propertyName was not set! Aborting...")
throw MissingSettingException("missing", "kafka.producer.$propertyName")
}
else -> log.trace("No value for property $propertyName found")
}
}
}
\ No newline at end of file
......@@ -28,6 +28,8 @@ import org.apache.kafka.streams.Topology
import org.apache.kafka.streams.kstream.KStream
import org.apache.kafka.streams.kstream.Predicate
import org.apache.logging.log4j.LogManager
import org.memobase.settings.SettingsLoader
import org.memobase.sftp.SftpClient
import java.io.File
import java.io.StringReader
......@@ -35,11 +37,13 @@ class KafkaTopology(private val settings: SettingsLoader) {
private val log = LogManager.getLogger("KafkaTopology")
private val sftpClient: SftpClient = SftpClient(settings.sftpSettings)
private val sheetIndex = settings.appSettings.getProperty("sheet.index").toInt()
private val sheetIndex = settings.appSettings.getProperty("sheet").toInt()
private val headerCount = settings.appSettings.getProperty("header.count").toInt()
private val propertyNamesIndex = settings.appSettings.getProperty("header.line").toInt()
private val identifierIndex = settings.appSettings.getProperty("identifier").toInt()
private val reportingTopic = settings.outputTopic + "-reporting"
fun build(): Topology {
val builder = StreamsBuilder()
......@@ -51,20 +55,23 @@ class KafkaTopology(private val settings: SettingsLoader) {
)
// report filtered error message from previous job.
// TODO: what to send to main topic?
branchedSource[1]
.map { key, _ ->
reportToJson(
.mapValues { key, _ ->
Klaxon().toJsonString(
Report(
key,
"Failure",
"Ignored message as file validator reported error.",
0
"FAILURE",
"Ignored message due to previous error."
)
)
}
.to("${settings.outputTopic}-reporting")
// filtered result simply sends ERROR along!
branchedSource[1]
.mapValues { _ -> "ERROR" }
.to(settings.outputTopic)
// work branch
val formatBranches = branchedSource[0]
.mapValues { _, value -> parseJsonObject(value) }
......@@ -76,28 +83,28 @@ class KafkaTopology(private val settings: SettingsLoader) {
// CSV Branch
buildHelper(formatBranches[0]
.flatMapValues { _, value -> csvMapper(value) })
// Excel Branch
buildHelper(formatBranches[1]
.flatMapValues { value -> excelMapper(value) })
// OpenDocument Spreadsheet Branch
buildHelper(formatBranches[2]
.flatMapValues { value -> odsMapper(value) })
.mapValues { key, value -> errorWrapper(key, value) })
return builder.build()
}
private fun buildHelper(stream: KStream<String, Pair<Pair<String, JsonObject>, Report>>) {
stream
.map { _, value -> KeyValue(value.first.first, value.first.second.toJsonString()) }
private fun buildHelper(stream: KStream<String, Pair<List<Pair<Pair<String, JsonObject>, Report>>, Report>>) {
val records = stream
.flatMapValues { _, value -> value.first }
records
.map { _, value -> KeyValue(value.first.first, value.first.second) }
.mapValues { value -> value.toJsonString() }
.to(settings.outputTopic)
records
.map { _, value -> KeyValue(value.second.id, value.second) }
.mapValues { value -> Klaxon().toJsonString(value) }
.to(reportingTopic)
stream
.map { _, value -> reportToJson(value.second) }
.to("${settings.outputTopic}-reporting")
.to(settings.processReportTopic)
}
private fun filter(value: String): Boolean {
......@@ -105,10 +112,18 @@ class KafkaTopology(private val settings: SettingsLoader) {
}
private fun parseJsonObject(value: String): JsonObject {
// TODO: try catch errors
return Klaxon().parseJsonObject(StringReader(value))
}
private fun errorWrapper(key: String, value: JsonObject): Pair<List<Pair<Pair<String, JsonObject>, Report>>, Report> {
return try {
val result = csvMapper(value)
Pair(result, Report(key, "SUCCESS", "Transformed table data into ${result.count()} records."))
} catch (ex: InvalidInputException) {
Pair(emptyList(), Report(key, "FAILED", ex.localizedMessage))
}
}
private fun csvMapper(value: JsonObject): List<Pair<Pair<String, JsonObject>, Report>> {
val resultMessages = mutableListOf<Pair<Pair<String, JsonObject>, Report>>()
val mutableSet = mutableSetOf<String>()
......@@ -131,17 +146,18 @@ class KafkaTopology(private val settings: SettingsLoader) {
headerProperties = line
headerProperties.forEachIndexed { index, property ->
if (property.isEmpty()) {
throw InvalidInputException("Missing a property name on row $count in column $index!")
throw InvalidInputException("Missing a property name on row $count in column ${index + 1}!")
}
if (property.contains(Regex("[+,.]"))) {
throw InvalidInputException("Invalid property name $property on row $count in column ${index + 1}! You may not use the any of the following characters: + , . ")
}
// TODO: Any validations on the field names themselves?
// Like no special characters? might be a good idea for processing
}
}
continue
}
// the +1 ensures, that users can start columns beginning at 1!
val identifier = line[identifierIndex + 1]
// the -1 ensures, that users can start columns beginning at 1!
val identifier = line[identifierIndex - 1]
when {
identifier.isEmpty() -> {
throw InvalidInputException("The unique identifier in column $identifierIndex in row $count is empty!")
......@@ -153,20 +169,16 @@ class KafkaTopology(private val settings: SettingsLoader) {
mutableSet.add(identifier)
}
}
val keyValueMap = json {
obj(
headerProperties.zip(line)
)
}
val result = Pair(identifier, keyValueMap)
val report = Report(
identifier,
"SUCCESS",
"Successfully created record with identifier $identifier with format CSV!",
1
"Successfully created record with identifier $identifier from row $count!"
)
resultMessages.add(Pair(result, report))
}
......
/*
* sftp-reader
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.memobase
class MissingSettingException(source: String, setting: String) :
Exception(
when (source) {
"env" -> "Internal Configuration Error: $setting!"
"missing" -> "Internal Configuration Error: Missing setting $setting in configuration file."
else -> "User Configuration Error: A value for setting $setting is required."
}
)
......@@ -18,14 +18,10 @@
package org.memobase
import java.time.Instant
data class Report(
val id: String,
val status: String,
val message: String,
val recordCount: Int,
val timeStamp: String = Instant.now().toString()
val message: String
) {
override fun equals(other: Any?): Boolean {
......@@ -40,7 +36,6 @@ data class Report(
var result = id.hashCode()
result = 31 * result + status.hashCode()
result = 31 * result + message.hashCode()
result = 31 * result + recordCount.hashCode()
return result
}
}
/*
* sftp-reader
* Copyright (C) 2020 Memobase Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.memobase
import java.io.File
import java.io.FileInputStream
import java.util.Optional
import java.util.Properties
import kotlin.system.exitProcess
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.kafka.streams.StreamsConfig
import org.apache.logging.log4j.LogManager
import org.snakeyaml.engine.v2.api.Load
import org.snakeyaml.engine.v2.api.LoadSettings
import org.snakeyaml.engine.v2.exceptions.MissingEnvironmentVariableException
class SettingsLoader(private val fileName: String) {
private val log = LogManager.getLogger("SettingsLoader")
private fun loadYaml(): Any {
val settings = LoadSettings.builder().setEnvConfig(Optional.of(CustomEnvConfig())).build()
val load = Load(settings)
try {
val file = File("/configs/$fileName")
return if (file.isFile) {
load.loadFromInputStream(FileInputStream(file))
} else {
log.warn("Loading default properties in app.yml from classpath!")
load.loadFromInputStream(ClassLoader.getSystemResourceAsStream(fileName))
}
} catch (ex: MissingEnvironmentVariableException) {
throw MissingSettingException("env", "${ex.localizedMessage}")
}
}
private val suppliedKafkaProducerSettings: Map<String, String>
private val suppliedKafkaStreamsSettings: Map<String, String>
private val mappedYaml: Map<String, Any>
val inputTopic: String
val outputTopic: String
val appSettings = Properties()
val sftpSettings = Properties()
val kafkaProducerSettings: Properties
val kafkaStreamsSettings: Properties
init {
try {
val rawYaml = loadYaml()
mappedYaml = rawYaml as Map<String, Any>
val kafkaOptions = mappedYaml["kafka"] as Map<String, Any>
val topics = kafkaOptions["topic"] as Map<String, String>
inputTopic = topics["in"].orEmpty()
if (inputTopic.isEmpty()) {
throw MissingSettingException("missing", "kafka.topic.in")
}
outputTopic = topics["out"].orEmpty()
if (outputTopic.isEmpty()) {
throw MissingSettingException("missing", "kafka.topic.out")
}
suppliedKafkaProducerSettings = kafkaOptions["producer"] as Map<String, String>
suppliedKafkaStreamsSettings = kafkaOptions["stream"] as Map<String, String>
val appMap = mappedYaml["app"] as Map<String, Any>
appSettings.setProperty("header.count",addSetting(appMap,"header.count"))
appSettings.setProperty("header.line", addSetting(appMap,"header.line"))
appSettings.setProperty("sheet.index", addSetting(appMap,"sheet.index"))
appSettings.setProperty("identifier", addSetting(appMap,"identifier"))
val sftpMap = mappedYaml["sftp"] as Map<String, Any>
sftpSettings.setProperty("host", addSetting(sftpMap,"host"))
sftpSettings.setProperty("port", addSetting(sftpMap,"port"))
sftpSettings.setProperty("user", addSetting(sftpMap,"user"))
sftpSettings.setProperty("password", addSetting(sftpMap,"password"))
} catch (ex: ClassCastException) {
ex.printStackTrace()
log.error("The properties file has an invalid structure: $ex")
exitProcess(1)
} catch (ex: MissingSettingException) {
log.error(ex.message)
exitProcess(1)
}
kafkaProducerSettings = initKafkaProducerSettings()
kafkaStreamsSettings = initKafkaStreamsSettings()
}
private fun addSetting(map: Map<String, Any>, base: String): String {
val levels = base.split('.')
return recursiveFunction(levels.first(), levels.drop(1), map, base)
}
private fun recursiveFunction(current: String, rest: List<String> , map: Map<String, Any>, base: String): String {
if (map.containsKey(current)) {
return when (val value = map[current]) {
is String -> if (value.isNotEmpty()) value else throw MissingSettingException("missing", base)
is Int -> value.toString()
is Boolean -> value.toString()
null -> throw MissingSettingException("missing", base)
is Map<*, *> -> recursiveFunction(rest.first(), rest.drop(1), value as Map<String, Any>, base)
else -> throw MissingSettingException("missing", base)
}
} else {
throw MissingSettingException("missing", base)
}
}
private fun initKafkaProducerSettings(): Properties {
val merger = KafkaSettings(suppliedKafkaProducerSettings, log)
merger.setKafkaProperty(ProducerConfig.CLIENT_ID_CONFIG, abortIfMissing = true)
merger.setKafkaProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, abortIfMissing = true)
merger.setKafkaProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer::class.java)
merger.setKafkaProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer::class.java)
merger.setKafkaProperty(ProducerConfig.BATCH_SIZE_CONFIG, 16384)
merger.setKafkaProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, 33445532)
merger.setKafkaProperty(ProducerConfig.LINGER_MS_CONFIG, 1)
merger.setKafkaProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true)
merger.setKafkaProperty(ProducerConfig.ACKS_CONFIG)
merger.setKafkaProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, "zstd")
merger.setKafkaProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG)
merger.setKafkaProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG)
merger.setKafkaProperty(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.SEND_BUFFER_CONFIG)
merger.setKafkaProperty(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG)
merger.setKafkaProperty(ProducerConfig.RETRY_BACKOFF_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.RETRIES_CONFIG)
merger.setKafkaProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG)
merger.setKafkaProperty(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG)
merger.setKafkaProperty(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)
merger.setKafkaProperty(ProducerConfig.METRICS_RECORDING_LEVEL_CONFIG)
merger.setKafkaProperty(ProducerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION)
merger.setKafkaProperty(ProducerConfig.MAX_REQUEST_SIZE_CONFIG)
merger.setKafkaProperty(ProducerConfig.METADATA_MAX_AGE_CONFIG)
merger.setKafkaProperty(ProducerConfig.RECEIVE_BUFFER_CONFIG)
merger.setKafkaProperty(ProducerConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG)
merger.setKafkaProperty(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG)
return merger.settings
}
private fun initKafkaStreamsSettings(): Properties {
val merger = KafkaSettings(suppliedKafkaStreamsSettings, log)
merger.setKafkaProperty(StreamsConfig.APPLICATION_ID_CONFIG, abortIfMissing = true)
merger.setKafkaProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, abortIfMissing = true)
merger.setKafkaProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().javaClass)
merger.setKafkaProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().javaClass)
merger.setKafkaProperty(StreamsConfig.APPLICATION_SERVER_CONFIG)
merger.setKafkaProperty(StreamsConfig.ADMIN_CLIENT_PREFIX)
merger.setKafkaProperty(StreamsConfig.AT_LEAST_ONCE)
merger.setKafkaProperty(StreamsConfig.BUFFERED_RECORDS_PER_PARTITION_CONFIG)
merger.setKafkaProperty(StreamsConfig.CLIENT_ID_CONFIG)
merger.setKafkaProperty(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG)
merger.setKafkaProperty(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.CONSUMER_PREFIX)
merger.setKafkaProperty(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG)
merger.setKafkaProperty(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG)
merger.setKafkaProperty(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG)
merger.setKafkaProperty(StreamsConfig.DEFAULT_WINDOWED_KEY_SERDE_INNER_CLASS)
merger.setKafkaProperty(StreamsConfig.DEFAULT_WINDOWED_VALUE_SERDE_INNER_CLASS)
merger.setKafkaProperty(StreamsConfig.EXACTLY_ONCE)
merger.setKafkaProperty(StreamsConfig.GLOBAL_CONSUMER_PREFIX)
merger.setKafkaProperty(StreamsConfig.METRIC_REPORTER_CLASSES_CONFIG)
merger.setKafkaProperty(StreamsConfig.METRICS_NUM_SAMPLES_CONFIG)
merger.setKafkaProperty(StreamsConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.MAIN_CONSUMER_PREFIX)
merger.setKafkaProperty(StreamsConfig.MAX_TASK_IDLE_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.METADATA_MAX_AGE_CONFIG)
merger.setKafkaProperty(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG)
merger.setKafkaProperty(StreamsConfig.NO_OPTIMIZATION)
merger.setKafkaProperty(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG)
merger.setKafkaProperty(StreamsConfig.NUM_STREAM_THREADS_CONFIG)
merger.setKafkaProperty(StreamsConfig.OPTIMIZE)
merger.setKafkaProperty(StreamsConfig.PARTITION_GROUPER_CLASS_CONFIG)
merger.setKafkaProperty(StreamsConfig.POLL_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.PROCESSING_GUARANTEE_CONFIG)
merger.setKafkaProperty(StreamsConfig.PRODUCER_PREFIX)
merger.setKafkaProperty(StreamsConfig.RECEIVE_BUFFER_CONFIG)
merger.setKafkaProperty(StreamsConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.RECONNECT_BACKOFF_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.REPLICATION_FACTOR_CONFIG)
merger.setKafkaProperty(StreamsConfig.REQUEST_TIMEOUT_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.RESTORE_CONSUMER_PREFIX)
merger.setKafkaProperty(StreamsConfig.RETRIES_CONFIG)
merger.setKafkaProperty(StreamsConfig.RETRY_BACKOFF_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG)
merger.setKafkaProperty(StreamsConfig.SECURITY_PROTOCOL_CONFIG)
merger.setKafkaProperty(StreamsConfig.SEND_BUFFER_CONFIG)
merger.setKafkaProperty(StreamsConfig.STATE_CLEANUP_DELAY_MS_CONFIG)
merger.setKafkaProperty(StreamsConfig.STATE_DIR_CONFIG)
merger.setKafkaProperty(StreamsConfig.TOPIC_PREFIX)
merger.setKafkaProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION)
merger.setKafkaProperty(StreamsConfig.WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG)
return merger.settings
}
}
/*
* Table Data Import Service
* Copyright (C) 2020 Memoriav
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.memobase
import net.schmizz.sshj.SSHClient
import net.schmizz.sshj.sftp.SFTPClient
import net.schmizz.sshj.sftp.FileAttributes
import net.schmizz.sshj.sftp.FileMode
import net.schmizz.sshj.sftp.RemoteFile
import net.schmizz.sshj.sftp.OpenMode
import net.schmizz.sshj.transport.verification.PromiscuousVerifier
import net.schmizz.sshj.userauth.UserAuthException
import org.apache.logging.log4j.LogManager
import java.io.Closeable