Commit 1d3656fe authored by Jonas Waeber's avatar Jonas Waeber
Browse files

Update tests

parent d9c8e2d1
......@@ -25,4 +25,4 @@ data class TestParams(
val inputKey: String,
val expectedOutputKey: String,
val expectedOutputDirectoryName: String
)
\ No newline at end of file
)
......@@ -18,6 +18,10 @@
package org.memobase
import com.beust.klaxon.Klaxon
import java.io.File
import java.io.FileInputStream
import java.nio.charset.Charset
import java.util.stream.Stream
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.kafka.streams.TopologyTestDriver
......@@ -29,10 +33,6 @@ import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
import org.memobase.extensions.EmbeddedSftpServer
import org.memobase.settings.SettingsLoader
import java.io.File
import java.io.FileInputStream
import java.nio.charset.Charset
import java.util.stream.Stream
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class Tests {
......@@ -86,8 +86,7 @@ class Tests {
assertThat(record).isNotNull
var count = 0
while (record != null)
{
while (record != null) {
count += 1
assertThat(record)
.isNotNull
......@@ -144,7 +143,49 @@ class Tests {
"baugazh_output"
)*/
)
/*
@Test
fun `test create records`() {
val settingsLoader = SettingsLoader(
listOf(
"sheet",
"header.count",
"header.line",
"identifier"
),
"test2.yml",
useStreamsConfig = true,
readSftpSettings = true
)
val testDriver = TopologyTestDriver(KafkaTopology(settingsLoader).build(), settingsLoader.kafkaStreamsSettings)
val factory = ConsumerRecordFactory(
StringSerializer(), StringSerializer()
)
testDriver.pipeInput(
factory.create(
settingsLoader.inputTopic, "mapping_baugazh.csv", readFile("baugazh_csv_import.json")
)
)
}
var record = testDriver.readOutput(
settingsLoader.outputTopic,
StringDeserializer(),
StringDeserializer()
)
var count = 1
while (record != null) {
FileOutputStream(File("/home/jonas/Documents/work/membase/utilities/tripel-parser/src/test/resources/data/record$count.json")).bufferedWriter(
Charset.defaultCharset()).use {
it.write(record.value())
}
record = testDriver.readOutput(
settingsLoader.outputTopic,
StringDeserializer(),
StringDeserializer()
)
count += 1
}
}*/
}
......@@ -13,7 +13,7 @@ app:
kafka:
streams:
bootstrap.servers: localhost:12345
application.id: test-clinet-1234
application.id: client-id
topic:
in: test-topic-in
out: test-topic-out
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment