Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multi-language documentation for Injecting Kafka Producer Beans example #826

Merged
merged 4 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

15 changes: 4 additions & 11 deletions src/main/docs/guide/kafkaClient/kafkaClientScope.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,8 @@ If you need maximum flexibility and don't want to use the ann:configuration.kafk
Consider the following example:

.Using a KafkaProducer directly
[source,java]
----
include::{testskafka}/producer/inject/BookSender.java[tags=imports, indent=0]

include::{testskafka}/producer/inject/BookSender.java[tags=clazz, indent=0]
----
snippet::io.micronaut.kafka.docs.producer.inject.BookSender[tags="imports,clazz"]

<1> The `Producer` is dependency injected into the constructor. If not specified in configuration, the key and value serializer are inferred from the generic type arguments.
<2> The `Producer` is used to send records
Expand All @@ -17,14 +13,11 @@ Note that there is no need to call the `close()` method to shut down the `KafkaP

The previous example can be tested in JUnit with the following test:


.Using a KafkaProducer directly
[source,java]
----
include::{testskafka}/producer/inject/BookSenderTest.java[tags=test, indent=0]
----

snippet::io.micronaut.kafka.docs.producer.inject.BookSenderTest[tags=test, indent=0]

<1> A Kafka docker container is used
<2> The `BookSender` is retrieved from the api:context.ApplicationContext[] and a `ProducerRecord` sent

By using the link:{kafkaapi}/org/apache/kafka/clients/producer/KafkaProducer.html[KafkaProducer] API directly you open up even more options if you require transactions (exactly-once delivery) or want control over when records are flushed etc.
By using the link:{kafkaapi}/org/apache/kafka/clients/producer/KafkaProducer.html[KafkaProducer] API directly you open up even more options if you require transactions (exactly-once delivery) or want control over when records are flushed etc.
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package io.micronaut.kafka.docs.producer.inject

import groovy.transform.Canonical
import io.micronaut.serde.annotation.Serdeable

@Serdeable
@Canonical
class Book {
String title
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package io.micronaut.kafka.docs.producer.inject;

// tag::imports[]
import io.micronaut.configuration.kafka.annotation.KafkaClient
import io.micronaut.context.annotation.Requires
import jakarta.inject.Singleton
import org.apache.kafka.clients.producer.Producer
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.clients.producer.RecordMetadata

import java.util.concurrent.Future
// end::imports[]

@Requires(property = 'spec.name', value = 'BookSenderTest')
// tag::clazz[]
@Singleton
class BookSender {

private final Producer<String, Book> kafkaProducer

BookSender(@KafkaClient('book-producer') Producer<String, Book> kafkaProducer) { // <1>
this.kafkaProducer = kafkaProducer
}

Future<RecordMetadata> send(String author, Book book) {
kafkaProducer.send(new ProducerRecord<>('books', author, book)) // <2>
}
}
// end::clazz[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package io.micronaut.kafka.docs.producer.inject

import io.micronaut.context.ApplicationContext
import spock.lang.Specification

class BookSenderTest extends Specification {

// tag::test[]
void "test Book Sender"() {
expect:
ApplicationContext ctx = ApplicationContext.run( // <1>
"kafka.enabled": true, "spec.name": "BookSenderTest"
)
BookSender bookSender = ctx.getBean(BookSender) // <2>
Book book = new Book('The Stand')
bookSender.send('Stephen King', book)

cleanup:
ctx.close()
}
// end::test[]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package io.micronaut.kafka.docs.producer.inject

import io.micronaut.serde.annotation.Serdeable

@Serdeable
data class Book(val title: String)
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package io.micronaut.kafka.docs.producer.inject

// tag::imports[]
import io.micronaut.configuration.kafka.annotation.KafkaClient
import io.micronaut.context.annotation.Requires
import jakarta.inject.Singleton
import org.apache.kafka.clients.producer.Producer
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.clients.producer.RecordMetadata
import java.util.concurrent.Future
// end::imports[]

// tag::clazz[]
@Requires(property = "spec.name", value = "BookSenderTest")
@Singleton
class BookSender(
@param:KafkaClient("book-producer") private val kafkaProducer: Producer<String, Book>) { // <1>

fun send(author: String, book: Book): Future<RecordMetadata> {
return kafkaProducer.send(ProducerRecord("books", author, book)) // <2>
}
}
// end::clazz[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package io.micronaut.kafka.docs.producer.inject

import io.micronaut.context.ApplicationContext
import org.junit.jupiter.api.Test
import java.util.Map

internal class BookSenderTest {

// tag::test[]
@Test
fun testBookSender() {
ApplicationContext.run(Map.of<String, Any>( // <1>
"kafka.enabled", "true", "spec.name", "BookSenderTest")).use { ctx ->
val bookSender = ctx.getBean(BookSender::class.java) // <2>
val book = Book("The Stand")
bookSender.send("Stephen King", book)
}
} // end::test[]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package io.micronaut.kafka.docs.producer.inject;

import io.micronaut.serde.annotation.Serdeable;

@Serdeable
public record Book (String title) {}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package io.micronaut.configuration.kafka.docs.producer.inject;
package io.micronaut.kafka.docs.producer.inject;

// tag::imports[]
import io.micronaut.configuration.kafka.annotation.KafkaClient;
import io.micronaut.configuration.kafka.docs.consumer.batch.Book;
import io.micronaut.context.annotation.Requires;
import jakarta.inject.Singleton;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
Expand All @@ -11,20 +11,19 @@
import java.util.concurrent.Future;
// end::imports[]

@Requires(property = "spec.name", value = "BookSenderTest")
// tag::clazz[]
@Singleton
public class BookSender {

private final Producer<String, Book> kafkaProducer;

public BookSender(
@KafkaClient("book-producer") Producer<String, Book> kafkaProducer) { // <1>
public BookSender(@KafkaClient("book-producer") Producer<String, Book> kafkaProducer) { // <1>
this.kafkaProducer = kafkaProducer;
}

public Future<RecordMetadata> send(String author, Book book) {
return kafkaProducer.send(new ProducerRecord<>("books", author, book)); // <2>
}

}
// end::clazz[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package io.micronaut.kafka.docs.producer.inject;

import io.micronaut.context.ApplicationContext;
import org.junit.jupiter.api.Test;

import java.util.Map;

class BookSenderTest {

// tag::test[]
@Test
void testBookSender() {

ApplicationContext ctx = ApplicationContext.run( // <1>
Map.of("kafka.enabled", "true","spec.name", "BookSenderTest")
);
BookSender bookSender = ctx.getBean(BookSender.class); // <2>
Book book = new Book("The Stand");
bookSender.send("Stephen King", book);
wetted marked this conversation as resolved.
Show resolved Hide resolved
}
// end::test[]
}