Merge pull request #9862 from kcacademic/webflux-threads

Adding source code for article tracked under BAEL-4109.
This commit is contained in:
bfontana 2020-08-14 14:07:45 -03:00 committed by GitHub
commit 5a4bfbc0fd
9 changed files with 313 additions and 0 deletions

View File

@ -556,6 +556,7 @@
<module>atomikos</module>
<module>reactive-systems</module>
<module>slack</module>
<module>spring-webflux-threads</module>
</modules>
</profile>
@ -1067,6 +1068,7 @@
<module>atomikos</module>
<module>reactive-systems</module>
<module>slack</module>
<module>spring-webflux-threads</module>
</modules>
</profile>

25
spring-webflux-threads/.gitignore vendored Normal file
View File

@ -0,0 +1,25 @@
/target/
!.mvn/wrapper/maven-wrapper.jar
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/build/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/

View File

@ -0,0 +1,9 @@
## Spring WebFlux Concurrency
This module contains articles about concurrency model in Spring WebFlux.
Please note that some of the code assumes Mongo and Kafka are running on the local machine on default configurations.
If you want to experiment with Tomcat/Jetty instead of Netty, just uncomment the lines in pom.xml and rebuild.
### Relevant Articles:
- [Concurrency in Spring WebFlux]()

View File

@ -0,0 +1,90 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.baeldung.spring</groupId>
<artifactId>spring-webflux-threads</artifactId>
<version>1.0.0-SNAPSHOT</version>
<name>spring-webflux-threads</name>
<packaging>jar</packaging>
<description>Spring WebFlux Threads Sample</description>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-boot-2</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../parent-boot-2</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<!-- Uncomment the following to switch from Netty to Tomcat/Jetty -->
<!--
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-reactor-netty</artifactId>
</exclusion>
</exclusions>
-->
</dependency>
<!-- Uncomment the following to switch from Netty to Tomcat -->
<!--
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</dependency>
-->
<!-- Uncomment the following to switch from Netty to Jetty -->
<!--
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jetty</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-reactive-httpclient</artifactId>
</dependency>
-->
<dependency>
<groupId>io.reactivex.rxjava2</groupId>
<artifactId>rxjava</artifactId>
<version>2.2.19</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb-reactive</artifactId>
</dependency>
<dependency>
<groupId>io.projectreactor.kafka</groupId>
<artifactId>reactor-kafka</artifactId>
<version>1.2.2.RELEASE</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,13 @@
package com.baeldung.webflux;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}

View File

@ -0,0 +1,128 @@
package com.baeldung.webflux;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.reactive.function.client.WebClient;
import io.reactivex.Observable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.receiver.ReceiverRecord;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
@RestController
@RequestMapping("/")
public class Controller {
@Autowired
private PersonRepository personRepository;
private Scheduler scheduler = Schedulers.newBoundedElastic(5, 10, "MyThreadGroup");
private Logger logger = LoggerFactory.getLogger(Controller.class);
@GetMapping("/threads/webflux")
public Flux<String> getThreadsWebflux() {
return Flux.fromIterable(getThreads());
}
@GetMapping("/threads/webclient")
public Flux<String> getThreadsWebClient() {
WebClient.create("http://localhost:8080/index")
.get()
.retrieve()
.bodyToMono(String.class)
.subscribeOn(scheduler)
.publishOn(scheduler)
.doOnNext(s -> logger.info("Response: {}", s))
.subscribe();
return Flux.fromIterable(getThreads());
}
@GetMapping("/threads/rxjava")
public Observable<String> getIndexRxJava() {
Observable.fromIterable(Arrays.asList("Hello", "World"))
.map(s -> s.toUpperCase())
.observeOn(io.reactivex.schedulers.Schedulers.trampoline())
.doOnNext(s -> logger.info("String: {}", s))
.subscribe();
return Observable.fromIterable(getThreads());
}
@GetMapping("/threads/mongodb")
public Flux<String> getIndexMongo() {
personRepository.findAll()
.doOnNext(p -> logger.info("Person: {}", p))
.subscribe();
return Flux.fromIterable(getThreads());
}
@GetMapping("/threads/reactor-kafka")
public Flux<String> getIndexKafka() {
Map<String, Object> producerProps = new HashMap<>();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
SenderOptions<Integer, String> senderOptions = SenderOptions.create(producerProps);
KafkaSender<Integer, String> sender = KafkaSender.create(senderOptions);
Flux<SenderRecord<Integer, String, Integer>> outboundFlux = Flux.range(1, 10)
.map(i -> SenderRecord.create(new ProducerRecord<>("reactive-test", i, "Message_" + i), i));
sender.send(outboundFlux)
.subscribe();
Map<String, Object> consumerProps = new HashMap<>();
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "my-consumer");
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "my-group");
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
ReceiverOptions<Integer, String> receiverOptions = ReceiverOptions.create(consumerProps);
receiverOptions.subscription(Collections.singleton("reactive-test"));
KafkaReceiver<Integer, String> receiver = KafkaReceiver.create(receiverOptions);
Flux<ReceiverRecord<Integer, String>> inboundFlux = receiver.receive();
inboundFlux.subscribe(r -> {
logger.info("Received message: {}", r.value());
r.receiverOffset()
.acknowledge();
});
return Flux.fromIterable(getThreads());
}
@GetMapping("/index")
public Mono<String> getIndex() {
return Mono.just("Hello world!");
}
private List<String> getThreads() {
return Thread.getAllStackTraces()
.keySet()
.stream()
.map(t -> String.format("%-20s \t %s \t %d \t %s\n", t.getName(), t.getState(), t.getPriority(), t.isDaemon() ? "Daemon" : "Normal"))
.collect(Collectors.toList());
}
}

View File

@ -0,0 +1,27 @@
package com.baeldung.webflux;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
@Document
public class Person {
@Id
String id;
public Person(String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public String toString() {
return "Person{" + "id='" + id + '\'' + '}';
}
}

View File

@ -0,0 +1,6 @@
package com.baeldung.webflux;
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
}

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
</configuration>