This commit is contained in:
Raksha Rao 2018-01-25 13:39:11 +05:30
commit b21a3705b6
17 changed files with 281 additions and 76 deletions

View File

@ -2,46 +2,48 @@ package com.baeldung.spliteratorAPI;
import java.util.List; import java.util.List;
import java.util.Spliterator; import java.util.Spliterator;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer; import java.util.function.Consumer;
public class RelatedAuthorSpliterator implements Spliterator<Author> { public class RelatedAuthorSpliterator implements Spliterator<Author> {
private final List<Author> list; private final List<Author> list;
private int current = 0; AtomicInteger current = new AtomicInteger();
public RelatedAuthorSpliterator(List<Author> list) { public RelatedAuthorSpliterator(List<Author> list) {
this.list = list; this.list = list;
} }
@Override @Override
public boolean tryAdvance(Consumer<? super Author> action) { public boolean tryAdvance(Consumer<? super Author> action) {
action.accept(list.get(current++));
return current < list.size();
}
@Override action.accept(list.get(current.getAndIncrement()));
public Spliterator<Author> trySplit() { return current.get() < list.size();
int currentSize = list.size() - current; }
if (currentSize < 10) {
return null;
}
for (int splitPos = currentSize / 2 + current; splitPos < list.size(); splitPos++) {
if (list.get(splitPos)
.getRelatedArticleId() == 0) {
Spliterator<Author> spliterator = new RelatedAuthorSpliterator(list.subList(current, splitPos));
current = splitPos;
return spliterator;
}
}
return null;
}
@Override @Override
public long estimateSize() { public Spliterator<Author> trySplit() {
return list.size() - current; int currentSize = list.size() - current.get();
} if (currentSize < 10) {
return null;
}
for (int splitPos = currentSize / 2 + current.intValue(); splitPos < list.size(); splitPos++) {
if (list.get(splitPos).getRelatedArticleId() == 0) {
Spliterator<Author> spliterator = new RelatedAuthorSpliterator(list.subList(current.get(), splitPos));
current.set(splitPos);
return spliterator;
}
}
return null;
}
@Override
public long estimateSize() {
return list.size() - current.get();
}
@Override
public int characteristics() {
return CONCURRENT;
}
@Override
public int characteristics() {
return SIZED + CONCURRENT;
}
} }

View File

@ -2,22 +2,25 @@ package com.baeldung.reactive.websocket;
import java.net.URI; import java.net.URI;
import java.time.Duration; import java.time.Duration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.web.reactive.socket.WebSocketMessage; import org.springframework.web.reactive.socket.WebSocketMessage;
import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient; import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient;
import org.springframework.web.reactive.socket.client.WebSocketClient; import org.springframework.web.reactive.socket.client.WebSocketClient;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
@SpringBootApplication
public class ReactiveJavaClientWebSocket { public class ReactiveJavaClientWebSocket {
public static void main(String[] args) throws InterruptedException { public static void main(String[] args) throws InterruptedException {
WebSocketClient client = new ReactorNettyWebSocketClient(); WebSocketClient client = new ReactorNettyWebSocketClient();
client.execute(URI.create("ws://localhost:8080/event-emitter"), session -> session.send(Mono.just(session.textMessage("event-me-from-spring-reactive-client"))) client.execute(
URI.create("ws://localhost:8080/event-emitter"),
session -> session.send(
Mono.just(session.textMessage("event-spring-reactive-client-websocket")))
.thenMany(session.receive() .thenMany(session.receive()
.map(WebSocketMessage::getPayloadAsText) .map(WebSocketMessage::getPayloadAsText)
.log()) .log())
.then()) .then())
.block(Duration.ofSeconds(10L)); .block(Duration.ofSeconds(10L));
} }
} }

View File

@ -12,7 +12,6 @@ import org.springframework.web.reactive.socket.WebSocketMessage;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import javax.annotation.PostConstruct;
import java.time.Duration; import java.time.Duration;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.UUID; import java.util.UUID;
@ -20,37 +19,18 @@ import java.util.UUID;
@Component @Component
public class ReactiveWebSocketHandler implements WebSocketHandler { public class ReactiveWebSocketHandler implements WebSocketHandler {
private Flux<Event> eventFlux; private Flux<Event> eventFlux = Flux.generate(e -> {
private Flux<Event> intervalFlux; Event event = new Event(UUID.randomUUID().toString(), LocalDateTime.now().toString());
e.next(event);
});
/** private Flux<Event> intervalFlux = Flux.interval(Duration.ofMillis(1000L)).zipWith(eventFlux, (time, event) -> event);
* Here we prepare a Flux that will emit a message every second
*/
@PostConstruct
private void init() throws InterruptedException {
eventFlux = Flux.generate(e -> { private ObjectMapper json = new ObjectMapper();
Event event = new Event(UUID.randomUUID()
.toString(),
LocalDateTime.now()
.toString());
e.next(event);
});
intervalFlux = Flux.interval(Duration.ofMillis(1000L))
.zipWith(eventFlux, (time, event) -> event);
}
/**
* On each new client session, send the message flux to the client.
* Spring subscribes to the flux and send every new flux event to the WebSocketSession object
* @param session
* @return Mono<Void>
*/
@Override @Override
public Mono<Void> handle(WebSocketSession webSocketSession) { public Mono<Void> handle(WebSocketSession webSocketSession) {
ObjectMapper json = new ObjectMapper();
return webSocketSession.send(intervalFlux.map(event -> { return webSocketSession.send(intervalFlux.map(event -> {
try { try {
String jsonEvent = json.writeValueAsString(event); String jsonEvent = json.writeValueAsString(event);
@ -60,12 +40,9 @@ public class ReactiveWebSocketHandler implements WebSocketHandler {
e.printStackTrace(); e.printStackTrace();
return ""; return "";
} }
}) }).map(webSocketSession::textMessage))
.map(webSocketSession::textMessage))
.and(webSocketSession.receive() .and(webSocketSession.receive().map(WebSocketMessage::getPayloadAsText).log());
.map(WebSocketMessage::getPayloadAsText)
.log());
} }
} }

View File

@ -12,13 +12,11 @@
Client-side service discovery allows services to find and communicate with each other without hardcoding hostname and port. The only fixed point in such an architecture consists of a service registry with which each service has to register. Client-side service discovery allows services to find and communicate with each other without hardcoding hostname and port. The only fixed point in such an architecture consists of a service registry with which each service has to register.
### Relevant Articles:
- [Intro to Spring Cloud Netflix - Hystrix](http://www.baeldung.com/spring-cloud-netflix-hystrix) - [Intro to Spring Cloud Netflix - Hystrix](http://www.baeldung.com/spring-cloud-netflix-hystrix)
- [Dockerizing a Spring Boot Application](http://www.baeldung.com/dockerizing-spring-boot-application) - [Dockerizing a Spring Boot Application](http://www.baeldung.com/dockerizing-spring-boot-application)
- [Introduction to Spring Cloud Rest Client with Netflix Ribbon](http://www.baeldung.com/spring-cloud-rest-client-with-netflix-ribbon) - [Introduction to Spring Cloud Rest Client with Netflix Ribbon](http://www.baeldung.com/spring-cloud-rest-client-with-netflix-ribbon)
- [A Quick Guide to Spring Cloud Consul](http://www.baeldung.com/spring-cloud-consul) - [A Quick Guide to Spring Cloud Consul](http://www.baeldung.com/spring-cloud-consul)
### Relevant Articles:
- [Introduction to Spring Cloud Rest Client with Netflix Ribbon](http://www.baeldung.com/spring-cloud-rest-client-with-netflix-ribbon)
- [An Introduction to Spring Cloud Zookeeper](http://www.baeldung.com/spring-cloud-zookeeper) - [An Introduction to Spring Cloud Zookeeper](http://www.baeldung.com/spring-cloud-zookeeper)
- [Using a Spring Cloud App Starter](http://www.baeldung.com/using-a-spring-cloud-app-starter)
- [Spring Cloud Connectors and Heroku](http://www.baeldung.com/spring-cloud-heroku) - [Spring Cloud Connectors and Heroku](http://www.baeldung.com/spring-cloud-heroku)

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# For Ubuntu 14.04
# Inspired from: https://github.com/curran/setupHadoop/blob/master/setupHadoop.sh
# Use from the user directory
# Install Java
sudo apt-get update
sudo add-apt-repository -y ppa:webupd8team/java
sudo apt-get install -y oracle-java8-installer
# Install Hadoop
curl -O http://mirror.cogentco.com/pub/apache/hadoop/common/hadoop-2.8.2/hadoop-2.8.2.tar.gz
tar xfz hadoop-2.8.2.tar.gz
sudo mv hadoop-2.8.2 /usr/local/hadoop
rm hadoop-2.8.2.tar.gz
# Environmental Variables
echo export JAVA_HOME=/usr/lib/jvm/java-8-oracle >> ~/.bashrc
echo export HADOOP_PREFIX=/usr/local/hadoop >> ~/.bashrc
echo export PATH=\$PATH:/usr/local/hadoop/bin >> ~/.bashrc
echo export PATH=\$PATH:/usr/local/hadoop/sbin >> ~/.bashrc
source ~/.bashrc
# Copy configuration files
cp master/* /usr/local/hadoop/etc/hadoop/
# Format HDFS
hdfs namenode -format
# SSH keys for Hadoop to use.
ssh-keygen -t rsa -P 'password' -f ~/.ssh/id_rsa.pub
sudo mv ~/.ssh/id_rsa.pub ~/.ssh/authorized_keys
# SSH
ssh localhost
# authenticate with osboxes.org
# Start NameNode daemon and DataNode daemon
start-dfs.sh
# stop-dfs.sh
# Install Maven
sudo apt-get install maven
# Access Hadoop - http://localhost:50070

View File

@ -0,0 +1,3 @@
.idea
*/target/*
*.iml

View File

@ -0,0 +1,55 @@
<?xml version="1.0"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>com.baeldung.twitterhdfs</groupId>
<artifactId>twitterhdfs</artifactId>
<packaging>jar</packaging>
<version>1.0.0</version>
<name>twitterhdfs</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.5.8.RELEASE</version>
</parent>
<dependencies>
<!-- Spring Stream Starter Apps -->
<dependency>
<groupId>org.springframework.cloud.stream.app</groupId>
<artifactId>spring-cloud-starter-stream-source-twitterstream</artifactId>
<version>1.3.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud.stream.app</groupId>
<artifactId>spring-cloud-starter-stream-sink-hdfs</artifactId>
<version>1.3.1.RELEASE</version>
</dependency>
<!-- JSTL/JSP -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>jstl</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
<finalName>twitterhdfs</finalName>
</build>
</project>

View File

@ -0,0 +1,18 @@
package com.baeldung.twitterhdfs.aggregate;
import com.baeldung.twitterhdfs.processor.ProcessorApp;
import com.baeldung.twitterhdfs.source.SourceApp;
import com.baeldung.twitterhdfs.sink.SinkApp;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.aggregate.AggregateApplicationBuilder;
@SpringBootApplication
public class AggregateApp {
public static void main(String[] args) {
new AggregateApplicationBuilder()
.from(SourceApp.class).args("--fixedDelay=5000")
.via(ProcessorApp.class)
.to(SinkApp.class).args("--debug=true")
.run(args);
}
}

View File

@ -0,0 +1,20 @@
package com.baeldung.twitterhdfs.processor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.messaging.Processor;
import org.springframework.integration.annotation.Transformer;
@SpringBootApplication
@EnableBinding(Processor.class)
public class ProcessorApp {
Logger log = LoggerFactory.getLogger(ProcessorApp.class);
@Transformer(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
public String processMessage(String payload) {
log.info("Payload received!");
return payload;
}
}

View File

@ -0,0 +1,22 @@
package com.baeldung.twitterhdfs.sink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.app.hdfs.sink.HdfsSinkConfiguration;
import org.springframework.cloud.stream.messaging.Sink;
import org.springframework.context.annotation.Import;
import org.springframework.integration.annotation.ServiceActivator;
@SpringBootApplication
@EnableBinding(Sink.class)
@Import(HdfsSinkConfiguration.class)
public class SinkApp {
Logger log = LoggerFactory.getLogger(SinkApp.class);
@ServiceActivator(inputChannel= Sink.INPUT)
public void loggerSink(Object payload) {
log.info("Received: " + payload);
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.twitterhdfs.source;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.app.twitterstream.source.TwitterstreamSourceConfiguration;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.context.annotation.Import;
import org.springframework.integration.annotation.InboundChannelAdapter;
import java.text.SimpleDateFormat;
import java.util.Date;
@SpringBootApplication
@EnableBinding(Source.class)
@Import(TwitterstreamSourceConfiguration.class)
public class SourceApp {
Logger log = LoggerFactory.getLogger(SourceApp.class);
@InboundChannelAdapter(value = Source.OUTPUT)
public String timerMessageSource() {
return new SimpleDateFormat().format(new Date());
}
}

View File

@ -0,0 +1,6 @@
hdfs.fs-uri=hdfs://127.0.0.1:50010/
twitter.credentials.access-token=
twitter.credentials.access-token-secret=
twitter.credentials.consumer-key=
twitter.credentials.consumer-secret=

View File

@ -0,0 +1 @@
hdfs.fs-uri=hdfs://127.0.0.1:50010/

View File

@ -0,0 +1,11 @@
# Git spring-cloud-stream-app-starters
# https://github.com/spring-cloud-stream-app-starters/hdfs/blob/master/spring-cloud-starter-stream-sink-hdfs/README.adoc
git clone https://github.com/spring-cloud-stream-app-starters/hdfs.git
# Build it
./mvnw clean install -PgenerateApps
# Run it
cd apps
# Optionally inject application.properties prior to build
java -jar hdfs-sink.jar --fsUri=hdfs://127.0.0.1:50010/

View File

@ -0,0 +1,4 @@
twitter.credentials.access-token=
twitter.credentials.access-token-secret=
twitter.credentials.consumer-key=
twitter.credentials.consumer-secret=

View File

@ -0,0 +1,12 @@
# Git spring-cloud-stream-app-starters
# https://github.com/spring-cloud-stream-app-starters/hdfs/blob/master/spring-cloud-starter-stream-sink-hdfs/README.adoc
git clone https://github.com/spring-cloud-stream-app-starters/twitter.git
# Build it
./mvnw clean install -PgenerateApps
# Run it
cd apps
# Optionally inject application.properties prior to build
java -jar twitter_stream_source.jar --consumerKey=<CONSUMER_KEY> --consumerSecret=<CONSUMER_SECRET> \
--accessToken=<ACCESS_TOKEN> --accessTokenSecret=<ACCESS_TOKEN_SECRET>

View File

@ -2,3 +2,4 @@
- [HandlerAdapters in Spring MVC](http://www.baeldung.com/spring-mvc-handler-adapters) - [HandlerAdapters in Spring MVC](http://www.baeldung.com/spring-mvc-handler-adapters)
- [Template Engines for Spring](http://www.baeldung.com/spring-template-engines) - [Template Engines for Spring](http://www.baeldung.com/spring-template-engines)
- [Spring 5 and Servlet 4 The PushBuilder](http://www.baeldung.com/spring-5-push)