diff --git a/.gitignore b/.gitignore index a05cee53aa..8674dfd427 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,4 @@ build/ # Spring Bootstrap artifacts dependency-reduced-pom.xml +README.html diff --git a/README.ftl.md b/README.adoc similarity index 51% rename from README.ftl.md rename to README.adoc index 09b64f7e1d..fccd892ebd 100644 --- a/README.ftl.md +++ b/README.adoc @@ -1,86 +1,116 @@ -<#assign project_id="gs-batch-processing"> +:spring_boot_version: 0.5.0.M6 +:Component: http://docs.spring.io/spring/docs/current/javadoc-api/org/springframework/stereotype/Component.html +:EnableAutoConfiguration: http://docs.spring.io/spring-boot/docs/{spring_boot_version}/api/org/springframework/boot/autoconfigure/EnableAutoConfiguration.html +:SpringApplication: http://docs.spring.io/spring-boot/docs/{spring_boot_version}/api/org/springframework/boot/SpringApplication.html +:toc: +:icons: font +:source-highlighter: prettify +:project_id: gs-batch-processing This guide walks you through the process of creating a basic batch-driven solution. -What you'll build ------------------ +== What you'll build You'll build a service that imports data from a CSV spreadsheet, transforms it with custom code, and stores the final results in a database. -What you'll need ----------------- +== What you'll need - - About 15 minutes - - <@prereq_editor_jdk_buildtools/> +include::https://raw.github.com/spring-guides/getting-started-macros/master/prereq_editor_jdk_buildtools.adoc[] -## <@how_to_complete_this_guide jump_ahead='Create a business class'/> +:jump_ahead: Create a business class +include::https://raw.github.com/spring-guides/getting-started-macros/master/how_to_complete_this_guide.adoc[] - -Set up the project ------------------- -<@build_system_intro/> +[[scratch]] +== Set up the project +include::https://raw.github.com/spring-guides/getting-started-macros/master/build_system_intro.adoc[] -<@create_directory_structure_hello/> +include::https://raw.github.com/spring-guides/getting-started-macros/master/create_directory_structure_hello.adoc[] -<@create_both_builds/> +include::https://raw.github.com/spring-guides/getting-started-macros/master/create_both_builds.adoc[] -<@bootstrap_starter_pom_disclaimer/> +`build.gradle` +// AsciiDoc source formatting doesn't support groovy, so using java instead +[source,java] +---- +include::initial/build.gradle[] +---- + +include::https://raw.github.com/spring-guides/getting-started-macros/master/bootstrap_starter_pom_disclaimer.adoc[] -### Create business data Typically your customer or a business analyst supplies a spreadsheet. In this case, you make it up. - <@snippet path="src/main/resources/sample-data.csv" prefix="initial"/> +`src/main/resources/sample-data.csv` +[source,csv] +---- +include::initial/src/main/resources/sample-data.csv[] +---- This spreadsheet contains a first name and a last name on each row, separated by a comma. This is a fairly common pattern that Spring handles out-of-the-box, as you will see. -### Define the destination for your data Next, you write a SQL script to create a table to store the data. - <@snippet path="src/main/resources/schema-all.sql" prefix="initial"/> +`src/main/resources/schema-all.sql` +[source,sql] +---- +include::initial/src/main/resources/schema-all.sql[] +---- -> **Note:** Spring Boot runs `schema-@@platform@@.sql` automatically during startup. `-all` is the default for all platforms. +NOTE: Spring Boot runs `schema-@@platform@@.sql` automatically during startup. `-all` is the default for all platforms. - -Create a business class ------------------------ +[[initial]] +== Create a business class Now that you see the format of data inputs and outputs, you write code to represent a row of data. - <@snippet path="src/main/java/hello/Person.java" prefix="complete"/> +`src/main/java/hello/Person.java` +[source,java] +---- +include::complete/src/main/java/hello/Person.java[] +---- You can instantiate the `Person` class either with first and last name through a constructor, or by setting the properties. -Create an intermediate processor --------------------------------- +== Create an intermediate processor A common paradigm in batch processing is to ingest data, transform it, and then pipe it out somewhere else. Here you write a simple transformer that converts the names to uppercase. - <@snippet path="src/main/java/hello/PersonItemProcessor.java" prefix="complete"/> +`src/main/java/hello/PersonItemProcessor.java` +[source,java] +---- +include::complete/src/main/java/hello/PersonItemProcessor.java[] +---- `PersonItemProcessor` implements Spring Batch's `ItemProcessor` interface. This makes it easy to wire the code into a batch job that you define further down in this guide. According to the interface, you receive an incoming `Person` object, after which you transform it to an upper-cased `Person`. -> **Note:** There is no requirement that the input and output types be the same. In fact, after one source of data is read, sometimes the application's data flow needs a different data type. +NOTE: There is no requirement that the input and output types be the same. In fact, after one source of data is read, sometimes the application's data flow needs a different data type. -Put together a batch job ------------------------- +== Put together a batch job Now you put together the actual batch job. Spring Batch provides many utility classes that reduce the need to write custom code. Instead, you can focus on the business logic. - <@snippet path="src/main/java/hello/BatchConfiguration.java" prefix="complete"/> +`src/main/java/hello/BatchConfiguration.java` +[source,java] +---- +include::complete/src/main/java/hello/BatchConfiguration.java[] +---- For starters, the `@EnableBatchProcessing` annotation adds many critical beans that support jobs and saves you a lot of leg work. This example uses a memory-based database (provided by `@EnableBatchProcessing`), meaning that when it's done, the data is gone. Break it down: - <@snippet "src/main/java/hello/BatchConfiguration.java" "readerwriterprocessor" "/complete"/> - +`src/main/java/hello/BatchConfiguration.java` +[source,java] +---- +include::/complete/src/main/java/hello/BatchConfiguration.java[tag=readerwriterprocessor] +---- +. The first chunk of code defines the input, processor, and output. - `reader()` creates an `ItemReader`. It looks for a file called `sample-data.csv` and parses each line item with enough information to turn it into a `Person`. - `processor()` creates an instance of our `PersonItemProcessor` you defined earlier, meant to uppercase the data. @@ -88,43 +118,50 @@ The first chunk of code defines the input, processor, and output. The next chunk focuses on the actual job configuration. - <@snippet "src/main/java/hello/BatchConfiguration.java" "jobstep" "/complete"/> - +`src/main/java/hello/BatchConfiguration.java` +[source,java] +---- +include::/complete/src/main/java/hello/BatchConfiguration.java[tag=jobstep] +---- +. The first method defines the job and the second one defines a single step. Jobs are built from steps, where each step can involve a reader, a processor, and a writer. In this job definition, you need an incrementer because jobs use a database to maintain execution state. You then list each step, of which this job has only one step. The job ends, and the Java API produces a perfectly configured job. In the step definition, you define how much data to write at a time. In this case, it writes up to ten records at a time. Next, you configure the reader, processor, and writer using the injected bits from earlier. -> **Note:** chunk() is prefixed `` because it's a generic method. This represents the input and output types of each "chunk" of processing, and lines up with `ItemReader` and `ItemWriter`. +NOTE: chunk() is prefixed `` because it's a generic method. This represents the input and output types of each "chunk" of processing, and lines up with `ItemReader` and `ItemWriter`. -Make the application executable -------------------------------- +== Make the application executable Although batch processing can be embedded in web apps and WAR files, the simpler approach demonstrated below creates a standalone application. You package everything in a single, executable JAR file, driven by a good old Java `main()` method. -### Create an Application class - <@snippet path="src/main/java/hello/Application.java" prefix="complete"/> +`src/main/java/hello/Application.java` +[source,java] +---- +include::complete/src/main/java/hello/Application.java[] +---- -The `main()` method defers to the [`SpringApplication`][] helper class, providing `Application.class` as an argument to its `run()` method. This tells Spring to read the annotation metadata from `Application` and to manage it as a component in the [Spring application context][u-application-context]. +The `main()` method defers to the {SpringApplication}[`SpringApplication`] helper class, providing `Application.class` as an argument to its `run()` method. This tells Spring to read the annotation metadata from `Application` and to manage it as a component in the link:/understanding/application-context[Spring application context]. -The `@ComponentScan` annotation tells Spring to search recursively through the `hello` package and its children for classes marked directly or indirectly with Spring's [`@Component`][] annotation. This directive ensures that Spring finds and registers `BatchConfiguration`, because it is marked with `@Configuration`, which in turn is a kind of `@Component` annotation. +The `@ComponentScan` annotation tells Spring to search recursively through the `hello` package and its children for classes marked directly or indirectly with Spring's {Component}[`@Component`] annotation. This directive ensures that Spring finds and registers `BatchConfiguration`, because it is marked with `@Configuration`, which in turn is a kind of `@Component` annotation. -The [`@EnableAutoConfiguration`][] annotation switches on reasonable default behaviors based on the content of your classpath. For example, it looks for any class that implements the `CommandLineRunner` interface and invokes its `run()` method. In this case, it runs the demo code for this guide. +The {EnableAutoConfiguration}[`@EnableAutoConfiguration`] annotation switches on reasonable default behaviors based on the content of your classpath. For example, it looks for any class that implements the `CommandLineRunner` interface and invokes its `run()` method. In this case, it runs the demo code for this guide. For demonstration purposes, there is code to create a `JdbcTemplate`, query the database, and print out the names of people the batch job inserts. -<@build_an_executable_jar_subhead/> +include::https://raw.github.com/spring-guides/getting-started-macros/master/build_an_executable_jar_subhead.adoc[] -<@build_an_executable_jar_with_both/> +include::https://raw.github.com/spring-guides/getting-started-macros/master/build_an_executable_jar_with_both.adoc[] -<@run_the_application_with_both module="batch job"/> +:module: batch job +include::https://raw.github.com/spring-guides/getting-started-macros/master/run_the_application_with_both.adoc[] The job prints out a line for each person that gets transformed. After the job runs, you can also see the output from querying the database. -```sh +.... Converting (firstName: Jill, lastName: Doe) into (firstName: JILL, lastName: DOE) Converting (firstName: Joe, lastName: Doe) into (firstName: JOE, lastName: DOE) Converting (firstName: Justin, lastName: Doe) into (firstName: JUSTIN, lastName: DOE) @@ -135,16 +172,12 @@ Found in the database. Found in the database. Found in the database. Found in the database. -``` +.... -Summary -------- +== Summary Congratulations! You built a batch job that ingested data from a spreadsheet, processed it, and wrote it to a database. -[`SpringApplication`]: http://docs.spring.io/spring-boot/docs/0.5.0.M3/api/org/springframework/boot/SpringApplication.html -[`@EnableAutoConfiguration`]: http://docs.spring.io/spring-boot/docs/0.5.0.M3/api/org/springframework/boot/autoconfigure/EnableAutoConfiguration.html -[`@Component`]: http://docs.spring.io/spring/docs/current/javadoc-api/org/springframework/stereotype/Component.html diff --git a/README.md b/README.md deleted file mode 100644 index c0d9014b4a..0000000000 --- a/README.md +++ /dev/null @@ -1,521 +0,0 @@ -This guide walks you through the process of creating a basic batch-driven solution. - -What you'll build ------------------ - -You'll build a service that imports data from a CSV spreadsheet, transforms it with custom code, and stores the final results in a database. - - -What you'll need ----------------- - - - About 15 minutes - - A favorite text editor or IDE - - [JDK 6][jdk] or later - - [Gradle 1.8+][gradle] or [Maven 3.0+][mvn] - - You can also import the code from this guide as well as view the web page directly into [Spring Tool Suite (STS)][gs-sts] and work your way through it from there. - -[jdk]: http://www.oracle.com/technetwork/java/javase/downloads/index.html -[gradle]: http://www.gradle.org/ -[mvn]: http://maven.apache.org/download.cgi -[gs-sts]: /guides/gs/sts - -How to complete this guide --------------------------- - -Like all Spring's [Getting Started guides](/guides/gs), you can start from scratch and complete each step, or you can bypass basic setup steps that are already familiar to you. Either way, you end up with working code. - -To **start from scratch**, move on to [Set up the project](#scratch). - -To **skip the basics**, do the following: - - - [Download][zip] and unzip the source repository for this guide, or clone it using [Git][u-git]: -`git clone https://github.com/spring-guides/gs-batch-processing.git` - - cd into `gs-batch-processing/initial`. - - Jump ahead to [Create a business class](#initial). - -**When you're finished**, you can check your results against the code in `gs-batch-processing/complete`. -[zip]: https://github.com/spring-guides/gs-batch-processing/archive/master.zip -[u-git]: /understanding/Git - - - -Set up the project ------------------- -First you set up a basic build script. You can use any build system you like when building apps with Spring, but the code you need to work with [Gradle](http://gradle.org) and [Maven](https://maven.apache.org) is included here. If you're not familiar with either, refer to [Building Java Projects with Gradle](/guides/gs/gradle/) or [Building Java Projects with Maven](/guides/gs/maven). - -### Create the directory structure - -In a project directory of your choosing, create the following subdirectory structure; for example, with `mkdir -p src/main/java/hello` on *nix systems: - - └── src - └── main - └── java - └── hello - - -### Create a Gradle build file -Below is the [initial Gradle build file](https://github.com/spring-guides/gs-batch-processing/blob/master/initial/build.gradle). But you can also use Maven. The pom.xml file is included [right here](https://github.com/spring-guides/gs-batch-processing/blob/master/initial/pom.xml). If you are using [Spring Tool Suite (STS)][gs-sts], you can import the guide directly. - -`build.gradle` -```gradle -buildscript { - repositories { - maven { url "http://repo.spring.io/libs-snapshot" } - mavenLocal() - } -} - -apply plugin: 'java' -apply plugin: 'eclipse' -apply plugin: 'idea' - -jar { - baseName = 'gs-batch-processing' - version = '0.1.0' -} - -repositories { - mavenCentral() - maven { url "http://repo.spring.io/libs-snapshot" } -} - -dependencies { - compile("org.springframework.boot:spring-boot-starter-batch:0.5.0.M6") - compile("org.hsqldb:hsqldb") - testCompile("junit:junit:4.11") -} - -task wrapper(type: Wrapper) { - gradleVersion = '1.8' -} -``` - -[gs-sts]: /guides/gs/sts - -> **Note:** This guide is using [Spring Boot](/guides/gs/spring-boot/). - -### Create business data - -Typically your customer or a business analyst supplies a spreadsheet. In this case, you make it up. - -`src/main/resources/sample-data.csv` -```csv -Jill,Doe -Joe,Doe -Justin,Doe -Jane,Doe -John,Doe -``` - -This spreadsheet contains a first name and a last name on each row, separated by a comma. This is a fairly common pattern that Spring handles out-of-the-box, as you will see. - -### Define the destination for your data - -Next, you write a SQL script to create a table to store the data. - -`src/main/resources/schema-all.sql` -```sql -DROP TABLE people IF EXISTS; - -CREATE TABLE people ( - person_id BIGINT IDENTITY NOT NULL PRIMARY KEY, - first_name VARCHAR(20), - last_name VARCHAR(20) -); -``` - -> **Note:** Spring Boot runs `schema-@@platform@@.sql` automatically during startup. `-all` is the default for all platforms. - - - -Create a business class ------------------------ - -Now that you see the format of data inputs and outputs, you write code to represent a row of data. - -`src/main/java/hello/Person.java` -```java -package hello; - -public class Person { - private String lastName; - private String firstName; - - public Person() { - - } - - public Person(String firstName, String lastName) { - this.firstName = firstName; - this.lastName = lastName; - } - - public void setFirstName(String firstName) { - this.firstName = firstName; - } - - public String getFirstName() { - return firstName; - } - - public String getLastName() { - return lastName; - } - - public void setLastName(String lastName) { - this.lastName = lastName; - } - - @Override - public String toString() { - return "firstName: " + firstName + ", lastName: " + lastName; - } - -} -``` - -You can instantiate the `Person` class either with first and last name through a constructor, or by setting the properties. - - -Create an intermediate processor --------------------------------- - -A common paradigm in batch processing is to ingest data, transform it, and then pipe it out somewhere else. Here you write a simple transformer that converts the names to uppercase. - -`src/main/java/hello/PersonItemProcessor.java` -```java -package hello; - -import org.springframework.batch.item.ItemProcessor; - -public class PersonItemProcessor implements ItemProcessor { - - @Override - public Person process(final Person person) throws Exception { - final String firstName = person.getFirstName().toUpperCase(); - final String lastName = person.getLastName().toUpperCase(); - - final Person transformedPerson = new Person(firstName, lastName); - - System.out.println("Converting (" + person + ") into (" + transformedPerson + ")"); - - return transformedPerson; - } - -} -``` - -`PersonItemProcessor` implements Spring Batch's `ItemProcessor` interface. This makes it easy to wire the code into a batch job that you define further down in this guide. According to the interface, you receive an incoming `Person` object, after which you transform it to an upper-cased `Person`. - -> **Note:** There is no requirement that the input and output types be the same. In fact, after one source of data is read, sometimes the application's data flow needs a different data type. - - -Put together a batch job ------------------------- - -Now you put together the actual batch job. Spring Batch provides many utility classes that reduce the need to write custom code. Instead, you can focus on the business logic. - -`src/main/java/hello/BatchConfiguration.java` -```java -package hello; - -import javax.sql.DataSource; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.core.launch.support.RunIdIncrementer; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider; -import org.springframework.batch.item.database.JdbcBatchItemWriter; -import org.springframework.batch.item.file.FlatFileItemReader; -import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper; -import org.springframework.batch.item.file.mapping.DefaultLineMapper; -import org.springframework.batch.item.file.transform.DelimitedLineTokenizer; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; -import org.springframework.jdbc.core.JdbcTemplate; - -@Configuration -@EnableBatchProcessing -public class BatchConfiguration { - - @Bean - public ItemReader reader() { - FlatFileItemReader reader = new FlatFileItemReader(); - reader.setResource(new ClassPathResource("sample-data.csv")); - reader.setLineMapper(new DefaultLineMapper() {{ - setLineTokenizer(new DelimitedLineTokenizer() {{ - setNames(new String[] { "firstName", "lastName" }); - }}); - setFieldSetMapper(new BeanWrapperFieldSetMapper() {{ - setTargetType(Person.class); - }}); - }}); - return reader; - } - - @Bean - public ItemProcessor processor() { - return new PersonItemProcessor(); - } - - @Bean - public ItemWriter writer(DataSource dataSource) { - JdbcBatchItemWriter writer = new JdbcBatchItemWriter(); - writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider()); - writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)"); - writer.setDataSource(dataSource); - return writer; - } - - @Bean - public Job importUserJob(JobBuilderFactory jobs, Step s1) { - return jobs.get("importUserJob") - .incrementer(new RunIdIncrementer()) - .flow(s1) - .end() - .build(); - } - - @Bean - public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader reader, - ItemWriter writer, ItemProcessor processor) { - return stepBuilderFactory.get("step1") - . chunk(10) - .reader(reader) - .processor(processor) - .writer(writer) - .build(); - } - - @Bean - public JdbcTemplate jdbcTemplate(DataSource dataSource) { - return new JdbcTemplate(dataSource); - } - -} -``` - -For starters, the `@EnableBatchProcessing` annotation adds many critical beans that support jobs and saves you a lot of leg work. This example uses a memory-based database (provided by `@EnableBatchProcessing`), meaning that when it's done, the data is gone. - -Break it down: - -`src/main/java/hello/BatchConfiguration.java` -```java - @Bean - public ItemReader reader() { - FlatFileItemReader reader = new FlatFileItemReader(); - reader.setResource(new ClassPathResource("sample-data.csv")); - reader.setLineMapper(new DefaultLineMapper() {{ - setLineTokenizer(new DelimitedLineTokenizer() {{ - setNames(new String[] { "firstName", "lastName" }); - }}); - setFieldSetMapper(new BeanWrapperFieldSetMapper() {{ - setTargetType(Person.class); - }}); - }}); - return reader; - } - - @Bean - public ItemProcessor processor() { - return new PersonItemProcessor(); - } - - @Bean - public ItemWriter writer(DataSource dataSource) { - JdbcBatchItemWriter writer = new JdbcBatchItemWriter(); - writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider()); - writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)"); - writer.setDataSource(dataSource); - return writer; - } -``` - -The first chunk of code defines the input, processor, and output. -- `reader()` creates an `ItemReader`. It looks for a file called `sample-data.csv` and parses each line item with enough information to turn it into a `Person`. -- `processor()` creates an instance of our `PersonItemProcessor` you defined earlier, meant to uppercase the data. -- `write(DataSource)` creates an `ItemWriter`. This one is aimed at a JDBC destination and automatically gets a copy of the dataSource created by `@EnableBatchProcessing`. It includes the SQL statement needed to insert a single `Person` driven by Java bean properties. - -The next chunk focuses on the actual job configuration. - -`src/main/java/hello/BatchConfiguration.java` -```java - @Bean - public Job importUserJob(JobBuilderFactory jobs, Step s1) { - return jobs.get("importUserJob") - .incrementer(new RunIdIncrementer()) - .flow(s1) - .end() - .build(); - } - - @Bean - public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader reader, - ItemWriter writer, ItemProcessor processor) { - return stepBuilderFactory.get("step1") - . chunk(10) - .reader(reader) - .processor(processor) - .writer(writer) - .build(); - } -``` - -The first method defines the job and the second one defines a single step. Jobs are built from steps, where each step can involve a reader, a processor, and a writer. - -In this job definition, you need an incrementer because jobs use a database to maintain execution state. You then list each step, of which this job has only one step. The job ends, and the Java API produces a perfectly configured job. - -In the step definition, you define how much data to write at a time. In this case, it writes up to ten records at a time. Next, you configure the reader, processor, and writer using the injected bits from earlier. - -> **Note:** chunk() is prefixed `` because it's a generic method. This represents the input and output types of each "chunk" of processing, and lines up with `ItemReader` and `ItemWriter`. - - -Make the application executable -------------------------------- - -Although batch processing can be embedded in web apps and WAR files, the simpler approach demonstrated below creates a standalone application. You package everything in a single, executable JAR file, driven by a good old Java `main()` method. - -### Create an Application class - -`src/main/java/hello/Application.java` -```java -package hello; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.List; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; -import org.springframework.context.ApplicationContext; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; - -@ComponentScan -@EnableAutoConfiguration -public class Application { - - public static void main(String[] args) { - ApplicationContext ctx = SpringApplication.run(Application.class, args); - - List results = ctx.getBean(JdbcTemplate.class).query("SELECT first_name, last_name FROM people", new RowMapper() { - @Override - public Person mapRow(ResultSet rs, int row) throws SQLException { - return new Person(rs.getString(1), rs.getString(2)); - } - }); - - for (Person person : results) { - System.out.println("Found <" + person + "> in the database."); - } - } - -} -``` - -The `main()` method defers to the [`SpringApplication`][] helper class, providing `Application.class` as an argument to its `run()` method. This tells Spring to read the annotation metadata from `Application` and to manage it as a component in the [Spring application context][u-application-context]. - -The `@ComponentScan` annotation tells Spring to search recursively through the `hello` package and its children for classes marked directly or indirectly with Spring's [`@Component`][] annotation. This directive ensures that Spring finds and registers `BatchConfiguration`, because it is marked with `@Configuration`, which in turn is a kind of `@Component` annotation. - -The [`@EnableAutoConfiguration`][] annotation switches on reasonable default behaviors based on the content of your classpath. For example, it looks for any class that implements the `CommandLineRunner` interface and invokes its `run()` method. In this case, it runs the demo code for this guide. - -For demonstration purposes, there is code to create a `JdbcTemplate`, query the database, and print out the names of people the batch job inserts. - -### Build an executable JAR - -Now that your `Application` class is ready, you simply instruct the build system to create a single, executable jar containing everything. This makes it easy to ship, version, and deploy the service as an application throughout the development lifecycle, across different environments, and so forth. - -Below are the Gradle steps, but if you are using Maven, you can find the updated pom.xml [right here](https://github.com/spring-guides/gs-batch-processing/blob/master/complete/pom.xml) and build it by typing `mvn clean package`. - -Update your Gradle `build.gradle` file's `buildscript` section, so that it looks like this: - -```groovy -buildscript { - repositories { - maven { url "http://repo.spring.io/libs-snapshot" } - mavenLocal() - } - dependencies { - classpath("org.springframework.boot:spring-boot-gradle-plugin:0.5.0.M6") - } -} -``` - -Further down inside `build.gradle`, add the following to the list of applied plugins: - -```groovy -apply plugin: 'spring-boot' -``` -You can see the final version of `build.gradle` [right here]((https://github.com/spring-guides/gs-batch-processing/blob/master/complete/build.gradle). - -The [Spring Boot gradle plugin][spring-boot-gradle-plugin] collects all the jars on the classpath and builds a single "über-jar", which makes it more convenient to execute and transport your service. -It also searches for the `public static void main()` method to flag as a runnable class. - -Now run the following command to produce a single executable JAR file containing all necessary dependency classes and resources: - -```sh -$ ./gradlew build -``` - -If you are using Gradle, you can run the JAR by typing: - -```sh -$ java -jar build/libs/gs-batch-processing-0.1.0.jar -``` - -If you are using Maven, you can run the JAR by typing: - -```sh -$ java -jar target/gs-batch-processing-0.1.0.jar -``` - -[spring-boot-gradle-plugin]: https://github.com/spring-projects/spring-boot/tree/master/spring-boot-tools/spring-boot-gradle-plugin - -> **Note:** The procedure above will create a runnable JAR. You can also opt to [build a classic WAR file](/guides/gs/convert-jar-to-war/) instead. - -Run the batch job -------------------- -If you are using Gradle, you can run your batch job at the command line this way: - -```sh -$ ./gradlew clean build && java -jar build/libs/gs-batch-processing-0.1.0.jar -``` - -> **Note:** If you are using Maven, you can run your batch job by typing `mvn clean package && java -jar target/gs-batch-processing-0.1.0.jar`. - - -The job prints out a line for each person that gets transformed. After the job runs, you can also see the output from querying the database. - -```sh -Converting (firstName: Jill, lastName: Doe) into (firstName: JILL, lastName: DOE) -Converting (firstName: Joe, lastName: Doe) into (firstName: JOE, lastName: DOE) -Converting (firstName: Justin, lastName: Doe) into (firstName: JUSTIN, lastName: DOE) -Converting (firstName: Jane, lastName: Doe) into (firstName: JANE, lastName: DOE) -Converting (firstName: John, lastName: Doe) into (firstName: JOHN, lastName: DOE) -Found in the database. -Found in the database. -Found in the database. -Found in the database. -Found in the database. -``` - - -Summary -------- - -Congratulations! You built a batch job that ingested data from a spreadsheet, processed it, and wrote it to a database. - - -[`SpringApplication`]: http://docs.spring.io/spring-boot/docs/0.5.0.M3/api/org/springframework/boot/SpringApplication.html -[`@EnableAutoConfiguration`]: http://docs.spring.io/spring-boot/docs/0.5.0.M3/api/org/springframework/boot/autoconfigure/EnableAutoConfiguration.html -[`@Component`]: http://docs.spring.io/spring/docs/current/javadoc-api/org/springframework/stereotype/Component.html - diff --git a/complete/src/main/java/hello/BatchConfiguration.java b/complete/src/main/java/hello/BatchConfiguration.java index a1ab92b5a1..1cf9a7f227 100644 --- a/complete/src/main/java/hello/BatchConfiguration.java +++ b/complete/src/main/java/hello/BatchConfiguration.java @@ -26,7 +26,7 @@ import org.springframework.jdbc.core.JdbcTemplate; @EnableBatchProcessing public class BatchConfiguration { - // {!begin readerwriterprocessor} + // tag::readerwriterprocessor[] @Bean public ItemReader reader() { FlatFileItemReader reader = new FlatFileItemReader(); @@ -55,9 +55,9 @@ public class BatchConfiguration { writer.setDataSource(dataSource); return writer; } - // {!end readerwriterprocessor} + // end::readerwriterprocessor[] - // {!begin jobstep} + // tag::jobstep[] @Bean public Job importUserJob(JobBuilderFactory jobs, Step s1) { return jobs.get("importUserJob") @@ -77,7 +77,7 @@ public class BatchConfiguration { .writer(writer) .build(); } - // {!end jobstep} + // end::jobstep[] @Bean public JdbcTemplate jdbcTemplate(DataSource dataSource) {