Merge pull request #12199 from hkhan/JAVA-11122-log-clean-up
[JAVA-11122] Logging clean up
This commit is contained in:
commit
0bbd9bdc64
|
@ -12,21 +12,29 @@ import org.junit.AfterClass;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class ActionsUnitTest {
|
||||
|
||||
public static final Logger LOG = LoggerFactory.getLogger(ActionsUnitTest.class);
|
||||
|
||||
private static JavaRDD<String> tourists;
|
||||
private static JavaSparkContext sc;
|
||||
public static final String COMMA_DELIMITER = ",(?=([^\"]*\"[^\"]*\")*[^\"]*$)";
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
SparkConf conf = new SparkConf().setAppName("reduce")
|
||||
.setMaster("local[*]");
|
||||
SparkConf conf = new SparkConf()
|
||||
.setAppName("reduce")
|
||||
.setMaster("local[*]")
|
||||
.set("spark.driver.allowMultipleContexts", "true");
|
||||
|
||||
sc = new JavaSparkContext(conf);
|
||||
tourists = sc.textFile("data/Tourist.csv").filter(line -> !line.startsWith("Region"));
|
||||
}
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void cleanup() {
|
||||
sc.close();
|
||||
|
@ -40,11 +48,11 @@ public class ActionsUnitTest {
|
|||
})
|
||||
.distinct();
|
||||
Long numberOfCountries = countries.count();
|
||||
System.out.println("Count: " + numberOfCountries);
|
||||
|
||||
LOG.debug("Count: {}", numberOfCountries);
|
||||
|
||||
assertEquals(Long.valueOf(220), numberOfCountries);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void whenReduceByKeySum_thenTotalValuePerKey() {
|
||||
JavaRDD<String> touristsExpenditure = tourists.filter(line -> line.split(COMMA_DELIMITER)[3].contains("expenditure"));
|
||||
|
@ -53,10 +61,12 @@ public class ActionsUnitTest {
|
|||
String[] columns = line.split(COMMA_DELIMITER);
|
||||
return new Tuple2<>(columns[1], Double.valueOf(columns[6]));
|
||||
});
|
||||
List<Tuple2<String, Double>> totalByCountry = expenditurePairRdd.reduceByKey((x, y) -> x + y)
|
||||
.collect();
|
||||
System.out.println("Total per Country: " + totalByCountry);
|
||||
|
||||
List<Tuple2<String, Double>> totalByCountry = expenditurePairRdd
|
||||
.reduceByKey(Double::sum)
|
||||
.collect();
|
||||
|
||||
LOG.debug("Total per Country: {}", totalByCountry);
|
||||
|
||||
for(Tuple2<String, Double> tuple : totalByCountry) {
|
||||
if (tuple._1.equals("Mexico")) {
|
||||
assertEquals(Double.valueOf(99164), tuple._2);
|
||||
|
|
|
@ -39,8 +39,10 @@ public class DataFrameUnitTest {
|
|||
@Test
|
||||
public void whenSelectSpecificColumns_thenColumnsFiltered() {
|
||||
Dataset<Row> selectedData = data.select(col("country"), col("year"), col("value"));
|
||||
selectedData.show();
|
||||
|
||||
|
||||
// uncomment to see table
|
||||
// selectedData.show();
|
||||
|
||||
List<String> resultList = Arrays.asList(selectedData.columns());
|
||||
assertTrue(resultList.contains("country"));
|
||||
assertTrue(resultList.contains("year"));
|
||||
|
@ -52,22 +54,26 @@ public class DataFrameUnitTest {
|
|||
@Test
|
||||
public void whenFilteringByCountry_thenCountryRecordsSelected() {
|
||||
Dataset<Row> filteredData = data.filter(col("country").equalTo("Mexico"));
|
||||
filteredData.show();
|
||||
|
||||
|
||||
// uncomment to see table
|
||||
// filteredData.show();
|
||||
|
||||
filteredData.foreach(record -> {
|
||||
assertEquals("Mexico", record.get(1));
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenGroupCountByCountry_thenContryTotalRecords() {
|
||||
Dataset<Row> recordsPerCountry = data.groupBy(col("country"))
|
||||
.count();
|
||||
recordsPerCountry.show();
|
||||
|
||||
|
||||
// uncomment to see table
|
||||
// recordsPerCountry.show();
|
||||
|
||||
Dataset<Row> filteredData = recordsPerCountry.filter(col("country").equalTo("Sweden"));
|
||||
assertEquals(new Long(12), filteredData.first()
|
||||
assertEquals(12L, filteredData.first()
|
||||
.get(1));
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package com.baeldung.differences.rdd;
|
|||
import static org.apache.spark.sql.functions.col;
|
||||
import static org.apache.spark.sql.functions.sum;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.sql.DataFrameReader;
|
||||
|
@ -29,8 +30,8 @@ public class DatasetUnitTest {
|
|||
DataFrameReader dataFrameReader = session.read();
|
||||
Dataset<Row> data = dataFrameReader.option("header", "true")
|
||||
.csv("data/Tourist.csv");
|
||||
Dataset<Row> responseWithSelectedColumns = data.select(col("region"),
|
||||
col("country"), col("year"), col("series"), col("value").cast("double"),
|
||||
Dataset<Row> responseWithSelectedColumns = data.select(col("region"),
|
||||
col("country"), col("year"), col("series"), col("value").cast("double"),
|
||||
col("footnotes"), col("source"));
|
||||
typedDataset = responseWithSelectedColumns.as(Encoders.bean(TouristData.class));
|
||||
}
|
||||
|
@ -45,7 +46,9 @@ public class DatasetUnitTest {
|
|||
Dataset<TouristData> selectedData = typedDataset
|
||||
.filter((FilterFunction<TouristData>) record -> record.getCountry()
|
||||
.equals("Norway"));
|
||||
selectedData.show();
|
||||
|
||||
// uncomment to see output
|
||||
// selectedData.show();
|
||||
|
||||
selectedData.foreach(record -> {
|
||||
assertEquals("Norway", record.getCountry());
|
||||
|
@ -56,28 +59,41 @@ public class DatasetUnitTest {
|
|||
public void whenGroupCountByCountry_thenContryTotalRecords() {
|
||||
Dataset<Row> countriesCount = typedDataset.groupBy(typedDataset.col("country"))
|
||||
.count();
|
||||
countriesCount.show();
|
||||
|
||||
assertEquals(Long.valueOf(220), Long.valueOf(countriesCount.count()));
|
||||
// uncomment to see output
|
||||
// countriesCount.show();
|
||||
|
||||
assertEquals(220, countriesCount.count());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenFilteredByPropertyRange_thenRetreiveValidRecords() {
|
||||
// Filter records with existing data for years between 2010 and 2017
|
||||
typedDataset.filter((FilterFunction<TouristData>) record -> record.getYear() != null
|
||||
&& (Long.valueOf(record.getYear()) > 2010 && Long.valueOf(record.getYear()) < 2017))
|
||||
.show();
|
||||
Dataset<TouristData> filteredData = typedDataset.filter(
|
||||
(FilterFunction<TouristData>) record -> record.getYear() != null
|
||||
&& (Long.parseLong(record.getYear()) > 2010 && Long.parseLong(record.getYear()) < 2017));
|
||||
|
||||
// uncomment to see output
|
||||
// filteredData.show();
|
||||
|
||||
assertEquals(394, filteredData.count());
|
||||
filteredData.foreach(record -> {
|
||||
assertTrue(Integer.parseInt(record.getYear()) > 2010 && Integer.parseInt(record.getYear()) < 2017);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void whenSumValue_thenRetreiveTotalValue() {
|
||||
// Total tourist expenditure by country
|
||||
typedDataset.filter((FilterFunction<TouristData>) record -> record.getValue() != null
|
||||
&& record.getSeries()
|
||||
.contains("expenditure"))
|
||||
.groupBy("country")
|
||||
.agg(sum("value"))
|
||||
.show();
|
||||
Dataset<Row> filteredData = typedDataset.filter((FilterFunction<TouristData>) record -> record.getValue() != null
|
||||
&& record.getSeries().contains("expenditure"))
|
||||
.groupBy("country")
|
||||
.agg(sum("value"));
|
||||
|
||||
// uncomment to see output
|
||||
// filteredData.show();
|
||||
|
||||
assertEquals(212, filteredData.count());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,8 +23,11 @@ public class TransformationsUnitTest {
|
|||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
SparkConf conf = new SparkConf().setAppName("uppercaseCountries")
|
||||
.setMaster("local[*]");
|
||||
SparkConf conf = new SparkConf()
|
||||
.setAppName("uppercaseCountries")
|
||||
.setMaster("local[*]")
|
||||
.set("spark.driver.allowMultipleContexts", "true");
|
||||
|
||||
sc = new JavaSparkContext(conf);
|
||||
tourists = sc.textFile("data/Tourist.csv")
|
||||
.filter(line -> !line.startsWith("Region")); //filter header row
|
||||
|
|
|
@ -3,12 +3,13 @@ package com.baeldung.graphql;
|
|||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.mockserver.client.MockServerClient;
|
||||
import org.mockserver.configuration.Configuration;
|
||||
import org.mockserver.integration.ClientAndServer;
|
||||
import org.mockserver.model.HttpStatusCode;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import static org.mockserver.integration.ClientAndServer.startClientAndServer;
|
||||
import static org.mockserver.matchers.Times.exactly;
|
||||
|
@ -17,20 +18,22 @@ import static org.mockserver.model.HttpResponse.response;
|
|||
|
||||
public class GraphQLMockServer {
|
||||
|
||||
public static ClientAndServer mockServer;
|
||||
private static final String SERVER_ADDRESS = "127.0.0.1";
|
||||
private static final String PATH = "/graphql";
|
||||
|
||||
public static String serviceUrl;
|
||||
|
||||
private static ClientAndServer mockServer;
|
||||
private static int serverPort;
|
||||
|
||||
public static final String SERVER_ADDRESS = "127.0.0.1";
|
||||
public static final String HTTP_GET_POST = "GET";
|
||||
public static final String PATH = "/graphql";
|
||||
|
||||
@BeforeAll
|
||||
static void startServer() throws IOException, URISyntaxException {
|
||||
static void startServer() throws IOException {
|
||||
serverPort = getFreePort();
|
||||
serviceUrl = "http://" + SERVER_ADDRESS + ":" + serverPort + PATH;
|
||||
mockServer = startClientAndServer(serverPort);
|
||||
|
||||
Configuration config = Configuration.configuration().logLevel(Level.WARN);
|
||||
mockServer = startClientAndServer(config, serverPort);
|
||||
|
||||
mockAllBooksTitleRequest();
|
||||
mockAllBooksTitleAuthorRequest();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue