SDAP-28 Ingest SWOT Sample Data Tiled by time (#2)
authorfgreg <fgreguska+github@gmail.com>
Thu, 29 Mar 2018 18:29:04 +0000 (11:29 -0700)
committerGitHub <noreply@github.com>
Thu, 29 Mar 2018 18:29:04 +0000 (11:29 -0700)
Added new tiling scheme that simply lets you set the step size for each dimension. Can be found at `src/main/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSize.java`

18 files changed:
.idea/compiler.xml
.idea/modules/ningester_testJobs.iml
build.gradle
src/main/java/org/apache/sdap/ningester/configuration/AppConfig.java
src/main/java/org/apache/sdap/ningester/configuration/BatchConfig.java
src/main/java/org/apache/sdap/ningester/configuration/properties/ApplicationProperties.java
src/main/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSize.java [new file with mode: 0644]
src/main/java/org/apache/sdap/ningester/datatiler/SliceFileByTilesDesired.java
src/main/java/org/apache/sdap/ningester/datatiler/properties/SliceFileByStepSize.java [new file with mode: 0644]
src/test/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSizeTest.java [new file with mode: 0644]
src/test/java/org/apache/sdap/ningester/datatiler/SliceFileByTilesDesiredTest.java
src/test/resources/granules/20050101120000-NCEI-L4_GHRSST-SSTblend-AVHRR_OI-GLOB-v02.0-fv02.0.nc
src/test/resources/granules/CCMP_Wind_Analysis_20050101_V02.0_L3.0_RSS.nc
src/test/resources/granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc [new file with mode: 0644]
src/test/resources/granules/SMAP_L2B_SSS_04892_20160101T005507_R13080.h5
src/test/resources/granules/ascat_20121029_010301_metopb_00588_eps_o_coa_2101_ovw.l2.nc
src/testJobs/java/org/apache/sdap/ningester/testjobs/SwotJobTest.java [new file with mode: 0644]
src/testJobs/resources/testjobs/SwotJobTest.yml [new file with mode: 0644]

index 1239e68..1f7ddff 100644 (file)
@@ -2,9 +2,7 @@
 <project version="4">
   <component name="CompilerConfiguration">
     <bytecodeTargetLevel>
-      <module name="ningester_integrationTest" target="1.8" />
       <module name="ningester_main" target="1.8" />
-      <module name="ningester_python" target="1.8" />
       <module name="ningester_test" target="1.8" />
       <module name="ningester_testJobs" target="1.8" />
     </bytecodeTargetLevel>
index 67c8b84..9b7ee1f 100644 (file)
@@ -17,6 +17,7 @@
     <orderEntry type="library" name="Gradle: org.springframework.boot:spring-boot-configuration-processor:1.5.9.RELEASE" level="project" />
     <orderEntry type="module" module-name="ningester_main" />
     <orderEntry type="library" scope="PROVIDED" name="Gradle: com.vaadin.external.google:android-json:0.0.20131108.vaadin1" level="project" />
+    <orderEntry type="module" module-name="ningester_test" production-on-test="" />
     <orderEntry type="library" name="Gradle: org.springframework.boot:spring-boot-starter-batch:1.5.9.RELEASE" level="project" />
     <orderEntry type="library" name="Gradle: org.springframework:spring-web:4.3.13.RELEASE" level="project" />
     <orderEntry type="library" name="Gradle: org.springframework.data:spring-data-cassandra:1.5.9.RELEASE" level="project" />
index 445f776..76b4960 100644 (file)
@@ -71,6 +71,8 @@ dependencies {
 
        testCompile('org.springframework.boot:spring-boot-starter-test')
        testCompile('org.springframework.batch:spring-batch-test')
+
+       testJobsCompile sourceSets.test.output
 }
 
 compileJava.dependsOn(processResources)
\ No newline at end of file
index b7a7e7f..b4017e9 100644 (file)
 
 package org.apache.sdap.ningester.configuration;
 
-import org.apache.sdap.nexusproto.NexusTile;
 import org.apache.sdap.ningester.configuration.properties.ApplicationProperties;
 import org.apache.sdap.ningester.datatiler.FileSlicer;
 import org.apache.sdap.ningester.datatiler.SliceFileByDimension;
+import org.apache.sdap.ningester.datatiler.SliceFileByStepSize;
 import org.apache.sdap.ningester.datatiler.SliceFileByTilesDesired;
 import org.apache.sdap.ningester.http.NexusTileConverter;
 import org.apache.sdap.ningester.processors.*;
@@ -34,6 +34,7 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
+import org.springframework.core.annotation.Order;
 import org.springframework.http.MediaType;
 import org.springframework.http.converter.HttpMessageConverter;
 import org.springframework.web.client.RestTemplate;
@@ -43,6 +44,7 @@ import java.util.Collections;
 import java.util.List;
 
 @Configuration
+@Order(1)
 @EnableConfigurationProperties({ApplicationProperties.class})
 public class AppConfig {
 
@@ -76,6 +78,13 @@ public class AppConfig {
     }
 
     @Bean
+    @ConditionalOnProperty(prefix = "ningester", name = "tile_slicer", havingValue = "sliceFileByStepSize")
+    @Qualifier("fileSlicer")
+    protected FileSlicer sliceFileByStepSize() {
+        return new SliceFileByStepSize(applicationProperties.getSliceFileByStepSize().getDimensionToStepSize());
+    }
+
+    @Bean
     protected HttpMessageConverter nexusTileConverter() {
         NexusTileConverter converter = new NexusTileConverter();
         converter.setSupportedMediaTypes(Collections.singletonList(MediaType.APPLICATION_OCTET_STREAM));
index 34ffb9c..7b5249e 100644 (file)
@@ -39,10 +39,12 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
+import org.springframework.core.annotation.Order;
 import org.springframework.core.io.Resource;
 import org.springframework.core.io.ResourceLoader;
 
 @Configuration
+@Order(2)
 @EnableBatchProcessing
 @Import(AppConfig.class)
 public class BatchConfig {
index a970f80..84f30bd 100644 (file)
@@ -18,6 +18,7 @@
 
 package org.apache.sdap.ningester.configuration.properties;
 
+import org.apache.sdap.ningester.datatiler.properties.SliceFileByStepSize;
 import org.apache.sdap.ningester.datatiler.properties.SliceFileByDimension;
 import org.apache.sdap.ningester.datatiler.properties.SliceFileByTilesDesired;
 import org.apache.sdap.ningester.processors.properties.*;
@@ -37,6 +38,8 @@ public class ApplicationProperties {
     @NestedConfigurationProperty
     private final SliceFileByTilesDesired sliceFileByTilesDesired = new SliceFileByTilesDesired();
     @NestedConfigurationProperty
+    private final SliceFileByStepSize sliceFileByStepSize = new SliceFileByStepSize();
+    @NestedConfigurationProperty
     private final AddDayOfYearAttribute addDayOfYearAttribute = new AddDayOfYearAttribute();
     @NestedConfigurationProperty
     private final AddTimeFromGranuleName addTimeFromGranuleName = new AddTimeFromGranuleName();
@@ -88,4 +91,8 @@ public class ApplicationProperties {
     public SliceFileByDimension getSliceFileByDimension() {
         return sliceFileByDimension;
     }
+
+    public SliceFileByStepSize getSliceFileByStepSize() {
+        return sliceFileByStepSize;
+    }
 }
diff --git a/src/main/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSize.java b/src/main/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSize.java
new file mode 100644 (file)
index 0000000..cc5ecb7
--- /dev/null
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sdap.ningester.datatiler;
+
+import com.google.common.collect.Sets;
+import ucar.nc2.Dimension;
+import ucar.nc2.dataset.NetcdfDataset;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+import java.util.stream.Collectors;
+
+public class SliceFileByStepSize implements FileSlicer {
+
+    private LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+    private List<String> orderedDimensions = new ArrayList<>();
+
+    public SliceFileByStepSize(LinkedHashMap<String, Integer> dimensionToStepSize) {
+        this.dimensionToStepSize.putAll(dimensionToStepSize);
+
+        this.dimensionToStepSize.forEach((dimension, stepSize) -> orderedDimensions.add(dimension));
+    }
+
+
+    @Override
+    public List<String> generateSlices(File inputfile) throws IOException {
+
+        Map<String, Integer> dimensionNameToLength;
+        try (NetcdfDataset ds = NetcdfDataset.openDataset(inputfile.getAbsolutePath())) {
+            List<String> dimensionNames = ds.getDimensions().stream().map(Dimension::getShortName).collect(Collectors.toList());
+            assert dimensionNames.containsAll(dimensionToStepSize.keySet()) : String
+                    .format("Slice by dimensions must be present in dataset. Dimensions in dataset are %s. Dimensions provided %s",
+                            dimensionNames, dimensionToStepSize.keySet());
+
+            dimensionNameToLength = ds.getDimensions().stream()
+                    .filter(dimension -> this.dimensionToStepSize.keySet().contains(dimension.getShortName()))
+                    .sorted(Comparator.comparing(Dimension::getShortName, Comparator.comparingInt(dim -> this.orderedDimensions.indexOf(dim))))
+                    .collect(Collectors.toMap(Dimension::getShortName, Dimension::getLength,
+                            (v1, v2) -> {
+                                throw new RuntimeException(String.format("Duplicate key for values %s and %s", v1, v2));
+                            },
+                            LinkedHashMap::new));
+
+        }
+
+
+        return generateChunkBoundrySlices(dimensionNameToLength);
+    }
+
+    List<String> generateChunkBoundrySlices(Map<String, Integer> dimensionNameToLength) {
+
+        List<Set<String>> dimensionBounds = dimensionNameToLength.entrySet().stream()
+                .map(stringIntegerEntry -> {
+                    String dimensionName = stringIntegerEntry.getKey();
+                    Integer lengthOfDimension = stringIntegerEntry.getValue();
+                    Integer stepSize = this.dimensionToStepSize.get(dimensionName);
+                    Set<String> bounds = new LinkedHashSet<>();
+                    for (int i = 0; i < lengthOfDimension; i += stepSize) {
+                        bounds.add(
+                                dimensionName + ":" +
+                                        i + ":" +
+                                        (i + stepSize >= lengthOfDimension ? lengthOfDimension : i + stepSize));
+                    }
+                    return bounds;
+                }).collect(Collectors.toList());
+
+        return Sets.cartesianProduct(dimensionBounds)
+                .stream()
+                .map(tileSpecAsList -> tileSpecAsList.stream().collect(Collectors.joining(",")))
+                .collect(Collectors.toList());
+
+    }
+}
index 62e43bf..46e56e0 100644 (file)
@@ -51,11 +51,12 @@ public class SliceFileByTilesDesired implements FileSlicer {
 
             dimensionNameToLength = ds.getDimensions().stream()
                     .filter(dimension -> this.dimensions.contains(dimension.getShortName()))
+                    .sorted(Comparator.comparing(Dimension::getShortName, Comparator.comparingInt(dim -> this.dimensions.indexOf(dim))))
                     .collect(Collectors.toMap(Dimension::getShortName, Dimension::getLength,
                             (v1, v2) -> {
                                 throw new RuntimeException(String.format("Duplicate key for values %s and %s", v1, v2));
                             },
-                            TreeMap::new));
+                            LinkedHashMap::new));
 
             if (this.timeDimension != null) {
                 timeLen = ds.getDimensions().stream()
diff --git a/src/main/java/org/apache/sdap/ningester/datatiler/properties/SliceFileByStepSize.java b/src/main/java/org/apache/sdap/ningester/datatiler/properties/SliceFileByStepSize.java
new file mode 100644 (file)
index 0000000..5d4514f
--- /dev/null
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sdap.ningester.datatiler.properties;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import java.util.LinkedHashMap;
+
+@ConfigurationProperties
+@Component("sliceFileByStepSizeProperties")
+public class SliceFileByStepSize {
+
+    private LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+
+    public LinkedHashMap<String, Integer> getDimensionToStepSize() {
+        return dimensionToStepSize;
+    }
+
+    public void setDimensionToStepSize(LinkedHashMap<String, Integer> dimensionToStepSize) {
+        this.dimensionToStepSize = dimensionToStepSize;
+    }
+
+}
diff --git a/src/test/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSizeTest.java b/src/test/java/org/apache/sdap/ningester/datatiler/SliceFileByStepSizeTest.java
new file mode 100644 (file)
index 0000000..6a1267b
--- /dev/null
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sdap.ningester.datatiler;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.Resource;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.junit.Assert.assertEquals;
+
+public class SliceFileByStepSizeTest {
+
+    @Rule
+    public ExpectedException exceptionGrabber = ExpectedException.none();
+
+    @Test
+    public void testGenerateChunkBoundrySlicesWithDivisibileTiles() {
+
+        LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+        dimensionToStepSize.put("lat", 4);
+        dimensionToStepSize.put("lon", 4);
+
+        SliceFileByStepSize slicer = new SliceFileByStepSize(dimensionToStepSize);
+
+        Map<String, Integer> dimensionNameToLength = new LinkedHashMap<>();
+        dimensionNameToLength.put("lat", 8);
+        dimensionNameToLength.put("lon", 8);
+
+        List<String> result = slicer.generateChunkBoundrySlices(dimensionNameToLength);
+
+        assertEquals(4, result.size());
+
+        String[] expected = new String[]{
+                "lat:0:4,lon:0:4",
+                "lat:0:4,lon:4:8",
+                "lat:4:8,lon:0:4",
+                "lat:4:8,lon:4:8"};
+        assertThat(result, containsInAnyOrder(expected));
+        assertThat(result, contains(expected));
+
+    }
+
+    @Test
+    public void testGenerateChunkBoundrySlicesWithNonDivisibileTiles() {
+
+        LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+        dimensionToStepSize.put("lat", 3);
+        dimensionToStepSize.put("lon", 3);
+
+        SliceFileByStepSize slicer = new SliceFileByStepSize(dimensionToStepSize);
+
+        Map<String, Integer> dimensionNameToLength = new LinkedHashMap<>();
+        dimensionNameToLength.put("lat", 8);
+        dimensionNameToLength.put("lon", 8);
+
+        List<String> result = slicer.generateChunkBoundrySlices(dimensionNameToLength);
+
+        assertEquals(9, result.size());
+
+        String[] expected = new String[]{
+                "lat:0:3,lon:0:3",
+                "lat:0:3,lon:3:6",
+                "lat:0:3,lon:6:8",
+                "lat:3:6,lon:0:3",
+                "lat:3:6,lon:3:6",
+                "lat:3:6,lon:6:8",
+                "lat:6:8,lon:0:3",
+                "lat:6:8,lon:3:6",
+                "lat:6:8,lon:6:8"};
+        assertThat(result, containsInAnyOrder(expected));
+        assertThat(result, contains(expected));
+
+    }
+
+    @Test
+    public void testSliceFileByStepSize() throws IOException {
+
+        LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+        dimensionToStepSize.put("time", 5832);
+        dimensionToStepSize.put("rivid", 1);
+
+        Integer expectedTiles = 43; // 1 river and all times per tile. 43 total rivers
+
+        SliceFileByStepSize slicer = new SliceFileByStepSize(dimensionToStepSize);
+
+        Resource testResource = new ClassPathResource("granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc");
+
+        List<String> results = slicer.generateSlices(testResource.getFile());
+
+        assertThat(results.size(), is(expectedTiles));
+
+        assertThat(results.get(0), is("time:0:5832,rivid:0:1"));
+        assertThat(results.get(results.size() - 1), is("time:0:5832,rivid:42:43"));
+
+    }
+
+    @Test
+    public void testSliceFileByStepSizeThrowsExceptionWithUnkownDimension() throws IOException {
+        LinkedHashMap<String, Integer> dimensionToStepSize = new LinkedHashMap<>();
+        dimensionToStepSize.put("badDimension", 5832);
+
+        SliceFileByStepSize slicer = new SliceFileByStepSize(dimensionToStepSize);
+
+        Resource testResource = new ClassPathResource("granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc");
+
+        exceptionGrabber.expect(AssertionError.class);
+        slicer.generateSlices(testResource.getFile());
+
+    }
+}
index 60cc18b..b92f280 100644 (file)
@@ -188,5 +188,29 @@ public class SliceFileByTilesDesiredTest {
 
         assertThat(results.size(), is(expectedTiles));
 
+        assertThat(results.get(0), is("time:0:1,latitude:0:38,longitude:0:87"));
+        assertThat(results.get(results.size() - 1), is("time:3:4,latitude:608:628,longitude:1392:1440"));
+
+    }
+
+    @Test
+    public void testDimensionsOrderedByInput() throws IOException {
+        Integer tilesDesired = 270;
+        Integer expectedTiles = 289 * 4; // 4 time slices and 289 tiles per time slice
+
+        SliceFileByTilesDesired slicer = new SliceFileByTilesDesired();
+        slicer.setTilesDesired(tilesDesired);
+        slicer.setDimensions(Arrays.asList("longitude", "latitude"));
+        slicer.setTimeDimension("time");
+
+        Resource testResource = new ClassPathResource("granules/CCMP_Wind_Analysis_20050101_V02.0_L3.0_RSS.nc");
+
+        List<String> results = slicer.generateSlices(testResource.getFile());
+
+        assertThat(results.size(), is(expectedTiles));
+
+        assertThat(results.get(0), is("time:0:1,longitude:0:87,latitude:0:38"));
+        assertThat(results.get(results.size() - 1), is("time:3:4,longitude:1392:1440,latitude:608:628"));
+
     }
 }
index 6996466..4935c81 100644 (file)
Binary files a/src/test/resources/granules/20050101120000-NCEI-L4_GHRSST-SSTblend-AVHRR_OI-GLOB-v02.0-fv02.0.nc and b/src/test/resources/granules/20050101120000-NCEI-L4_GHRSST-SSTblend-AVHRR_OI-GLOB-v02.0-fv02.0.nc differ
index 2a26eda..8556f3c 100644 (file)
Binary files a/src/test/resources/granules/CCMP_Wind_Analysis_20050101_V02.0_L3.0_RSS.nc and b/src/test/resources/granules/CCMP_Wind_Analysis_20050101_V02.0_L3.0_RSS.nc differ
diff --git a/src/test/resources/granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc b/src/test/resources/granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc
new file mode 100644 (file)
index 0000000..ce0ebcc
Binary files /dev/null and b/src/test/resources/granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc differ
index 8a5d950..11815dd 100644 (file)
Binary files a/src/test/resources/granules/SMAP_L2B_SSS_04892_20160101T005507_R13080.h5 and b/src/test/resources/granules/SMAP_L2B_SSS_04892_20160101T005507_R13080.h5 differ
index 029e9de..99af9d9 100644 (file)
Binary files a/src/test/resources/granules/ascat_20121029_010301_metopb_00588_eps_o_coa_2101_ovw.l2.nc and b/src/test/resources/granules/ascat_20121029_010301_metopb_00588_eps_o_coa_2101_ovw.l2.nc differ
diff --git a/src/testJobs/java/org/apache/sdap/ningester/testjobs/SwotJobTest.java b/src/testJobs/java/org/apache/sdap/ningester/testjobs/SwotJobTest.java
new file mode 100644 (file)
index 0000000..d85206f
--- /dev/null
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.sdap.ningester.testjobs;
+
+import org.apache.sdap.ningester.configuration.properties.ApplicationProperties;
+import org.apache.sdap.ningester.configuration.properties.DatasourceProperties;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersBuilder;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.test.JobLauncherTestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.context.annotation.Bean;
+import org.springframework.data.cassandra.core.CassandraTemplate;
+import org.springframework.data.solr.core.SolrTemplate;
+import org.springframework.data.solr.core.query.SimpleQuery;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.TestPropertySource;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import static org.apache.sdap.ningester.testjobs.TestUtils.assertEqualsEventually;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+@RunWith(SpringRunner.class)
+@SpringBootTest
+@TestPropertySource(properties = {"spring.config.location = classpath:testjobs/SwotJobTest.yml"})
+@ActiveProfiles({"test", "cassandra", "solr"})
+public class SwotJobTest {
+
+    @Autowired
+    JobLauncherTestUtils jobLauncherTestUtils;
+    @Autowired
+    CassandraTemplate cassandraTemplate;
+    @Autowired
+    SolrTemplate solrTemplate;
+    @Autowired
+    DatasourceProperties datasourceProperties;
+    @Autowired
+    ApplicationProperties applicationProperties;
+
+    @Before
+    public void emptyDatabase() {
+        solrTemplate.delete(datasourceProperties.getSolrStore().getCollection(), new SimpleQuery("*:*"));
+        cassandraTemplate.truncate(datasourceProperties.getCassandraStore().getTableName());
+    }
+
+    @Test
+    public void testJobCompletes() throws Exception {
+
+        JobParameters jobParameters = new JobParametersBuilder()
+                .addString("granule", "classpath:granules/Qout_WSWM_729days_p0_dtR900s_n1_preonly_20160416.split.nc")
+                .toJobParameters();
+
+        JobExecution jobExecution = jobLauncherTestUtils.launchJob(jobParameters);
+
+        assertThat(jobExecution.getExitStatus().getExitCode(), is("COMPLETED"));
+        StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next();
+        assertThat(stepExecution.getReadCount(), is(43));
+        assertThat(stepExecution.getWriteCount(), is(43));
+        assertThat(stepExecution.getFilterCount(), is(0));
+
+        assertEqualsEventually(43L,
+                () -> solrTemplate.count(datasourceProperties.getSolrStore().getCollection(),
+                        new SimpleQuery("dataset_s: " + applicationProperties.getAddDatasetName().getDatasetName())),
+                3);
+
+        long cassandraCount = cassandraTemplate.count(datasourceProperties.getCassandraStore().getTableName());
+
+        assertThat(cassandraCount, is(43L));
+    }
+
+    @TestConfiguration
+    static class NingesterApplicationTestsConfig {
+
+        @Bean
+        JobLauncherTestUtils jobLauncherTestUtils() {
+            return new JobLauncherTestUtils();
+        }
+
+    }
+
+
+}
diff --git a/src/testJobs/resources/testjobs/SwotJobTest.yml b/src/testJobs/resources/testjobs/SwotJobTest.yml
new file mode 100644 (file)
index 0000000..b36d945
--- /dev/null
@@ -0,0 +1,65 @@
+# Tile Slicer Config
+ningester:
+  tile_slicer: sliceFileByStepSize
+  sliceFileByStepSize:
+    dimensionToStepSize:
+      time: 5832
+      rivid: 1
+
+---
+# Tile processors configuration
+ningester:
+  tile_processors:
+    - pythonChainProcessor
+    - generateTileId
+    - addDatasetName
+  pythonChainProcessor:
+    enabled:
+    processor_list:
+      -
+        name: TimeSeriesReadingProcessor
+        config:
+          latitude: lat
+          longitude: lon
+          time: time
+          variable_to_read: Qout
+          xarray: True
+      -
+        name: EmptyTileFilter
+      -
+        name: PromoteVariableToGlobalAttribute
+        config:
+          attribute_name: rivid_i
+          variable_name: rivid
+          dimensioned_by: [rivid]
+      -
+        name: TileSummarizingProcessor
+        config:
+          stored_var_name: Qout
+  generateTileId:
+    enabled:
+  addDatasetName:
+    enabled:
+    datasetName: RAPID_WSWM_SWOT
+---
+# Tile writer configuration
+ningester:
+  tile_writer:
+    data_store: cassandraStore
+    metadata_store: solrStore
+---
+# Connection settings for the test profile
+spring:
+  profiles:
+    - test
+    - local
+  data:
+    cassandra:
+      keyspaceName: nexustiles
+      contactPoints: 127.0.0.1
+    solr:
+      host: http://127.0.0.1:8983/solr/
+
+datasource:
+  solrStore:
+    collection: nexustiles
\ No newline at end of file