diff --git a/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy b/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy new file mode 100644 index 000000000..972b8ca18 --- /dev/null +++ b/grails-app/controllers/au/org/ala/ecodata/SpatialController.groovy @@ -0,0 +1,120 @@ +package au.org.ala.ecodata + +import au.org.ala.ecodata.spatial.SpatialConversionUtils +import au.org.ala.ecodata.spatial.SpatialUtils +import org.apache.commons.fileupload.servlet.ServletFileUpload +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.tuple.Pair +import org.locationtech.jts.geom.Geometry +import org.springframework.web.multipart.MultipartFile + +import javax.servlet.http.HttpServletResponse +@au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.readScope"]) +class SpatialController { + + static responseFormats = ['json', 'xml'] + static allowedMethods = [uploadShapeFile: "POST", getShapeFileFeatureGeoJson: "GET"] + + @au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.writeScope"]) + def uploadShapeFile() { + // Use linked hash map to maintain key ordering + Map retMap = new LinkedHashMap() + + File tmpZipFile = File.createTempFile("shpUpload", ".zip") + + if (ServletFileUpload.isMultipartContent(request)) { + // Parse the request + Map items = request.getFileMap() + + if (items.size() == 1) { + MultipartFile fileItem = items.values()[0] + IOUtils.copy(fileItem.getInputStream(), new FileOutputStream(tmpZipFile)) + retMap.putAll(handleZippedShapeFile(tmpZipFile)) + response.setStatus(HttpServletResponse.SC_OK) + } else { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + retMap.put("error", "Multiple files sent in request. A single zipped shape file should be supplied.") + } + } + + respond retMap + } + + @au.ala.org.ws.security.RequireApiKey(scopesFromProperty=["app.writeScope"]) + def getShapeFileFeatureGeoJson() { + Map retMap + String shapeId = params.shapeFileId + String featureIndex = params.featureId + if (featureIndex != null && shapeId != null) { + + retMap = processShapeFileFeatureRequest(shapeId, featureIndex) + if(retMap.geoJson == null) { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + } + else { + response.setStatus(HttpServletResponse.SC_OK) + } + } + else { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST) + retMap = ["error": "featureId and shapeFileId must be supplied"] + } + + respond retMap + } + + private Map processShapeFileFeatureRequest(String shapeFileId, String featureIndex) { + Map retMap = new HashMap() + + try { + File shpFileDir = new File(System.getProperty("java.io.tmpdir"), shapeFileId) + Geometry geoJson = SpatialUtils.getShapeFileFeaturesAsGeometry(shpFileDir, featureIndex) + + if (geoJson == null) { + retMap.put("error", "Invalid geometry") + return retMap + } + else { + if (geoJson.getCoordinates().flatten().size() > grailsApplication.config.getProperty("shapefile.simplify.threshhold", Integer, 50_000)) { + geoJson = GeometryUtils.simplify(geoJson, grailsApplication.config.getProperty("shapefile.simplify.tolerance", Double, 0.0001)) + } + + retMap.put("geoJson", GeometryUtils.geometryToGeoJsonMap(geoJson, grailsApplication.config.getProperty("shapefile.geojson.decimal", Integer, 20))) + } + } catch (Exception ex) { + log.error("Error processsing shapefile feature request", ex) + retMap.put("error", ex.getMessage()) + } + + return retMap + } + + private static Map handleZippedShapeFile(File zippedShp) throws IOException { + // Use linked hash map to maintain key ordering + Map retMap = new LinkedHashMap() + + Pair idFilePair = SpatialConversionUtils.extractZippedShapeFile(zippedShp) + String uploadedShpId = idFilePair.getLeft() + File shpFile = idFilePair.getRight() + + retMap.put("shp_id", uploadedShpId) + + List>> manifestData = SpatialConversionUtils.getShapeFileManifest(shpFile) + + int featureIndex = 0 + for (List> featureData : manifestData) { + // Use linked hash map to maintain key ordering + Map featureDataMap = new LinkedHashMap() + + for (Pair fieldData : featureData) { + featureDataMap.put(fieldData.getLeft(), fieldData.getRight()) + } + + retMap.put(featureIndex, featureDataMap) + + featureIndex++ + } + + return retMap + } +} diff --git a/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy b/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy index 4e5d91d5d..45a44e89c 100644 --- a/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy +++ b/grails-app/controllers/au/org/ala/ecodata/UrlMappings.groovy @@ -42,6 +42,9 @@ class UrlMappings { "/ws/output/getOutputSpeciesUUID/"(controller: "output"){ action = [GET:"getOutputSpeciesUUID"] } + "/ws/shapefile" (controller: "spatial"){ action = [POST:"uploadShapeFile"] } + "/ws/shapefile/geojson/$shapeFileId/$featureId"(controller: "spatial"){ action = [GET:"getShapeFileFeatureGeoJson"] } + "/ws/activitiesForProject/$id" { controller = 'activity' action = 'activitiesForProject' diff --git a/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy b/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy new file mode 100644 index 000000000..1e17d86a7 --- /dev/null +++ b/src/integration-test/groovy/au/org/ala/ecodata/SpatialControllerIntegrationSpec.groovy @@ -0,0 +1,74 @@ +package au.org.ala.ecodata + +import grails.testing.mixin.integration.Integration +import grails.util.GrailsWebMockUtil +import groovy.json.JsonSlurper +import org.apache.http.HttpStatus +import org.grails.plugins.testing.GrailsMockHttpServletRequest +import org.grails.plugins.testing.GrailsMockHttpServletResponse +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.mock.web.MockMultipartFile +import org.springframework.web.context.WebApplicationContext +import spock.lang.Specification + +@Integration +class SpatialControllerIntegrationSpec extends Specification { + + @Autowired + SpatialController spatialController + + @Autowired + WebApplicationContext ctx + + def setup() { + setRequestResponse() + } + + def cleanup() { + } + + def setRequestResponse() { + GrailsMockHttpServletRequest grailsMockHttpServletRequest = new GrailsMockHttpServletRequest() + GrailsMockHttpServletResponse grailsMockHttpServletResponse = new GrailsMockHttpServletResponse() + GrailsWebMockUtil.bindMockWebRequest(ctx, grailsMockHttpServletRequest, grailsMockHttpServletResponse) + } + + void "test uploadShapeFile with resource zip file"() { + given: + // Read the zip file from resources + def zipFileResourceStream = spatialController.class.getResourceAsStream("/projectExtent.zip") + byte[] zipFileBytes = zipFileResourceStream.bytes + + // Mock the request + MockMultipartFile mockMultipartFile = new MockMultipartFile("file", "projectExtent.zip", "application/zip", zipFileBytes) + spatialController.request.addFile(mockMultipartFile) + spatialController.request.method = 'POST' + + when: + // Call the method + spatialController.uploadShapeFile() + + then: + // Verify the response + spatialController.response.status == HttpStatus.SC_OK + println spatialController.response.contentAsString + def responseContent = new JsonSlurper().parseText(spatialController.response.contentAsString) + responseContent.shp_id != null + responseContent["0"].siteId == "340cfe6a-f230-4bb9-a034-23e9bff125c7" + responseContent["0"].name == "Project area for Southern Tablelands Koala Habitat Restoration Project" + + when: + setRequestResponse() + spatialController.request.method = 'GET' + spatialController.params.shapeFileId = responseContent.shp_id + spatialController.params.featureId = "0" + spatialController.getShapeFileFeatureGeoJson() + + then: + spatialController.response.status == HttpStatus.SC_OK + println spatialController.response.contentAsString + def responseJSON = new JsonSlurper().parseText(spatialController.response.contentAsString) + responseJSON.geoJson != null + responseJSON.geoJson.type == "MultiPolygon" + } +} \ No newline at end of file diff --git a/src/integration-test/resources/projectExtent.zip b/src/integration-test/resources/projectExtent.zip new file mode 100644 index 000000000..52e846f76 Binary files /dev/null and b/src/integration-test/resources/projectExtent.zip differ diff --git a/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy b/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy index bda514e80..1bb298f82 100644 --- a/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy +++ b/src/main/groovy/au/org/ala/ecodata/GeometryUtils.groovy @@ -247,11 +247,15 @@ class GeometryUtils { new GeometryJSON().read(json) } - static Map geometryToGeoJsonMap(Geometry input) { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream() - new GeometryJSON().write(input, new OutputStreamWriter(byteOut, 'UTF-8')) + static Map geometryToGeoJsonMap(Geometry input, int decimals = 4) { + String geoJson = geometryToGeoJsonString(input, decimals) + JSON.parse(geoJson) + } - JSON.parse(byteOut.toString('UTF-8')) + static String geometryToGeoJsonString(Geometry input, int decimals = 4) { + ByteArrayOutputStream byteOut = new ByteArrayOutputStream() + new GeometryJSON(decimals).write(input, new OutputStreamWriter(byteOut, 'UTF-8')) + byteOut.toString('UTF-8') } /** @@ -261,13 +265,15 @@ class GeometryUtils { * @return */ static Map simplify(Map geoJson, double tolerance) { - Geometry input = geoJsonMapToGeometry(geoJson) - - Geometry result = TopologyPreservingSimplifier.simplify(input, tolerance) + Geometry result = simplifyGeometry(input, tolerance) geometryToGeoJsonMap(result) } + static Geometry simplifyGeometry(Geometry input, double tolerance) { + TopologyPreservingSimplifier.simplify(input, tolerance) + } + /** * Iterates through the supplied features and determines which features are neighbours using an diff --git a/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy new file mode 100644 index 000000000..697c8e4b1 --- /dev/null +++ b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialConversionUtils.groovy @@ -0,0 +1,112 @@ +package au.org.ala.ecodata.spatial + +import com.google.common.io.Files +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.tuple.Pair +import org.geotools.data.FileDataStore +import org.geotools.data.FileDataStoreFinder +import org.geotools.data.simple.SimpleFeatureCollection +import org.geotools.data.simple.SimpleFeatureIterator +import org.geotools.data.simple.SimpleFeatureSource +import org.opengis.feature.Property +import org.opengis.feature.simple.SimpleFeature +import org.opengis.feature.type.GeometryType + +import java.util.zip.ZipEntry +import java.util.zip.ZipFile +/** + * Utilities for converting spatial data between formats + * + * @author ChrisF + */ +@Slf4j +@CompileStatic +class SpatialConversionUtils { + static Pair extractZippedShapeFile(File zippedShpFile) throws IOException { + + File tempDir = Files.createTempDir() + + // Unpack the zipped shape file into the temp directory + ZipFile zf = null + File shpFile = null + try { + zf = new ZipFile(zippedShpFile) + + boolean shpPresent = false + boolean shxPresent = false + boolean dbfPresent = false + + Enumeration entries = zf.entries() + + while (entries.hasMoreElements()) { + ZipEntry entry = entries.nextElement() + InputStream inStream = zf.getInputStream(entry) + File f = new File(tempDir, entry.getName()) + if (!f.getName().startsWith(".")) { + if (entry.isDirectory()) { + f.mkdirs() + } else { + FileOutputStream outStream = new FileOutputStream(f) + IOUtils.copy(inStream, outStream) + + if (entry.getName().endsWith(".shp")) { + shpPresent = true + shpFile = f + } else if (entry.getName().endsWith(".shx") && !f.getName().startsWith("/")) { + shxPresent = true + } else if (entry.getName().endsWith(".dbf") && !f.getName().startsWith("/")) { + dbfPresent = true + } + } + } + } + + if (!shpPresent || !shxPresent || !dbfPresent) { + throw new IllegalArgumentException("Invalid archive. Must contain .shp, .shx and .dbf at a minimum.") + } + } catch (Exception e) { + log.error(e.getMessage(), e) + } finally { + if (zf != null) { + try { + zf.close() + } catch (Exception e) { + log.error(e.getMessage(), e) + } + } + } + + if (shpFile == null) { + return null + } else { + return Pair.of(shpFile.getParentFile().getName(), shpFile) + } + } + + static List>> getShapeFileManifest(File shpFile) throws IOException { + List>> manifestData = new ArrayList>>() + + FileDataStore store = FileDataStoreFinder.getDataStore(shpFile) + + SimpleFeatureSource featureSource = store.getFeatureSource(store.getTypeNames()[0]) + SimpleFeatureCollection featureCollection = featureSource.getFeatures() + SimpleFeatureIterator it = featureCollection.features() + + while (it.hasNext()) { + SimpleFeature feature = it.next() + List> pairList = new ArrayList>() + for (Property prop : feature.getProperties()) { + if (!(prop.getType() instanceof GeometryType)) { + Pair pair = Pair.of(prop.getName().toString(), feature.getAttribute(prop.getName())) + pairList.add(pair) + } + } + manifestData.add(pairList) + } + + return manifestData + } +} + diff --git a/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy new file mode 100755 index 000000000..948b2290a --- /dev/null +++ b/src/main/groovy/au/org/ala/ecodata/spatial/SpatialUtils.groovy @@ -0,0 +1,127 @@ +/* + * To change this template, choose Tools | Templates + * and open the template in the editor. + */ +package au.org.ala.ecodata.spatial + + +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import org.geotools.data.FileDataStore +import org.geotools.data.FileDataStoreFinder +import org.geotools.data.simple.SimpleFeatureCollection +import org.geotools.data.simple.SimpleFeatureIterator +import org.geotools.data.simple.SimpleFeatureSource +import org.geotools.geometry.jts.JTS +import org.geotools.geometry.jts.JTSFactoryFinder +import org.geotools.referencing.CRS +import org.geotools.referencing.crs.DefaultGeographicCRS +import org.locationtech.jts.geom.Geometry +import org.locationtech.jts.geom.GeometryCollection +import org.locationtech.jts.geom.GeometryFactory +import org.opengis.feature.simple.SimpleFeature +import org.opengis.referencing.crs.CoordinateReferenceSystem + +@CompileStatic +@Slf4j +class SpatialUtils { + static Geometry getShapeFileFeaturesAsGeometry(File shpFileDir, String featureIndexes) throws IOException { + + if (!shpFileDir.exists() || !shpFileDir.isDirectory()) { + throw new IllegalArgumentException("Supplied directory does not exist or is not a directory") + } + + List geometries = new ArrayList() + FileDataStore store = null + SimpleFeatureIterator it = null + + try { + + File shpFile = null + for (File f : shpFileDir.listFiles()) { + if (f.getName().endsWith(".shp")) { + shpFile = f + break + } + } + + if (shpFile == null) { + throw new IllegalArgumentException("No .shp file present in directory") + } + + store = FileDataStoreFinder.getDataStore(shpFile) + + SimpleFeatureSource featureSource = store.getFeatureSource(store.getTypeNames()[0]) + SimpleFeatureCollection featureCollection = featureSource.getFeatures() + it = featureCollection.features() + + //transform CRS to the same as the shapefile (at least try) + //default to 4326 + CoordinateReferenceSystem crs = null + try { + crs = store.getSchema().getCoordinateReferenceSystem() + if (crs == null) { + //attempt to parse prj + try { + File prjFile = new File(shpFile.getPath().substring(0, shpFile.getPath().length() - 3) + "prj") + if (prjFile.exists()) { + String prj = prjFile.text + + if (prj == "PROJCS[\"WGS_1984_Web_Mercator_Auxiliary_Sphere\",GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]],PROJECTION[\"Mercator_Auxiliary_Sphere\"],PARAMETER[\"False_Easting\",0.0],PARAMETER[\"False_Northing\",0.0],PARAMETER[\"Central_Meridian\",0.0],PARAMETER[\"Standard_Parallel_1\",0.0],PARAMETER[\"Auxiliary_Sphere_Type\",0.0],UNIT[\"Meter\",1.0]]") { + //support for arcgis online default shp exports + crs = CRS.decode("EPSG:3857") + } else { + crs = CRS.parseWKT(prjFile.text) + } + } + } catch (Exception ignored) { + } + + if (crs == null) { + crs = DefaultGeographicCRS.WGS84 + } + } + } catch (Exception ignored) { + } + + int i = 0 + boolean all = "all".equalsIgnoreCase(featureIndexes) + def indexes = [] + if (!all) featureIndexes.split(",").each { indexes.push(it.toInteger()) } + while (it.hasNext()) { + SimpleFeature feature = (SimpleFeature) it.next() + if (all || indexes.contains(i)) { + geometries.add(feature.getDefaultGeometry() as Geometry) + } + i++ + } + + Geometry mergedGeometry + + if (geometries.size() == 1) { + mergedGeometry = geometries.get(0) + } else { + GeometryFactory factory = JTSFactoryFinder.getGeometryFactory(null) + GeometryCollection geometryCollection = (GeometryCollection) factory.buildGeometry(geometries) + + // note the following geometry collection may be invalid (say with overlapping polygons) + mergedGeometry = geometryCollection.union() + } + + try { + return JTS.transform(mergedGeometry, CRS.findMathTransform(crs, DefaultGeographicCRS.WGS84, true)) + } catch (Exception ignored) { + return mergedGeometry + } + } catch (Exception e) { + throw e + } finally { + if (it != null) { + it.close() + } + if (store != null) { + store.dispose() + } + } + } +}