diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml index 89b418c..ed92309 100644 --- a/.github/workflows/build-pipeline.yml +++ b/.github/workflows/build-pipeline.yml @@ -24,7 +24,7 @@ jobs: - uses: actions/setup-java@v2 with: distribution: 'adopt' - java-version: '8.0.232' + java-version: '11.0.6' - uses: gradle/gradle-build-action@v1 with: gradle-version: 8.0.1 diff --git a/CHANGELOG.md b/CHANGELOG.md index ec43a8b..25e586f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,15 +7,39 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Added +- **PODAAC-5876** + - Update to use CMA 2.0.0, thus allowing 2.0.3 layer for lambda + - Update build to use java 11 ### Deprecated ### Removed ### Fixed -- **PODAAC-5291** - - Assure the UMMG RelatedUrls arry in the following order - - http/https scientific data - - other http/https files - - s3 scientific data - - other s3 files +### Security + +## [8.5.0] +### Added +- **PODAAC-5594** + - Support BasinID +- **PODAAC-5770** + - use meta.isoXMLSpatialType to configure the collection should process the combination of footprint, orbit and bbox +- **PODAAC-5717** + - Upgrade to UMMG 1.6.5 + - support empty Pass in Cycle/Pass/Tile string +### Deprecated +### Removed +### Fixed +- **PODAAC-5708** + - .nc.iso.xml Polygon divided over IDL +### Security + +- +## [8.4.0] +### Added +- Update metadata aggregator to add description to image variables from image processor and test tig forge processor +### Deprecated +### Removed +### Fixed +- **PODAAC-5614** + - Strip leading zeros from cycle and pass in validity check ### Security - Snyk: Security upgrade com.amazonaws:aws-java-sdk-s3 from 1.12.378 to 1.12.386 @@ -530,4 +554,4 @@ Note: This is the first release where we are consolidating DMAS changes with Cum ### Fixed -### Security \ No newline at end of file +### Security diff --git a/README.md b/README.md index 80175c3..5ef0f06 100644 --- a/README.md +++ b/README.md @@ -54,8 +54,9 @@ gradle -x test build ** -r --constructors-required-only : generate constructor for required field only ** -R remove old output ```aidl - jsonschema2pojo -s ./UMM-G1.6.3.json --target java-gen -p gov.nasa.cumulus.metadata.umm.model -a GSON -r -fdt true -R - jsonschema2pojo -s ./UMM-G1.6.3.json --target java-gen -p gov.nasa.cumulus.metadata.umm.model -a GSON -fdt + + jsonschema2pojo -s ./ummg165.json --target java-gen -p gov.nasa.cumulus.metadata.umm.generated -a GSON -fdt -r -R + jsonschema2pojo -s ./ummg165.json --target java-gen -p gov.nasa.cumulus.metadata.umm.generated -a GSON -fdt jsonshcema2pojo maven plugin is also configured within the pom.xml file mvn compile // call plugin goal to generate pojo classes diff --git a/VERSION b/VERSION index 2bf50aa..acd405b 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -8.3.0 +8.6.0 diff --git a/build.gradle b/build.gradle index cf5189b..fe36db4 100755 --- a/build.gradle +++ b/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'java' -sourceCompatibility = 1.8 -targetCompatibility = 1.8 +sourceCompatibility = 11 +targetCompatibility = 11 dependencies { implementation fileTree(dir: 'target/dependency/', include: '*.jar') diff --git a/pom.xml b/pom.xml index 455cb99..8593937 100755 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ 4.0.0 gov.nasa.podaac cumulus-metadata-aggregator - 8.3.0-alpha.6-SNAPSHOT + 8.6.0-alpha.1-SNAPSHOT jar Metadata-Aggregator @@ -22,8 +22,8 @@ TODO - 1.8 - 1.8 + 11 + 11 UTF-8 UTF-8 @@ -39,18 +39,18 @@ gov.nasa.earthdata cumulus-message-adapter - 1.3.9 + 2.0.0 com.amazonaws aws-java-sdk-s3 - 1.12.440 + 1.12.544 com.amazonaws aws-java-sdk-secretsmanager - 1.12.440 + 1.12.544 org.apache.httpcomponents @@ -133,6 +133,12 @@ 4.5.1 test + + + javax.xml.bind + jaxb-api + 2.3.1 + src/main/java @@ -172,7 +178,7 @@ org.jsonschema2pojo jsonschema2pojo-maven-plugin - 1.0.2 + 1.2.1 ${basedir}/src/main/resources/jsonschema src/main/java diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/CMRLambdaRestClient.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/CMRLambdaRestClient.java index 87f96c8..a059372 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/CMRLambdaRestClient.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/CMRLambdaRestClient.java @@ -337,6 +337,8 @@ public HttpResponse validateUMMG(String provider, String granuleId, String strU */ public boolean isUMMGSpatialValid(String provider, String granuleId, String strUMMG) throws URISyntaxException, IOException, ParseException { + AdapterLogger.LogInfo(this.className + " UMMG validation provider: "+ provider + " granuleId: " + granuleId + + " ummg: " + strUMMG); HttpResponse httpResponse = validateUMMG(provider, granuleId, strUMMG); int statusCode = httpResponse.getStatusLine().getStatusCode(); InputStream inputStream = httpResponse.getEntity().getContent(); diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/Constants.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/Constants.java index a23ac6e..7b74d13 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/Constants.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/Constants.java @@ -6,5 +6,5 @@ public static class Metadata{ public static final String START_ORBIT = "startorbit"; public static final String END_ORBIT = "endorbit"; } - public static final String UMMG_VERSION = "1.6.3"; + public static final String UMMG_VERSION = "1.6.5"; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoGranule.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoGranule.java index 44265c2..a827a46 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoGranule.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoGranule.java @@ -21,6 +21,7 @@ public class IsoGranule extends UMMGranule { private String PGEVersionClass; private IsoType isoType; + private int orientation; public IsoGranule() { this.identifiers = new HashMap<>(); @@ -107,6 +108,14 @@ public void setPolygon(String polygon) { this.polygon = polygon; } + public int getOrientation() { + return this.orientation; + } + + public void setOrientation(int orientation) { + this.orientation = orientation; + } + public void addIdentifier(String name, String value) { this.identifiers.put(name, value); } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoMendsXPath.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoMendsXPath.java index 2149a33..65dcea0 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoMendsXPath.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/IsoMendsXPath.java @@ -34,6 +34,7 @@ public final class IsoMendsXPath extends IsoXPath { // AscendingCrossing, StartLatitude, StartDirection, EndLatitude, EndDirection public static final String ORBIT = "/gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_GeographicDescription[@id=\"Orbit\"]/gmd:geographicIdentifier/gmd:MD_Identifier/gmd:code/gco:CharacterString"; + /** list of coordinates representing footprint */ public static final String GRANULE_INPUT = "/gmi:MI_Metadata/gmd:dataQualityInfo/gmd:DQ_DataQuality/gmd:lineage/gmd:LI_Lineage/gmd:source/gmi:LE_Source[gmd:description/gco:CharacterString[text()=\"GranuleInput\"]]/gmd:sourceCitation/gmd:CI_Citation/gmd:title/gmx:FileName"; public static final String CYCLE_PASS_TILE_SCENE = "/gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_GeographicDescription[@id=\"SWOTTrack\"]/gmd:geographicIdentifier/gmd:MD_Identifier/gmd:code/gco:CharacterString"; diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataAggregatorLambda.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataAggregatorLambda.java index 52263b2..97d24cd 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataAggregatorLambda.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataAggregatorLambda.java @@ -11,12 +11,14 @@ import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.util.ArrayList; +import java.util.HashSet; import java.util.Hashtable; import java.util.List; import gov.nasa.cumulus.metadata.aggregator.processor.DMRPPProcessor; import gov.nasa.cumulus.metadata.aggregator.processor.FootprintProcessor; import gov.nasa.cumulus.metadata.aggregator.processor.ImageProcessor; +import gov.nasa.cumulus.metadata.state.MENDsIsoXMLSpatialTypeEnum; import gov.nasa.cumulus.metadata.state.WorkflowTypeEnum; import gov.nasa.cumulus.metadata.util.S3Utils; import org.apache.commons.io.FileUtils; @@ -55,6 +57,10 @@ public String PerformFunction(String input, Context context) throws Exception { * this will help the logic in postIngestProcess function. */ this.setWorkFlowType((String) config.get("stateMachine")); + // This is a switch to determine, shall footprint, orbit or boundingbox shall be processed from iso.xml + // while ingesting swot collections + JSONArray isoXMLSpatialTypeJsonArray = (JSONArray) config.get("isoXMLSpatialType"); + HashSet isoXMLSpatialTypeHashSet = createIsoXMLSpatialTypeSet(isoXMLSpatialTypeJsonArray); String isoRegex = (String) config.get("isoRegex"); @@ -149,7 +155,9 @@ public String PerformFunction(String input, Context context) throws Exception { MetadataFilesToEcho mtfe; boolean isIsoFile = (iso != null); - mtfe = new MetadataFilesToEcho(isIsoFile); + mtfe = new MetadataFilesToEcho(isIsoFile, isoXMLSpatialTypeHashSet); + //set the name/granuleId + mtfe.getGranule().setName(granuleId); mtfe.setDatasetValues(collectionName, collectionVersion, rangeIs360, boundingBox, additionalAttributes); if (granules != null && granules.size() > 0) { mtfe.setGranuleFileSizeAndChecksum(granules); @@ -182,10 +190,6 @@ public String PerformFunction(String input, Context context) throws Exception { } } - //set the name - mtfe.getGranule().setName(granuleId); - - //write UMM-G to file try { mtfe.writeJson("/tmp/" + granuleId + ".cmr.json"); @@ -234,6 +238,33 @@ public String PerformFunction(String input, Context context) throws Exception { return returnable.toJSONString(); } + public HashSet createIsoXMLSpatialTypeSet(JSONArray isoXMLSpatialTypeConfigJSONArray) throws IllegalArgumentException{ + HashSet isoSpatialTypes = new HashSet<>(); + // if not containing isoXMLTypes, then return an empty HashSet + if(isoXMLSpatialTypeConfigJSONArray == null || isoXMLSpatialTypeConfigJSONArray.size()==0) { + return isoSpatialTypes; + } + isoXMLSpatialTypeConfigJSONArray.forEach(item -> { + String t = (String) item; + MENDsIsoXMLSpatialTypeEnum en = MENDsIsoXMLSpatialTypeEnum.getEnum(getIsoXMLSpatialTypeStr(t)); + isoSpatialTypes.add(en); + }); + AdapterLogger.LogDebug(this.className + " isoSpatialTypes HashSet: " + isoSpatialTypes); + return isoSpatialTypes; + } + + public String getIsoXMLSpatialTypeStr(String token) { + final String trimmedToken = StringUtils.trim(token); + String s; + try { + s = MENDsIsoXMLSpatialTypeEnum.getEnumValuList().stream() + .filter(e -> StringUtils.equals(trimmedToken, e)).findFirst().get(); + } catch (java.util.NoSuchElementException e) { + s = ""; + } + return s; + } + /** * get S3 fileStaging direction from S3 full key * diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataFilesToEcho.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataFilesToEcho.java index 92e8dff..b1a0a59 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataFilesToEcho.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/MetadataFilesToEcho.java @@ -10,6 +10,7 @@ import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import javax.xml.bind.DatatypeConverter; import javax.xml.parsers.DocumentBuilder; @@ -18,11 +19,13 @@ import javax.xml.xpath.*; import gov.nasa.cumulus.metadata.aggregator.factory.UmmgPojoFactory; +import gov.nasa.cumulus.metadata.state.MENDsIsoXMLSpatialTypeEnum; import gov.nasa.cumulus.metadata.umm.generated.AdditionalAttributeType; import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType; import gov.nasa.cumulus.metadata.umm.generated.TrackType; import gov.nasa.cumulus.metadata.util.BoundingTools; import gov.nasa.cumulus.metadata.util.JSONUtils; +import gov.nasa.cumulus.metadata.util.MENDsISOXmlUtiils; import gov.nasa.podaac.inventory.model.Granule; import gov.nasa.podaac.inventory.model.GranuleCharacter; import gov.nasa.podaac.inventory.model.DatasetElement; @@ -51,6 +54,9 @@ import gov.nasa.podaac.inventory.api.Constant.GranuleArchiveType; import gov.nasa.cumulus.metadata.umm.model.UMMGranuleArchive; import org.xml.sax.SAXException; + +import com.vividsolutions.jts.algorithm.CGAlgorithms; + import cumulus_message_adapter.message_parser.AdapterLogger; public class MetadataFilesToEcho { @@ -62,6 +68,7 @@ public class MetadataFilesToEcho { boolean isIsoFile = false; JSONObject additionalAttributes = null; UmmgPojoFactory ummgPojoFactory = UmmgPojoFactory.getInstance(); + HashSet isoXMLSpatialTypeEnumHashSet = new HashSet<>(); public MetadataFilesToEcho() { this(false); @@ -75,6 +82,15 @@ public MetadataFilesToEcho(boolean isIso) { this.granule = new UMMGranule(); } + public MetadataFilesToEcho(boolean isIso, HashSet inputIsoXMLSpatialTypeHashSet) { + this.isIsoFile = isIso; + if (isIsoFile) + this.granule = new IsoGranule(); + else + this.granule = new UMMGranule(); + this.isoXMLSpatialTypeEnumHashSet = inputIsoXMLSpatialTypeHashSet; + } + //this method reads the configuration file (per dataset) sent to this class (.cfg) public void readConfiguration(String file) throws IOException, ParseException { JSONParser parser = new JSONParser(); @@ -260,14 +276,9 @@ public void readCommonMetadataFile(String file, String s3Location) throws IOExce public void setGranuleFileSizeAndChecksum(JSONArray input_granules) { JSONArray files = (JSONArray)((JSONObject)input_granules.get(0)).get("files"); - + AdapterLogger.LogDebug(this.className + " setGranuleFileSizeAndChecksum files[]:" + files); for(Object f: files){ JSONObject file = (JSONObject)f; - AdapterLogger.LogDebug(this.className + " UMM-G GranuleArchive filename:" + (String)file.get("fileName")); - AdapterLogger.LogDebug(this.className + " UMM-G GranuleArchive filesize:" + ((Double) file.get("size")).longValue()); - AdapterLogger.LogDebug(this.className + " UMM-G GranuleArchive checksum:" + (String)file.get("checksum")); - AdapterLogger.LogDebug(this.className + " UMM-G GranuleArchive checksumType:" + (String)file.get("checksumType")); - AdapterLogger.LogDebug(this.className + " UMM-G GranuleArchive type:" + (String)file.get("type")); UMMGranuleArchive uga = new UMMGranuleArchive(); uga.setName((String)file.get("fileName")); uga.setFileSize(((Double) file.get("size")).longValue()); @@ -411,97 +422,96 @@ private void parseRequiredFields(Document doc, XPath xpath, IsoType iso) throws } } - public IsoGranule readIsoMendsMetadataFile(String s3Location, Document doc, XPath xpath) throws XPathExpressionException { - - if (xpath.evaluate(IsoMendsXPath.NORTH_BOUNDING_COORDINATE, doc) != "") { - setGranuleBoundingBox( - Double.parseDouble(xpath.evaluate(IsoMendsXPath.NORTH_BOUNDING_COORDINATE, doc)), - Double.parseDouble(xpath.evaluate(IsoMendsXPath.SOUTH_BOUNDING_COORDINATE, doc)), - Double.parseDouble(xpath.evaluate(IsoMendsXPath.EAST_BOUNDING_COORDINATE, doc)), - Double.parseDouble(xpath.evaluate(IsoMendsXPath.WEST_BOUNDING_COORDINATE, doc))); - } - ((IsoGranule) granule).setPolygon(xpath.evaluate(IsoMendsXPath.POLYGON, doc)); + public IsoGranule readIsoMendsMetadataFile(String s3Location, Document doc, XPath xpath) throws XPathExpressionException { + if (MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.NORTH_BOUNDING_COORDINATE, "IsoMendsXPath.NORTH_BOUNDING_COORDINATE")!= "") { + setGranuleBoundingBox( + Double.parseDouble(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.NORTH_BOUNDING_COORDINATE, "IsoMendsXPath.NORTH_BOUNDING_COORDINATE")), + Double.parseDouble(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.SOUTH_BOUNDING_COORDINATE, "IsoMendsXPath.SOUTH_BOUNDING_COORDINATE")), + Double.parseDouble(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.EAST_BOUNDING_COORDINATE, "IsoMendsXPath.EAST_BOUNDING_COORDINATE")), + Double.parseDouble(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.WEST_BOUNDING_COORDINATE, "IsoMendsXPath.WEST_BOUNDING_COORDINATE"))); + } + ((IsoGranule) granule).setPolygon(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.POLYGON, "IsoMendsXPath.POLYGON")); + ((IsoGranule) granule).setOrientation(CGAlgorithms.COUNTERCLOCKWISE); + NodeList nodes = (NodeList) xpath.evaluate(IsoMendsXPath.DATA_FILE, doc, XPathConstants.NODESET); + for (int i = 0; i < nodes.getLength(); i++) { + Element dataFile = (Element) nodes.item(i); + + String description = xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_DESCRIPTION, dataFile); + Pattern p = Pattern.compile("Size:\\s(.*)\\sSizeUnit:\\s(.*)\\sChecksumValue:\\s(.*)\\sChecksumAlgorithm:\\s(.*)\\sDescription:\\s(.*)"); + Matcher m = p.matcher(description); + if (m.find()) { + String type = m.group(5); + if (type.equals("Science data file") || type.equals("ISO/Archive metadata file") + || type.equals("Quicklook Image of the Science data file")) { + String fileFormat = xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_FORMAT, dataFile); + if (type.equals("Science data file")) { + granule.setDataFormat(fileFormat); + } - NodeList nodes = (NodeList) xpath.evaluate(IsoMendsXPath.DATA_FILE, doc, XPathConstants.NODESET); - for (int i = 0; i < nodes.getLength(); i++) { - Element dataFile = (Element) nodes.item(i); + IsoGranuleArchive ga = new IsoGranuleArchive(); + ga.setType(fileFormat); + ga.setFileSize(Long.parseLong(m.group(1))); + ga.setSizeUnit(m.group(2)); + ga.setName(xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_NAME, dataFile)); + ga.setMimeType(xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_MIME_TYPE, dataFile)); + ga.setChecksum(m.group(3)); + ga.setChecksumAlgorithm(m.group(4)); + granule.add(ga); + } + } + } - String description = xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_DESCRIPTION, dataFile); - Pattern p = Pattern.compile("Size:\\s(.*)\\sSizeUnit:\\s(.*)\\sChecksumValue:\\s(.*)\\sChecksumAlgorithm:\\s(.*)\\sDescription:\\s(.*)"); - Matcher m = p.matcher(description); - if (m.find()) { - String type = m.group(5); - if (type.equals("Science data file") || type.equals("ISO/Archive metadata file") - || type.equals("Quicklook Image of the Science data file")) { - String fileFormat = xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_FORMAT, dataFile); - if (type.equals("Science data file")) { - granule.setDataFormat(fileFormat); - } - - IsoGranuleArchive ga = new IsoGranuleArchive(); - ga.setType(fileFormat); - ga.setFileSize(Long.parseLong(m.group(1))); - ga.setSizeUnit(m.group(2)); - ga.setName(xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_NAME, dataFile)); - ga.setMimeType(xpath.evaluate(IsoMendsXPath.DATA_FILE_FILE_MIME_TYPE, dataFile)); - ga.setChecksum(m.group(3)); - ga.setChecksumAlgorithm(m.group(4)); - granule.add(ga); - } - } - } + ((IsoGranule) granule).setProducerGranuleId(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.PRODUCER_GRANULE_ID, "IsoMendsXPath.PRODUCER_GRANULE_ID")); + ((IsoGranule) granule).setCrid(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.CRID, "IsoMendsXPath.CRID")); - ((IsoGranule) granule).setProducerGranuleId(xpath.evaluate(IsoMendsXPath.PRODUCER_GRANULE_ID, doc)); - ((IsoGranule) granule).setCrid(xpath.evaluate(IsoMendsXPath.CRID, doc)); + NodeList identifiers = (NodeList) xpath.evaluate(IsoMendsXPath.IDENTIFIERS, doc, XPathConstants.NODESET); + for (int i = 0; i < identifiers.getLength(); i++) { + Element identifier = (Element) identifiers.item(i); + String identifierDescription = xpath.evaluate(IsoMendsXPath.IDENTIFIER_DESCRIPTION, identifier); + ((IsoGranule) granule).addIdentifier(identifierDescription.substring(identifierDescription.indexOf(" ") + 1), xpath.evaluate(IsoMendsXPath.IDENTIFIER_CODE, identifier)); + } - NodeList identifiers = (NodeList) xpath.evaluate(IsoMendsXPath.IDENTIFIERS, doc, XPathConstants.NODESET); - for (int i = 0; i < identifiers.getLength(); i++) { - Element identifier = (Element) identifiers.item(i); - String identifierDescription = xpath.evaluate(IsoMendsXPath.IDENTIFIER_DESCRIPTION, identifier); - ((IsoGranule) granule).addIdentifier(identifierDescription.substring(identifierDescription.indexOf(" ") + 1), xpath.evaluate(IsoMendsXPath.IDENTIFIER_CODE, identifier)); - } + String reprocessingPlanned = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.REPROCESSING_PLANNED, "IsoMendsXPath.REPROCESSING_PLANNED"); + ((IsoGranule) granule).setReprocessingPlanned(reprocessingPlanned.substring(reprocessingPlanned.indexOf(" ") + 1)); - String reprocessingPlanned = xpath.evaluate(IsoMendsXPath.REPROCESSING_PLANNED, doc); - ((IsoGranule) granule).setReprocessingPlanned(reprocessingPlanned.substring(reprocessingPlanned.indexOf(" ") + 1)); + String reprocessingActual = xpath.evaluate(IsoMendsXPath.REPROCESSING_ACTUAL, doc); + ((IsoGranule) granule).setReprocessingActual(reprocessingActual.substring(reprocessingActual.indexOf(" ") + 1)); - String reprocessingActual = xpath.evaluate(IsoMendsXPath.REPROCESSING_ACTUAL, doc); - ((IsoGranule) granule).setReprocessingActual(reprocessingActual.substring(reprocessingActual.indexOf(" ") + 1)); + ((IsoGranule) granule).setParameterName(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.PARAMETER_NAME, "IsoMendsXPath.PARAMETER_NAME")); + String qaPercentMissingData = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.QA_PERCENT_MISSING_DATA, "IsoMendsXPath.QA_PERCENT_MISSING_DATA"); + if (qaPercentMissingData != "" && BoundingTools.isParseable(qaPercentMissingData)) { + ((IsoGranule) granule).setQAPercentMissingData(Double.parseDouble(qaPercentMissingData)); + } + String qaPercentOutOfBoundsData = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.QA_PERCENT_OUT_OF_BOUNDS_DATA, "IsoMendsXPath.QA_PERCENT_OUT_OF_BOUNDS_DATA"); + if (qaPercentOutOfBoundsData != "" && BoundingTools.isParseable(qaPercentOutOfBoundsData)) { + ((IsoGranule) granule).setQAPercentOutOfBoundsData(Double.parseDouble(qaPercentOutOfBoundsData)); + } - ((IsoGranule) granule).setParameterName(xpath.evaluate(IsoMendsXPath.PARAMETER_NAME, doc)); - String qaPercentMissingData = xpath.evaluate(IsoMendsXPath.QA_PERCENT_MISSING_DATA, doc); - if (qaPercentMissingData != "" && BoundingTools.isParseable(qaPercentMissingData)) { - ((IsoGranule) granule).setQAPercentMissingData(Double.parseDouble(qaPercentMissingData)); - } - String qaPercentOutOfBoundsData = xpath.evaluate(IsoMendsXPath.QA_PERCENT_OUT_OF_BOUNDS_DATA, doc); - if (qaPercentOutOfBoundsData != "" && BoundingTools.isParseable(qaPercentOutOfBoundsData)) { - ((IsoGranule) granule).setQAPercentOutOfBoundsData(Double.parseDouble(qaPercentOutOfBoundsData)); - } + ((IsoGranule) granule).setOrbit(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.ORBIT, "IsoMendsXPath.ORBIT")); + ((IsoGranule) granule).setSwotTrack(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.SWOT_TRACK, "IsoMendsXPath.SWOT_TRACK")); - ((IsoGranule) granule).setOrbit(xpath.evaluate(IsoMendsXPath.ORBIT, doc)); - ((IsoGranule) granule).setSwotTrack(xpath.evaluate(IsoMendsXPath.SWOT_TRACK, doc)); + Source source = new Source(); + source.setSourceShortName(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.PLATFORM, "IsoMendsXPath.PLATFORM")); - Source source = new Source(); - source.setSourceShortName(xpath.evaluate(IsoMendsXPath.PLATFORM, doc)); + Sensor sensor = new Sensor(); + sensor.setSensorShortName(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.INSTRUMENT, "IsoMendsXPath.INSTRUMENT")); - Sensor sensor = new Sensor(); - sensor.setSensorShortName(xpath.evaluate(IsoMendsXPath.INSTRUMENT, doc)); + DatasetSource datasetSource = new DatasetSource(); + DatasetSource.DatasetSourcePK datasetSourcePK = new DatasetSource.DatasetSourcePK(); + datasetSourcePK.setSource(source); + datasetSourcePK.setSensor(sensor); + datasetSource.setDatasetSourcePK(datasetSourcePK); - DatasetSource datasetSource = new DatasetSource(); - DatasetSource.DatasetSourcePK datasetSourcePK = new DatasetSource.DatasetSourcePK(); - datasetSourcePK.setSource(source); - datasetSourcePK.setSensor(sensor); - datasetSource.setDatasetSourcePK(datasetSourcePK); + dataset.add(datasetSource); - dataset.add(datasetSource); - - NodeList inputGranules = (NodeList) xpath.evaluate(IsoMendsXPath.GRANULE_INPUT, doc, XPathConstants.NODESET); - for (int i = 0; i < inputGranules.getLength(); i++) { - ((IsoGranule) granule).addInputGranule(inputGranules.item(i).getTextContent().trim()); - } + NodeList inputGranules = (NodeList) xpath.evaluate(IsoMendsXPath.GRANULE_INPUT, doc, XPathConstants.NODESET); + for (int i = 0; i < inputGranules.getLength(); i++) { + ((IsoGranule) granule).addInputGranule(inputGranules.item(i).getTextContent().trim()); + } - ((IsoGranule) granule).setPGEVersionClass(xpath.evaluate(IsoMendsXPath.PGE_VERSION_CLASS, doc)); + ((IsoGranule) granule).setPGEVersionClass(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.PGE_VERSION_CLASS, "IsoMendsXPath.PGE_VERSION_CLASS")); // Process ISO cycle, pass and tile - String cyclePassTileSceneStr =StringUtils.trim(xpath.evaluate(IsoMendsXPath.CYCLE_PASS_TILE_SCENE, doc)); + String cyclePassTileSceneStr =StringUtils.trim(MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.CYCLE_PASS_TILE_SCENE, "IsoMendsXPath.CYCLE_PASS_TILE_SCENE")); if(!StringUtils.isBlank(cyclePassTileSceneStr)) { try { createIsoCyclePassTile(cyclePassTileSceneStr); @@ -530,29 +540,30 @@ public IsoGranule readIsoMendsMetadataFile(String s3Location, Document doc, XPat additionalAttributes.remove("publishAll"); ((IsoGranule) granule).setDynamicAttributeNameMapping(additionalAttributes); } - - - String mgrsId = xpath.evaluate(IsoMendsXPath.MGRS_ID, doc); - if (mgrsId != null && !mgrsId.equals("")) { - // If MGRS_ID field is not null, set as additional attribute - AdditionalAttributeType mgrsAttr = new AdditionalAttributeType("MGRS_TILE_ID", Collections.singletonList(mgrsId)); - - List additionalAttributeTypes = ((IsoGranule) granule).getAdditionalAttributeTypes(); - if (additionalAttributeTypes == null) { - additionalAttributeTypes = Collections.singletonList(mgrsAttr); - } else { - additionalAttributeTypes.add(mgrsAttr); - } - - JSONObject dynamicAttributeNameMapping = ((IsoGranule) granule).getDynamicAttributeNameMapping(); - if (dynamicAttributeNameMapping == null) { - ((IsoGranule) granule).setDynamicAttributeNameMapping(additionalAttributes); - } else { - dynamicAttributeNameMapping.put("MGRS_TILE_ID", Collections.singletonList(mgrsId)); - } - ((IsoGranule) granule).setAdditionalAttributeTypes(additionalAttributeTypes); - ((IsoGranule) granule).setDynamicAttributeNameMapping(dynamicAttributeNameMapping); - } + + + String mgrsId = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc, xpath, IsoMendsXPath.MGRS_ID, "IsoMendsXPath.MGRS_ID"); + if (mgrsId != null && !mgrsId.equals("")) { + // If MGRS_ID field is not null, set as additional attribute + AdditionalAttributeType mgrsAttr = new AdditionalAttributeType(); + mgrsAttr.setName("MGRS_TILE_ID");mgrsAttr.setValues( Collections.singletonList(mgrsId)); + + List additionalAttributeTypes = ((IsoGranule) granule).getAdditionalAttributeTypes(); + if (additionalAttributeTypes == null) { + additionalAttributeTypes = Collections.singletonList(mgrsAttr); + } else { + additionalAttributeTypes.add(mgrsAttr); + } + + JSONObject dynamicAttributeNameMapping = ((IsoGranule) granule).getDynamicAttributeNameMapping(); + if (dynamicAttributeNameMapping == null) { + ((IsoGranule) granule).setDynamicAttributeNameMapping(additionalAttributes); + } else { + dynamicAttributeNameMapping.put("MGRS_TILE_ID", Collections.singletonList(mgrsId)); + } + ((IsoGranule) granule).setAdditionalAttributeTypes(additionalAttributeTypes); + ((IsoGranule) granule).setDynamicAttributeNameMapping(dynamicAttributeNameMapping); + } return ((IsoGranule) granule); } @@ -661,8 +672,11 @@ public IsoGranule createIsoCyclePassTile(String cyclePassTileStr) { * This block of code supports multiple cycles. In theory, during cycle transition, it is possible * a granule consists 2 cycles. However, UMMG json schema does support one at the time. */ + ArrayList basinIdStrs = new ArrayList<>(); for(String cps : cyclePassStrs) { try { + // Extract BasinID and the store into IsoGranule object + ((IsoGranule)granule).setBasinIds(getBasinIds(cps)); trackType = createTrackType(cps, p_cycle); } catch (Exception e) { AdapterLogger.LogError(this.className + " Creating TrackType with exception: " + UMMUtils.getStackTraceAsString(e)); @@ -671,16 +685,34 @@ public IsoGranule createIsoCyclePassTile(String cyclePassTileStr) { UmmgPojoFactory ummgPojoFactory = UmmgPojoFactory.getInstance(); additionalAttributeTypes= ummgPojoFactory.trackTypeToAdditionalAttributeTypes(trackType); + // Join array lists + additionalAttributeTypes.addAll( + ummgPojoFactory.basinIdsToAdditionalAttributeTypes(((IsoGranule)granule).getBasinIds())); + } // It is possible after all the above processing, cycle is present but passes is not (no pass in passes array) // That is, we shall NOT create trackType at all. Otherwise, CMR will throw validation error - if (trackType.getCycle()!=null && trackType.getPasses()!=null && trackType.getPasses().size() >0) { + // from UMMG Schema 1.6.5, TrackType contains Cycle and Passes and ONLY Cycle is required + if (trackType.getCycle()!=null) { ((IsoGranule) granule).setTrackType(trackType); ((IsoGranule) granule).setAdditionalAttributeTypes(additionalAttributeTypes); } return (IsoGranule)granule; } + public List getBasinIds(String cyclePassStr) { + Pattern p_basinId = Pattern.compile("\\s*BASINID\\s*:\\s*\\d+\\s*?"); + Matcher m_basinId = p_basinId.matcher(cyclePassStr); + ArrayListbasinIdStrs = new ArrayList<>(); + while(m_basinId.find()) { + String basinIdStr = m_basinId.group(); + AdapterLogger.LogInfo("BasinId:" + basinIdStr); + String tokens[] = basinIdStr.split(":"); + basinIdStrs.add(StringUtils.trim(tokens[1])); + } + return basinIdStrs; + } + public TrackType createTrackType(String cyclePassTileStr, Pattern p_cycle) { Matcher m_cycle = p_cycle.matcher(cyclePassTileStr); String cycleStr=null; @@ -712,16 +744,18 @@ public TrackType createTrackType(String cyclePassTileStr, Pattern p_cycle) { StringUtils.replace(passTilesStr,"[",""),"]",""); passTilesStr = passTilesStr.replaceAll("TILES\\s*:\\s*?", ""); String[] passTiles = StringUtils.split(passTilesStr, ","); - String passStr = StringUtils.trim(passTiles[0]); - trackPassTileType.setPass(NumberUtils.createInteger(UMMUtils.removeStrLeadingZeros(passStr))); - try { - List tiles = getTiles(StringUtils.trim(passTiles[1])); - trackPassTileType.setTiles(tiles); - } catch (Exception e) { - AdapterLogger.LogWarning(this.className + " Continue processing after tile processing failed with " + - "exception: " + UMMUtils.getStackTraceAsString(e)); + if(!StringUtils.isEmpty(passTilesStr) && passTiles.length >0) { + String passStr = StringUtils.trim(passTiles[0]); + trackPassTileType.setPass(NumberUtils.createInteger(UMMUtils.removeStrLeadingZeros(passStr))); + try { + List tiles = getTiles(StringUtils.trim(passTiles[1])); + trackPassTileType.setTiles(tiles); + } catch (Exception e) { + AdapterLogger.LogWarning(this.className + " Continue processing after tile processing failed with " + + "exception: " + UMMUtils.getStackTraceAsString(e)); + } + trackPassTileTypes.add(trackPassTileType); } - trackPassTileTypes.add(trackPassTileType); } trackType.setPasses(trackPassTileTypes); return trackType; @@ -788,6 +822,7 @@ private void readIsoSmapMetadataFile(String s3Location, Document doc, XPath xpat ((IsoGranule) granule).setSwotTrack(xpath.evaluate(IsoSmapXPath.SWOT_TRACK, doc)); ((IsoGranule) granule).setPolygon(xpath.evaluate(IsoSmapXPath.POLYGON, doc)); + ((IsoGranule) granule).setOrientation(CGAlgorithms.CLOCKWISE); Source source = new Source(); source.setSourceShortName(xpath.evaluate(IsoSmapXPath.PLATFORM, doc)); @@ -829,7 +864,7 @@ public void readSwotArchiveXmlFile(String file) throws ParserConfigurationExcept // No spatial extent exists for SWOT L0 data so set as global setGranuleBoundingBox(90.0, -90.0, 180.0, -180.0); } - + /** * Parse metadata from SWOT Cal/Val XML file * @param file path to SWOT Cal/Val XML file on local file system @@ -840,19 +875,19 @@ public void readSwotCalValXmlFile(String file) throws ParserConfigurationExcepti docBuilderFactory.setNamespaceAware(true); DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder(); Document doc = docBuilder.parse(new File(file)); - + XPath xpath = XPathFactory.newInstance().newXPath(); xpath.setNamespaceContext(new NamespaceResolver(doc)); - + String startTime = xpath.evaluate(SwotCalValXmlPath.BEGINNING_DATE_TIME, doc); String stopTime = xpath.evaluate(SwotCalValXmlPath.ENDING_DATE_TIME, doc); String createTime = xpath.evaluate(SwotCalValXmlPath.CREATION_DATE_TIME, doc); - + String north = xpath.evaluate(SwotCalValXmlPath.NORTH_BOUNDING_COORDINATE, doc); String south = xpath.evaluate(SwotCalValXmlPath.SOUTH_BOUNDING_COORDINATE, doc); String east = xpath.evaluate(SwotCalValXmlPath.EAST_BOUNDING_COORDINATE, doc); String west = xpath.evaluate(SwotCalValXmlPath.WEST_BOUNDING_COORDINATE, doc); - + try { granule.setStartTime(DatatypeConverter.parseDateTime(startTime).getTime()); granule.setStopTime(DatatypeConverter.parseDateTime(stopTime).getTime()); @@ -861,7 +896,7 @@ public void readSwotCalValXmlFile(String file) throws ParserConfigurationExcepti throw new IllegalArgumentException(String.format("Failed to parse datetime start=%s stop=%s create=%s", startTime, stopTime, createTime), exception); } - + try { setGranuleBoundingBox(Double.parseDouble(north), Double.parseDouble(south), @@ -916,8 +951,8 @@ public TrackType createTrackType(String cycleStr, String passStr, String tileStr on the other hand, any exceptions caused by cycle or pass should be thrown all the way up and break the ingestion */ try { - if (NumberUtils.createInteger(StringUtils.trim(cycleStr)) == null || - NumberUtils.createInteger(StringUtils.trim(passStr)) == null) { + if (NumberUtils.createInteger(UMMUtils.removeStrLeadingZeros(StringUtils.trim(cycleStr))) == null || + NumberUtils.createInteger(UMMUtils.removeStrLeadingZeros(StringUtils.trim(passStr))) == null) { return null; } } catch(NumberFormatException nfe) { // if either cycle or pass are un-processable, then return null @@ -988,6 +1023,7 @@ public void readSentinelManifest(String file) throws ParserConfigurationExceptio } } catch (XPathExpressionException e) { // Ignore if unable to parse for footprint since it isn't required for ingest + AdapterLogger.LogWarning(this.className + " Not able to extract footprint from SentinelManifest: " + e); } String cycle = StringUtils.trim(xpath.evaluate(ManifestXPath.CYCLE, doc)); @@ -1042,7 +1078,7 @@ private void setGranuleBoundingBox(double north, double south, double east, doub public JSONObject createJson() throws ParseException, IOException, URISyntaxException { granule.setIngestTime(new Date()); - UMMGranuleFile granuleFile = new UMMGranuleFile(granule, dataset, rangeIs360); + UMMGranuleFile granuleFile = new UMMGranuleFile(granule, dataset, rangeIs360, this.isoXMLSpatialTypeEnumHashSet); JSONObject granuleJson = granuleFile.defineGranule(); return granuleJson; } @@ -1051,7 +1087,6 @@ public void writeJson(String outputLocation) throws IOException, ParseException, URISyntaxException{ JSONObject granuleJson = createJson(); JSONUtils.cleanJSON(granuleJson); - granuleJson = JSONUtils.sortRelatedUrls(granuleJson); FileUtils.writeStringToFile(new File(outputLocation), granuleJson.toJSONString()); } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranule.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranule.java index bf43d6f..77e0d14 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranule.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranule.java @@ -17,6 +17,7 @@ public class UMMGranule extends Granule { /** * A generated type which represents the pass and associate Tiles under Track */ + private List basinIds; private List additionalAttributeTypes; private Integer orbitNumber; private Integer startOrbit; @@ -70,6 +71,14 @@ public void setTrackType(TrackType trackType) { this.trackType = trackType; } + public List getBasinIds() { + return basinIds; + } + + public void setBasinIds(List basinIds) { + this.basinIds = basinIds; + } + public List getAdditionalAttributeTypes() { return additionalAttributeTypes; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranuleFile.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranuleFile.java index 8056e9b..7a78994 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranuleFile.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/UMMGranuleFile.java @@ -4,6 +4,7 @@ import com.vividsolutions.jts.geom.*; import com.vividsolutions.jts.algorithm.CGAlgorithms; import cumulus_message_adapter.message_parser.AdapterLogger; +import gov.nasa.cumulus.metadata.state.MENDsIsoXMLSpatialTypeEnum; import gov.nasa.cumulus.metadata.umm.adapter.UMMGCollectionAdapter; import gov.nasa.cumulus.metadata.umm.adapter.UMMGListAdapter; import gov.nasa.cumulus.metadata.umm.adapter.UMMGMapAdapter; @@ -56,11 +57,18 @@ public class UMMGranuleFile { * if processed the footprint through S6 manifest xml's tag */ boolean isLineFormattedPolygon = false; + HashSet isoXMLSpatialTypeEnumHashSet = new HashSet<>(); public UMMGranuleFile(Granule granule, Dataset dataset, boolean rangeIs360) { + this(granule, dataset,rangeIs360, new HashSet() ); + } + + public UMMGranuleFile(Granule granule, Dataset dataset, boolean rangeIs360, + HashSet inputIsoXMLSpatialTypeEnumHashSet) { this.granule = granule; this.dataset = dataset; this.rangeIs360 = rangeIs360; + this.isoXMLSpatialTypeEnumHashSet =inputIsoXMLSpatialTypeEnumHashSet; } public JSONObject defineGranule() @@ -200,6 +208,7 @@ public JSONObject defineGranule() * Only when having gone through S6A Line to Polygon processing, then call UMMGPostProcessing. */ if(this.isLineFormattedPolygon) { + AdapterLogger.LogInfo(this.className + " Start post processing of UMMG by posting UMMG to CMR. If failed, put GBBox into UMMG"); granuleJson = UMMGPostProcessing(granuleJson); } @@ -396,10 +405,19 @@ private JSONObject exportTemporal() { JSONObject range = new JSONObject(); range.put("BeginningDateTime", TimeConversion.convertDate(granule.getStartTime()).toString()); range.put("EndingDateTime", TimeConversion.convertDate(granule.getStopTime()).toString()); + temporal.put("RangeDateTime", range); return temporal; } + /** + * PODAAC-4713 + * SMAP collection going through LP DAAC should: + * create GPolygon of posList appears in iso.xml + * UMMG should not include bounding box if GPolygon appeared under SpatialExtent + * @param granule + * @return + */ private boolean shouldAddBBx(Granule granule) { boolean shouldAddBBx = false; if(granule !=null && granule instanceof gov.nasa.cumulus.metadata.aggregator.UMMGranule) { @@ -420,6 +438,7 @@ private JSONObject exportSpatial() throws ParseException{ JSONObject geometry = new JSONObject(); JSONObject horizontalSpatialDomain = new JSONObject(); Boolean foundOrbitalData = false; + boolean isoBBoxAlreadyProcessed = false; spatialExtent.put("HorizontalSpatialDomain", horizontalSpatialDomain); if (granule instanceof IsoGranule) { @@ -431,21 +450,42 @@ private JSONObject exportSpatial() throws ParseException{ polygonCoordinatesArrayList.add(polygonCoordinates); addPolygons(geometry, polygonCoordinatesArrayList, true); } - // Export Orbit - // Commented out for now since UMM v1.5 only allows for either Geometry or Orbit not both - JSONObject orbit = new JSONObject(); - horizontalSpatialDomain.put("Orbit", orbit); - Pattern p = Pattern.compile("AscendingCrossing:\\s?(.*)\\s?StartLatitude:\\s?(.*)\\s?StartDirection:\\s?(.*)\\s?EndLatitude:\\s?(.*)\\s?EndDirection:\\s?(.*)"); - Matcher m = p.matcher(((IsoGranule) granule).getOrbit()); - foundOrbitalData = m.find(); - if (foundOrbitalData && BoundingTools.allParsable(m.group(1), m.group(2), m.group(4))) { - orbit.put("AscendingCrossing", UMMUtils.longitudeTypeNormalizer(Double.parseDouble(m.group(1)))); - orbit.put("StartLatitude", Double.parseDouble(m.group(2))); - orbit.put("StartDirection", m.group(3).trim()); - orbit.put("EndLatitude", Double.parseDouble(m.group(4))); - orbit.put("EndDirection", m.group(5).trim()); + /** + * Export Footprint, Orbit or Bounding Box + * UMM v1.5 only allows for either Geometry or Orbit not both. Only process orbit if the orbitString stored + * (during MetatdataFilesToEcho.readIsoxxxx()) is not empty or null + */ + if(this.isoXMLSpatialTypeEnumHashSet.contains(MENDsIsoXMLSpatialTypeEnum.FOOTPRINT)) { + AdapterLogger.LogDebug(this.className + "UMMGranuleFile.exportSpatial FOOTPRINT Processing"); + String polygon = ((IsoGranule) granule).getPolygon(); + AdapterLogger.LogInfo(this.className + " nc.iso.xml footprint processing ... "); + this.isLineFormattedPolygon = true; + geometry = line2Polygons(geometry,polygon); + } + if(this.isoXMLSpatialTypeEnumHashSet.contains(MENDsIsoXMLSpatialTypeEnum.ORBIT)) { + AdapterLogger.LogDebug(this.className + "UMMGranuleFile.exportSpatial ORBIT Processing"); + String orbitStr = ((IsoGranule) granule).getOrbit(); + if (!StringUtils.isEmpty(orbitStr)) { + JSONObject orbit = new JSONObject(); + horizontalSpatialDomain.put("Orbit", orbit); + Pattern p = Pattern.compile("AscendingCrossing:\\s?(.*)\\s?StartLatitude:\\s?(.*)\\s?StartDirection:\\s?(.*)\\s?EndLatitude:\\s?(.*)\\s?EndDirection:\\s?(.*)"); + Matcher m = p.matcher(orbitStr); + foundOrbitalData = m.find(); + if (foundOrbitalData && BoundingTools.allParsable(m.group(1), m.group(2), m.group(4))) { + orbit.put("AscendingCrossing", UMMUtils.longitudeTypeNormalizer(Double.parseDouble(m.group(1)))); + orbit.put("StartLatitude", Double.parseDouble(m.group(2))); + orbit.put("StartDirection", m.group(3).trim()); + orbit.put("EndLatitude", Double.parseDouble(m.group(4))); + orbit.put("EndDirection", m.group(5).trim()); + } + } + } + if(this.isoXMLSpatialTypeEnumHashSet.contains(MENDsIsoXMLSpatialTypeEnum.BBOX)) { + // Extract the stored IsoGranule bounding box and put into SpatialExtent + AdapterLogger.LogDebug(this.className + "UMMGranuleFile.exportSpatial BBOX Processing"); + isoBBoxAlreadyProcessed = true; + horizontalSpatialDomain = this.appendBoundingRectangles(geometry, horizontalSpatialDomain); } - // Export track if (((IsoGranule) granule).getSwotTrack() != "") { JSONObject track = new JSONObject(); @@ -476,10 +516,11 @@ private JSONObject exportSpatial() throws ParseException{ } } } - } + } // end of processing IsoGranule // We can only include orbital or bounding-box data, not both - if (foundOrbitalData == false) { + // if iso Bounding Box already processed in logic above, then don't enter this block + if (foundOrbitalData == false && !isoBBoxAlreadyProcessed) { horizontalSpatialDomain.put("Geometry", geometry); @@ -519,7 +560,7 @@ private JSONObject exportSpatial() throws ParseException{ // first, check to see if any of the spatial values are bad/invalid if (BoundingTools.coordsInvalid(north, south, east, west)) { - log.warn("Bounding coordinates invalid: \'North: " + north + + AdapterLogger.LogWarning("Bounding coordinates invalid: \'North: " + north + ", \'South: " + south + ", \'West: " + west + ", \'East: " + east + @@ -587,12 +628,14 @@ private JSONObject exportSpatial() throws ParseException{ horizontalSpatialDomain.put("Track", createUMMGTrack((UMMGranule) granule)); } } - - // Export footprint if it exists - + // this block of code process Sentinal6 iso.xml polygon which is stored within + // granule.getGranuleCharacterSet().add(createGranuleCharacter(line,"line")) + // This is NOT swot iso.xml which stores everything within IsoGranule pojo Set granuleCharacters = granule.getGranuleCharacterSet(); for (GranuleCharacter granuleCharacter : granuleCharacters) { if (granuleCharacter.getDatasetElement().getElementDD().getShortName().equals("line")) { + AdapterLogger.LogInfo(this.className + " Start processing line2Polygons : " + granuleCharacter.getValue() ); + this.isLineFormattedPolygon = true; geometry = line2Polygons(geometry,granuleCharacter.getValue()); break; } @@ -600,6 +643,30 @@ private JSONObject exportSpatial() throws ParseException{ return spatialExtent; } + public JSONObject appendBoundingRectangles (JSONObject geometry, JSONObject horizontalSpatialDomain) { + double north = 0, south = 0, east = 0, west = 0; + east = ((IsoGranule) granule).getBbxEasternLongitude() != null ? + ((IsoGranule) granule).getBbxEasternLongitude() : 0; + west = ((IsoGranule) granule).getBbxWesternLongitude() != null? + ((IsoGranule) granule).getBbxWesternLongitude() : 0; + north = ((IsoGranule) granule).getBbxNorthernLatitude() != null? + ((IsoGranule) granule).getBbxNorthernLatitude() : 0; + south = ((IsoGranule) granule).getBbxSouthernLatitude() != null? + ((IsoGranule) granule).getBbxSouthernLatitude() : 0; + if(BoundingTools.coordsInvalid(north, south, east, west)) { + west = -180.0; + east = -179.0; + north = -89.0; + south = -90.0; + } + horizontalSpatialDomain.put("Geometry", geometry); + JSONArray boundingRectangles = new JSONArray(); + geometry.put("BoundingRectangles", boundingRectangles); + boundingRectangles.add(createBoundingBoxJson(new BigDecimal(north), new BigDecimal(south), + new BigDecimal(east), new BigDecimal(west))); + return horizontalSpatialDomain; + } + public JSONObject createUMMGTrack(UMMGranule ummGranule) throws ParseException { Gson gsonBuilder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation() .registerTypeHierarchyAdapter(Collection.class, new UMMGCollectionAdapter()) @@ -711,6 +778,9 @@ public JSONObject addPolygons(JSONObject geometry, ArrayList geo = inputPolygons.get(i); @@ -719,9 +789,16 @@ public JSONObject addPolygons(JSONObject geometry, ArrayList counterClockwiseCoordinates = Arrays.asList( - UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, geo.toArray(new Coordinate[geo.size()])) - ); + + Coordinate[] coordinates_array = geo.toArray(new Coordinate[geo.size()]); + List orientedCoordinates; + if (orientation != desired_orientation) { + orientedCoordinates = Arrays.asList( + UMMUtils.ensureOrientation(desired_orientation, orientation, coordinates_array) + ); + } else { + orientedCoordinates = Arrays.asList(coordinates_array); + } // valid polygon by vividsolution again if(polygon.isValid() || invalidOK) { @@ -730,9 +807,9 @@ public JSONObject addPolygons(JSONObject geometry, ArrayList coordinates) { * @return an array of coordinates in the desired counterclockwise or clockwise sequence */ public static Coordinate[] ensureOrientation( - final int desiredOrientation, final Coordinate... coord) { + final int desiredOrientation, int inputOrientation, final Coordinate... coord) { if (coord.length == 0) { return coord; } - final int orientation = CGAlgorithms.isCCW(coord) ? CGAlgorithms.COUNTERCLOCKWISE + if (inputOrientation != CGAlgorithms.COUNTERCLOCKWISE && inputOrientation != CGAlgorithms.CLOCKWISE) { + inputOrientation = CGAlgorithms.isCCW(coord) ? CGAlgorithms.COUNTERCLOCKWISE : CGAlgorithms.CLOCKWISE; - - if (orientation != desiredOrientation) { + } + if (inputOrientation != desiredOrientation) { final Coordinate[] reverse = coord.clone(); reverse(reverse); diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/factory/UmmgPojoFactory.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/factory/UmmgPojoFactory.java index 2ca4091..de4e2c8 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/factory/UmmgPojoFactory.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/factory/UmmgPojoFactory.java @@ -1,5 +1,6 @@ package gov.nasa.cumulus.metadata.aggregator.factory; +import gov.nasa.cumulus.metadata.aggregator.IsoGranule; import gov.nasa.cumulus.metadata.umm.generated.AdditionalAttributeType; import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType; import gov.nasa.cumulus.metadata.umm.generated.TrackType; @@ -47,7 +48,8 @@ public TrackType createTrackType(Integer cycle, List trackPas return trackType; } - public List trackTypeToAdditionalAttributeTypes(TrackType trackType) { + public List trackTypeToAdditionalAttributeTypes( + TrackType trackType) { List trackPassTileTypes = trackType.getPasses(); trackPassTileTypes = trackPassTileTypes.stream().filter(trackPassTileType -> trackPassTileType.getPass()!=null @@ -63,4 +65,17 @@ public List trackTypeToAdditionalAttributeTypes(TrackTy }); return additionalAttributeTypes; } + + public List basinIdsToAdditionalAttributeTypes( + List basinIdStrs) { + ArrayList additionalAttributeTypes = new ArrayList<>(); + if(basinIdStrs!=null && basinIdStrs.size() >0) { + AdditionalAttributeType additionalAttributeType = new AdditionalAttributeType(); + additionalAttributeType.setName("BasinID"); + additionalAttributeType.setValues(basinIdStrs); + additionalAttributeTypes.add(additionalAttributeType); + } + return (additionalAttributeTypes); + + } } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/FootprintProcessor.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/FootprintProcessor.java index c584b01..b83776d 100755 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/FootprintProcessor.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/FootprintProcessor.java @@ -341,8 +341,8 @@ public ExclusiveZoneType getExclusiveZones(Geometry geometry) { boundary.setPoints(points); boundaries.add(boundary); } - - ExclusiveZoneType exclusiveZoneType = new ExclusiveZoneType(boundaries); + ExclusiveZoneType exclusiveZoneType = new ExclusiveZoneType(); + exclusiveZoneType.setBoundaries(boundaries); return exclusiveZoneType; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/ImageProcessor.java b/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/ImageProcessor.java index a01bf85..5fa57f2 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/ImageProcessor.java +++ b/src/main/java/gov/nasa/cumulus/metadata/aggregator/processor/ImageProcessor.java @@ -3,7 +3,6 @@ import com.google.gson.*; import cumulus_message_adapter.message_parser.AdapterLogger; import gov.nasa.cumulus.metadata.umm.generated.RelatedUrlType; -import gov.nasa.cumulus.metadata.util.JSONUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.client.utils.URIBuilder; @@ -11,6 +10,8 @@ import java.math.BigInteger; import java.net.URISyntaxException; import java.nio.file.Paths; +import java.util.Iterator; + public class ImageProcessor extends ProcessorBase{ private final String className = this.getClass().getName(); @@ -34,7 +35,7 @@ public String process(String input, String ummgStr, String region, String revisi this.region = region; decodeVariables(input); this.workingDir = createWorkDir(); - String newCMRStr = appendImageUrls(input, ummgStr); + String newCMRStr = appendImageUrl(input, ummgStr); String cmrFileName = buildCMRFileName(this.granuleId, this.executionId); long cmrFileSize = uploadCMRJson(cmrBucket, cmrDir, this.collectionName, cmrFileName, newCMRStr); @@ -54,15 +55,16 @@ public String process(String input, String ummgStr, String region, String revisi } } - public String appendImageUrls(String input, String cmrString) + public String appendImageUrl(String input, String cmrString) throws IOException, URISyntaxException { try { Gson gsonBuilder = getGsonBuilder(); JsonObject cmrJsonObj = JsonParser.parseString(cmrString).getAsJsonObject(); JsonArray relatedUrls = cmrJsonObj.getAsJsonArray("RelatedUrls"); - // If cmrJson does not include relatedURLs jsonArray, then create one and then attached to cmrJsonObj + // If cmrJson does not included relatedURLs jsonArray, then create one and then attached to cmrJsonObj if (relatedUrls == null) { relatedUrls = new JsonArray(); + cmrJsonObj.add("RelatedUrls", relatedUrls); } JsonObject inputJsonObj = JsonParser.parseString(input).getAsJsonObject(); @@ -74,46 +76,33 @@ public String appendImageUrls(String input, String cmrString) files = granule.get("files").getAsJsonArray(); JsonArray files = granule.get("files").getAsJsonArray(); - /* At this point, the RelatedUrls array has been sorted with - http/https scientific data - other http files - s3 scientific data - other s3 file - */ - // first split relatedUrls to 2 arrays, one including http/https resources and - // the other array contain items which are NOT http/https resources - JsonArray httpArray = new JsonArray(); - JsonArray otherItemsArray = new JsonArray(); - String[] httpStrs = {"http", "https"}; - relatedUrls.forEach(e -> { - if(JSONUtils.isStrStarsWithIgnoreCase(e.getAsJsonObject().get("URL").getAsString(), httpStrs)) { - httpArray.add(e); - } else { - otherItemsArray.add(e); - } - }); + for (JsonElement f : files) { JsonObject fileObj = f.getAsJsonObject(); String filename = StringUtils.trim(f.getAsJsonObject().get("fileName").getAsString()); if(isImageFile(filename)) { + String downloadUrl = getImageDownloadUrl(distribution_endpoint, fileObj.get("bucket").getAsString(), fileObj.get("key").getAsString()); - if(!isDownloadUrlAlreadyExist(relatedUrls, downloadUrl)) { - RelatedUrlType relatedUrlType = new RelatedUrlType(); - relatedUrlType.setUrl(downloadUrl); - relatedUrlType.setType(RelatedUrlType.RelatedUrlTypeEnum.GET_RELATED_VISUALIZATION); - relatedUrlType.setSubtype(RelatedUrlType.RelatedUrlSubTypeEnum.DIRECT_DOWNLOAD); - relatedUrlType.setMimeType(getImageMimeType(filename)); - - String relatedUrlTypeStr = gsonBuilder.toJson(relatedUrlType); - // append image Url to the end of the http array which has all http/https resources - httpArray.add(JsonParser.parseString(relatedUrlTypeStr)); + + // remove the related url if it already exists in related urls + removeExistingUrls(relatedUrls, downloadUrl); + + // add related url for the new images in case something has changed + RelatedUrlType relatedUrlType = new RelatedUrlType(); + relatedUrlType.setUrl(downloadUrl); + relatedUrlType.setType(RelatedUrlType.RelatedUrlTypeEnum.GET_RELATED_VISUALIZATION); + relatedUrlType.setSubtype(RelatedUrlType.RelatedUrlSubTypeEnum.DIRECT_DOWNLOAD); + relatedUrlType.setMimeType(getImageMimeType(filename)); + + if(fileObj.has("description")){ + relatedUrlType.setDescription(fileObj.get("description").getAsString()); } + + String relatedUrlTypeStr = gsonBuilder.toJson(relatedUrlType); + relatedUrls.add(JsonParser.parseString(relatedUrlTypeStr)); } } - // now add all elements from the other array into the end of the httpArray - otherItemsArray.forEach(e -> httpArray.add(e)); // append otherItmesArray on the end of httpArray - cmrJsonObj.add("RelatedUrls", httpArray); String newCMRStr = gsonBuilder.toJson(cmrJsonObj); return newCMRStr; } catch (URISyntaxException ipe) { @@ -127,11 +116,23 @@ public boolean isDownloadUrlAlreadyExist(JsonArray relatedUrls, String downloadU downloadUrl = StringUtils.trim(downloadUrl); for (JsonElement relatdUrl : relatedUrls) { String umg_downloadUrl = StringUtils.trim(relatdUrl.getAsJsonObject().get("URL").getAsString()); - if(StringUtils.compare(umg_downloadUrl, downloadUrl) ==0) return true; + if(StringUtils.compare(umg_downloadUrl, downloadUrl) == 0) return true; } return false; } + public void removeExistingUrls(JsonArray relatedUrls, String downloadUrl) { + downloadUrl = StringUtils.trim(downloadUrl); + Iterator iterator = relatedUrls.iterator(); + while (iterator.hasNext()) { + JsonElement relatedUrlElement = iterator.next(); + String umg_downloadUrl = StringUtils.trim(relatedUrlElement.getAsJsonObject().get("URL").getAsString()); + if (StringUtils.compareIgnoreCase(umg_downloadUrl, downloadUrl) == 0) { + iterator.remove(); + } + } + } + public boolean isImageFile(String filename) { return StringUtils.endsWith(filename, ".jpg") || StringUtils.endsWith(filename, ".png") || diff --git a/src/main/java/gov/nasa/cumulus/metadata/state/MENDsIsoXMLSpatialTypeEnum.java b/src/main/java/gov/nasa/cumulus/metadata/state/MENDsIsoXMLSpatialTypeEnum.java new file mode 100644 index 0000000..cd68a93 --- /dev/null +++ b/src/main/java/gov/nasa/cumulus/metadata/state/MENDsIsoXMLSpatialTypeEnum.java @@ -0,0 +1,46 @@ +package gov.nasa.cumulus.metadata.state; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; +import java.util.List; + +public enum MENDsIsoXMLSpatialTypeEnum { + + FOOTPRINT("footprint"), ORBIT("orbit"), BBOX("bbox"), NONE("none"); + private String Status; + private static String FOOTPRINT_STR="footprint"; + private static String ORBIT_STR="orbit"; + private static String BBOX_STR="bbox"; + private MENDsIsoXMLSpatialTypeEnum(String type) { + this.Status = type; + } + + public String toString() + { + return this.Status; + } + + public static List getEnumValuList() { + final List isoXMLSpatialTypeList = Arrays.asList(FOOTPRINT_STR, BBOX_STR, ORBIT_STR); + return isoXMLSpatialTypeList; + } + + public static MENDsIsoXMLSpatialTypeEnum getEnum(String val) + { + val= StringUtils.trim(val); + if(StringUtils.equals(val, FOOTPRINT_STR)) + return FOOTPRINT; + else if (StringUtils.equals(val, ORBIT_STR)) + return ORBIT; + else if (StringUtils.equals(val, BBOX_STR)) + return BBOX; + else + return NONE; + } + + public String getID() + { + return this.Status; + } +} diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/AdditionalAttributeType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/AdditionalAttributeType.java index 03331b2..f3d942c 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/AdditionalAttributeType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/AdditionalAttributeType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * A reference to an additional attribute in the parent collection. The attribute reference may contain a granule specific value that will override the value in the parent collection for this granule. An attribute with the same name must exist in the parent collection. * */ +@Generated("jsonschema2pojo") public class AdditionalAttributeType { /** @@ -30,24 +32,6 @@ public class AdditionalAttributeType { @Expose private List values = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public AdditionalAttributeType() { - } - - /** - * - * @param values - * @param name - */ - public AdditionalAttributeType(String name, List values) { - super(); - this.name = name; - this.values = values; - } - /** * The additional attribute's name. * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundaryType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundaryType.java index acbe0f5..6bf0573 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundaryType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundaryType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * A boundary is set of points connected by straight lines representing a polygon on the earth. It takes a minimum of three points to make a boundary. Points must be specified in counter-clockwise order and closed (the first and last vertices are the same). * */ +@Generated("jsonschema2pojo") public class BoundaryType { /** @@ -22,22 +24,6 @@ public class BoundaryType { @Expose private List points = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public BoundaryType() { - } - - /** - * - * @param points - */ - public BoundaryType(List points) { - super(); - this.points = points; - } - /** * * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundingRectangleType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundingRectangleType.java index 5a11216..dae33ad 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundingRectangleType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/BoundingRectangleType.java @@ -1,6 +1,7 @@ package gov.nasa.cumulus.metadata.umm.generated; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -9,6 +10,7 @@ * This entity holds the horizontal spatial coverage of a bounding box. * */ +@Generated("jsonschema2pojo") public class BoundingRectangleType { /** @@ -44,28 +46,6 @@ public class BoundingRectangleType { @Expose private Double southBoundingCoordinate; - /** - * No args constructor for use in serialization - * - */ - public BoundingRectangleType() { - } - - /** - * - * @param eastBoundingCoordinate - * @param northBoundingCoordinate - * @param southBoundingCoordinate - * @param westBoundingCoordinate - */ - public BoundingRectangleType(Double westBoundingCoordinate, Double northBoundingCoordinate, Double eastBoundingCoordinate, Double southBoundingCoordinate) { - super(); - this.westBoundingCoordinate = westBoundingCoordinate; - this.northBoundingCoordinate = northBoundingCoordinate; - this.eastBoundingCoordinate = eastBoundingCoordinate; - this.southBoundingCoordinate = southBoundingCoordinate; - } - /** * The longitude value of a spatially referenced point, in degrees. Longitude values range from -180 to 180. * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/ExclusiveZoneType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/ExclusiveZoneType.java index 4f4f595..6cbc703 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/ExclusiveZoneType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/ExclusiveZoneType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * Contains the excluded boundaries from the GPolygon. * */ +@Generated("jsonschema2pojo") public class ExclusiveZoneType { /** @@ -22,22 +24,6 @@ public class ExclusiveZoneType { @Expose private List boundaries = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public ExclusiveZoneType() { - } - - /** - * - * @param boundaries - */ - public ExclusiveZoneType(List boundaries) { - super(); - this.boundaries = boundaries; - } - /** * * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GPolygonType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GPolygonType.java index c80c932..b6b2d91 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GPolygonType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GPolygonType.java @@ -1,6 +1,7 @@ package gov.nasa.cumulus.metadata.umm.generated; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -9,6 +10,7 @@ * A GPolygon specifies an area on the earth represented by a main boundary with optional boundaries for regions excluded from the main boundary. * */ +@Generated("jsonschema2pojo") public class GPolygonType { /** @@ -27,24 +29,6 @@ public class GPolygonType { @Expose private ExclusiveZoneType exclusiveZone; - /** - * No args constructor for use in serialization - * - */ - public GPolygonType() { - } - - /** - * - * @param boundary - * @param exclusiveZone - */ - public GPolygonType(BoundaryType boundary, ExclusiveZoneType exclusiveZone) { - super(); - this.boundary = boundary; - this.exclusiveZone = exclusiveZone; - } - /** * A boundary is set of points connected by straight lines representing a polygon on the earth. It takes a minimum of three points to make a boundary. Points must be specified in counter-clockwise order and closed (the first and last vertices are the same). * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GeometryType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GeometryType.java index 0ff2cd2..80f2c66 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GeometryType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/GeometryType.java @@ -3,6 +3,7 @@ import java.util.LinkedHashSet; import java.util.Set; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This entity holds the geometry representing the spatial coverage information of a granule. * */ +@Generated("jsonschema2pojo") public class GeometryType { /** @@ -42,28 +44,6 @@ public class GeometryType { @Expose private Set lines = new LinkedHashSet(); - /** - * No args constructor for use in serialization - * - */ - public GeometryType() { - } - - /** - * - * @param boundingRectangles - * @param lines - * @param points - * @param gPolygons - */ - public GeometryType(Set points, Set boundingRectangles, Set gPolygons, Set lines) { - super(); - this.points = points; - this.boundingRectangles = boundingRectangles; - this.gPolygons = gPolygons; - this.lines = lines; - } - /** * The horizontal spatial coverage of a point. * diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/HorizontalSpatialDomainType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/HorizontalSpatialDomainType.java index 279c1e2..9c5c7c5 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/HorizontalSpatialDomainType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/HorizontalSpatialDomainType.java @@ -1,6 +1,7 @@ package gov.nasa.cumulus.metadata.umm.generated; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -9,6 +10,7 @@ * Information about a granule with horizontal spatial coverage. * */ +@Generated("jsonschema2pojo") public class HorizontalSpatialDomainType { /** @@ -40,28 +42,6 @@ public class HorizontalSpatialDomainType { @Expose private TrackType track; - /** - * No args constructor for use in serialization - * - */ - public HorizontalSpatialDomainType() { - } - - /** - * - * @param zoneIdentifier - * @param geometry - * @param orbit - * @param track - */ - public HorizontalSpatialDomainType(String zoneIdentifier, GeometryType geometry, OrbitType orbit, TrackType track) { - super(); - this.zoneIdentifier = zoneIdentifier; - this.geometry = geometry; - this.orbit = orbit; - this.track = track; - } - /** * The appropriate numeric or alpha code used to identify the various zones in the granule's grid coordinate system. * diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/LineType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/LineType.java index 3b28fe2..31b4e12 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/LineType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/LineType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This entity holds the horizontal spatial coverage of a line. A line area contains at lease two points. * */ +@Generated("jsonschema2pojo") public class LineType { /** @@ -22,22 +24,6 @@ public class LineType { @Expose private List points = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public LineType() { - } - - /** - * - * @param points - */ - public LineType(List points) { - super(); - this.points = points; - } - /** * * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/OrbitType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/OrbitType.java index 3635a06..20feb89 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/OrbitType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/OrbitType.java @@ -3,6 +3,7 @@ import java.util.HashMap; import java.util.Map; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This entity stores orbital coverage information of the granule. This coverage is an alternative way of expressing granule spatial coverage. This information supports orbital backtrack searching on a granule. * */ +@Generated("jsonschema2pojo") public class OrbitType { /** @@ -54,30 +56,6 @@ public class OrbitType { @Expose private OrbitType.OrbitDirectionTypeEnum endDirection; - /** - * No args constructor for use in serialization - * - */ - public OrbitType() { - } - - /** - * - * @param startLatitude - * @param ascendingCrossing - * @param endDirection - * @param startDirection - * @param endLatitude - */ - public OrbitType(Double ascendingCrossing, Double startLatitude, OrbitType.OrbitDirectionTypeEnum startDirection, Double endLatitude, OrbitType.OrbitDirectionTypeEnum endDirection) { - super(); - this.ascendingCrossing = ascendingCrossing; - this.startLatitude = startLatitude; - this.startDirection = startDirection; - this.endLatitude = endLatitude; - this.endDirection = endDirection; - } - /** * The longitude value of a spatially referenced point, in degrees. Longitude values range from -180 to 180. * (Required) @@ -228,6 +206,7 @@ public boolean equals(Object other) { * Orbit start and end direction. A for ascending orbit and D for descending. * */ + @Generated("jsonschema2pojo") public enum OrbitDirectionTypeEnum { @SerializedName("A") @@ -243,7 +222,7 @@ public enum OrbitDirectionTypeEnum { } } - private OrbitDirectionTypeEnum(String value) { + OrbitDirectionTypeEnum(String value) { this.value = value; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/PointType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/PointType.java index 7a93382..8326a6c 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/PointType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/PointType.java @@ -1,6 +1,7 @@ package gov.nasa.cumulus.metadata.umm.generated; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -9,6 +10,7 @@ * The longitude and latitude values of a spatially referenced point in degrees. * */ +@Generated("jsonschema2pojo") public class PointType { /** @@ -28,24 +30,6 @@ public class PointType { @Expose private Double latitude; - /** - * No args constructor for use in serialization - * - */ - public PointType() { - } - - /** - * - * @param latitude - * @param longitude - */ - public PointType(Double longitude, Double latitude) { - super(); - this.longitude = longitude; - this.latitude = latitude; - } - /** * The longitude value of a spatially referenced point, in degrees. Longitude values range from -180 to 180. * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/RelatedUrlType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/RelatedUrlType.java index b159e07..4d9649b 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/RelatedUrlType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/RelatedUrlType.java @@ -3,6 +3,7 @@ import java.util.HashMap; import java.util.Map; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This entity holds all types of online URL associated with the granule such as guide document or ordering site etc. * */ +@Generated("jsonschema2pojo") public class RelatedUrlType { /** @@ -39,9 +41,13 @@ public class RelatedUrlType { @SerializedName("Description") @Expose private String description; + /** + * The format that granule data confirms to. While the value is listed as open to any text, CMR requires that it confirm to one of the values on the GranuleDataFormat values in the Keyword Management System: https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/GranuleDataFormat + * + */ @SerializedName("Format") @Expose - private RelatedUrlType.DataFormatEnum format; + private String format; @SerializedName("MimeType") @Expose private RelatedUrlType.MimeTypeEnum mimeType; @@ -60,36 +66,6 @@ public class RelatedUrlType { @Expose private RelatedUrlType.FileSizeUnitEnum sizeUnit; - /** - * No args constructor for use in serialization - * - */ - public RelatedUrlType() { - } - - /** - * - * @param size - * @param subtype - * @param format - * @param description - * @param sizeUnit - * @param mimeType - * @param type - * @param url - */ - public RelatedUrlType(String url, RelatedUrlType.RelatedUrlTypeEnum type, RelatedUrlType.RelatedUrlSubTypeEnum subtype, String description, RelatedUrlType.DataFormatEnum format, RelatedUrlType.MimeTypeEnum mimeType, Double size, RelatedUrlType.FileSizeUnitEnum sizeUnit) { - super(); - this.url = url; - this.type = type; - this.subtype = subtype; - this.description = description; - this.format = format; - this.mimeType = mimeType; - this.size = size; - this.sizeUnit = sizeUnit; - } - /** * The URL for the relevant resource. * (Required) @@ -150,11 +126,19 @@ public void setDescription(String description) { this.description = description; } - public RelatedUrlType.DataFormatEnum getFormat() { + /** + * The format that granule data confirms to. While the value is listed as open to any text, CMR requires that it confirm to one of the values on the GranuleDataFormat values in the Keyword Management System: https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/GranuleDataFormat + * + */ + public String getFormat() { return format; } - public void setFormat(RelatedUrlType.DataFormatEnum format) { + /** + * The format that granule data confirms to. While the value is listed as open to any text, CMR requires that it confirm to one of the values on the GranuleDataFormat values in the Keyword Management System: https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/GranuleDataFormat + * + */ + public void setFormat(String format) { this.format = format; } @@ -268,110 +252,12 @@ public boolean equals(Object other) { return (((((((((this.size == rhs.size)||((this.size!= null)&&this.size.equals(rhs.size)))&&((this.subtype == rhs.subtype)||((this.subtype!= null)&&this.subtype.equals(rhs.subtype))))&&((this.format == rhs.format)||((this.format!= null)&&this.format.equals(rhs.format))))&&((this.description == rhs.description)||((this.description!= null)&&this.description.equals(rhs.description))))&&((this.sizeUnit == rhs.sizeUnit)||((this.sizeUnit!= null)&&this.sizeUnit.equals(rhs.sizeUnit))))&&((this.mimeType == rhs.mimeType)||((this.mimeType!= null)&&this.mimeType.equals(rhs.mimeType))))&&((this.type == rhs.type)||((this.type!= null)&&this.type.equals(rhs.type))))&&((this.url == rhs.url)||((this.url!= null)&&this.url.equals(rhs.url)))); } - public enum DataFormatEnum { - - @SerializedName("ASCII") - ASCII("ASCII"), - @SerializedName("BINARY") - BINARY("BINARY"), - @SerializedName("BMP") - BMP("BMP"), - @SerializedName("BUFR") - BUFR("BUFR"), - @SerializedName("CSV") - CSV("CSV"), - @SerializedName("GEOTIFF") - GEOTIFF("GEOTIFF"), - @SerializedName("GIF") - GIF("GIF"), - @SerializedName("GEOTIFFINT16") - GEOTIFFINT_16("GEOTIFFINT16"), - @SerializedName("GEOTIFFFLOAT32") - GEOTIFFFLOAT_32("GEOTIFFFLOAT32"), - @SerializedName("GRIB") - GRIB("GRIB"), - @SerializedName("GZIP") - GZIP("GZIP"), - @SerializedName("HDF4") - HDF_4("HDF4"), - @SerializedName("HDF5") - HDF_5("HDF5"), - @SerializedName("HDF-EOS2") - HDF_EOS_2("HDF-EOS2"), - @SerializedName("HDF-EOS5") - HDF_EOS_5("HDF-EOS5"), - @SerializedName("HTML") - HTML("HTML"), - @SerializedName("ICARTT") - ICARTT("ICARTT"), - @SerializedName("JPEG") - JPEG("JPEG"), - @SerializedName("JSON") - JSON("JSON"), - @SerializedName("KML") - KML("KML"), - @SerializedName("NETCDF-3") - NETCDF_3("NETCDF-3"), - @SerializedName("NETCDF-4") - NETCDF_4("NETCDF-4"), - @SerializedName("NETCDF-CF") - NETCDF_CF("NETCDF-CF"), - @SerializedName("PNG") - PNG("PNG"), - @SerializedName("PNG24") - PNG_24("PNG24"), - @SerializedName("TAR") - TAR("TAR"), - @SerializedName("TIFF") - TIFF("TIFF"), - @SerializedName("XLSX") - XLSX("XLSX"), - @SerializedName("XML") - XML("XML"), - @SerializedName("ZIP") - ZIP("ZIP"), - @SerializedName("DMRPP") - DMRPP("DMRPP"), - @SerializedName("Not provided") - NOT_PROVIDED("Not provided"); - private final String value; - private final static Map CONSTANTS = new HashMap(); - - static { - for (RelatedUrlType.DataFormatEnum c: values()) { - CONSTANTS.put(c.value, c); - } - } - - private DataFormatEnum(String value) { - this.value = value; - } - - @Override - public String toString() { - return this.value; - } - - public String value() { - return this.value; - } - - public static RelatedUrlType.DataFormatEnum fromValue(String value) { - RelatedUrlType.DataFormatEnum constant = CONSTANTS.get(value); - if (constant == null) { - throw new IllegalArgumentException(value); - } else { - return constant; - } - } - - } - /** * The unit of the file size. * */ + @Generated("jsonschema2pojo") public enum FileSizeUnitEnum { @SerializedName("KB") @@ -395,7 +281,7 @@ public enum FileSizeUnitEnum { } } - private FileSizeUnitEnum(String value) { + FileSizeUnitEnum(String value) { this.value = value; } @@ -419,6 +305,7 @@ public static RelatedUrlType.FileSizeUnitEnum fromValue(String value) { } + @Generated("jsonschema2pojo") public enum MimeTypeEnum { @SerializedName("application/json") @@ -486,7 +373,7 @@ public enum MimeTypeEnum { } } - private MimeTypeEnum(String value) { + MimeTypeEnum(String value) { this.value = value; } @@ -510,6 +397,7 @@ public static RelatedUrlType.MimeTypeEnum fromValue(String value) { } + @Generated("jsonschema2pojo") public enum RelatedUrlSubTypeEnum { @SerializedName("MOBILE APP") @@ -657,7 +545,7 @@ public enum RelatedUrlSubTypeEnum { } } - private RelatedUrlSubTypeEnum(String value) { + RelatedUrlSubTypeEnum(String value) { this.value = value; } @@ -681,6 +569,7 @@ public static RelatedUrlType.RelatedUrlSubTypeEnum fromValue(String value) { } + @Generated("jsonschema2pojo") public enum RelatedUrlTypeEnum { @SerializedName("DOWNLOAD SOFTWARE") @@ -710,7 +599,7 @@ public enum RelatedUrlTypeEnum { } } - private RelatedUrlTypeEnum(String value) { + RelatedUrlTypeEnum(String value) { this.value = value; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/SpatialExtentType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/SpatialExtentType.java index 3eb0da3..dcb6012 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/SpatialExtentType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/SpatialExtentType.java @@ -3,6 +3,7 @@ import java.util.LinkedHashSet; import java.util.Set; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This class contains attributes which describe the spatial extent of a granule. Spatial Extent includes any or all of Granule Localities, Horizontal Spatial Domain, and Vertical Spatial Domain. * */ +@Generated("jsonschema2pojo") public class SpatialExtentType { /** @@ -35,26 +37,6 @@ public class SpatialExtentType { @Expose private Set verticalSpatialDomains = new LinkedHashSet(); - /** - * No args constructor for use in serialization - * - */ - public SpatialExtentType() { - } - - /** - * - * @param verticalSpatialDomains - * @param granuleLocalities - * @param horizontalSpatialDomain - */ - public SpatialExtentType(Set granuleLocalities, HorizontalSpatialDomainType horizontalSpatialDomain, Set verticalSpatialDomains) { - super(); - this.granuleLocalities = granuleLocalities; - this.horizontalSpatialDomain = horizontalSpatialDomain; - this.verticalSpatialDomains = verticalSpatialDomains; - } - /** * This entity stores information used at the granule level to describe the labeling of granules with compounded time/space text values and which are subsequently used to define more phenomenological-based granules, thus the locality type and description are contained. * diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackPassTileType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackPassTileType.java index cf1d2f7..3879737 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackPassTileType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackPassTileType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This element stores a track pass and its tile information. It will allow a user to search by pass number and their tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granules spatial extent. * */ +@Generated("jsonschema2pojo") public class TrackPassTileType { /** @@ -29,24 +31,6 @@ public class TrackPassTileType { @Expose private List tiles = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public TrackPassTileType() { - } - - /** - * - * @param tiles - * @param pass - */ - public TrackPassTileType(Integer pass, List tiles) { - super(); - this.pass = pass; - this.tiles = tiles; - } - /** * A pass number identifies a subset of a granule's spatial extent. This element holds a pass number that exists in the granule and will allow a user to search by pass number that is contained within a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. * (Required) diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackType.java index 5f862a3..54c17da 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/TrackType.java @@ -3,6 +3,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This element stores track information of the granule. Track information is used to allow a user to search for granules whose spatial extent is based on an orbital cycle, pass, and tile mapping. Though it is derived from the SWOT mission requirements, it is intended that this element type be generic enough so that other missions can make use of it. While track information is a type of spatial domain, it is expected that the metadata provider will provide geometry information that matches the spatial extent of the track information. * */ +@Generated("jsonschema2pojo") public class TrackType { /** @@ -23,31 +25,12 @@ public class TrackType { private Integer cycle; /** * A pass number identifies a subset of a granule's spatial extent. This element holds a list of pass numbers and their tiles that exist in the granule. It will allow a user to search by pass number and its tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granule's spatial extent. - * (Required) * */ @SerializedName("Passes") @Expose private List passes = new ArrayList(); - /** - * No args constructor for use in serialization - * - */ - public TrackType() { - } - - /** - * - * @param passes - * @param cycle - */ - public TrackType(Integer cycle, List passes) { - super(); - this.cycle = cycle; - this.passes = passes; - } - /** * An integer that represents a specific set of orbital spatial extents defined by passes and tiles. Though intended to be generic, this comes from a SWOT mission requirement where each cycle represents a set of 1/2 orbits. Each 1/2 orbit is called a 'pass'. During science mode, a cycle represents 21 days of 14 full orbits or 588 passes. * (Required) @@ -68,7 +51,6 @@ public void setCycle(Integer cycle) { /** * A pass number identifies a subset of a granule's spatial extent. This element holds a list of pass numbers and their tiles that exist in the granule. It will allow a user to search by pass number and its tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granule's spatial extent. - * (Required) * */ public List getPasses() { @@ -77,7 +59,6 @@ public List getPasses() { /** * A pass number identifies a subset of a granule's spatial extent. This element holds a list of pass numbers and their tiles that exist in the granule. It will allow a user to search by pass number and its tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granule's spatial extent. - * (Required) * */ public void setPasses(List passes) { diff --git a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/VerticalSpatialDomainType.java b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/VerticalSpatialDomainType.java index a5aeaff..1bfaed7 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/umm/generated/VerticalSpatialDomainType.java +++ b/src/main/java/gov/nasa/cumulus/metadata/umm/generated/VerticalSpatialDomainType.java @@ -3,6 +3,7 @@ import java.util.HashMap; import java.util.Map; +import javax.annotation.processing.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @@ -11,6 +12,7 @@ * This entity contains the type and value for the granule's vertical spatial domain. * */ +@Generated("jsonschema2pojo") public class VerticalSpatialDomainType { @SerializedName("Type") @@ -45,30 +47,6 @@ public class VerticalSpatialDomainType { @Expose private VerticalSpatialDomainType.Unit unit; - /** - * No args constructor for use in serialization - * - */ - public VerticalSpatialDomainType() { - } - - /** - * - * @param minimumValue - * @param unit - * @param type - * @param value - * @param maximumValue - */ - public VerticalSpatialDomainType(VerticalSpatialDomainType.VerticalSpatialDomainTypeEnum type, String value, String minimumValue, String maximumValue, VerticalSpatialDomainType.Unit unit) { - super(); - this.type = type; - this.value = value; - this.minimumValue = minimumValue; - this.maximumValue = maximumValue; - this.unit = unit; - } - public VerticalSpatialDomainType.VerticalSpatialDomainTypeEnum getType() { return type; } @@ -201,6 +179,7 @@ public boolean equals(Object other) { * Describes the unit of the vertical extent value. * */ + @Generated("jsonschema2pojo") public enum Unit { @SerializedName("Fathoms") @@ -232,7 +211,7 @@ public enum Unit { } } - private Unit(String value) { + Unit(String value) { this.value = value; } @@ -256,6 +235,7 @@ public static VerticalSpatialDomainType.Unit fromValue(String value) { } + @Generated("jsonschema2pojo") public enum VerticalSpatialDomainTypeEnum { @SerializedName("Atmosphere Layer") @@ -275,7 +255,7 @@ public enum VerticalSpatialDomainTypeEnum { } } - private VerticalSpatialDomainTypeEnum(String value) { + VerticalSpatialDomainTypeEnum(String value) { this.value = value; } diff --git a/src/main/java/gov/nasa/cumulus/metadata/util/JSONUtils.java b/src/main/java/gov/nasa/cumulus/metadata/util/JSONUtils.java index f12d994..c08836f 100644 --- a/src/main/java/gov/nasa/cumulus/metadata/util/JSONUtils.java +++ b/src/main/java/gov/nasa/cumulus/metadata/util/JSONUtils.java @@ -3,14 +3,11 @@ import com.google.gson.JsonObject; import com.google.gson.JsonArray; import gov.nasa.cumulus.metadata.aggregator.UMMUtils; -import gov.nasa.cumulus.metadata.umm.generated.RelatedUrlType; -import org.apache.commons.lang3.StringUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; -import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Iterator; @@ -124,112 +121,4 @@ public static JSONArray GsonArrayToJSONArray(JsonArray input) throws ParseExcept return jarray; } - public static JSONObject sortRelatedUrls(JSONObject input)throws ParseException { - /** - * JSONArray extends ArrayList implements List, JSONAware, JSONStreamAware - * so JSONArray maintains insert order - */ - JSONArray unsortedRelatedUrls = (JSONArray)input.get("RelatedUrls"); - JSONArray sortedRelatedUrls = new JSONArray(); - ArrayList toBeRemovedItems = new ArrayList<>(); - // first, extract URL starts with http/https and Type == GET DATA to added into sortedRelatedUrls - String[] httpStrs = {"http", "https"}; - for(int i =0; i< unsortedRelatedUrls.size(); i++) { - JSONObject relatedUrl = (JSONObject) unsortedRelatedUrls.get(i); - if( isStrStarsWithIgnoreCase((String)relatedUrl.get("URL"),httpStrs) - && - isGETDataType((String)relatedUrl.get("Type")) - ) { - sortedRelatedUrls.add(relatedUrl); - toBeRemovedItems.add(relatedUrl); - } - } - unsortedRelatedUrls = shrinkUnsortedRelatedUrls(unsortedRelatedUrls, toBeRemovedItems); - toBeRemovedItems.clear(); - - // other http/https files - for(int i =0; i< unsortedRelatedUrls.size(); i++) { - JSONObject relatedUrl = (JSONObject) unsortedRelatedUrls.get(i); - if(isStrStarsWithIgnoreCase((String)relatedUrl.get("URL"),httpStrs)) { - sortedRelatedUrls.add(relatedUrl); - toBeRemovedItems.add(relatedUrl); - } - } - unsortedRelatedUrls = shrinkUnsortedRelatedUrls(unsortedRelatedUrls, toBeRemovedItems); - toBeRemovedItems.clear(); - - // s3 link to scientific data - for(int i =0; i< unsortedRelatedUrls.size(); i++) { - JSONObject relatedUrl = (JSONObject) unsortedRelatedUrls.get(i); - if(isGETDataType((String)relatedUrl.get("Type")) - && - isStrStarsWithIgnoreCase((String)relatedUrl.get("URL"), "s3://")) { - sortedRelatedUrls.add(relatedUrl); - toBeRemovedItems.add(relatedUrl); - } - } - unsortedRelatedUrls = shrinkUnsortedRelatedUrls(unsortedRelatedUrls, toBeRemovedItems); - toBeRemovedItems.clear(); - // other s3 links - for(int i =0; i< unsortedRelatedUrls.size(); i++) { - JSONObject relatedUrl = (JSONObject) unsortedRelatedUrls.get(i); - if(isStrStarsWithIgnoreCase((String)relatedUrl.get("URL"), "s3://")) { - sortedRelatedUrls.add(relatedUrl); - toBeRemovedItems.add(relatedUrl); - } - } - unsortedRelatedUrls = shrinkUnsortedRelatedUrls(unsortedRelatedUrls, toBeRemovedItems); - toBeRemovedItems.clear(); - - // left of item in unsorted array - for(Object e: unsortedRelatedUrls) { - JSONObject relatedUrl = (JSONObject) e; - sortedRelatedUrls.add(relatedUrl); - } - input.remove("RelatedUrls"); - input.put("RelatedUrls", sortedRelatedUrls); - return input; - } - - public static JSONArray shrinkUnsortedRelatedUrls(JSONArray unsortedRelatedUrls, - ArrayList toBeRemovedItems){ - toBeRemovedItems.forEach(item -> { - unsortedRelatedUrls.remove(item); - }); - return unsortedRelatedUrls; - } - - /** - * - * @param s : The string to be verified - * @param startStr : verifies the input s is starting with startStr ignoring case - * return true/false - */ - public static boolean isStrStarsWithIgnoreCase(String s, String startStr) { - s = StringUtils.trim(s); - return StringUtils.startsWithIgnoreCase(s,startStr); - } - - /** - * - * @param s: The string to be verified - * @param startStrs: verifies the input s is starting with at least one item in startStrs[] ignoring case - * @return true/false - */ - public static boolean isStrStarsWithIgnoreCase(String s, String[] startStrs) { - s = StringUtils.trim(s); - for(String elementStr:startStrs) { - if(StringUtils.startsWithIgnoreCase(s, elementStr)){ - return true; - } - } - return false; - } - - public static boolean isGETDataType(String s) { - s = StringUtils.trim(s); - return StringUtils.equalsIgnoreCase(s,RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value()); - } - - } diff --git a/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java b/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java new file mode 100644 index 0000000..2ab7391 --- /dev/null +++ b/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java @@ -0,0 +1,72 @@ +package gov.nasa.cumulus.metadata.util; + +import cumulus_message_adapter.message_parser.AdapterLogger; +import org.w3c.dom.Document; + +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathExpressionException; + +public class MENDsISOXmlUtiils { + /** + * extract a string from xml document. swallow exception if there is any. + * If exception is swallowed, return empty string. + * Another extractXPathValueThrowsException shall be implemented whenever needed, which + * in another case should throw exception instead of swallow. + * @return + */ + /** + * + * @param doc + * @param xpath + * @param pathStr : the xml path in string format + * @param pathTagStr :the tag form of the xml path string. Ex IsoMendsXPath.ADDITIONAL_ATTRIBUTES_BLOCK. + * This is for logging and support purpose so the developer can quickly identify what field(s) + * is causing problem + * @return : extracted string. Or the extractedString default is "" which is empty string. Hence, + * any exception would cause this function to return an empty string + */ + public static String extractXPathValueSwallowException(Document doc, XPath xpath, String pathStr, String pathTagStr) { + String extractedStr = ""; //default to empty string. + try { + extractedStr = xpath.evaluate(pathStr, doc); + } catch (XPathExpressionException xPathExpressionException) { + AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: " + pathTagStr + + " path string value:"+ pathStr + + " Exception:" +xPathExpressionException); + } catch (Exception genericException) { + AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: "+ pathTagStr + + " path string value:"+ pathStr + + " Exception:" +genericException); + } + return extractedStr; + } + + /** + * extract a string from xml document. throws exception if there is any. + * @param doc + * @param xpath + * @param pathStr + * @param pathTagStr + * @return + * @throws Exception + */ + public static String extractXPathValueThrowsException(Document doc, XPath xpath, String pathStr, String pathTagStr) + throws Exception{ + String extractedStr = ""; + try { + extractedStr = xpath.evaluate(pathStr, doc); + } catch (XPathExpressionException xPathExpressionException) { + AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: " + pathTagStr + + " path string value:"+ pathStr + + " Exception:" +xPathExpressionException); + throw xPathExpressionException; + } catch (Exception genericException) { + AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: "+ pathTagStr + + " path string value:"+ pathStr + + " Exception:" +genericException); + throw genericException; + } + return extractedStr; + } + +} diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java index 4581de1..8f75173 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java @@ -95,7 +95,7 @@ public Date getReleaseDate() { */ public void setReleaseDate(Date releaseDate) { this.releaseDate = releaseDate; - this.releaseDateLong = new Long(releaseDate.getTime()); + this.releaseDateLong = releaseDate.getTime(); } public Long getReleaseDateLong() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java index 160f33f..7bf2fe1 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java @@ -147,7 +147,7 @@ public Date getStartTime() { */ public void setStartTime(Date startTime) { this.startTime = startTime; - this.startTimeLong = new Long(startTime.getTime()); + this.startTimeLong = startTime.getTime(); } /** * @return the stopTime @@ -160,7 +160,7 @@ public Date getStopTime() { */ public void setStopTime(Date stopTime) { this.stopTime = stopTime; - this.stopTimeLong = new Long(stopTime.getTime()); + this.stopTimeLong = stopTime.getTime(); } public Long getStartTimeLong() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java index 42ac1a9..fe6eee1 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java @@ -34,7 +34,7 @@ public DatasetDateTime() { public DatasetDateTime(DatasetElement element, Date keyValue) { this.datasetElement = element; this.value = keyValue; - this.valueLong = new Long(value.getTime()); + this.valueLong = keyValue.getTime(); } public DatasetDateTime(DatasetElement element, Long keyValue) { @@ -74,7 +74,7 @@ public Long getValueLong() { public void setValue(Date value) { this.value = value; - this.valueLong = new Long(value.getTime()); + this.valueLong = value.getTime(); } diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java index 9c74f09..26e338c 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java @@ -78,7 +78,7 @@ public Date getEchoSubmitDate() { } public void setEchoSubmitDate(Date echoSubmitDate) { this.echoSubmitDate = echoSubmitDate; - this.echoSubmitDateLong = new Long(echoSubmitDate.getTime()); + this.echoSubmitDateLong = echoSubmitDate.getTime(); } public Long getEchoSubmitDateLong() { @@ -103,14 +103,14 @@ public Date getCreationDate() { } public void setCreationDate(Date creationDate) { this.creationDate = creationDate; - this.creationDateLong = new Long(creationDate.getTime()); + this.creationDateLong = creationDate.getTime(); } public Date getLastRevisionDate() { return lastRevisionDate; } public void setLastRevisionDate(Date lastRevisionDate) { this.lastRevisionDate = lastRevisionDate; - this.lastRevisionDateLong = new Long(lastRevisionDate.getTime()); + this.lastRevisionDateLong = lastRevisionDate.getTime(); } diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java index c4ffa29..eed0475 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java @@ -54,7 +54,7 @@ public Date getSoftwareDate() { } public void setSoftwareDate(Date softwareDate) { this.softwareDate = softwareDate; - this.softwareDateLong = new Long(softwareDate.getTime()); + this.softwareDateLong = softwareDate.getTime(); } public Long getSoftwareDateLong() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java index 5d22f91..ef42a9a 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java @@ -79,7 +79,7 @@ public Date getVersionDate() { } public void setVersionDate(Date versionDate) { this.versionDate = versionDate; - this.versionDateLong = new Long(versionDate.getTime()); + this.versionDateLong = versionDate.getTime(); } public Long getVersionDateLong() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/Granule.java b/src/main/java/gov/nasa/podaac/inventory/model/Granule.java index 4c2e843..802e90b 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/Granule.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/Granule.java @@ -78,39 +78,39 @@ public Granule(String name, Date startTime, Date requestedTime, Date acquiredTim this.name = name; try{ - this.startTimeLong = new Long(startTime.getTime()); + this.startTimeLong = startTime.getTime(); }catch(NullPointerException npe) { this.startTimeLong = null; } try{ - this.stopTimeLong = new Long(stopTime.getTime()); + this.stopTimeLong = stopTime.getTime(); }catch(NullPointerException npe) { this.stopTimeLong = null; } try{ - this.createTimeLong = new Long(createTime.getTime()); + this.createTimeLong = createTime.getTime(); }catch(NullPointerException npe) { this.createTimeLong = null; } try{ - this.ingestTimeLong = new Long(ingestTime.getTime()); + this.ingestTimeLong = ingestTime.getTime(); }catch(NullPointerException npe) { this.ingestTimeLong = null; } this.version = version; try{ - this.requestedTimeLong = new Long(requestedTime.getTime()); + this.requestedTimeLong = requestedTime.getTime(); }catch(NullPointerException npe) { this.requestedTimeLong = null; } try{ - this.acquiredTimeLong = new Long(acquiredTime.getTime()); + this.acquiredTimeLong = acquiredTime.getTime(); }catch(NullPointerException npe) { this.acquiredTimeLong = null; @@ -121,7 +121,7 @@ public Granule(String name, Date startTime, Date requestedTime, Date acquiredTim this.checksumType = checksumType; this.status = status; try{ - this.archiveTimeLong = new Long(archiveTime.getTime()); + this.archiveTimeLong = archiveTime.getTime(); }catch(NullPointerException npe) { this.archiveTimeLong = null; @@ -173,14 +173,6 @@ public String getName() { public void setName(String name) { this.name = name; } - //officialName -// public String getOfficialName() { -// return officialName; -// } -// -// public void setOfficialName(String name) { -// this.officialName = name; -// } public String getRootPath() { return rootPath; } @@ -205,7 +197,7 @@ public Date getStartTime() { public void setStartTime(Date startTime) { this.startTime = startTime; - this.startTimeLong = new Long(startTime.getTime()); + this.startTimeLong = startTime.getTime(); } public Date getStopTime() { @@ -214,7 +206,7 @@ public Date getStopTime() { public void setStopTime(Date stopTime) { this.stopTime = stopTime; - this.stopTimeLong = new Long(stopTime.getTime()); + this.stopTimeLong = stopTime.getTime(); } @@ -224,7 +216,7 @@ public Date getAcquiredTime() { public void setAcquiredTime(Date acquiredTime) { this.acquiredTime = acquiredTime; - this.acquiredTimeLong = new Long(acquiredTime.getTime()); + this.acquiredTimeLong = acquiredTime.getTime(); } public Date getRequestedTime() { return requestedTime; @@ -232,7 +224,7 @@ public Date getRequestedTime() { public void setRequestedTime(Date requestedTime) { this.requestedTime = requestedTime; - this.requestedTimeLong = new Long(requestedTime.getTime()); + this.requestedTimeLong = requestedTime.getTime(); } public Date getArchiveTime() { @@ -241,7 +233,7 @@ public Date getArchiveTime() { public void setArchiveTime(Date archiveTime) { this.archiveTime = archiveTime; - this.archiveTimeLong = new Long(archiveTime.getTime()); + this.archiveTimeLong = archiveTime.getTime(); } public Date getCreateTime() { @@ -250,7 +242,7 @@ public Date getCreateTime() { public void setCreateTime(Date createTime) { this.createTime = createTime; - this.createTimeLong = new Long(createTime.getTime()); + this.createTimeLong = createTime.getTime(); } public Date getIngestTime() { @@ -259,7 +251,7 @@ public Date getIngestTime() { public void setIngestTime(Date ingestTime) { this.ingestTime = ingestTime; - this.ingestTimeLong = new Long(ingestTime.getTime()); + this.ingestTimeLong = ingestTime.getTime(); } public Date getVerifyTime() { @@ -268,7 +260,7 @@ public Date getVerifyTime() { public void setVerifyTime(Date verifyTime) { this.verifyTime = verifyTime; - this.verifyTimeLong = new Long(verifyTime.getTime()); + this.verifyTimeLong = verifyTime.getTime(); } public Integer getVersion() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java b/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java index 1d50b80..0532e30 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java @@ -27,7 +27,7 @@ public GranuleDateTime(DatasetElement element, Date keyValue) { this.datasetElement = element; this.value = keyValue; try{ - this.valueLong = new Long(keyValue.getTime()); + this.valueLong = keyValue.getTime(); }catch(NullPointerException npe) { this.valueLong = null; @@ -75,7 +75,7 @@ public Long getValueLong() { public void setValue(Date value) { this.value = value; - this.valueLong = new Long(value.getTime()); + this.valueLong = value.getTime(); } @Override diff --git a/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java b/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java index d9ad422..b6aeaef 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java @@ -83,14 +83,14 @@ public Date getCreationDate() { } public void setCreationDate(Date creationDate) { this.creationDate = creationDate; - this.creationDateLong = new Long(creationDate.getTime()); + this.creationDateLong = creationDate.getTime(); } public Date getLastRevisionDate() { return lastRevisionDate; } public void setLastRevisionDate(Date lastRevisionDate) { this.lastRevisionDate = lastRevisionDate; - this.lastRevisionDateLong = new Long(lastRevisionDate.getTime()); + this.lastRevisionDateLong = lastRevisionDate.getTime(); } public String getRevisionHistory() { return revisionHistory; @@ -103,7 +103,7 @@ public Date getEchoSubmitDate() { } public void setEchoSubmitDate(Date echoSubmitDate) { this.echoSubmitDate = echoSubmitDate; - this.echoSubmitDateLong = new Long(echoSubmitDate.getTime()); + this.echoSubmitDateLong = echoSubmitDate.getTime(); } public Long getCreationDateLong() { diff --git a/src/main/java/gov/nasa/podaac/inventory/model/Provider.java b/src/main/java/gov/nasa/podaac/inventory/model/Provider.java index 9a953dd..1bf642e 100644 --- a/src/main/java/gov/nasa/podaac/inventory/model/Provider.java +++ b/src/main/java/gov/nasa/podaac/inventory/model/Provider.java @@ -26,7 +26,7 @@ public Integer getProviderId() { return providerId; } public void setProviderId(Integer providerId) { - this.providerId = new Integer(providerId); + this.providerId = providerId; } public String getShortName() { return shortName; diff --git a/src/main/resources/jsonschema/UMM-G1.6.5.json b/src/main/resources/jsonschema/UMM-G1.6.5.json new file mode 100644 index 0000000..9a5b443 --- /dev/null +++ b/src/main/resources/jsonschema/UMM-G1.6.5.json @@ -0,0 +1,1310 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "title": "UMM-G", + "type": "object", + "additionalProperties": false, + "properties": { + "GranuleUR": { + "description": "The Universal Reference ID of the granule referred by the data provider. This ID is unique per data provider.", + "type": "string", + "minLength": 1, + "maxLength": 250 + }, + "ProviderDates": { + "description": "Dates related to activities involving the the granule and the data provider database with the exception for Delete. For Create, Update, and Insert the date is the date that the granule file is created, updated, or inserted into the provider database by the provider. Delete is the date that the CMR should delete the granule metadata record from its repository.", + "type": "array", + "items": { + "$ref": "#/definitions/ProviderDateType" + }, + "minItems": 1, + "maxItems": 4, + "uniqueItems":true + }, + "CollectionReference": { + "description": "The collection metadata record's short name and version, or entry title to which this granule metadata record belongs.", + "$ref": "#/definitions/CollectionReferenceType" + }, + "AccessConstraints": { + "description": "Allows the author to constrain access to the granule. Some words that may be used in this element's value include: Public, In-house, Limited, None. The value field is used for special ACL rules (Access Control Lists (http://en.wikipedia.org/wiki/Access_control_list)). For example it can be used to hide metadata when it isn't ready for public consumption.", + "$ref": "#/definitions/AccessConstraintsType" + }, + "DataGranule": { + "description": "This entity stores basic descriptive characteristics associated with a granule.", + "$ref": "#/definitions/DataGranuleType" + }, + "PGEVersionClass": { + "description": "This entity stores basic descriptive characteristics related to the Product Generation Executable associated with a granule.", + "$ref": "#/definitions/PGEVersionClassType" + }, + "TemporalExtent": { + "description": "This class contains attributes which describe the temporal extent of a granule. Temporal Extent includes either a Range Date Time, or a Single Date Time", + "$ref": "#/definitions/TemporalExtentType" + }, + "SpatialExtent": { + "description": "This class contains attributes which describe the spatial extent of a granule. Spatial Extent includes any or all of Granule Localities, Horizontal Spatial Domain, and Vertical Spatial Domain.", + "$ref": "#/definitions/SpatialExtentType" + }, + "OrbitCalculatedSpatialDomains": { + "description": "This entity is used to store the characteristics of the orbit calculated spatial domain to include the model name, orbit number, start and stop orbit number, equator crossing date and time, and equator crossing longitude.", + "type": "array", + "items": { + "$ref": "#/definitions/OrbitCalculatedSpatialDomainType" + }, + "minItems": 1, + "uniqueItems":true + }, + "MeasuredParameters": { + "description": "This entity contains the name of the geophysical parameter expressed in the data as well as associated quality flags and quality statistics. The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters. The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.", + "type": "array", + "items": { + "$ref": "#/definitions/MeasuredParameterType" + }, + "minItems": 1, + "uniqueItems":true + }, + "Platforms": { + "description": "A reference to a platform in the parent collection that is associated with the acquisition of the granule. The platform must exist in the parent collection. For example, Platform types may include (but are not limited to): ADEOS-II, AEM-2, Terra, Aqua, Aura, BALLOONS, BUOYS, C-130, DEM, DMSP-F1,etc.", + "type": "array", + "items": { + "$ref": "#/definitions/PlatformType" + }, + "minItems": 1, + "uniqueItems":true + }, + "Projects": { + "description": "The name of the scientific program, field campaign, or project from which the data were collected. This element is intended for the non-space assets such as aircraft, ground systems, balloons, sondes, ships, etc. associated with campaigns. This element may also cover a long term project that continuously creates new data sets — like MEaSUREs from ISCCP and NVAP or CMARES from MISR. Project also includes the Campaign sub-element to support multiple campaigns under the same project.", + "type": "array", + "items": { + "$ref": "#/definitions/ProjectType" + }, + "minItems": 1, + "uniqueItems": true + }, + "AdditionalAttributes": { + "description": "Reference to an additional attribute in the parent collection. The attribute reference may contain a granule specific value that will override the value in the parent collection for this granule. An attribute with the same name must exist in the parent collection.", + "type": "array", + "items": { + "$ref": "#/definitions/AdditionalAttributeType" + }, + "minItems": 1, + "uniqueItems": true + }, + "InputGranules": { + "description": "This entity contains the identification of the input granule(s) for a specific granule.", + "type": "array", + "items": { + "type": "string", + "minLength": 1, + "maxLength": 500 + }, + "minItems": 1, + "uniqueItems": true + }, + "TilingIdentificationSystem": { + "description": "This entity stores the tiling identification system for the granule. The tiling identification system information is an alternative way to express granule's spatial coverage based on a certain two dimensional coordinate system defined by the providers. The name must match the name in the parent collection.", + "$ref": "#/definitions/TilingIdentificationSystemType" + }, + "CloudCover": { + "description": "A percentage value indicating how much of the area of a granule (the EOSDIS data unit) has been obscured by clouds. It is worth noting that there are many different measures of cloud cover within the EOSDIS data holdings and that the cloud cover parameter that is represented in the archive is dataset-specific.", + "type": "number" + }, + "RelatedUrls": { + "description": "This element describes any data/service related URLs that include project home pages, services, related data archives/servers, metadata extensions, direct links to online software packages, web mapping services, links to images, or other data.", + "type": "array", + "items": { + "$ref": "#/definitions/RelatedUrlType" + }, + "minItems": 1 + }, + "NativeProjectionNames": { + "description": "Represents the native projection of the granule if the granule has a native projection.", + "type": "array", + "items": { + "$ref": "#/definitions/ProjectionNameType" + } + }, + "GridMappingNames": { + "description": "Represents the native grid mapping of the granule, if the granule is gridded.", + "type": "array", + "items": { + "$ref": "#/definitions/GridMappingNameType" + } + }, + "MetadataSpecification": { + "description": "Requires the user to add in schema information into every granule record. It includes the schema's name, version, and URL location. The information is controlled through enumerations at the end of this schema.", + "$ref": "#/definitions/MetadataSpecificationType" + } + }, + "required": ["GranuleUR", "ProviderDates", "CollectionReference", "MetadataSpecification"], + + + + "definitions": { + "ProviderDateType": { + "type": "object", + "additionalProperties": false, + "description": "Specifies the date and its type that the provider uses for the granule. For Create, Update, and Insert the date is the date that the granule file is created, updated, or inserted into the provider database by the provider. Delete is the date that the CMR should delete the granule metadata record from its repository.", + "properties": { + "Date": { + "description": "This is the date that an event associated with the granule occurred.", + "format": "date-time", + "type": "string" + }, + "Type": { + "description": "This is the type of event associated with the date. For example, Creation or Upate.", + "$ref": "#/definitions/ProviderDateTypeEnum" + } + }, + "required": ["Date", "Type"] + }, + "CollectionReferenceType": { + "type": "object", + "description": "A reference to a collection metadata record's short name and version, or entry title to which this granule metadata record belongs.", + "oneOf": [{ + "additionalProperties": false, + "properties": { + "ShortName": { + "description": "The collection's short name as per the UMM-C.", + "type": "string", + "minLength": 1, + "maxLength": 85 + }, + "Version": { + "description": "The collection's version as per the UMM-C.", + "type": "string", + "minLength": 1, + "maxLength": 80 + } + }, + "required": ["ShortName", "Version"] + }, + { + "additionalProperties": false, + "properties": { + "EntryTitle": { + "description": "The collections entry title as per the UMM-C.", + "type": "string", + "minLength": 1, + "maxLength": 1030 + } + }, + "required": ["EntryTitle"] + }] + }, + "AccessConstraintsType": { + "type": "object", + "additionalProperties": false, + "description": "Information about any physical constraints for accessing the data set.", + "properties": { + "Description": { + "description": "Free-text description of the constraint. In ECHO 10, this field is called RestrictionComment. Additional detailed instructions on how to access the granule data may be entered in this field.", + "type": "string", + "minLength": 1, + "maxLength": 4000 + }, + "Value": { + "description": "Numeric value that is used with Access Control Language (ACLs) to restrict access to this granule. For example, a provider might specify a granule level ACL that hides all granules with a value element set to 15. In ECHO, this field is called RestrictionFlag.", + "type": "number" + } + }, + "required": ["Value"] + }, + "DataGranuleType": { + "type": "object", + "additionalProperties": false, + "description": "This entity stores the basic descriptive characteristics associated with a granule.", + "properties": { + "ArchiveAndDistributionInformation": { + "description": "A list of the file(s) or file package(s) that make up the granule. A file package is something like a tar or zip file.", + "type": "array", + "items": { + "$ref": "#/definitions/ArchiveAndDistributionInformationType" + }, + "minItems": 1, + "uniqueItems":true + }, + "ReprocessingPlanned": { + "description": "Granule level, stating what reprocessing may be performed on this granule.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "ReprocessingActual": { + "description": "Granule level, stating what reprocessing has been performed on this granule.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "DayNightFlag": { + "description": "This attribute is used to identify if a granule was collected during the day, night (between sunset and sunrise) or both.", + "type": "string", + "enum": ["Day", "Night", "Both", "Unspecified"] + }, + "ProductionDateTime": { + "description": "The date and time a specific granule was produced by a PGE.", + "format": "date-time", + "type": "string" + }, + "Identifiers": { + "description": "This holds any granule identifiers the provider wishes to provide.", + "type": "array", + "items": {"$ref": "#/definitions/IdentifierType"}, + "minItems": 1, + "uniqueItems":true + } + }, + "required": ["DayNightFlag", "ProductionDateTime"] + }, + "ArchiveAndDistributionInformationType": { + "description": "This set of elements describes a file package or a file that contains other files. Normally this is either a tar or a zip file.", + "anyOf": [{"$ref": "#/definitions/FilePackageType"}, {"$ref": "#/definitions/FileType"}] + }, + "FilePackageType": { + "type": "object", + "additionalProperties": false, + "description": "This set of elements describes a file package or a file that contains other files. Normally this is either a tar or a zip file.", + "properties": { + "Name": { + "description": "This field describes the name of the actual file.", + "$ref": "#/definitions/FileNameType" + }, + "SizeInBytes": { + "description": "The size in Bytes of the volume of data contained in the granule. Bytes are defined as eight bits. Please use this element instead of or inclusive with the Size element. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.", + "type": "integer" + }, + "Size": { + "description": "The size of the volume of data contained in the granule. Please use the SizeInBytes element either instead of this one or inclusive of this one. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.", + "type": "number" + }, + "SizeUnit": { + "description": "The unit of the file size.", + "$ref": "#/definitions/FileSizeUnitEnum" + }, + "Format": { + "description": "This element defines a single format for a distributable artifact.", + "$ref": "#/definitions/DataFormatType" + }, + "MimeType": { + "description": "The mime type of the resource.", + "$ref": "#/definitions/MimeTypeEnum" + }, + "Checksum": { + "description": "Allows the provider to provide the checksum value for the file.", + "$ref": "#/definitions/ChecksumType" + }, + "Files": { + "description": "Allows the provider to add the list of the files that are included in this one.", + "type": "array", + "items": {"$ref": "#/definitions/FileType"}, + "uniqueItems": true, + "minItems": 1 + } + }, + "required": ["Name"], + "dependencies": { + "Size": ["SizeUnit"] + } + }, + "FileType": { + "type": "object", + "additionalProperties": false, + "description": "This set of elements describes a file. The file can be a part of the entire granule or is the granule.", + "properties": { + "Name": { + "description": "This field describes the name of the actual file.", + "$ref": "#/definitions/FileNameType" + }, + "SizeInBytes": { + "description": "The size in Bytes of the volume of data contained in the granule. Bytes are defined as eight bits. Please use this element instead of or inclusive with the Size element. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.", + "type": "integer" + }, + "Size": { + "description": "The size of the volume of data contained in the granule. Please use the SizeInBytes element either instead of this one or inclusive of this one. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.", + "type": "number" + }, + "SizeUnit": { + "description": "The unit of the file size.", + "$ref": "#/definitions/FileSizeUnitEnum" + }, + "Format": { + "description": "This element defines a single format for a distributable artifact.", + "$ref": "#/definitions/DataFormatType" + }, + "FormatType": { + "description": "Allows the provider to state whether the distributable item's format is its native format or another supported format.", + "type": "string", + "enum": ["Native", "Supported", "NA"] + }, + "MimeType": { + "description": "The mime type of the resource.", + "$ref": "#/definitions/MimeTypeEnum" + }, + "Checksum": { + "description": "Allows the provider to provide the checksum value for the file.", + "$ref": "#/definitions/ChecksumType" + } + }, + "required": ["Name"], + "dependencies": { + "Size": ["SizeUnit"] + } + }, + "IdentifierType" :{ + "type": "object", + "description": "This entity stores an identifier. If the identifier is part of the enumeration then use it. If the enumeration is 'Other', the provider must specify the identifier's name.", + "oneOf": [{ + "additionalProperties": false, + "properties": { + "Identifier": { + "description": "The identifier value.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "IdentifierType": { + "description": "The enumeration of known identifier types.", + "type": "string", + "enum": ["ProducerGranuleId", "LocalVersionId", "FeatureId", "CRID"] + }, + "IdentifierName": { + "description": "The name of the identifier.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + } + }, + "required": ["Identifier","IdentifierType"] + }, + { + "additionalProperties": false, + "properties": { + "Identifier": { + "description": "The identifier value.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "IdentifierType": { + "description": "The enumeration of known identifier types.", + "type": "string", + "enum": ["Other"] + }, + "IdentifierName": { + "description": "The Name of identifier.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + } + }, + "required": ["Identifier","IdentifierType","IdentifierName"] + }] + }, + "PGEVersionClassType": { + "type": "object", + "additionalProperties": false, + "description": "This entity stores basic descriptive characteristics related to the Product Generation Executable associated with a granule.", + "properties": { + "PGEName": { + "description": "Name of product generation executable.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "PGEVersion": { + "description": "Version of the product generation executable that produced the granule.", + "type": "string", + "minLength": 1, + "maxLength": 50 + } + }, + "required": ["PGEVersion"] + }, + "TemporalExtentType": { + "type": "object", + "description": "Information which describes the temporal extent of a specific granule.", + "oneOf": [{ + "additionalProperties": false, + "properties": { + "RangeDateTime": { + "description": "Stores the data acquisition start and end date/time for a granule.", + "$ref": "#/definitions/RangeDateTimeType" + } + }, + "required": ["RangeDateTime"] + }, { + "additionalProperties": false, + "properties": { + "SingleDateTime": { + "description": "Stores the data acquisition date/time for a granule.", + "format": "date-time", + "type": "string" + } + }, + "required": ["SingleDateTime"] + }] + }, + "RangeDateTimeType": { + "type": "object", + "additionalProperties": false, + "description": "Stores the data acquisition start and end date/time for a granule.", + "properties": { + "BeginningDateTime": { + "description": "The time when the temporal coverage period being described began.", + "format": "date-time", + "type": "string" + }, + "EndingDateTime": { + "description": "The time when the temporal coverage period being described ended.", + "format": "date-time", + "type": "string" + } + }, + "required": ["BeginningDateTime"] + }, + "SpatialExtentType": { + "type": "object", + "additionalProperties": false, + "description": "This class contains attributes which describe the spatial extent of a granule. Spatial Extent includes any or all of Granule Localities, Horizontal Spatial Domain, and Vertical Spatial Domain.", + "properties": { + "GranuleLocalities": { + "description": "This entity stores information used at the granule level to describe the labeling of granules with compounded time/space text values and which are subsequently used to define more phenomenological-based granules, thus the locality type and description are contained.", + "type": "array", + "items": {"$ref": "#/definitions/GranuleLocalityType"}, + "minItems": 1, + "uniqueItems": true + }, + "HorizontalSpatialDomain": { + "description": "This represents the granule horizontal spatial domain information.", + "$ref": "#/definitions/HorizontalSpatialDomainType" + }, + "VerticalSpatialDomains": { + "description": "This represents the domain value and type for the granule's vertical spatial domain.", + "type": "array", + "items": {"$ref": "#/definitions/VerticalSpatialDomainType"}, + "minItems":1, + "uniqueItems":true + } + }, + "anyOf": [{ + "required": ["GranuleLocalities"] + }, { + "required": ["HorizontalSpatialDomain"] + }, { + "required": ["VerticalSpatialDomains"] + }] + }, + "HorizontalSpatialDomainType": { + "type": "object", + "description": "Information about a granule with horizontal spatial coverage.", + "additionalProperties": false, + "properties": { + "ZoneIdentifier": { + "description": "The appropriate numeric or alpha code used to identify the various zones in the granule's grid coordinate system.", + "$ref": "#/definitions/ZoneIdentifierType" + }, + "Geometry": { + "description": "This entity holds the geometry representing the spatial coverage information of a granule.", + "$ref": "#/definitions/GeometryType" + }, + "Orbit": { + "description": "This entity stores orbital coverage information of the granule. This coverage is an alternative way of expressing granule spatial coverage. This information supports orbital backtrack searching on a granule.", + "$ref": "#/definitions/OrbitType" + }, + "Track": { + "description": "This element stores track information of the granule. Track information is used to allow a user to search for granules whose spatial extent is based on an orbital cycle, pass, and tile mapping. Though it is derived from the SWOT mission requirements, it is intended that this element type be generic enough so that other missions can make use of it. While track information is a type of spatial domain, it is expected that the metadata provider will provide geometry information that matches the spatial extent of the track information.", + "$ref": "#/definitions/TrackType" + } + }, + "oneOf": [{ + "required": ["Geometry"] + }, { + "required": ["Orbit"] + }] + }, + "GeometryType": { + "type": "object", + "additionalProperties": false, + "description": "This entity holds the geometry representing the spatial coverage information of a granule.", + "properties": { + "Points": { + "description": "The horizontal spatial coverage of a point.", + "type": "array", + "items": { + "$ref": "#/definitions/PointType" + }, + "minItems": 1, + "uniqueItems": true + }, + "BoundingRectangles": { + "description": "This entity holds the horizontal spatial coverage of a bounding box.", + "type": "array", + "items": { + "$ref": "#/definitions/BoundingRectangleType" + }, + "minItems": 1, + "uniqueItems": true + }, + "GPolygons": { + "description": "A GPolygon specifies an area on the earth represented by a main boundary with optional boundaries for regions excluded from the main boundary.", + "type": "array", + "items": { + "$ref": "#/definitions/GPolygonType" + }, + "minItems": 1, + "uniqueItems": true + }, + "Lines": { + "description": "This entity holds the horizontal spatial coverage of a line. A line area contains at least two points.", + "type": "array", + "items": { + "$ref": "#/definitions/LineType" + }, + "minItems": 1, + "uniqueItems": true + } + }, + "anyOf": [{ + "required": ["Points"] + }, { + "required": ["BoundingRectangles"] + }, { + "required": ["GPolygons"] + }, { + "required": ["Lines"] + }] + }, + "PointType": { + "type": "object", + "additionalProperties": false, + "description": "The longitude and latitude values of a spatially referenced point in degrees.", + "properties": { + "Longitude": { + "$ref": "#/definitions/LongitudeType" + }, + "Latitude": { + "$ref": "#/definitions/LatitudeType" + } + }, + "required": ["Longitude", "Latitude"] + }, + "BoundingRectangleType": { + "type": "object", + "additionalProperties": false, + "description": "This entity holds the horizontal spatial coverage of a bounding box.", + "properties": { + "WestBoundingCoordinate": { + "$ref": "#/definitions/LongitudeType" + }, + "NorthBoundingCoordinate": { + "$ref": "#/definitions/LatitudeType" + }, + "EastBoundingCoordinate": { + "$ref": "#/definitions/LongitudeType" + }, + "SouthBoundingCoordinate": { + "$ref": "#/definitions/LatitudeType" + } + }, + "required": ["WestBoundingCoordinate", "NorthBoundingCoordinate", "EastBoundingCoordinate", "SouthBoundingCoordinate"] + }, + "GPolygonType": { + "type": "object", + "additionalProperties": false, + "description": "A GPolygon specifies an area on the earth represented by a main boundary with optional boundaries for regions excluded from the main boundary.", + "properties": { + "Boundary": { + "$ref": "#/definitions/BoundaryType" + }, + "ExclusiveZone": { + "$ref": "#/definitions/ExclusiveZoneType" + } + }, + "required": ["Boundary"] + }, + "BoundaryType": { + "type": "object", + "additionalProperties": false, + "description": "A boundary is set of points connected by straight lines representing a polygon on the earth. It takes a minimum of three points to make a boundary. Points must be specified in counter-clockwise order and closed (the first and last vertices are the same).", + "properties": { + "Points": { + "type": "array", + "items": { + "$ref": "#/definitions/PointType" + }, + "minItems": 3 + } + }, + "required": ["Points"] + }, + "ExclusiveZoneType": { + "type": "object", + "additionalProperties": false, + "description": "Contains the excluded boundaries from the GPolygon.", + "properties": { + "Boundaries": { + "type": "array", + "items": { + "$ref": "#/definitions/BoundaryType" + }, + "minItems": 1 + } + }, + "required": ["Boundaries"] + }, + "LineType": { + "type": "object", + "additionalProperties": false, + "description": "This entity holds the horizontal spatial coverage of a line. A line area contains at lease two points.", + "properties": { + "Points": { + "type": "array", + "items": { + "$ref": "#/definitions/PointType" + }, + "minItems": 2 + } + }, + "required": ["Points"] + }, + "OrbitType":{ + "type": "object", + "additionalProperties": false, + "description": "This entity stores orbital coverage information of the granule. This coverage is an alternative way of expressing granule spatial coverage. This information supports orbital backtrack searching on a granule.", + "properties": { + "AscendingCrossing": { + "description": "Equatorial crossing on the ascending pass in decimal degrees longitude. The convention we've been using is it's the first included ascending crossing if one is included, and the prior ascending crossing if none is included (e.g. descending half orbits).", + "$ref": "#/definitions/LongitudeType" + }, + "StartLatitude": { + "description": "Granule's starting latitude.", + "$ref": "#/definitions/LatitudeType" + }, + "StartDirection": { + "description": "Ascending or descending. Valid input: 'A' or 'D'", + "$ref": "#/definitions/OrbitDirectionTypeEnum" + }, + "EndLatitude": { + "description": "Granule's ending latitude.", + "$ref": "#/definitions/LatitudeType" + }, + "EndDirection": { + "description": "Ascending or descending. Valid input: 'A' or 'D'", + "$ref": "#/definitions/OrbitDirectionTypeEnum" + } + }, + "required": ["AscendingCrossing", "StartLatitude", "StartDirection", "EndLatitude", "EndDirection"] + }, + "TrackType": { + "type": "object", + "additionalProperties": false, + "description": "This element stores track information of the granule. Track information is used to allow a user to search for granules whose spatial extent is based on an orbital cycle, pass, and tile mapping. Though it is derived from the SWOT mission requirements, it is intended that this element type be generic enough so that other missions can make use of it. While track information is a type of spatial domain, it is expected that the metadata provider will provide geometry information that matches the spatial extent of the track information.", + "properties": { + "Cycle": { + "description": "An integer that represents a specific set of orbital spatial extents defined by passes and tiles. Though intended to be generic, this comes from a SWOT mission requirement where each cycle represents a set of 1/2 orbits. Each 1/2 orbit is called a 'pass'. During science mode, a cycle represents 21 days of 14 full orbits or 588 passes.", + "type": "integer" + }, + "Passes": { + "description": "A pass number identifies a subset of a granule's spatial extent. This element holds a list of pass numbers and their tiles that exist in the granule. It will allow a user to search by pass number and its tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granule's spatial extent.", + "type": "array", + "items": { + "$ref": "#/definitions/TrackPassTileType" + }, + "minItems": 1 + } + }, + "required": ["Cycle"] + }, + "TrackPassTileType": { + "type": "object", + "additionalProperties": false, + "description": "This element stores a track pass and its tile information. It will allow a user to search by pass number and their tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granules spatial extent.", + "properties": { + "Pass": { + "description": "A pass number identifies a subset of a granule's spatial extent. This element holds a pass number that exists in the granule and will allow a user to search by pass number that is contained within a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit.", + "type": "integer" + }, + "Tiles": { + "description": "A tile is a subset of a pass' spatial extent. This element holds a list of tile identifiers that exist in the granule and will allow a user to search by tile identifier that is contained within a pass number within a cycle number. Though intended to be generic, this comes from a SWOT mission requirement where a tile is a spatial extent that encompasses either a square scanning swath to the left or right of the ground track or a rectangle that includes a full scanning swath both to the left and right of the ground track.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1 + } + }, + "required": ["Pass"] + }, + "VerticalSpatialDomainType": { + "type": "object", + "additionalProperties": false, + "description": "This entity contains the type and value for the granule's vertical spatial domain.", + "properties": { + "Type": { + "description": "Describes the type of the area of vertical space covered by the granule locality.", + "$ref": "#/definitions/VerticalSpatialDomainTypeEnum" + }, + "Value": { + "description": "Describes the extent of the area of vertical space covered by the granule. Use this for Atmosphere profiles or for a specific value.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "MinimumValue": { + "description": "Describes the extent of the area of vertical space covered by the granule. Use this and MaximumValue to represent a range of values (Min and Max).", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "MaximumValue": { + "description": "Describes the extent of the area of vertical space covered by the granule. Use this and MinimumValue to represent a range of values (Min and Max).", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "Unit": { + "description": "Describes the unit of the vertical extent value.", + "type": "string", + "enum": ["Fathoms", "Feet", "HectoPascals", "Kilometers", "Meters", "Millibars","PoundsPerSquareInch", "Atmosphere", "InchesOfMercury", "InchesOfWater"] + } + }, + "oneOf": [{ + "required": ["Type", "Value"] + }, { + "required":["Type","MinimumValue", "MaximumValue"] + }], + "allOf": [{ + "not": { + "required": ["Value", "MinimumValue"] + } + }, { + "not": { + "required": ["Value", "MaximumValue"] + } + }] + }, + "OrbitCalculatedSpatialDomainType": { + "type": "object", + "additionalProperties": false, + "description": "This entity is used to store the characteristics of the orbit calculated spatial domain to include the model name, orbit number, start and stop orbit number, equator crossing date and time, and equator crossing longitude.", + "properties": { + "OrbitalModelName": { + "description": "The reference to the orbital model to be used to calculate the geo-location of this data in order to determine global spatial extent.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "OrbitNumber": { + "description": "The orbit number to be used in calculating the spatial extent of this data.", + "type": "integer" + }, + "BeginOrbitNumber": { + "description": "Orbit number at the start of the data granule.", + "type": "integer" + }, + "EndOrbitNumber": { + "description": "Orbit number at the end of the data granule.", + "type": "integer" + }, + "EquatorCrossingLongitude": { + "description": "This attribute represents the terrestrial longitude of the descending equator crossing.", + "$ref": "#/definitions/LongitudeType" + }, + "EquatorCrossingDateTime": { + "description": "This attribute represents the date and time of the descending equator crossing.", + "format": "date-time", + "type": "string" + } + }, + "anyOf": [{ + "required": ["OrbitalModelName"] + }, { + "required": ["EquatorCrossingLongitude"] + }, { + "required": ["EquatorCrossingDateTime"] + }, { + "required": ["OrbitNumber"] + }, { + "required": ["BeginOrbitNumber", "EndOrbitNumber"] + }], + "allOf": [{ + "not": { + "required": ["OrbitNumber", "BeginOrbitNumber"] + } + }, { + "not": { + "required": ["OrbitNumber", "EndOrbitNumber"] + } + }] + }, + "MeasuredParameterType": { + "type": "object", + "additionalProperties": false, + "description": "This entity contains the name of the geophysical parameter expressed in the data as well as associated quality flags and quality statistics. The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters. The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.", + "properties": { + "ParameterName": { + "description": "The measured science parameter expressed in the data granule.", + "type": "string", + "minLength": 1, + "maxLength": 250 + }, + "QAStats": { + "description": "The associated quality statistics.", + "$ref": "#/definitions/QAStatsType" + }, + "QAFlags": { + "description": "The associated quality flags.", + "$ref": "#/definitions/QAFlagsType" + } + }, + "required": ["ParameterName"] + }, + "QAStatsType": { + "type": "object", + "additionalProperties": false, + "description": "The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters.", + "properties": { + "QAPercentMissingData": { + "description": "Granule level % missing data. This attribute can be repeated for individual parameters within a granule.", + "type": "number", + "minimum": 0, + "maximum": 100 + }, + "QAPercentOutOfBoundsData": { + "description": "Granule level % out of bounds data. This attribute can be repeated for individual parameters within a granule.", + "type": "number", + "minimum": 0, + "maximum": 100 + }, + "QAPercentInterpolatedData": { + "description": "Granule level % interpolated data. This attribute can be repeated for individual parameters within a granule.", + "type": "number", + "minimum": 0, + "maximum": 100 + }, + "QAPercentCloudCover": { + "description": "This attribute is used to characterize the cloud cover amount of a granule. This attribute may be repeated for individual parameters within a granule. (Note - there may be more than one way to define a cloud or it's effects within a product containing several parameters; i.e. this attribute may be parameter specific).", + "type": "number", + "minimum": 0, + "maximum": 100 + } + }, + "anyOf": [{ + "required": ["QAPercentMissingData"] + }, { + "required": ["QAPercentOutOfBoundsData"] + }, { + "required": ["QAPercentInterpolatedData"] + }, { + "required": ["QAPercentCloudCover"] + }] + }, + "QAFlagsType": { + "type": "object", + "additionalProperties": false, + "description": "The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.", + "properties": { + "AutomaticQualityFlag": { + "description": "The granule level flag applying generally to the granule and specifically to parameters the granule level. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developer and documented in the Quality Flag Explanation.", + "type": "string", + "enum": ["Passed", "Failed", "Suspect", "Undetermined"] + }, + "AutomaticQualityFlagExplanation": { + "description": "A text explanation of the criteria used to set automatic quality flag; including thresholds or other criteria.", + "type": "string", + "minLength": 1, + "maxLength": 2048 + }, + "OperationalQualityFlag": { + "description": "The granule level flag applying both generally to a granule and specifically to parameters at the granule level. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developers and documented in the QualityFlagExplanation.", + "type": "string", + "enum": ["Passed", "Failed", "Being Investigated", "Not Investigated", "Inferred Passed", "Inferred Failed", "Suspect", "Undetermined"] + }, + "OperationalQualityFlagExplanation": { + "description": "A text explanation of the criteria used to set operational quality flag; including thresholds or other criteria.", + "type": "string", + "minLength": 1, + "maxLength": 2048 + }, + "ScienceQualityFlag": { + "description": "Granule level flag applying to a granule, and specifically to parameters. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developers and documented in the Quality Flag Explanation.", + "type": "string", + "enum": ["Passed", "Failed", "Being Investigated", "Not Investigated", "Inferred Passed", "Inferred Failed", "Suspect", "Hold", "Undetermined"] + }, + "ScienceQualityFlagExplanation": { + "description": "A text explanation of the criteria used to set science quality flag; including thresholds or other criteria.", + "type": "string", + "minLength": 1, + "maxLength": 2048 + } + }, + "anyOf": [{ + "required": ["AutomaticQualityFlag"] + }, { + "required": ["OperationalQualityFlag"] + }, { + "required": ["ScienceQualityFlag"] + }] + }, + "PlatformType": { + "type": "object", + "additionalProperties": false, + "description": "A reference to a platform in the parent collection that is associated with the acquisition of the granule. The platform must exist in the parent collection. For example, Platform types may include (but are not limited to): ADEOS-II, AEM-2, Terra, Aqua, Aura, BALLOONS, BUOYS, C-130, DEM, DMSP-F1,etc.", + "properties": { + "ShortName": { + "$ref": "#/definitions/ShortNameType" + }, + "Instruments": { + "description": "References to the devices in the parent collection that were used to measure or record data, including direct human observation.", + "type": "array", + "items": { + "$ref": "#/definitions/InstrumentType" + }, + "minItems": 1 + } + }, + "required": ["ShortName"] + }, + "InstrumentType": { + "type": "object", + "additionalProperties": false, + "description": "A reference to the device in the parent collection that was used to measure or record data, including direct human observation. In cases where instruments have a single composed of child instrument (sensor) or the instrument and composed of child instrument (sensor) are used synonymously (e.g. AVHRR) the both Instrument and composed of child instrument should be recorded. The child instrument information is represented by child entities. The instrument reference may contain granule specific characteristics and operation modes. These characteristics and modes are not checked against the referenced instrument.", + "properties": { + "ShortName": { + "$ref": "#/definitions/ShortNameType" + }, + "Characteristics": { + "description": "This entity is used to define item additional attributes (unprocessed, custom data).", + "type": "array", + "items": { + "$ref": "#/definitions/CharacteristicType" + }, + "minItems": 1, + "uniqueItems": true + }, + "ComposedOf": { + "description": "References to instrument subcomponents in the parent collection's instrument used by various sources in the granule. An instrument subcomponent reference may contain characteristics specific to the granule.", + "type": "array", + "items": { + "$ref": "#/definitions/InstrumentType" + }, + "minItems": 1, + "uniqueItems": true + }, + "OperationalModes": { + "description": "This entity identifies the instrument's operational modes for a specific collection associated with the channel, wavelength, and FOV (e.g., launch, survival, initialization, safe, diagnostic, standby, crosstrack, biaxial, solar calibration).", + "type": "array", + "items": { + "type": "string", + "minLength": 1, + "maxLength": 20 + }, + "minItems": 1, + "uniqueItems": true + } + }, + "required": ["ShortName"] + }, + "CharacteristicType": { + "type": "object", + "additionalProperties": false, + "description": "This entity is used to reference characteristics defined in the parent collection.", + "properties": { + "Name": { + "description": "The name of the characteristic attribute.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "Value": { + "description": "The value of the Characteristic attribute.", + "type": "string", + "minLength": 1, + "maxLength": 80 + } + }, + "required": ["Name", "Value"] + }, + "ProjectType": { + "type": "object", + "additionalProperties": false, + "description": "Information describing the scientific endeavor with which the granule is associated.", + "properties": { + "ShortName": { + "description": "The unique identifier by which a project is known. The project is the scientific endeavor associated with the acquisition of the collection.", + "type": "string", + "minLength": 1, + "maxLength": 40 + }, + "Campaigns": { + "description": "The name of the campaign/experiment (e.g. Global climate observing system).", + "type": "array", + "items": { + "$ref": "#/definitions/CampaignType" + }, + "minItems": 1, + "uniqueItems": true + } + }, + "required": ["ShortName"] + }, + "CampaignType": { + "description": "Information describing campaign names with which the granule is associated.", + "type": "string", + "minLength": 1, + "maxLength": 40 + }, + "AdditionalAttributeType": { + "type": "object", + "additionalProperties": false, + "description": "A reference to an additional attribute in the parent collection. The attribute reference may contain a granule specific value that will override the value in the parent collection for this granule. An attribute with the same name must exist in the parent collection.", + "properties": { + "Name": { + "description": "The additional attribute's name.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "Values": { + "description": "Values of the additional attribute.", + "type": "array", + "items": { + "type": "string", + "minLength": 1, + "maxLength": 500 + }, + "minItems": 1 + } + }, + "required": ["Name", "Values"] + }, + "TilingIdentificationSystemType": { + "type": "object", + "additionalProperties": false, + "description": "This entity stores the tiling identification system for the granule. The tiling identification system information is an alternative way to express granule's spatial coverage based on a certain two dimensional coordinate system defined by the providers. The name must match the name in the parent collection.", + "properties": { + "TilingIdentificationSystemName": { + "$ref": "#/definitions/TilingIdentificationSystemNameEnum" + }, + "Coordinate1": { + "$ref": "#/definitions/TilingCoordinateType" + }, + "Coordinate2": { + "$ref": "#/definitions/TilingCoordinateType" + } + }, + "required": ["TilingIdentificationSystemName", "Coordinate1", "Coordinate2"] + }, + "TilingCoordinateType": { + "type": "object", + "additionalProperties": false, + "description": "Defines the minimum and maximum value for one dimension of a two dimensional coordinate system.", + "properties": { + "MinimumValue": { + "type": "number" + }, + "MaximumValue": { + "type": "number" + } + }, + "required": ["MinimumValue"] + }, + "RelatedUrlType": { + "type": "object", + "additionalProperties": false, + "description": "This entity holds all types of online URL associated with the granule such as guide document or ordering site etc.", + "properties": { + "URL": { + "description": "The URL for the relevant resource.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "Type": { + "description": "A keyword describing the type of the online resource to this resource.", + "$ref": "#/definitions/RelatedUrlTypeEnum" + }, + "Subtype": { + "description": "A keyword that provides more detailed information than Type of the online resource to this resource. For example if the Type=VIEW RELATED INFORMATION then the Subtype can be USER'S GUIDE or GENERAL DOCUMENTATION", + "$ref": "#/definitions/RelatedUrlSubTypeEnum" + }, + "Description": { + "description": "Description of the web page at this URL.", + "type": "string", + "minLength": 1, + "maxLength": 4000 + }, + "Format": { + "description": "The format of the resource.", + "$ref": "#/definitions/DataFormatType" + }, + "MimeType": { + "description": "The mime type of the resource.", + "$ref": "#/definitions/MimeTypeEnum" + }, + "Size": { + "description": "The size of the resource.", + "type": "number" + }, + "SizeUnit": { + "description": "Unit of information, together with Size determines total size in bytes of the resource.", + "$ref": "#/definitions/FileSizeUnitEnum" + } + }, + "required": ["URL", "Type"], + "dependencies": { + "Size": ["SizeUnit"] + } + }, + "ChecksumType": { + "type": "object", + "additionalProperties": false, + "description": "Allows the provider to provide a checksum value and checksum algorithm name to allow the user to calculate the checksum.", + "properties": { + "Value": { + "description": "Describes the checksum value for a file.", + "type": "string", + "minLength": 1, + "maxLength": 128 + }, + "Algorithm": { + "description": "The algorithm name by which the checksum was calulated. This allows the user to re-calculate the checksum to verify the integrity of the downloaded data.", + "type": "string", + "enum": ["Adler-32", "BSD checksum", "Fletcher-32", "Fletcher-64", "MD5", "POSIX", "SHA-1", "SHA-2", "SHA-256", "SHA-384", "SHA-512", "SM3", "SYSV"] + } + }, + "required": ["Value", "Algorithm"] + }, + "ProjectionNameType": { + "description": "Represents the native projection of the granule if the granule has a native projection. The projection name must match the projection that has been defined in the parent collection.", + "type": "string", + "enum": ["Geographic", "Mercator", "Spherical Mercator", "Space Oblique Mercator", "Universal Transverse Mercator", "Military Grid Reference", "MODIS Sinusoidal System", "Sinusoidal", "Lambert Equal Area", "NSIDC EASE Grid North and South (Lambert EA)", "NSIDC EASE Grid Global", "EASE Grid 2.0 N. Polar", "Plate Carree", "Polar Stereographic", "WELD Albers Equal Area", "Canadian Albers Equal Area Conic", "Lambert Conformal Conic", "State Plane Coordinates", "Albers Equal Area Conic", "Transverse Mercator", "Lambert Azimuthal Equal Area", "UTM Northern Hemisphere", "NAD83 / UTM zone 17N", "UTM Southern Hemisphere", "Cylindrical"] + }, + "GridMappingNameType": { + "description": "Represents the native grid mapping of the granule, if the granule is gridded. The grid name must match a grid that has been defined in the parent collection.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "ProviderDateTypeEnum": { + "description": "The types of dates that a metadata record can have.", + "type": "string", + "enum": ["Create", "Insert", "Update", "Delete"] + }, + "FileNameType": { + "description": "This field describes the name of the actual file.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "FileSizeUnitEnum": { + "description": "The unit of the file size.", + "type": "string", + "enum": ["KB", "MB", "GB", "TB", "PB", "NA"] + }, + "DistributionMediaType": { + "description": "This element defines the media by which the end user can obtain the distributable item. Each media type is listed separately. Examples of media include: CD-ROM, 9 track tape, diskettes, hard drives, online, transparencies, hardcopy, etc.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "GranuleLocalityType" :{ + "description": "Provides name which spatial/temporal entity is known. This could change on a granule by granule basis. This attribute is paralleled by the AggregationType which applies at the collection level although locality has a more restricted usage. Several locality measures could be included in each granule.", + "type": "string", + "minLength": 1, + "maxLength": 1024 + }, + "LatitudeType": { + "description": "The latitude value of a spatially referenced point, in degrees. Latitude values range from -90 to 90.", + "type": "number", + "minimum": -90, + "maximum": 90 + }, + "LongitudeType": { + "description": "The longitude value of a spatially referenced point, in degrees. Longitude values range from -180 to 180.", + "type": "number", + "minimum": -180, + "maximum": 180 + }, + "OrbitDirectionTypeEnum": { + "description": "Orbit start and end direction. A for ascending orbit and D for descending.", + "type": "string", + "enum": ["A", "D"] + }, + "ZoneIdentifierType": { + "description": "The appropriate numeric or alpha code used to identify the various zones in the granule's grid coordinate system.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "VerticalSpatialDomainTypeEnum": { + "type": "string", + "enum": ["Atmosphere Layer","Pressure", "Altitude", "Depth"] + }, + "ShortNameType": { + "description": "The unique name of the platform or instrument.", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "TilingIdentificationSystemNameEnum": { + "type": "string", + "enum": ["CALIPSO", "MISR", "MODIS Tile EASE", "MODIS Tile SIN", "SMAP Tile EASE", "WELD Alaska Tile", "WELD CONUS Tile", "WRS-1", "WRS-2"] + }, + "RelatedUrlTypeEnum": { + "type": "string", + "enum": ["DOWNLOAD SOFTWARE", "EXTENDED METADATA", "GET DATA", "GET DATA VIA DIRECT ACCESS", "GET RELATED VISUALIZATION", "GOTO WEB TOOL", "PROJECT HOME PAGE", "USE SERVICE API", "VIEW RELATED INFORMATION"] + }, + "RelatedUrlSubTypeEnum": { + "type": "string", + "enum": ["MOBILE APP", "APPEARS", "DATA COLLECTION BUNDLE", "DATA TREE", "DATACAST URL", "DIRECT DOWNLOAD", "EOSDIS DATA POOL", "Earthdata Search", "GIOVANNI", "GoLIVE Portal", + "IceBridge Portal", "LAADS", "LANCE", "MIRADOR", "MODAPS", "NOAA CLASS", "NOMADS", "Order", "PORTAL", "Subscribe", "USGS EARTH EXPLORER", "VERTEX", "VIRTUAL COLLECTION", + "MAP", "WORLDVIEW", "LIVE ACCESS SERVER (LAS)", "MAP VIEWER", "SIMPLE SUBSET WIZARD (SSW)", "SUBSETTER", "GRADS DATA SERVER (GDS)", "MAP SERVICE", "OPENDAP DATA", + "OpenSearch", "SERVICE CHAINING", "TABULAR DATA STREAM (TDS)", "THREDDS DATA", "WEB COVERAGE SERVICE (WCS)", "WEB FEATURE SERVICE (WFS)", "WEB MAP SERVICE (WMS)", + "WEB MAP TILE SERVICE (WMTS)", "ALGORITHM DOCUMENTATION", "ALGORITHM THEORETICAL BASIS DOCUMENT (ATBD)", "ANOMALIES", "CASE STUDY", "DATA CITATION POLICY", "DATA QUALITY", + "DATA RECIPE", "DELIVERABLES CHECKLIST", "GENERAL DOCUMENTATION", "HOW-TO", "IMPORTANT NOTICE","INSTRUMENT/SENSOR CALIBRATION DOCUMENTATION", "MICRO ARTICLE", + "PI DOCUMENTATION", "PROCESSING HISTORY", "PRODUCT HISTORY", "PRODUCT QUALITY ASSESSMENT", "PRODUCT USAGE", "PRODUCTION HISTORY", "PUBLICATIONS", "READ-ME", + "REQUIREMENTS AND DESIGN", "SCIENCE DATA PRODUCT SOFTWARE DOCUMENTATION", "SCIENCE DATA PRODUCT VALIDATION", "USER FEEDBACK PAGE", "USER'S GUIDE", + "DMR++", "DMR++ MISSING DATA"] + }, + "MimeTypeEnum": { + "type": "string", + "enum": ["application/json", "application/xml", "application/x-netcdf", "application/x-hdfeos", "application/gml+xml", + "application/vnd.google-earth.kml+xml", "image/gif", "image/tiff", "image/bmp", "text/csv", + "text/xml", "application/pdf", "application/x-hdf", "application/x-hdf5", + "application/octet-stream", "application/vnd.google-earth.kmz", "image/jpeg", "image/png", + "image/vnd.collada+xml", "text/html", "text/plain", "application/zip", "application/gzip", "application/tar", + "application/tar+gzip", "application/tar+zip", "application/vnd.opendap.dap4.dmrpp+xml", "Not provided"] + }, + "DataFormatType": { + "description": "The format that granule data confirms to. While the value is listed as open to any text, CMR requires that it confirm to one of the values on the GranuleDataFormat values in the Keyword Management System: https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/GranuleDataFormat", + "type": "string", + "minLength": 1, + "maxLength": 80 + }, + "MetadataSpecificationType": + { + "type": "object", + "additionalProperties": false, + "description": "This object requires any metadata record that is validated by this schema to provide information about the schema.", + "properties": { + "URL": { + "description": "This element represents the URL where the schema lives. The schema can be downloaded.", + "type": "string", + "enum": ["https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4"] + }, + "Name": { + "description": "This element represents the name of the schema.", + "type": "string", + "enum": ["UMM-G"] + }, + "Version": { + "description": "This element represents the version of the schema.", + "type": "string", + "enum": ["1.6.4"] + } + }, + "required": ["URL", "Name", "Version"] + } + } +} \ No newline at end of file diff --git a/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java index 897be71..bce683e 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java @@ -112,7 +112,7 @@ public void testEnsureClockwise2CounterclockwisePolygon() throws ParseException Geometry geometry = wktReader.read(clockwisePolygonWKT); Coordinate[] coordinates = geometry.getCoordinates(); // the original input array's trailing 3 coordinates will become leading 3 coordinates - Coordinate[] reversedCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, coordinates); + Coordinate[] reversedCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, 0, coordinates); assertTrue(reversedCoordinates[0].x == Double.valueOf(45.261678) && reversedCoordinates[0].y == Double.valueOf(-65.651128)); assertTrue(reversedCoordinates[1].x == Double.valueOf(45.36766) && @@ -132,7 +132,7 @@ public void testEnsureCounterclockwise2CounterclockwisePolygon() throws ParseExc WKTReader wktReader = new WKTReader(); Geometry geometry = wktReader.read(clockwisePolygonWKT); Coordinate[] coordinates = geometry.getCoordinates(); - Coordinate[] sameSequenceCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, coordinates); + Coordinate[] sameSequenceCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, 0, coordinates); assertTrue(sameSequenceCoordinates[0].x == Double.valueOf(-66.1897) && sameSequenceCoordinates[0].y == Double.valueOf(63.1972)); assertTrue(sameSequenceCoordinates[1].x == Double.valueOf(-83.1304) && diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java b/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java index d1e80fd..86ce1d0 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java @@ -9,7 +9,9 @@ AggregatorRelease_4_3_0_Test.class, gov.nasa.cumulus.metadata.test.MetadataFilesToEchoTest.class, gov.nasa.cumulus.metadata.test.UMMTest.class, - + gov.nasa.cumulus.metadata.test.ImageProcessorTest.class, + gov.nasa.cumulus.metadata.test.FootprintProcessorTest.class, + }) public class AggregatorTestSuite { // the class remains completely empty, diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java index bd3daab..49bd9c6 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java @@ -76,10 +76,10 @@ public void testGetImageDownloadUrl() { try { ImageProcessor imageProcessor = new ImageProcessor(); String downloadUri = imageProcessor.getImageDownloadUrl("https://distribution/xxx/bb/download", - "my-public-bucket", "/collection_name/granuleId/Image1.jpg"); + "my-public-bucket","/collection_name/granuleId/Image1.jpg"); assertEquals(downloadUri, "https://distribution/xxx/bb/download/my-public-bucket/collection_name/granuleId/Image1.jpg"); - } catch (URISyntaxException uriSyntaxException) { + } catch (URISyntaxException uriSyntaxException) { System.out.println(uriSyntaxException); fail(); } @@ -88,24 +88,26 @@ public void testGetImageDownloadUrl() { /** * This test purposely make getImageDownloadUrl throwing URISyntaxException * by passing illegal character '^' as distribution_url. - *

+ * * fail() will force the test case to fail. Since the test is to force URISyntaxException * to be thrown, it is a failed case if not thrown. + * */ @Test public void testGetImageDownloadUrl_URISyntaxException() { try { ImageProcessor imageProcessor = new ImageProcessor(); String downloadUri = imageProcessor.getImageDownloadUrl("https://distribution/xxx/bb/download^12334", - "my-public-bucket", "s3://my-public-bucket/collection_name/granuleId/image1.jpg"); + "my-public-bucket","s3://my-public-bucket/collection_name/granuleId/image1.jpg"); fail(); - } catch (URISyntaxException uriSyntaxException) { + } catch (URISyntaxException uriSyntaxException) { assertTrue(true); } } @Test public void testAppendImageUrl() { + try { /** * From the input message , the distribution_endpoint is set to be: @@ -115,7 +117,7 @@ public void testAppendImageUrl() { * "s3://dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/standard-deviation.jpg", */ ImageProcessor imageProcessor = new ImageProcessor(); - String newCMRStr = imageProcessor.appendImageUrls(cmaString, cmrString); + String newCMRStr = imageProcessor.appendImageUrl(cmaString, cmrString); JsonObject cmrJsonObj = JsonParser.parseString(newCMRStr).getAsJsonObject(); JsonArray relatedUrls = cmrJsonObj.getAsJsonArray("RelatedUrls"); int count = findTimesOfAppearance(relatedUrls, @@ -126,18 +128,28 @@ public void testAppendImageUrl() { "https://distribution_endpoint.jpl.nasa.gov/s3distribute/dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/standard-deviation.jpg"); assertEquals(count, 1); + // test description + for (JsonElement relatedUrl : relatedUrls) { + JsonObject fileObj = relatedUrl.getAsJsonObject(); + String ummg_downloadUrl = StringUtils.trim(fileObj.get("URL").getAsString()); + if(ummg_downloadUrl.equals("https://distribution_endpoint.jpl.nasa.gov/s3distribute/dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/sst.png")){ + assertEquals(fileObj.get("Description").getAsString(), "sst"); + } + } + } catch (URISyntaxException | IOException pe) { System.out.println("testAppendImageUrl Error:" + pe); pe.printStackTrace(); } } + int findTimesOfAppearance(JsonArray relatedUrls, String downloadUrl) { int count = 0; downloadUrl = StringUtils.trim(downloadUrl); for (JsonElement relatedUrl : relatedUrls) { String ummg_downloadUrl = StringUtils.trim(relatedUrl.getAsJsonObject().get("URL").getAsString()); - if (StringUtils.compare(ummg_downloadUrl, downloadUrl) == 0) count++; + if(StringUtils.compare(ummg_downloadUrl, downloadUrl) ==0) count ++; } return count; } @@ -145,7 +157,7 @@ int findTimesOfAppearance(JsonArray relatedUrls, String downloadUrl) { @Test public void testIsDownloadUrlAlreadyExist() { ImageProcessor imageProcessor = new ImageProcessor(); - JsonObject cmrJsonObj = JsonParser.parseString(cmrString).getAsJsonObject(); + JsonObject cmrJsonObj = new JsonParser().parse(cmrString).getAsJsonObject(); JsonArray relatedUrls = cmrJsonObj.getAsJsonArray("RelatedUrls"); boolean isAlreadyExist = imageProcessor.isDownloadUrlAlreadyExist(relatedUrls, "https://jh72u371y2.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/MODIS_A-JPL-L2P-v2019.0/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.sses_standard_deviation.png"); @@ -161,19 +173,19 @@ public void testIsDownloadUrlAlreadyExist() { @Test public void testCreateOutputMessage() { ImageProcessor processor = new ImageProcessor(); - String output = processor.createOutputMessage(cmaString, 334411, + String output = processor.createOutputMessage(cmaString, 334411, new BigInteger("3244"), "granuleId-3344-22.cmr.json", "my-private", "CMR", "collectionName"); JsonElement jsonElement = JsonParser.parseString(output); JsonArray granules = jsonElement.getAsJsonObject().get("output").getAsJsonArray(); JsonArray files = granules.get(0).getAsJsonObject().get("files").getAsJsonArray(); - JsonObject foundCMR = processor.getFileJsonObjByFileTrailing(files, ".cmr.json"); + JsonObject foundCMR = processor.getFileJsonObjByFileTrailing(files, ".cmr.json"); assertEquals(foundCMR.get("bucket").getAsString(), "my-private"); assertEquals(foundCMR.get("key").getAsString(), "CMR/collectionName/granuleId-3344-22.cmr.json"); assertEquals(foundCMR.get("fileName").getAsString(), "granuleId-3344-22.cmr.json"); - Long cmrFileSize = foundCMR.get("size").getAsLong(); - BigInteger revisionId = jsonElement.getAsJsonObject().get("cmrRevisionId").getAsBigInteger(); + Long cmrFileSize = foundCMR.get("size").getAsLong(); + BigInteger revisionId = jsonElement.getAsJsonObject().get("cmrRevisionId").getAsBigInteger(); assertEquals(334411, cmrFileSize.longValue()); assertEquals(revisionId.compareTo(new BigInteger("3244")), 0); } diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java index 91d2979..82acfc6 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java @@ -4,21 +4,18 @@ import java.io.File; import java.io.IOException; -import java.nio.file.Files; import java.util.ArrayList; import com.google.gson.JsonParser; import com.google.gson.JsonObject; -import gov.nasa.cumulus.metadata.umm.generated.RelatedUrlType; import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType; import gov.nasa.cumulus.metadata.umm.generated.TrackType; import gov.nasa.cumulus.metadata.util.JSONUtils; import org.apache.commons.io.FileUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; -import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.junit.Test; @@ -44,53 +41,4 @@ public void testGsonToJSONObj() throws IOException, ParseException{ assertEquals(productionDateTime, "2020-02-29T12:20:15.000Z"); } - @Test - public void testIsStartWithStrings() { - String elements[] = {"http", "https"}; - String httpStr = "http://distribution_url/resource.nc"; - assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true); - httpStr = "https://distribution_url/resource.nc"; - assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true); - httpStr = " http://distribution_url/resource.nc"; // test with space - assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true); - - httpStr = " s3://my_bucket/my_folder/resource.nc"; // test with space - assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), false); - - - } - @Test - public void testIsGETDataType() { - assertEquals(JSONUtils.isGETDataType("GET DATA"), true); - assertEquals(JSONUtils.isGETDataType(" GET DATA "), true); - assertEquals(JSONUtils.isGETDataType(" Get data "), true); - //test with space - assertEquals(JSONUtils.isGETDataType(" GEET Type "), false); - } - - @Test - public void testRelatedUrlsSorting() throws ParseException{ - String cmr_filename= "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json"; - try { - ClassLoader classLoader = getClass().getClassLoader(); - File inputCMRJsonFile = new File(classLoader.getResource(cmr_filename).getFile()); - String cmrString = new String(Files.readAllBytes(inputCMRJsonFile.toPath())); - JSONParser parser = new JSONParser(); - JSONObject json = (JSONObject) parser.parse(cmrString); - json = JSONUtils.sortRelatedUrls(json); - //check the first item must be http/https resource scientific data - JSONArray relatedUrlsArray = (JSONArray)json.get("RelatedUrls"); - JSONObject firstJSONObject = (JSONObject)relatedUrlsArray.get(0); - assertEquals(firstJSONObject.get("URL").toString(), "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-protected/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc"); - assertEquals(firstJSONObject.get("Type").toString(), RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value()); - //check the 6th item must be http/https resource scientific data - JSONObject sixthJSONObject = (JSONObject)relatedUrlsArray.get(6); - assertEquals(sixthJSONObject.get("URL").toString(), "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc"); - assertEquals(sixthJSONObject.get("Type").toString(), RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value()); - } catch (IOException ioe) { - System.out.println("Test initialization failed: " + ioe); - ioe.printStackTrace(); - } - } - } diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java index f47cc1d..6648be2 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java @@ -1,6 +1,8 @@ package gov.nasa.cumulus.metadata.test; import gov.nasa.cumulus.metadata.aggregator.MetadataAggregatorLambda; +import gov.nasa.cumulus.metadata.state.MENDsIsoXMLSpatialTypeEnum; +import org.json.simple.JSONArray; import org.json.simple.parser.ParseException; import org.junit.Before; import org.junit.Test; @@ -8,8 +10,10 @@ import java.io.File; import java.io.IOException; import java.nio.file.Files; +import java.util.HashSet; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class MetadataAggregatorLambdaTest { @@ -37,4 +41,42 @@ public void testGetConceptId() throws ParseException { assertEquals(conceptId, "G1238611022-POCUMULUS"); } + @Test + public void testGetIsoXMLSpatialTypeStr() { + MetadataAggregatorLambda lambda = new MetadataAggregatorLambda(); + assertEquals(lambda.getIsoXMLSpatialTypeStr("footprint"), "footprint"); + assertEquals(lambda.getIsoXMLSpatialTypeStr("orbit"), "orbit"); + assertEquals(lambda.getIsoXMLSpatialTypeStr("bbox"), "bbox"); + assertEquals(lambda.getIsoXMLSpatialTypeStr("xxxx"), ""); + } + + @Test + public void testCreateIsoXMLSpatialTypeSet() { + MetadataAggregatorLambda lambda = new MetadataAggregatorLambda(); + org.json.simple.JSONArray array = new JSONArray(); + array.add("footprint"); + array.add("orbit"); + //HashSet h = lambda.createIsoXMLSpatialTypeSet("[footprint,orbit]"); + HashSet h = lambda.createIsoXMLSpatialTypeSet(array); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.FOOTPRINT)); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.ORBIT)); + assertFalse(h.contains(MENDsIsoXMLSpatialTypeEnum.BBOX)); + assertFalse(h.contains(MENDsIsoXMLSpatialTypeEnum.NONE)); + + array.clear(); + array.add("footprint"); + array.add("orbit"); + array.add("bbox"); + array.add("eebb"); + array.add("ccmm"); + //h = lambda.createIsoXMLSpatialTypeSet("[footprint,orbit,bbox,eebb,ccmm]"); + h = lambda.createIsoXMLSpatialTypeSet(array); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.FOOTPRINT)); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.ORBIT)); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.BBOX)); + assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.NONE)); + // last 2 items in the input array will result in NONE added into the HashSet and overwite each other + assertTrue(h.size()==4); + } + } diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java index 85ce95d..b65753a 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java @@ -11,6 +11,7 @@ import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonObject; +import cumulus_message_adapter.message_parser.AdapterLogger; import gov.nasa.cumulus.metadata.aggregator.*; import gov.nasa.cumulus.metadata.umm.adapter.UMMGCollectionAdapter; @@ -20,6 +21,8 @@ import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType; import gov.nasa.cumulus.metadata.umm.generated.TrackType; +import gov.nasa.podaac.inventory.model.GranuleCharacter; +import org.apache.commons.lang3.StringUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; @@ -321,7 +324,47 @@ public void testMarshellCyclePassTileSceneStrToAchiveType() throws IOException, List tileValues = additionalAttributeType.getValues(); //7F, 8F, 9F, 10F assertEquals(tileValues.get(0), "7F"); assertEquals(tileValues.get(3), "10F"); + /** + * Following section test empty pass. Ex. Cycle: 001, Pass: [] + */ + input = "Cycle: 001, Pass: []"; + isoGranule = mfte.createIsoCyclePassTile(input); + trackType = isoGranule.getTrackType(); + assertEquals(trackType.getCycle(), new Integer("1")); + + /** + * Cycle: 483 Pass: [10, Tiles: 72-84R 111-111R 72-84L 110-111L] + */ + input = "Cycle: 483 Pass: [10, Tiles: 72-84R 111-111R 72-84L 110-111L]"; + isoGranule = mfte.createIsoCyclePassTile(input); + trackType = isoGranule.getTrackType(); + assertEquals(trackType.getCycle(), new Integer("483")); + + List passes = trackType.getPasses(); + tiles = passes.get(0).getTiles(); + assertEquals(tiles.size(), 29); + assertEquals(tiles.get(0), "72R"); + assertEquals(tiles.get(28), "111L"); + input = "Cycle: 406, Pass: [40, Tiles: 4-5L 4-5R] [41, Tiles: 6R 6L], BasinID: 123"; + isoGranule = mfte.createIsoCyclePassTile(input); + trackType = isoGranule.getTrackType(); + assertEquals(trackType.getCycle(), new Integer("406")); + + passes = trackType.getPasses(); + tiles = passes.get(0).getTiles(); + assertEquals(tiles.size(), 4); + assertEquals(tiles.get(0), "4L"); + assertEquals(tiles.get(1), "5L"); + tiles = passes.get(1).getTiles(); + assertEquals(tiles.size(), 2); + assertEquals(tiles.get(0), "6R"); + assertEquals(tiles.get(1), "6L"); + additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes(); + additionalAttributeType = additionalAttributeTypes.get(2); + assertEquals(additionalAttributeType.getName(), "BasinID"); + List basinIdStrs = additionalAttributeType.getValues(); + assertEquals(basinIdStrs.get(0), "123"); } @Test @@ -373,6 +416,27 @@ public void testReadIsoMendsMetadataFile() throws IOException, ParseException, X assertEquals(tiles.get(6), "8R"); List additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes(); assertEquals(additionalAttributeTypes.size(), 3); + + /** + * Test the behavior of reading SWOT ISO MENDS Orbit and Footprint + */ + file = new File(classLoader.getResource("SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml").getFile()); + cfgFile = new File(classLoader.getResource("MODIS_T-JPL-L2P-v2014.0.cmr.cfg").getFile()); + mfte = new MetadataFilesToEcho(true); + + + mfte.readConfiguration(cfgFile.getAbsolutePath()); + doc = mfte.makeDoc(file.getAbsolutePath()); + xpath = mfte.makeXpath(doc); + isoGranule = mfte.readIsoMendsMetadataFile("s3://mybucket/mygranule.nc", doc, xpath); + isoGranule.getOrbit(); + Set granuleCharacters = isoGranule.getGranuleCharacterSet(); + for (GranuleCharacter granuleCharacter : granuleCharacters) { + if (granuleCharacter.getDatasetElement().getElementDD().getShortName().equals("line")) { + assertTrue(StringUtils.equals("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778", + StringUtils.trim(granuleCharacter.getValue()))); + } + } } @@ -401,6 +465,33 @@ public void testReadIsoMendsMetadataFile_Pass_Cycle_LeadingZeros() throws IOExce List additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes(); assertEquals(additionalAttributeTypes.size(), 0); } + + @Test + public void testReadSwotArchoveMetadataFile_Pass_Cycle_LeadingZeros() throws IOException, ParseException, XPathExpressionException, ParserConfigurationException, SAXException{ + ClassLoader classLoader = getClass().getClassLoader(); + File file = new File(classLoader.getResource("SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml").getFile()); + File cfgFile = new File(classLoader.getResource("MODIS_T-JPL-L2P-v2014.0.cmr.cfg").getFile()); + MetadataFilesToEcho mfte = new MetadataFilesToEcho(true); + + Document doc = null; + XPath xpath = null; + mfte.readConfiguration(cfgFile.getAbsolutePath()); + doc = mfte.makeDoc(file.getAbsolutePath()); + xpath = mfte.makeXpath(doc); + mfte.readSwotArchiveXmlFile(file.getAbsolutePath()); + UMMGranule granule = (UMMGranule) mfte.getGranule(); + // Verify the values here: + TrackType trackType = granule.getTrackType(); + assertEquals(trackType.getCycle(), new Integer(403)); + List trackPassTileTypes = trackType.getPasses(); + assertEquals(trackPassTileTypes.size(), 1); + TrackPassTileType trackPassTileType = trackPassTileTypes.get(0); + assertEquals(trackPassTileType.getPass(), new Integer(8)); + List tiles = trackPassTileType.getTiles(); + assertEquals(tiles.size(), 1); + List additionalAttributeTypes = granule.getAdditionalAttributeTypes(); + assertEquals(additionalAttributeTypes.size(), 1); + } @Test public void testReadIsoMendsMetadataFileAdditionalFields_publishAll() throws ParseException, IOException, URISyntaxException, XPathExpressionException, ParserConfigurationException, SAXException { diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java index 901569e..646c28c 100644 --- a/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java +++ b/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java @@ -94,7 +94,7 @@ public void testIsoRequiredFields() throws IOException, ParseException, XPathExp fail("Did not find exactly one Insert and one Update field in ProviderDates"); } assertNotNull(umm.get("MetadataSpecification")); - testMetadataSpec(umm, "1.6.3"); + testMetadataSpec(umm, "1.6.5"); // These tests are based on testCollection.config, and will need // to be changed if the test resource changes. JSONObject cr = (JSONObject)umm.get("CollectionReference"); @@ -129,10 +129,10 @@ public void testIso2UmmMappings() throws XPathExpressionException, ParserConfigurationException, IOException, SAXException, ParseException, URISyntaxException { /* - * These tests are based on the ISO file located in the + * These tests are based on the ISO file located in the * src/test/resources directory. They validate the mapping of ISO to * UMM-G. If the underlying ISO file changes, these tests will need to - * be updated. + * be updated. */ //given an ISO file... //Granule_ISOMENDS_SWOT_Sample_L1_HR_TileBased_20181202_edit2.xml @@ -140,38 +140,38 @@ public void testIso2UmmMappings() String testFile = "Granule_ISOMENDS_SWOT_Sample_L1_HR_TileBased_20181202_edit2.xml"; String testFilePath = testDir + File.separator + testFile; - + String testConfigFile = "testCollection.config"; String testConfigFilePath = testDir + File.separator + testConfigFile; - + String granuleId = "SWOT_L1B_HR_SLC_001_005_001L_20210612T072103_20210612T07215_PGA200_03"; - + MetadataFilesToEcho mtfe = new MetadataFilesToEcho(true); - + mtfe.readConfiguration(testConfigFilePath); mtfe.readIsoMetadataFile(testFilePath, "s3://public/datafile.nc"); - + mtfe.getGranule().setName(granuleId); //write UMM-G to file mtfe.writeJson( testDir + "/" + granuleId + ".cmr.json"); - //the CMR file should have the following values... - + //the CMR file should have the following values... + JSONParser parser = new JSONParser(); Object obj = parser.parse(new FileReader(testDir + "/" + granuleId + ".cmr.json")); JSONObject umm = (JSONObject) obj; - - + + System.out.println(String.format("GranuleUR is not provided by ISO XML, " + "defined and supplied via datafile name - suffix: %s", granuleId)); assertEquals(granuleId,umm.get("GranuleUR")); - + //InputGranules JSONArray a = (JSONArray) umm.get("InputGranules"); - String[] _inputs = + String[] _inputs = { "SWOT_L0B_HR_Frame_001_005_011F_20210612T072103_20210612T072113_PGA200_03.nc", - "SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.nc", + "SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.nc", "SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.rc.xml" }; ArrayList inputs = new ArrayList(3); @@ -183,22 +183,22 @@ public void testIso2UmmMappings() fail("input array does not contain "+a.get(i)); } } - - + + //TemporalExtent/RangeDateTime - + JSONObject rdt = (JSONObject)((JSONObject) umm.get("TemporalExtent")).get("RangeDateTime"); assertEquals((String)rdt.get("BeginningDateTime"), "2018-07-17T00:00:00.000Z"); assertEquals((String)rdt.get("EndingDateTime"), "2018-07-17T23:59:59.999Z"); - + //MetadataSpecification testMetadataSpec(umm, "1.6.3"); - + //Platforms JSONObject p = (JSONObject) ((JSONArray)umm.get("Platforms")).get(0); assertEquals(p.get("ShortName"),"SWOT"); assertEquals(((JSONObject)((JSONArray)p.get("Instruments")).get(0)).get("ShortName"),"KaRIn"); - + //ProviderDates /* * These are generated by the mtfe code, and so we don't test them for an exact date. @@ -214,14 +214,14 @@ else if(date.get("Type").equals("Update")){ else fail(); }*/ - + //MeasuredParameters JSONObject param = (JSONObject)((JSONArray)umm.get("MeasuredParameters")).get(0); assertEquals("amplitude_hh", param.get("ParameterName")); - + assertEquals(20.5, ((JSONObject)param.get("QAStats")).get("QAPercentMissingData")); assertEquals(10.5, ((JSONObject)param.get("QAStats")).get("QAPercentOutOfBoundsData")); - + //SpatialExtent JSONObject hsd = (JSONObject)((JSONObject)umm.get("SpatialExtent")).get("HorizontalSpatialDomain"); JSONObject orbit = (JSONObject) hsd.get("Orbit"); @@ -236,13 +236,13 @@ else if(date.get("Type").equals("Update")){ TODO - convert this into a split test, one for ISO with orbit, and one for ISO without JSONObject geom = (JSONObject) hsd.get("Geometry"); - + //Geometry/GPolygons JSONObject bndry = (JSONObject)((JSONObject)((JSONArray) geom.get("GPolygons")).get(0)).get("Boundary"); JSONArray pnts = (JSONArray) bndry.get("Points"); - + for(int i=0; i< pnts.size(); i++){ - + JSONObject pt = (JSONObject) pnts.get(i); if(((Double)pt.get("Latitude")).equals(new Double(-11))){ assertEquals(((Double)pt.get("Longitude")),new Double(-17)); @@ -262,16 +262,16 @@ else if(date.get("Type").equals("Update")){ assertEquals(br.get("EastBoundingCoordinate"), new Double(179.999)); assertEquals(br.get("NorthBoundingCoordinate"), new Double(85.045)); */ - + //Track JSONObject track = (JSONObject) hsd.get("Track"); assertEquals(track.get("Cycle"), new Long(5)); JSONArray passes = (JSONArray) track.get("Passes"); - + ArrayList passVals = new ArrayList(Arrays.asList(new Long(40), new Long(41), new Long(42))); ArrayList tileVals= new ArrayList(Arrays.asList("4L","5L","5R", "6R", "7F")); - - + + for(int i = 0; i < passes.size(); i++){ JSONObject pass = (JSONObject) passes.get(i); assertTrue(passVals.contains(pass.get("Pass"))); @@ -281,15 +281,15 @@ else if(date.get("Type").equals("Update")){ assertTrue(tileVals.contains(tiles.get(j))); } } - + //PGEVersionClass JSONObject pgev = (JSONObject) umm.get("PGEVersionClass"); assertEquals("PGE_L1B_HR_SLC", pgev.get("PGEName")); assertEquals("1.1.4", pgev.get("PGEVersion")); - + //DataGranule JSONObject dg = (JSONObject)umm.get("DataGranule"); - + //DataGranule/ArchiveAndDistributionInformation JSONArray files = (JSONArray) dg.get("ArchiveAndDistributionInformation"); for(int i = 0; i < files.size(); i++){ @@ -322,11 +322,11 @@ else if(f.get("Name").equals("SWOT_L1B_HR_SLC_001_005_001L_20210612T072103_20210 fail("Could not find file with name " + f.get("Name")); } } - - + + //DataGranule/DayNightFlag assertEquals("Unspecified",dg.get("DayNightFlag")); - + //DataGranule/Identifiers JSONArray ids = (JSONArray) dg.get("Identifiers"); for (int i =0; i< ids.size(); i++){ @@ -338,7 +338,7 @@ else if(id.get("IdentifierType").equals("CRID")){ assertEquals("PGA200",id.get("Identifier")); } else if(id.get("IdentifierType").equals("Other")){ - + if(id.get("IdentifierName").equals("SASVersionId")){ assertEquals("7.8.9",id.get("Identifier")); }else if(id.get("IdentifierName").equals("PGEVersionId")){ @@ -355,16 +355,16 @@ else if(id.get("IdentifierType").equals("Other")){ fail("Could not find identifier " + id.get("IdentifierType")); } } - + assertEquals("One Post-Calibration bulk reprocessing and one End-of-mission bulk reprocessing",dg.get("ReprocessingPlanned")); assertEquals("2018-07-19T12:01:01.000Z",dg.get("ProductionDateTime")); - + //CollectionReference JSONObject cr = (JSONObject)umm.get("CollectionReference"); assertEquals("1",cr.get("Version")); assertEquals("L1B_HR_SLC",cr.get("ShortName")); - - + + /* * "RelatedUrls": [ { @@ -382,112 +382,112 @@ else if(id.get("IdentifierType").equals("Other")){ //fail("Not yet implemented"); } - @Test - public void testSentinelManifest2UmmMappings() - throws XPathExpressionException, ParserConfigurationException, - IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { - String testFile = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6.xfdumanifest.xml"; - String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; - String granuleId = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6"; - - JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); - - //TemporalExtent/RangeDateTime - JSONObject rdt = (JSONObject) ((JSONObject) umm.get("TemporalExtent" )).get("RangeDateTime" ); - assertEquals((String) rdt.get("BeginningDateTime" ), "2021-04-14T00:14:38.000Z" ); - assertEquals((String) rdt.get("EndingDateTime" ), "2021-04-14T00:21:49.532Z" ); - - //SpatialExtent - JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); - - //Track - JSONObject track = (JSONObject) hsd.get("Track" ); - assertEquals(track.get("Cycle" ), new Long(2)); - JSONArray passes = (JSONArray) track.get("Passes" ); - assertEquals(((JSONObject) passes.get(0)).get("Pass"), new Long(127)); - - JSONObject geom = (JSONObject) hsd.get("Geometry" ); - //Footprint - // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to - // 2 polygons - Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); - JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); - - JSONObject firstPoint = (JSONObject) pnts.get(0); - assertEquals(new Double(-45.4871), ((Double) firstPoint.get("Latitude" ))); - assertEquals(new Double(-132.544), ((Double) firstPoint.get("Longitude" ))); - - JSONObject midPoint = (JSONObject) pnts.get(3); - assertEquals(new Double( -51.5451), ((Double) midPoint.get("Latitude" ))); - assertEquals(new Double(-139.042), ((Double) midPoint.get("Longitude" ))); - - JSONObject lastPoint = (JSONObject) pnts.get(5); - assertEquals(new Double(-45.4871), ((Double) lastPoint.get("Latitude" ))); - assertEquals(new Double(-132.544), ((Double) lastPoint.get("Longitude" ))); - - //DataGranule - JSONObject dg = (JSONObject) umm.get("DataGranule" ); - assertEquals("2020-04-29T14:33:31.000Z", dg.get("ProductionDateTime" )); - - //CollectionReference - JSONObject cr = (JSONObject) umm.get("CollectionReference" ); - assertEquals("E", cr.get("Version" )); - assertEquals("JASON_CS_S6A_L0_ALT_ACQ", cr.get("ShortName" )); - - JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0); - assertEquals("ProviderDataSource", productName.get("Name")); - assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0)); - } - - @Test - public void testSentinelManifestOverIDL() - throws XPathExpressionException, ParserConfigurationException, - IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { - // this test file will split to 3 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon - String testFile = "S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02.xfdumanifest.xml"; - String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; - String granuleId ="S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02"; - - JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); - - //SpatialExtent - JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); - - JSONObject geom = (JSONObject) hsd.get("Geometry" ); - //Footprint - // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to - // 2 polygons - Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); - JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); - - JSONObject firstPoint = (JSONObject) pnts.get(0); - assertEquals(Double.valueOf(66.644644), ((Double) firstPoint.get("Latitude" ))); - assertEquals(Double.valueOf(140.378601), ((Double) firstPoint.get("Longitude" ))); - - JSONObject midPoint = (JSONObject) pnts.get(3); - assertEquals(Double.valueOf(58.947656), ((Double) midPoint.get("Latitude" ))); - assertEquals(Double.valueOf(180.0), ((Double) midPoint.get("Longitude" ))); - - JSONObject lastPoint = (JSONObject) pnts.get(5); - assertEquals(Double.valueOf(63.594104), ((Double) lastPoint.get("Latitude" ))); - assertEquals(Double.valueOf(168.727685), ((Double) lastPoint.get("Longitude" ))); - - //2nd polygon - boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(1))).get("Boundary"); - pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); - - firstPoint = (JSONObject) pnts.get(0); - assertEquals(Double.valueOf(59.804021000000006), ((Double) firstPoint.get("Latitude" ))); - assertEquals(Double.valueOf(-180), ((Double) firstPoint.get("Longitude" ))); - - midPoint = (JSONObject) pnts.get(15); - assertEquals(Double.valueOf(-66.647778), ((Double) midPoint.get("Latitude" ))); - assertEquals(Double.valueOf(-53.840211), ((Double) midPoint.get("Longitude" ))); - - lastPoint = (JSONObject) pnts.get(29); - assertEquals(Double.valueOf(56.013938), ((Double) lastPoint.get("Latitude" ))); - assertEquals(Double.valueOf(-171.655155), ((Double) lastPoint.get("Longitude" ))); - } +// @Test +// public void testSentinelManifest2UmmMappings() +// throws XPathExpressionException, ParserConfigurationException, +// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { +// String testFile = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6.xfdumanifest.xml"; +// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; +// String granuleId = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6"; +// +// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); +// +// //TemporalExtent/RangeDateTime +// JSONObject rdt = (JSONObject) ((JSONObject) umm.get("TemporalExtent" )).get("RangeDateTime" ); +// assertEquals((String) rdt.get("BeginningDateTime" ), "2021-04-14T00:14:38.000Z" ); +// assertEquals((String) rdt.get("EndingDateTime" ), "2021-04-14T00:21:49.532Z" ); +// +// //SpatialExtent +// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); +// +// //Track +// JSONObject track = (JSONObject) hsd.get("Track" ); +// assertEquals(track.get("Cycle" ), new Long(2)); +// JSONArray passes = (JSONArray) track.get("Passes" ); +// assertEquals(((JSONObject) passes.get(0)).get("Pass"), new Long(127)); +// +// JSONObject geom = (JSONObject) hsd.get("Geometry" ); +// //Footprint +// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to +// // 2 polygons +// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); +// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); +// +// JSONObject firstPoint = (JSONObject) pnts.get(0); +// assertEquals(new Double(-45.4871), ((Double) firstPoint.get("Latitude" ))); +// assertEquals(new Double(-132.544), ((Double) firstPoint.get("Longitude" ))); +// +// JSONObject midPoint = (JSONObject) pnts.get(3); +// assertEquals(new Double( -51.5451), ((Double) midPoint.get("Latitude" ))); +// assertEquals(new Double(-139.042), ((Double) midPoint.get("Longitude" ))); +// +// JSONObject lastPoint = (JSONObject) pnts.get(5); +// assertEquals(new Double(-45.4871), ((Double) lastPoint.get("Latitude" ))); +// assertEquals(new Double(-132.544), ((Double) lastPoint.get("Longitude" ))); +// +// //DataGranule +// JSONObject dg = (JSONObject) umm.get("DataGranule" ); +// assertEquals("2020-04-29T14:33:31.000Z", dg.get("ProductionDateTime" )); +// +// //CollectionReference +// JSONObject cr = (JSONObject) umm.get("CollectionReference" ); +// assertEquals("E", cr.get("Version" )); +// assertEquals("JASON_CS_S6A_L0_ALT_ACQ", cr.get("ShortName" )); +// +// JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0); +// assertEquals("ProviderDataSource", productName.get("Name")); +// assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0)); +// } + +// @Test +// public void testSentinelManifestOverIDL() +// throws XPathExpressionException, ParserConfigurationException, +// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { +// // this test file will split to 3 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon +// String testFile = "S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02.xfdumanifest.xml"; +// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; +// String granuleId ="S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02"; +// +// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); +// +// //SpatialExtent +// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); +// +// JSONObject geom = (JSONObject) hsd.get("Geometry" ); +// //Footprint +// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to +// // 2 polygons +// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); +// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); +// +// JSONObject firstPoint = (JSONObject) pnts.get(0); +// assertEquals(Double.valueOf(66.644644), ((Double) firstPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(140.378601), ((Double) firstPoint.get("Longitude" ))); +// +// JSONObject midPoint = (JSONObject) pnts.get(3); +// assertEquals(Double.valueOf(58.947656), ((Double) midPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(180.0), ((Double) midPoint.get("Longitude" ))); +// +// JSONObject lastPoint = (JSONObject) pnts.get(5); +// assertEquals(Double.valueOf(63.594104), ((Double) lastPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(168.727685), ((Double) lastPoint.get("Longitude" ))); +// +// //2nd polygon +// boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(1))).get("Boundary"); +// pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); +// +// firstPoint = (JSONObject) pnts.get(0); +// assertEquals(Double.valueOf(59.804021000000006), ((Double) firstPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(-180), ((Double) firstPoint.get("Longitude" ))); +// +// midPoint = (JSONObject) pnts.get(15); +// assertEquals(Double.valueOf(-66.647778), ((Double) midPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(-53.840211), ((Double) midPoint.get("Longitude" ))); +// +// lastPoint = (JSONObject) pnts.get(29); +// assertEquals(Double.valueOf(56.013938), ((Double) lastPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(-171.655155), ((Double) lastPoint.get("Longitude" ))); +// } @Test public void testSentinelManifestL0TooFewCoordinates() @@ -513,39 +513,39 @@ public void testSentinelManifestL0TooFewCoordinates() assertEquals(gbbx.get("NorthBoundingCoordinate"), Double.valueOf(90.00)); } - @Test - public void testSentinelManifestNotOverIDL() - throws XPathExpressionException, ParserConfigurationException, - IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { - // this test file will split to 1 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon - String testFile = "S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02.xfdumanifest.xml"; - String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; - String granuleId ="S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02"; - - JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); - - //SpatialExtent - JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); - - JSONObject geom = (JSONObject) hsd.get("Geometry" ); - //Footprint - // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to - // 2 polygons - Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); - JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); - - JSONObject firstPoint = (JSONObject) pnts.get(0); - assertEquals(Double.valueOf(-65.649768), ((Double) firstPoint.get("Latitude" ))); - assertEquals(Double.valueOf(-25.561001), ((Double) firstPoint.get("Longitude" ))); - - JSONObject midPoint = (JSONObject) pnts.get(16); - assertEquals(Double.valueOf(65.64749), ((Double) midPoint.get("Latitude" ))); - assertEquals(Double.valueOf(140.321732), ((Double) midPoint.get("Longitude" ))); - - JSONObject lastPoint = (JSONObject) pnts.get(31); - assertEquals(Double.valueOf(-62.663981), ((Double) lastPoint.get("Latitude" ))); - assertEquals(Double.valueOf(2.525361), ((Double) lastPoint.get("Longitude" ))); - } +// @Test +// public void testSentinelManifestNotOverIDL() +// throws XPathExpressionException, ParserConfigurationException, +// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { +// // this test file will split to 1 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon +// String testFile = "S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02.xfdumanifest.xml"; +// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config"; +// String granuleId ="S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02"; +// +// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); +// +// //SpatialExtent +// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); +// +// JSONObject geom = (JSONObject) hsd.get("Geometry" ); +// //Footprint +// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to +// // 2 polygons +// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); +// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); +// +// JSONObject firstPoint = (JSONObject) pnts.get(0); +// assertEquals(Double.valueOf(-65.649768), ((Double) firstPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(-25.561001), ((Double) firstPoint.get("Longitude" ))); +// +// JSONObject midPoint = (JSONObject) pnts.get(16); +// assertEquals(Double.valueOf(65.64749), ((Double) midPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(140.321732), ((Double) midPoint.get("Longitude" ))); +// +// JSONObject lastPoint = (JSONObject) pnts.get(31); +// assertEquals(Double.valueOf(-62.663981), ((Double) lastPoint.get("Latitude" ))); +// assertEquals(Double.valueOf(2.525361), ((Double) lastPoint.get("Longitude" ))); +// } @Test /** @@ -607,42 +607,42 @@ public void testSentinelAuxManifest2UmmMappings() assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0)); } - @Test - public void testSentinelManifestL1Footprint() - throws XPathExpressionException, ParserConfigurationException, - IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { - String testFile = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6.xfdumanifest.xml"; - String testConfigFile = "JASON_CS_S6A_L1_ALT_ECHO_AX.config"; - String granuleId = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6"; - JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); - - //SpatialExtent - JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); - - JSONObject geom = (JSONObject) hsd.get("Geometry" ); - assertNull(geom.get("BoundingRectangles")); - - //Footprint - // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to - // 2 polygons - Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); - JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); - - assertEquals(33, pnts.size()); - - DecimalFormat decimalFormat = new DecimalFormat("###.###"); - JSONObject firstPoint = (JSONObject) pnts.get(0); - assertEquals("-64.654", decimalFormat.format(firstPoint.get("Latitude"))); - assertEquals("-167.571", decimalFormat.format(firstPoint.get("Longitude"))); - - JSONObject lastPoint = (JSONObject) pnts.get(31); - assertEquals("-58.844", decimalFormat.format(lastPoint.get("Latitude"))); - assertEquals("-144.155", decimalFormat.format(lastPoint.get("Longitude"))); - - JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0); - assertEquals("ProviderDataSource", productName.get("Name")); - assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0)); - } +// @Test +// public void testSentinelManifestL1Footprint() +// throws XPathExpressionException, ParserConfigurationException, +// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException { +// String testFile = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6.xfdumanifest.xml"; +// String testConfigFile = "JASON_CS_S6A_L1_ALT_ECHO_AX.config"; +// String granuleId = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6"; +// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId); +// +// //SpatialExtent +// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" ); +// +// JSONObject geom = (JSONObject) hsd.get("Geometry" ); +// assertNull(geom.get("BoundingRectangles")); +// +// //Footprint +// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to +// // 2 polygons +// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary"); +// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points"); +// +// assertEquals(33, pnts.size()); +// +// DecimalFormat decimalFormat = new DecimalFormat("###.###"); +// JSONObject firstPoint = (JSONObject) pnts.get(0); +// assertEquals("-64.654", decimalFormat.format(firstPoint.get("Latitude"))); +// assertEquals("-167.571", decimalFormat.format(firstPoint.get("Longitude"))); +// +// JSONObject lastPoint = (JSONObject) pnts.get(31); +// assertEquals("-58.844", decimalFormat.format(lastPoint.get("Latitude"))); +// assertEquals("-144.155", decimalFormat.format(lastPoint.get("Longitude"))); +// +// JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0); +// assertEquals("ProviderDataSource", productName.get("Name")); +// assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0)); +// } @Test public void testSwotL02UmmMappings() diff --git a/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java new file mode 100644 index 0000000..c8f7637 --- /dev/null +++ b/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java @@ -0,0 +1,70 @@ +package gov.nasa.cumulus.metadata.util; + +import gov.nasa.cumulus.metadata.aggregator.IsoMendsXPath; +import gov.nasa.cumulus.metadata.aggregator.MetadataFilesToEcho; +import gov.nasa.cumulus.metadata.aggregator.NamespaceResolver; +import org.apache.commons.lang3.StringUtils; +import org.junit.Before; +import org.junit.Test; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import java.io.File; +import java.io.IOException; + +import static org.junit.Assert.assertEquals; + +public class MENDsISOXmlUtiilsTest { + File MENDsISOFile = null; + @Before + public void initialize() { + ClassLoader classLoader = getClass().getClassLoader(); + MENDsISOFile = new File(classLoader.getResource("SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml").getFile()); + } + + @Test + public void testExtractXPathValueSwallowException() throws ParserConfigurationException, SAXException, IOException{ + Document doc = null; + XPath xpath = null; + MetadataFilesToEcho mfte = new MetadataFilesToEcho(true); + doc = mfte.makeDoc(MENDsISOFile.getAbsolutePath()); + xpath = mfte.makeXpath(doc); + + String polygonStr = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc,xpath, IsoMendsXPath.POLYGON, "IsoMendsXPath.POLYGON"); + assertEquals(StringUtils.trim(polygonStr), StringUtils.trim("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778")); + + // added more characters in the end of XPath definition to make it find nothing + polygonStr = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc,xpath, IsoMendsXPath.POLYGON+"aabbccdd", "IsoMendsXPath.POLYGON"); + assertEquals(StringUtils.trim(polygonStr), ""); + } + + @Test + public void testExtractXPathValueThrowException() throws ParserConfigurationException, SAXException, IOException{ + Document doc = null; + XPath xpath = null; + MetadataFilesToEcho mfte = new MetadataFilesToEcho(true); + doc = mfte.makeDoc(MENDsISOFile.getAbsolutePath()); + xpath = mfte.makeXpath(doc); + String polygonStr=""; + try { + polygonStr = MENDsISOXmlUtiils.extractXPathValueThrowsException(doc, xpath, IsoMendsXPath.POLYGON, "IsoMendsXPath.POLYGON"); + assertEquals(StringUtils.trim(polygonStr), StringUtils.trim("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778")); + } catch (Exception e) { + System.out.println("caught generic Exception: " + e); + } + // added more characters in the end of XPath definition to make it find nothing + try { + polygonStr = MENDsISOXmlUtiils.extractXPathValueThrowsException(doc, xpath, "IsoMendsXPath.POLYGON", "IsoMendsXPath.POLYGON"); + } catch (XPathExpressionException xPathExpressionException) { + System.out.println("caught XPathExpressionException: " + xPathExpressionException); + } catch (Exception e) { + System.out.println("caught generic Exception: " + e); + } + + } + +} diff --git a/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json b/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json deleted file mode 100644 index 5506e08..0000000 --- a/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "TemporalExtent": { - "RangeDateTime": { - "EndingDateTime": "2020-01-01T00:04:57.000Z", - "BeginningDateTime": "2020-01-01T00:00:00.000Z" - } - }, - "MetadataSpecification": { - "Version": "1.6", - "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6", - "Name": "UMM-G" - }, - "GranuleUR": "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0", - "ProviderDates": [ - { - "Type": "Insert", - "Date": "2020-07-17T23:10:21.470Z" - }, - { - "Type": "Update", - "Date": "2020-07-17T23:10:21.484Z" - } - ], - "SpatialExtent": { - "HorizontalSpatialDomain": { - "Geometry": { - "BoundingRectangles": [ - { - "WestBoundingCoordinate": 123.165, - "SouthBoundingCoordinate": -89.989, - "EastBoundingCoordinate": 180, - "NorthBoundingCoordinate": -66.906 - }, - { - "WestBoundingCoordinate": -180, - "SouthBoundingCoordinate": -89.989, - "EastBoundingCoordinate": -74.116, - "NorthBoundingCoordinate": -66.906 - } - ] - } - } - }, - "DataGranule": { - "ArchiveAndDistributionInformation": [ - { - "SizeUnit": "MB", - "Size": 17.387483596801758, - "Name": "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc" - } - ], - "DayNightFlag": "Unspecified", - "ProductionDateTime": "2020-02-29T12:20:15.000Z" - }, - "CollectionReference": { - "Version": "2019.0", - "ShortName": "MODIS_A-JPL-L2P-v2019.0" - }, - "RelatedUrls": [ - { - "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc.md5", - "Description": "File to download", - "Type": "EXTENDED METADATA" - }, - { - "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-protected/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc", - "Description": "The base directory location for the granule.", - "Type": "GET DATA" - }, - { - "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc", - "Description": "The base directory location for the granule.", - "Type": "GET DATA" - }, - { - "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.cmr.json", - "Description": "File to download", - "Type": "EXTENDED METADATA" - }, - { - "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/s3credentials", - "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", - "Type": "VIEW RELATED INFORMATION" - }, - { - "URL": "https://opendap.uat.earthdata.nasa.gov/providers/POCUMULUS/collections/GHRSST%20Level%202P%20Global%20Sea%20Surface%20Skin%20Temperature%20from%20the%20Moderate%20Resolution%20Imaging%20Spectroradiometer%20(MODIS)%20on%20the%20NASA%20Aqua%20satellite%20(GDS2)/granules/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0", - "Type": "USE SERVICE API", - "Subtype": "OPENDAP DATA", - "Description": "OPeNDAP request URL" - }, - { - "URL": "https://jh72u371y2.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/MODIS_A-JPL-L2P-v2019.0/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.sses_standard_deviation.png", - "Type": "GET RELATED VISUALIZATION", - "Subtype": "DIRECT DOWNLOAD", - "MimeType": "image/png" - }, - { - "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc.md5", - "Description": "The base directory location for the granule.", - "Type": "EXTENDED METADATA" - }, - { - "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.iso.xml", - "Description": "The base directory location for the granule.", - "Type": "EXTENDED METADATA" - } - ] -} \ No newline at end of file diff --git a/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml b/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml new file mode 100644 index 0000000..4a780cd --- /dev/null +++ b/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml @@ -0,0 +1,86 @@ + + + + + + test + + SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.nc + + + test + test + + test + + + + SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.nc + 2020-01-18T11:16:35.056934Z + unknown + 1075 + PIA0 + 01 + Size: 123456789 SizeUnit: B + 403 + 008 + unknown + + + SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.met.json + Size: 123 SizeUnit: B + + + SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.rc.xml + Size: 4567 SizeUnit: B + + + SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.log + Size: unknown SizeUnit: B + + + SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.archive.xml + Size: unknown SizeUnit: B + + + 2020-01-17T15:04:58.187000Z + 2020-01-17T15:56:23.716000Z + + test + test + + + diff --git a/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml b/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml new file mode 100644 index 0000000..30c8c11 --- /dev/null +++ b/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml @@ -0,0 +1,2375 @@ + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip + + + + eng + + + + utf8 + + + + dataset + + + + + + 2023-06-16T13:18:49.250930Z + + + + ISO 19115-2 Geographic Information - Metadata Part 2 Extensions for imagery and gridded data + + + ISO 19115-2:2009(E) + + + + + + + + + + + + + + + + + 2023-06-16T13:18:49.250930Z + + + creation + + + + + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip + + + gov.nasa.esdis.umm.producergranuleid + + + ProducerGranuleId + + + + + + + + + + + + + + TGB0 + + + gov.nasa.esdis.umm.crid + + + CRID + + + + + + + + ICV01 CollectionVersion filler + + + gov.nasa.esdis.umm.otherid + + + OtherId: ScienceAlgorithmVersionId + + + + + + + 4.3.0 + + + gov.nasa.esdis.umm.otherid + + + OtherId: PGEVersionId + + + + + + + 1.0 + + + gov.nasa.esdis.umm.otherid + + + OtherId: SASVersionId + + + + + + + 01 + + + gov.nasa.esdis.umm.otherid + + + OtherId: ProductCounter + + + + + + + + + + + + + + + + + asNeeded + + + ReprocessingPlanned: None + + + + + + + + + https://webmap.ornl.gov/sdat/pimg/TBD + + + Size: TBD> SizeUnit: KB Description: TBD + + + Format: PNG MimeType: image/png + + + + + + + + https://webmap.ornl.gov/sdat/pimg/TBD + + + Size: TBD> SizeUnit: KB Description: TBD + + + Format: PNG MimeType: image/png + + + + + + + + + + + SWOT + + + Surface Water Ocean Topography + + + + project + + + + + NASA Project Keywords + + + + + + NASA + + + User Support Office + + + + + + + https://support.earthdata.nasa.gov/ + + + Earthdata Support + + + File an issue or provide feedback + + + information + + + + + + + custodian + + + + + + + + + + + + + Commissioning Phase + + + Calibration Phase + + + Science Ops Phase + + + campaign + + + + + + + + + Space-based Platforms + + + Earth Observation Satellites + + + SWOT + + + platform + + + + + NASA Platform Keywords + + + + + + NASA + + + User Support Office + + + + + + + https://support.earthdata.nasa.gov/ + + + Earthdata Support + + + File an issue or provide feedback + + + information + + + + + + + custodian + + + + + + + + + + + + + Earth Remote Sensing Instruments + + + Active Remote Sensing + + + Imaging Radars + + + KaRIn + + + instrument + + + + + NASA Instrument Keywords + + + + + + NASA + + + User Support Office + + + + + + + https://support.earthdata.nasa.gov/ + + + Earthdata Support + + + File an issue or provide feedback + + + information + + + + + + + custodian + + + + + + + + + + + + + + + + + + PGE_L2_HR_RiverAvg + + + gov.nasa.esdis.umm.collectionshortname + + + CollectionShortName + + + + + LargerWorkCitation + + + + + + + + + + ICV01 CollectionVersion filler + + + gov.nasa.esdis.umm.collectionversion + + + CollectionVersion + + + + + LargerWorkCitation + + + + + + + + + + + + + + + + + + + + https://swot.jpl.nasa.gov/ + + + SWOT Project Homepage + + + information + + + + + + + + + + + + + + + + + + + + + + + + + + + + + https://swot.jpl.nasa.gov/ + + + SWOT Project Homepage + + + information + + + + + + + + + + + + + + + + eng + + + + utf8 + + + + + + + + + + + + + + + + + + + + + + + 46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778 + + + + + + + + + + + + + + + + 129.8388049355948 + + + 147.8095258158062 + + + 59.54084540610198 + + + 71.2293826683445 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cycle: 487, Pass: [4], BasinID: 35 + + + gov.nasa.esdis.umm.swottrack + + + SWOTTrack + + + + + + + + + + + + + 2023-04-10T20:00:18.457000Z + 2023-04-11T19:50:56.521000Z + + + + + + + + + + + + + + + + + + + MeasuredParameters + + + physicalMeasurement + + + + + + + + N/A + + + + + + MeasuredParameters + + + + + + + + + + + + + qualityInformation + + + QAPercentMissingData + + + float + + + + + N/A + + + + + + + qualityInformation + + + + QAPercentOutOfBoundsData + + + float + + + N/A + + + N/A + + + + + N/A + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + https://daac.ornl.gov/daacdata/islscp_ii/vegetation/erbe_albedo_monthly_xdeg/data/erbe_albedo_1deg_1986.zip + + + + + Type: GET DATA Format: ZIP MimeType: application/zip Size: 395.673 SizeUnit: KB Description: This link provides direct download access to the granule. + + + download + + + + + + + + + + + + + + + + + dataset + + + + + + + + + + + + + + + + + ReprocessingActual: To be determined + + + + + + + + PGEVersionClass + + + + + + + PGEName: PGE_L2_HR_RiverAvg PGEVersion: 4.3.0 + + + gov.nasa.esdis.umm.pgeversionclass + + + PGEVersionClass + + + + + + + + + + + + + + ProductionDateTime + + + 2023-06-16T13:18:49.250930Z + + + + + + + + + + + + + + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip + + + + Size: unknown SizeUnit: KB ChecksumValue: unknown ChecksumAlgorithm: unknown Description: dataset in a ZIP file + + + application/zip + + + + + ZIP + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip + + + Size: 2661782 SizeUnit: B ChecksumValue: 9bfc923313717b1c163eeefc36783748 ChecksumAlgorithm: MD5 + + + application/zip + + + + + ZIP + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml + + + Size: unknown SizeUnit: unknown ChecksumValue: unknown ChecksumAlgorithm: unknown + + + text/xml + + + + + XML + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.rc.xml + + + Size: 1228 SizeUnit: B ChecksumValue: 65cae8e2682a4d3be646f8bd08296cee ChecksumAlgorithm: MD5 + + + text/xml + + + + + XML + + + + + + + + + + + SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.png + + + Size: 32503 SizeUnit: B ChecksumValue: 2972b99c9de9e8bf908582012fcdb0ba ChecksumAlgorithm: MD5 + + + image/png + + + + + PNG + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SWOT + + + gov.nasa.esdis.umm.platformshortname + + + PlatformShortName + + + + + + + + + + + + + KaRIn + + + gov.nasa.esdis.umm.instrumentshortname + + + InstrumentShortName + + + + + + + + + + + + + + + + instrumentInformation + + + ICV23 KaRIn Instrument_Characteristics name 1 + + + + + ICV24 KaRIn Instrument_Characteristics value 1 + + + + + + + instrumentInformation + + + ICV25 KaRIn Instrument_Characteristics name 2 + + + + + ICV26 KaRIn Instrument_Characteristics value 2 + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/cumulus_message_input_example.json b/src/test/resources/cumulus_message_input_example.json index af7d66f..bb3971b 100644 --- a/src/test/resources/cumulus_message_input_example.json +++ b/src/test/resources/cumulus_message_input_example.json @@ -40,7 +40,8 @@ "key": "dataset-image/MODIS_A-JPL-L2P-v2019.0/sst.png", "size": 7152, "fileName": "sst.png", - "type": "metadata" + "type": "metadata", + "description": "sst" }, { "bucket": "dyen-cumulus-public",