toBeRemovedItems){
- toBeRemovedItems.forEach(item -> {
- unsortedRelatedUrls.remove(item);
- });
- return unsortedRelatedUrls;
- }
-
- /**
- *
- * @param s : The string to be verified
- * @param startStr : verifies the input s is starting with startStr ignoring case
- * return true/false
- */
- public static boolean isStrStarsWithIgnoreCase(String s, String startStr) {
- s = StringUtils.trim(s);
- return StringUtils.startsWithIgnoreCase(s,startStr);
- }
-
- /**
- *
- * @param s: The string to be verified
- * @param startStrs: verifies the input s is starting with at least one item in startStrs[] ignoring case
- * @return true/false
- */
- public static boolean isStrStarsWithIgnoreCase(String s, String[] startStrs) {
- s = StringUtils.trim(s);
- for(String elementStr:startStrs) {
- if(StringUtils.startsWithIgnoreCase(s, elementStr)){
- return true;
- }
- }
- return false;
- }
-
- public static boolean isGETDataType(String s) {
- s = StringUtils.trim(s);
- return StringUtils.equalsIgnoreCase(s,RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value());
- }
-
-
}
diff --git a/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java b/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java
new file mode 100644
index 0000000..2ab7391
--- /dev/null
+++ b/src/main/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiils.java
@@ -0,0 +1,72 @@
+package gov.nasa.cumulus.metadata.util;
+
+import cumulus_message_adapter.message_parser.AdapterLogger;
+import org.w3c.dom.Document;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathExpressionException;
+
+public class MENDsISOXmlUtiils {
+ /**
+ * extract a string from xml document. swallow exception if there is any.
+ * If exception is swallowed, return empty string.
+ * Another extractXPathValueThrowsException shall be implemented whenever needed, which
+ * in another case should throw exception instead of swallow.
+ * @return
+ */
+ /**
+ *
+ * @param doc
+ * @param xpath
+ * @param pathStr : the xml path in string format
+ * @param pathTagStr :the tag form of the xml path string. Ex IsoMendsXPath.ADDITIONAL_ATTRIBUTES_BLOCK.
+ * This is for logging and support purpose so the developer can quickly identify what field(s)
+ * is causing problem
+ * @return : extracted string. Or the extractedString default is "" which is empty string. Hence,
+ * any exception would cause this function to return an empty string
+ */
+ public static String extractXPathValueSwallowException(Document doc, XPath xpath, String pathStr, String pathTagStr) {
+ String extractedStr = ""; //default to empty string.
+ try {
+ extractedStr = xpath.evaluate(pathStr, doc);
+ } catch (XPathExpressionException xPathExpressionException) {
+ AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: " + pathTagStr
+ + " path string value:"+ pathStr
+ + " Exception:" +xPathExpressionException);
+ } catch (Exception genericException) {
+ AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: "+ pathTagStr
+ + " path string value:"+ pathStr
+ + " Exception:" +genericException);
+ }
+ return extractedStr;
+ }
+
+ /**
+ * extract a string from xml document. throws exception if there is any.
+ * @param doc
+ * @param xpath
+ * @param pathStr
+ * @param pathTagStr
+ * @return
+ * @throws Exception
+ */
+ public static String extractXPathValueThrowsException(Document doc, XPath xpath, String pathStr, String pathTagStr)
+ throws Exception{
+ String extractedStr = "";
+ try {
+ extractedStr = xpath.evaluate(pathStr, doc);
+ } catch (XPathExpressionException xPathExpressionException) {
+ AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: " + pathTagStr
+ + " path string value:"+ pathStr
+ + " Exception:" +xPathExpressionException);
+ throw xPathExpressionException;
+ } catch (Exception genericException) {
+ AdapterLogger.LogError("extractXPathValueSwallowException error while extracting: "+ pathTagStr
+ + " path string value:"+ pathStr
+ + " Exception:" +genericException);
+ throw genericException;
+ }
+ return extractedStr;
+ }
+
+}
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java
index 4581de1..8f75173 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCitation.java
@@ -95,7 +95,7 @@ public Date getReleaseDate() {
*/
public void setReleaseDate(Date releaseDate) {
this.releaseDate = releaseDate;
- this.releaseDateLong = new Long(releaseDate.getTime());
+ this.releaseDateLong = releaseDate.getTime();
}
public Long getReleaseDateLong() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java
index 160f33f..7bf2fe1 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetCoverage.java
@@ -147,7 +147,7 @@ public Date getStartTime() {
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
- this.startTimeLong = new Long(startTime.getTime());
+ this.startTimeLong = startTime.getTime();
}
/**
* @return the stopTime
@@ -160,7 +160,7 @@ public Date getStopTime() {
*/
public void setStopTime(Date stopTime) {
this.stopTime = stopTime;
- this.stopTimeLong = new Long(stopTime.getTime());
+ this.stopTimeLong = stopTime.getTime();
}
public Long getStartTimeLong() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java
index 42ac1a9..fe6eee1 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetDateTime.java
@@ -34,7 +34,7 @@ public DatasetDateTime() {
public DatasetDateTime(DatasetElement element, Date keyValue) {
this.datasetElement = element;
this.value = keyValue;
- this.valueLong = new Long(value.getTime());
+ this.valueLong = keyValue.getTime();
}
public DatasetDateTime(DatasetElement element, Long keyValue) {
@@ -74,7 +74,7 @@ public Long getValueLong() {
public void setValue(Date value) {
this.value = value;
- this.valueLong = new Long(value.getTime());
+ this.valueLong = value.getTime();
}
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java
index 9c74f09..26e338c 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetMetaHistory.java
@@ -78,7 +78,7 @@ public Date getEchoSubmitDate() {
}
public void setEchoSubmitDate(Date echoSubmitDate) {
this.echoSubmitDate = echoSubmitDate;
- this.echoSubmitDateLong = new Long(echoSubmitDate.getTime());
+ this.echoSubmitDateLong = echoSubmitDate.getTime();
}
public Long getEchoSubmitDateLong() {
@@ -103,14 +103,14 @@ public Date getCreationDate() {
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
- this.creationDateLong = new Long(creationDate.getTime());
+ this.creationDateLong = creationDate.getTime();
}
public Date getLastRevisionDate() {
return lastRevisionDate;
}
public void setLastRevisionDate(Date lastRevisionDate) {
this.lastRevisionDate = lastRevisionDate;
- this.lastRevisionDateLong = new Long(lastRevisionDate.getTime());
+ this.lastRevisionDateLong = lastRevisionDate.getTime();
}
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java
index c4ffa29..eed0475 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetSoftware.java
@@ -54,7 +54,7 @@ public Date getSoftwareDate() {
}
public void setSoftwareDate(Date softwareDate) {
this.softwareDate = softwareDate;
- this.softwareDateLong = new Long(softwareDate.getTime());
+ this.softwareDateLong = softwareDate.getTime();
}
public Long getSoftwareDateLong() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java b/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java
index 5d22f91..ef42a9a 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/DatasetVersion.java
@@ -79,7 +79,7 @@ public Date getVersionDate() {
}
public void setVersionDate(Date versionDate) {
this.versionDate = versionDate;
- this.versionDateLong = new Long(versionDate.getTime());
+ this.versionDateLong = versionDate.getTime();
}
public Long getVersionDateLong() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/Granule.java b/src/main/java/gov/nasa/podaac/inventory/model/Granule.java
index 4c2e843..802e90b 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/Granule.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/Granule.java
@@ -78,39 +78,39 @@ public Granule(String name, Date startTime, Date requestedTime, Date acquiredTim
this.name = name;
try{
- this.startTimeLong = new Long(startTime.getTime());
+ this.startTimeLong = startTime.getTime();
}catch(NullPointerException npe)
{
this.startTimeLong = null;
}
try{
- this.stopTimeLong = new Long(stopTime.getTime());
+ this.stopTimeLong = stopTime.getTime();
}catch(NullPointerException npe)
{
this.stopTimeLong = null;
}
try{
- this.createTimeLong = new Long(createTime.getTime());
+ this.createTimeLong = createTime.getTime();
}catch(NullPointerException npe)
{
this.createTimeLong = null;
}
try{
- this.ingestTimeLong = new Long(ingestTime.getTime());
+ this.ingestTimeLong = ingestTime.getTime();
}catch(NullPointerException npe)
{
this.ingestTimeLong = null;
}
this.version = version;
try{
- this.requestedTimeLong = new Long(requestedTime.getTime());
+ this.requestedTimeLong = requestedTime.getTime();
}catch(NullPointerException npe)
{
this.requestedTimeLong = null;
}
try{
- this.acquiredTimeLong = new Long(acquiredTime.getTime());
+ this.acquiredTimeLong = acquiredTime.getTime();
}catch(NullPointerException npe)
{
this.acquiredTimeLong = null;
@@ -121,7 +121,7 @@ public Granule(String name, Date startTime, Date requestedTime, Date acquiredTim
this.checksumType = checksumType;
this.status = status;
try{
- this.archiveTimeLong = new Long(archiveTime.getTime());
+ this.archiveTimeLong = archiveTime.getTime();
}catch(NullPointerException npe)
{
this.archiveTimeLong = null;
@@ -173,14 +173,6 @@ public String getName() {
public void setName(String name) {
this.name = name;
}
- //officialName
-// public String getOfficialName() {
-// return officialName;
-// }
-//
-// public void setOfficialName(String name) {
-// this.officialName = name;
-// }
public String getRootPath() {
return rootPath;
}
@@ -205,7 +197,7 @@ public Date getStartTime() {
public void setStartTime(Date startTime) {
this.startTime = startTime;
- this.startTimeLong = new Long(startTime.getTime());
+ this.startTimeLong = startTime.getTime();
}
public Date getStopTime() {
@@ -214,7 +206,7 @@ public Date getStopTime() {
public void setStopTime(Date stopTime) {
this.stopTime = stopTime;
- this.stopTimeLong = new Long(stopTime.getTime());
+ this.stopTimeLong = stopTime.getTime();
}
@@ -224,7 +216,7 @@ public Date getAcquiredTime() {
public void setAcquiredTime(Date acquiredTime) {
this.acquiredTime = acquiredTime;
- this.acquiredTimeLong = new Long(acquiredTime.getTime());
+ this.acquiredTimeLong = acquiredTime.getTime();
}
public Date getRequestedTime() {
return requestedTime;
@@ -232,7 +224,7 @@ public Date getRequestedTime() {
public void setRequestedTime(Date requestedTime) {
this.requestedTime = requestedTime;
- this.requestedTimeLong = new Long(requestedTime.getTime());
+ this.requestedTimeLong = requestedTime.getTime();
}
public Date getArchiveTime() {
@@ -241,7 +233,7 @@ public Date getArchiveTime() {
public void setArchiveTime(Date archiveTime) {
this.archiveTime = archiveTime;
- this.archiveTimeLong = new Long(archiveTime.getTime());
+ this.archiveTimeLong = archiveTime.getTime();
}
public Date getCreateTime() {
@@ -250,7 +242,7 @@ public Date getCreateTime() {
public void setCreateTime(Date createTime) {
this.createTime = createTime;
- this.createTimeLong = new Long(createTime.getTime());
+ this.createTimeLong = createTime.getTime();
}
public Date getIngestTime() {
@@ -259,7 +251,7 @@ public Date getIngestTime() {
public void setIngestTime(Date ingestTime) {
this.ingestTime = ingestTime;
- this.ingestTimeLong = new Long(ingestTime.getTime());
+ this.ingestTimeLong = ingestTime.getTime();
}
public Date getVerifyTime() {
@@ -268,7 +260,7 @@ public Date getVerifyTime() {
public void setVerifyTime(Date verifyTime) {
this.verifyTime = verifyTime;
- this.verifyTimeLong = new Long(verifyTime.getTime());
+ this.verifyTimeLong = verifyTime.getTime();
}
public Integer getVersion() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java b/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java
index 1d50b80..0532e30 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/GranuleDateTime.java
@@ -27,7 +27,7 @@ public GranuleDateTime(DatasetElement element, Date keyValue) {
this.datasetElement = element;
this.value = keyValue;
try{
- this.valueLong = new Long(keyValue.getTime());
+ this.valueLong = keyValue.getTime();
}catch(NullPointerException npe)
{
this.valueLong = null;
@@ -75,7 +75,7 @@ public Long getValueLong() {
public void setValue(Date value) {
this.value = value;
- this.valueLong = new Long(value.getTime());
+ this.valueLong = value.getTime();
}
@Override
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java b/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java
index d9ad422..b6aeaef 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/GranuleMetaHistory.java
@@ -83,14 +83,14 @@ public Date getCreationDate() {
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
- this.creationDateLong = new Long(creationDate.getTime());
+ this.creationDateLong = creationDate.getTime();
}
public Date getLastRevisionDate() {
return lastRevisionDate;
}
public void setLastRevisionDate(Date lastRevisionDate) {
this.lastRevisionDate = lastRevisionDate;
- this.lastRevisionDateLong = new Long(lastRevisionDate.getTime());
+ this.lastRevisionDateLong = lastRevisionDate.getTime();
}
public String getRevisionHistory() {
return revisionHistory;
@@ -103,7 +103,7 @@ public Date getEchoSubmitDate() {
}
public void setEchoSubmitDate(Date echoSubmitDate) {
this.echoSubmitDate = echoSubmitDate;
- this.echoSubmitDateLong = new Long(echoSubmitDate.getTime());
+ this.echoSubmitDateLong = echoSubmitDate.getTime();
}
public Long getCreationDateLong() {
diff --git a/src/main/java/gov/nasa/podaac/inventory/model/Provider.java b/src/main/java/gov/nasa/podaac/inventory/model/Provider.java
index 9a953dd..1bf642e 100644
--- a/src/main/java/gov/nasa/podaac/inventory/model/Provider.java
+++ b/src/main/java/gov/nasa/podaac/inventory/model/Provider.java
@@ -26,7 +26,7 @@ public Integer getProviderId() {
return providerId;
}
public void setProviderId(Integer providerId) {
- this.providerId = new Integer(providerId);
+ this.providerId = providerId;
}
public String getShortName() {
return shortName;
diff --git a/src/main/resources/jsonschema/UMM-G1.6.5.json b/src/main/resources/jsonschema/UMM-G1.6.5.json
new file mode 100644
index 0000000..9a5b443
--- /dev/null
+++ b/src/main/resources/jsonschema/UMM-G1.6.5.json
@@ -0,0 +1,1310 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4",
+ "title": "UMM-G",
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "GranuleUR": {
+ "description": "The Universal Reference ID of the granule referred by the data provider. This ID is unique per data provider.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 250
+ },
+ "ProviderDates": {
+ "description": "Dates related to activities involving the the granule and the data provider database with the exception for Delete. For Create, Update, and Insert the date is the date that the granule file is created, updated, or inserted into the provider database by the provider. Delete is the date that the CMR should delete the granule metadata record from its repository.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ProviderDateType"
+ },
+ "minItems": 1,
+ "maxItems": 4,
+ "uniqueItems":true
+ },
+ "CollectionReference": {
+ "description": "The collection metadata record's short name and version, or entry title to which this granule metadata record belongs.",
+ "$ref": "#/definitions/CollectionReferenceType"
+ },
+ "AccessConstraints": {
+ "description": "Allows the author to constrain access to the granule. Some words that may be used in this element's value include: Public, In-house, Limited, None. The value field is used for special ACL rules (Access Control Lists (http://en.wikipedia.org/wiki/Access_control_list)). For example it can be used to hide metadata when it isn't ready for public consumption.",
+ "$ref": "#/definitions/AccessConstraintsType"
+ },
+ "DataGranule": {
+ "description": "This entity stores basic descriptive characteristics associated with a granule.",
+ "$ref": "#/definitions/DataGranuleType"
+ },
+ "PGEVersionClass": {
+ "description": "This entity stores basic descriptive characteristics related to the Product Generation Executable associated with a granule.",
+ "$ref": "#/definitions/PGEVersionClassType"
+ },
+ "TemporalExtent": {
+ "description": "This class contains attributes which describe the temporal extent of a granule. Temporal Extent includes either a Range Date Time, or a Single Date Time",
+ "$ref": "#/definitions/TemporalExtentType"
+ },
+ "SpatialExtent": {
+ "description": "This class contains attributes which describe the spatial extent of a granule. Spatial Extent includes any or all of Granule Localities, Horizontal Spatial Domain, and Vertical Spatial Domain.",
+ "$ref": "#/definitions/SpatialExtentType"
+ },
+ "OrbitCalculatedSpatialDomains": {
+ "description": "This entity is used to store the characteristics of the orbit calculated spatial domain to include the model name, orbit number, start and stop orbit number, equator crossing date and time, and equator crossing longitude.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/OrbitCalculatedSpatialDomainType"
+ },
+ "minItems": 1,
+ "uniqueItems":true
+ },
+ "MeasuredParameters": {
+ "description": "This entity contains the name of the geophysical parameter expressed in the data as well as associated quality flags and quality statistics. The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters. The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/MeasuredParameterType"
+ },
+ "minItems": 1,
+ "uniqueItems":true
+ },
+ "Platforms": {
+ "description": "A reference to a platform in the parent collection that is associated with the acquisition of the granule. The platform must exist in the parent collection. For example, Platform types may include (but are not limited to): ADEOS-II, AEM-2, Terra, Aqua, Aura, BALLOONS, BUOYS, C-130, DEM, DMSP-F1,etc.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlatformType"
+ },
+ "minItems": 1,
+ "uniqueItems":true
+ },
+ "Projects": {
+ "description": "The name of the scientific program, field campaign, or project from which the data were collected. This element is intended for the non-space assets such as aircraft, ground systems, balloons, sondes, ships, etc. associated with campaigns. This element may also cover a long term project that continuously creates new data sets — like MEaSUREs from ISCCP and NVAP or CMARES from MISR. Project also includes the Campaign sub-element to support multiple campaigns under the same project.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ProjectType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "AdditionalAttributes": {
+ "description": "Reference to an additional attribute in the parent collection. The attribute reference may contain a granule specific value that will override the value in the parent collection for this granule. An attribute with the same name must exist in the parent collection.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/AdditionalAttributeType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "InputGranules": {
+ "description": "This entity contains the identification of the input granule(s) for a specific granule.",
+ "type": "array",
+ "items": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 500
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "TilingIdentificationSystem": {
+ "description": "This entity stores the tiling identification system for the granule. The tiling identification system information is an alternative way to express granule's spatial coverage based on a certain two dimensional coordinate system defined by the providers. The name must match the name in the parent collection.",
+ "$ref": "#/definitions/TilingIdentificationSystemType"
+ },
+ "CloudCover": {
+ "description": "A percentage value indicating how much of the area of a granule (the EOSDIS data unit) has been obscured by clouds. It is worth noting that there are many different measures of cloud cover within the EOSDIS data holdings and that the cloud cover parameter that is represented in the archive is dataset-specific.",
+ "type": "number"
+ },
+ "RelatedUrls": {
+ "description": "This element describes any data/service related URLs that include project home pages, services, related data archives/servers, metadata extensions, direct links to online software packages, web mapping services, links to images, or other data.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/RelatedUrlType"
+ },
+ "minItems": 1
+ },
+ "NativeProjectionNames": {
+ "description": "Represents the native projection of the granule if the granule has a native projection.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ProjectionNameType"
+ }
+ },
+ "GridMappingNames": {
+ "description": "Represents the native grid mapping of the granule, if the granule is gridded.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/GridMappingNameType"
+ }
+ },
+ "MetadataSpecification": {
+ "description": "Requires the user to add in schema information into every granule record. It includes the schema's name, version, and URL location. The information is controlled through enumerations at the end of this schema.",
+ "$ref": "#/definitions/MetadataSpecificationType"
+ }
+ },
+ "required": ["GranuleUR", "ProviderDates", "CollectionReference", "MetadataSpecification"],
+
+
+
+ "definitions": {
+ "ProviderDateType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Specifies the date and its type that the provider uses for the granule. For Create, Update, and Insert the date is the date that the granule file is created, updated, or inserted into the provider database by the provider. Delete is the date that the CMR should delete the granule metadata record from its repository.",
+ "properties": {
+ "Date": {
+ "description": "This is the date that an event associated with the granule occurred.",
+ "format": "date-time",
+ "type": "string"
+ },
+ "Type": {
+ "description": "This is the type of event associated with the date. For example, Creation or Upate.",
+ "$ref": "#/definitions/ProviderDateTypeEnum"
+ }
+ },
+ "required": ["Date", "Type"]
+ },
+ "CollectionReferenceType": {
+ "type": "object",
+ "description": "A reference to a collection metadata record's short name and version, or entry title to which this granule metadata record belongs.",
+ "oneOf": [{
+ "additionalProperties": false,
+ "properties": {
+ "ShortName": {
+ "description": "The collection's short name as per the UMM-C.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 85
+ },
+ "Version": {
+ "description": "The collection's version as per the UMM-C.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ }
+ },
+ "required": ["ShortName", "Version"]
+ },
+ {
+ "additionalProperties": false,
+ "properties": {
+ "EntryTitle": {
+ "description": "The collections entry title as per the UMM-C.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1030
+ }
+ },
+ "required": ["EntryTitle"]
+ }]
+ },
+ "AccessConstraintsType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Information about any physical constraints for accessing the data set.",
+ "properties": {
+ "Description": {
+ "description": "Free-text description of the constraint. In ECHO 10, this field is called RestrictionComment. Additional detailed instructions on how to access the granule data may be entered in this field.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 4000
+ },
+ "Value": {
+ "description": "Numeric value that is used with Access Control Language (ACLs) to restrict access to this granule. For example, a provider might specify a granule level ACL that hides all granules with a value element set to 15. In ECHO, this field is called RestrictionFlag.",
+ "type": "number"
+ }
+ },
+ "required": ["Value"]
+ },
+ "DataGranuleType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity stores the basic descriptive characteristics associated with a granule.",
+ "properties": {
+ "ArchiveAndDistributionInformation": {
+ "description": "A list of the file(s) or file package(s) that make up the granule. A file package is something like a tar or zip file.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ArchiveAndDistributionInformationType"
+ },
+ "minItems": 1,
+ "uniqueItems":true
+ },
+ "ReprocessingPlanned": {
+ "description": "Granule level, stating what reprocessing may be performed on this granule.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "ReprocessingActual": {
+ "description": "Granule level, stating what reprocessing has been performed on this granule.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "DayNightFlag": {
+ "description": "This attribute is used to identify if a granule was collected during the day, night (between sunset and sunrise) or both.",
+ "type": "string",
+ "enum": ["Day", "Night", "Both", "Unspecified"]
+ },
+ "ProductionDateTime": {
+ "description": "The date and time a specific granule was produced by a PGE.",
+ "format": "date-time",
+ "type": "string"
+ },
+ "Identifiers": {
+ "description": "This holds any granule identifiers the provider wishes to provide.",
+ "type": "array",
+ "items": {"$ref": "#/definitions/IdentifierType"},
+ "minItems": 1,
+ "uniqueItems":true
+ }
+ },
+ "required": ["DayNightFlag", "ProductionDateTime"]
+ },
+ "ArchiveAndDistributionInformationType": {
+ "description": "This set of elements describes a file package or a file that contains other files. Normally this is either a tar or a zip file.",
+ "anyOf": [{"$ref": "#/definitions/FilePackageType"}, {"$ref": "#/definitions/FileType"}]
+ },
+ "FilePackageType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This set of elements describes a file package or a file that contains other files. Normally this is either a tar or a zip file.",
+ "properties": {
+ "Name": {
+ "description": "This field describes the name of the actual file.",
+ "$ref": "#/definitions/FileNameType"
+ },
+ "SizeInBytes": {
+ "description": "The size in Bytes of the volume of data contained in the granule. Bytes are defined as eight bits. Please use this element instead of or inclusive with the Size element. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.",
+ "type": "integer"
+ },
+ "Size": {
+ "description": "The size of the volume of data contained in the granule. Please use the SizeInBytes element either instead of this one or inclusive of this one. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.",
+ "type": "number"
+ },
+ "SizeUnit": {
+ "description": "The unit of the file size.",
+ "$ref": "#/definitions/FileSizeUnitEnum"
+ },
+ "Format": {
+ "description": "This element defines a single format for a distributable artifact.",
+ "$ref": "#/definitions/DataFormatType"
+ },
+ "MimeType": {
+ "description": "The mime type of the resource.",
+ "$ref": "#/definitions/MimeTypeEnum"
+ },
+ "Checksum": {
+ "description": "Allows the provider to provide the checksum value for the file.",
+ "$ref": "#/definitions/ChecksumType"
+ },
+ "Files": {
+ "description": "Allows the provider to add the list of the files that are included in this one.",
+ "type": "array",
+ "items": {"$ref": "#/definitions/FileType"},
+ "uniqueItems": true,
+ "minItems": 1
+ }
+ },
+ "required": ["Name"],
+ "dependencies": {
+ "Size": ["SizeUnit"]
+ }
+ },
+ "FileType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This set of elements describes a file. The file can be a part of the entire granule or is the granule.",
+ "properties": {
+ "Name": {
+ "description": "This field describes the name of the actual file.",
+ "$ref": "#/definitions/FileNameType"
+ },
+ "SizeInBytes": {
+ "description": "The size in Bytes of the volume of data contained in the granule. Bytes are defined as eight bits. Please use this element instead of or inclusive with the Size element. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.",
+ "type": "integer"
+ },
+ "Size": {
+ "description": "The size of the volume of data contained in the granule. Please use the SizeInBytes element either instead of this one or inclusive of this one. The issue with the size element is that if CMR data providers use a unit other than Bytes, end users don't know how the granule size was calculated. For example, if the unit was MegaBytes, the size could be calculated by using 1000xE2 Bytes (MegaBytes) or 1024xE2 Bytes (mebibytes) and therefore there is no systematic way to know the actual size of a granule by using the granule metadata record.",
+ "type": "number"
+ },
+ "SizeUnit": {
+ "description": "The unit of the file size.",
+ "$ref": "#/definitions/FileSizeUnitEnum"
+ },
+ "Format": {
+ "description": "This element defines a single format for a distributable artifact.",
+ "$ref": "#/definitions/DataFormatType"
+ },
+ "FormatType": {
+ "description": "Allows the provider to state whether the distributable item's format is its native format or another supported format.",
+ "type": "string",
+ "enum": ["Native", "Supported", "NA"]
+ },
+ "MimeType": {
+ "description": "The mime type of the resource.",
+ "$ref": "#/definitions/MimeTypeEnum"
+ },
+ "Checksum": {
+ "description": "Allows the provider to provide the checksum value for the file.",
+ "$ref": "#/definitions/ChecksumType"
+ }
+ },
+ "required": ["Name"],
+ "dependencies": {
+ "Size": ["SizeUnit"]
+ }
+ },
+ "IdentifierType" :{
+ "type": "object",
+ "description": "This entity stores an identifier. If the identifier is part of the enumeration then use it. If the enumeration is 'Other', the provider must specify the identifier's name.",
+ "oneOf": [{
+ "additionalProperties": false,
+ "properties": {
+ "Identifier": {
+ "description": "The identifier value.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "IdentifierType": {
+ "description": "The enumeration of known identifier types.",
+ "type": "string",
+ "enum": ["ProducerGranuleId", "LocalVersionId", "FeatureId", "CRID"]
+ },
+ "IdentifierName": {
+ "description": "The name of the identifier.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ }
+ },
+ "required": ["Identifier","IdentifierType"]
+ },
+ {
+ "additionalProperties": false,
+ "properties": {
+ "Identifier": {
+ "description": "The identifier value.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "IdentifierType": {
+ "description": "The enumeration of known identifier types.",
+ "type": "string",
+ "enum": ["Other"]
+ },
+ "IdentifierName": {
+ "description": "The Name of identifier.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ }
+ },
+ "required": ["Identifier","IdentifierType","IdentifierName"]
+ }]
+ },
+ "PGEVersionClassType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity stores basic descriptive characteristics related to the Product Generation Executable associated with a granule.",
+ "properties": {
+ "PGEName": {
+ "description": "Name of product generation executable.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "PGEVersion": {
+ "description": "Version of the product generation executable that produced the granule.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 50
+ }
+ },
+ "required": ["PGEVersion"]
+ },
+ "TemporalExtentType": {
+ "type": "object",
+ "description": "Information which describes the temporal extent of a specific granule.",
+ "oneOf": [{
+ "additionalProperties": false,
+ "properties": {
+ "RangeDateTime": {
+ "description": "Stores the data acquisition start and end date/time for a granule.",
+ "$ref": "#/definitions/RangeDateTimeType"
+ }
+ },
+ "required": ["RangeDateTime"]
+ }, {
+ "additionalProperties": false,
+ "properties": {
+ "SingleDateTime": {
+ "description": "Stores the data acquisition date/time for a granule.",
+ "format": "date-time",
+ "type": "string"
+ }
+ },
+ "required": ["SingleDateTime"]
+ }]
+ },
+ "RangeDateTimeType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Stores the data acquisition start and end date/time for a granule.",
+ "properties": {
+ "BeginningDateTime": {
+ "description": "The time when the temporal coverage period being described began.",
+ "format": "date-time",
+ "type": "string"
+ },
+ "EndingDateTime": {
+ "description": "The time when the temporal coverage period being described ended.",
+ "format": "date-time",
+ "type": "string"
+ }
+ },
+ "required": ["BeginningDateTime"]
+ },
+ "SpatialExtentType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This class contains attributes which describe the spatial extent of a granule. Spatial Extent includes any or all of Granule Localities, Horizontal Spatial Domain, and Vertical Spatial Domain.",
+ "properties": {
+ "GranuleLocalities": {
+ "description": "This entity stores information used at the granule level to describe the labeling of granules with compounded time/space text values and which are subsequently used to define more phenomenological-based granules, thus the locality type and description are contained.",
+ "type": "array",
+ "items": {"$ref": "#/definitions/GranuleLocalityType"},
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "HorizontalSpatialDomain": {
+ "description": "This represents the granule horizontal spatial domain information.",
+ "$ref": "#/definitions/HorizontalSpatialDomainType"
+ },
+ "VerticalSpatialDomains": {
+ "description": "This represents the domain value and type for the granule's vertical spatial domain.",
+ "type": "array",
+ "items": {"$ref": "#/definitions/VerticalSpatialDomainType"},
+ "minItems":1,
+ "uniqueItems":true
+ }
+ },
+ "anyOf": [{
+ "required": ["GranuleLocalities"]
+ }, {
+ "required": ["HorizontalSpatialDomain"]
+ }, {
+ "required": ["VerticalSpatialDomains"]
+ }]
+ },
+ "HorizontalSpatialDomainType": {
+ "type": "object",
+ "description": "Information about a granule with horizontal spatial coverage.",
+ "additionalProperties": false,
+ "properties": {
+ "ZoneIdentifier": {
+ "description": "The appropriate numeric or alpha code used to identify the various zones in the granule's grid coordinate system.",
+ "$ref": "#/definitions/ZoneIdentifierType"
+ },
+ "Geometry": {
+ "description": "This entity holds the geometry representing the spatial coverage information of a granule.",
+ "$ref": "#/definitions/GeometryType"
+ },
+ "Orbit": {
+ "description": "This entity stores orbital coverage information of the granule. This coverage is an alternative way of expressing granule spatial coverage. This information supports orbital backtrack searching on a granule.",
+ "$ref": "#/definitions/OrbitType"
+ },
+ "Track": {
+ "description": "This element stores track information of the granule. Track information is used to allow a user to search for granules whose spatial extent is based on an orbital cycle, pass, and tile mapping. Though it is derived from the SWOT mission requirements, it is intended that this element type be generic enough so that other missions can make use of it. While track information is a type of spatial domain, it is expected that the metadata provider will provide geometry information that matches the spatial extent of the track information.",
+ "$ref": "#/definitions/TrackType"
+ }
+ },
+ "oneOf": [{
+ "required": ["Geometry"]
+ }, {
+ "required": ["Orbit"]
+ }]
+ },
+ "GeometryType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity holds the geometry representing the spatial coverage information of a granule.",
+ "properties": {
+ "Points": {
+ "description": "The horizontal spatial coverage of a point.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PointType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "BoundingRectangles": {
+ "description": "This entity holds the horizontal spatial coverage of a bounding box.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/BoundingRectangleType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "GPolygons": {
+ "description": "A GPolygon specifies an area on the earth represented by a main boundary with optional boundaries for regions excluded from the main boundary.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/GPolygonType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "Lines": {
+ "description": "This entity holds the horizontal spatial coverage of a line. A line area contains at least two points.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/LineType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ },
+ "anyOf": [{
+ "required": ["Points"]
+ }, {
+ "required": ["BoundingRectangles"]
+ }, {
+ "required": ["GPolygons"]
+ }, {
+ "required": ["Lines"]
+ }]
+ },
+ "PointType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "The longitude and latitude values of a spatially referenced point in degrees.",
+ "properties": {
+ "Longitude": {
+ "$ref": "#/definitions/LongitudeType"
+ },
+ "Latitude": {
+ "$ref": "#/definitions/LatitudeType"
+ }
+ },
+ "required": ["Longitude", "Latitude"]
+ },
+ "BoundingRectangleType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity holds the horizontal spatial coverage of a bounding box.",
+ "properties": {
+ "WestBoundingCoordinate": {
+ "$ref": "#/definitions/LongitudeType"
+ },
+ "NorthBoundingCoordinate": {
+ "$ref": "#/definitions/LatitudeType"
+ },
+ "EastBoundingCoordinate": {
+ "$ref": "#/definitions/LongitudeType"
+ },
+ "SouthBoundingCoordinate": {
+ "$ref": "#/definitions/LatitudeType"
+ }
+ },
+ "required": ["WestBoundingCoordinate", "NorthBoundingCoordinate", "EastBoundingCoordinate", "SouthBoundingCoordinate"]
+ },
+ "GPolygonType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "A GPolygon specifies an area on the earth represented by a main boundary with optional boundaries for regions excluded from the main boundary.",
+ "properties": {
+ "Boundary": {
+ "$ref": "#/definitions/BoundaryType"
+ },
+ "ExclusiveZone": {
+ "$ref": "#/definitions/ExclusiveZoneType"
+ }
+ },
+ "required": ["Boundary"]
+ },
+ "BoundaryType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "A boundary is set of points connected by straight lines representing a polygon on the earth. It takes a minimum of three points to make a boundary. Points must be specified in counter-clockwise order and closed (the first and last vertices are the same).",
+ "properties": {
+ "Points": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PointType"
+ },
+ "minItems": 3
+ }
+ },
+ "required": ["Points"]
+ },
+ "ExclusiveZoneType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Contains the excluded boundaries from the GPolygon.",
+ "properties": {
+ "Boundaries": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/BoundaryType"
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["Boundaries"]
+ },
+ "LineType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity holds the horizontal spatial coverage of a line. A line area contains at lease two points.",
+ "properties": {
+ "Points": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PointType"
+ },
+ "minItems": 2
+ }
+ },
+ "required": ["Points"]
+ },
+ "OrbitType":{
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity stores orbital coverage information of the granule. This coverage is an alternative way of expressing granule spatial coverage. This information supports orbital backtrack searching on a granule.",
+ "properties": {
+ "AscendingCrossing": {
+ "description": "Equatorial crossing on the ascending pass in decimal degrees longitude. The convention we've been using is it's the first included ascending crossing if one is included, and the prior ascending crossing if none is included (e.g. descending half orbits).",
+ "$ref": "#/definitions/LongitudeType"
+ },
+ "StartLatitude": {
+ "description": "Granule's starting latitude.",
+ "$ref": "#/definitions/LatitudeType"
+ },
+ "StartDirection": {
+ "description": "Ascending or descending. Valid input: 'A' or 'D'",
+ "$ref": "#/definitions/OrbitDirectionTypeEnum"
+ },
+ "EndLatitude": {
+ "description": "Granule's ending latitude.",
+ "$ref": "#/definitions/LatitudeType"
+ },
+ "EndDirection": {
+ "description": "Ascending or descending. Valid input: 'A' or 'D'",
+ "$ref": "#/definitions/OrbitDirectionTypeEnum"
+ }
+ },
+ "required": ["AscendingCrossing", "StartLatitude", "StartDirection", "EndLatitude", "EndDirection"]
+ },
+ "TrackType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This element stores track information of the granule. Track information is used to allow a user to search for granules whose spatial extent is based on an orbital cycle, pass, and tile mapping. Though it is derived from the SWOT mission requirements, it is intended that this element type be generic enough so that other missions can make use of it. While track information is a type of spatial domain, it is expected that the metadata provider will provide geometry information that matches the spatial extent of the track information.",
+ "properties": {
+ "Cycle": {
+ "description": "An integer that represents a specific set of orbital spatial extents defined by passes and tiles. Though intended to be generic, this comes from a SWOT mission requirement where each cycle represents a set of 1/2 orbits. Each 1/2 orbit is called a 'pass'. During science mode, a cycle represents 21 days of 14 full orbits or 588 passes.",
+ "type": "integer"
+ },
+ "Passes": {
+ "description": "A pass number identifies a subset of a granule's spatial extent. This element holds a list of pass numbers and their tiles that exist in the granule. It will allow a user to search by pass number and its tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granule's spatial extent.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/TrackPassTileType"
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["Cycle"]
+ },
+ "TrackPassTileType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This element stores a track pass and its tile information. It will allow a user to search by pass number and their tiles that are contained with in a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit. This element will then hold a list of 1/2 orbits and their tiles that together represent the granules spatial extent.",
+ "properties": {
+ "Pass": {
+ "description": "A pass number identifies a subset of a granule's spatial extent. This element holds a pass number that exists in the granule and will allow a user to search by pass number that is contained within a cycle number. While trying to keep this generic for all to use, this comes from a SWOT requirement where a pass represents a 1/2 orbit.",
+ "type": "integer"
+ },
+ "Tiles": {
+ "description": "A tile is a subset of a pass' spatial extent. This element holds a list of tile identifiers that exist in the granule and will allow a user to search by tile identifier that is contained within a pass number within a cycle number. Though intended to be generic, this comes from a SWOT mission requirement where a tile is a spatial extent that encompasses either a square scanning swath to the left or right of the ground track or a rectangle that includes a full scanning swath both to the left and right of the ground track.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["Pass"]
+ },
+ "VerticalSpatialDomainType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity contains the type and value for the granule's vertical spatial domain.",
+ "properties": {
+ "Type": {
+ "description": "Describes the type of the area of vertical space covered by the granule locality.",
+ "$ref": "#/definitions/VerticalSpatialDomainTypeEnum"
+ },
+ "Value": {
+ "description": "Describes the extent of the area of vertical space covered by the granule. Use this for Atmosphere profiles or for a specific value.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "MinimumValue": {
+ "description": "Describes the extent of the area of vertical space covered by the granule. Use this and MaximumValue to represent a range of values (Min and Max).",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "MaximumValue": {
+ "description": "Describes the extent of the area of vertical space covered by the granule. Use this and MinimumValue to represent a range of values (Min and Max).",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "Unit": {
+ "description": "Describes the unit of the vertical extent value.",
+ "type": "string",
+ "enum": ["Fathoms", "Feet", "HectoPascals", "Kilometers", "Meters", "Millibars","PoundsPerSquareInch", "Atmosphere", "InchesOfMercury", "InchesOfWater"]
+ }
+ },
+ "oneOf": [{
+ "required": ["Type", "Value"]
+ }, {
+ "required":["Type","MinimumValue", "MaximumValue"]
+ }],
+ "allOf": [{
+ "not": {
+ "required": ["Value", "MinimumValue"]
+ }
+ }, {
+ "not": {
+ "required": ["Value", "MaximumValue"]
+ }
+ }]
+ },
+ "OrbitCalculatedSpatialDomainType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity is used to store the characteristics of the orbit calculated spatial domain to include the model name, orbit number, start and stop orbit number, equator crossing date and time, and equator crossing longitude.",
+ "properties": {
+ "OrbitalModelName": {
+ "description": "The reference to the orbital model to be used to calculate the geo-location of this data in order to determine global spatial extent.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "OrbitNumber": {
+ "description": "The orbit number to be used in calculating the spatial extent of this data.",
+ "type": "integer"
+ },
+ "BeginOrbitNumber": {
+ "description": "Orbit number at the start of the data granule.",
+ "type": "integer"
+ },
+ "EndOrbitNumber": {
+ "description": "Orbit number at the end of the data granule.",
+ "type": "integer"
+ },
+ "EquatorCrossingLongitude": {
+ "description": "This attribute represents the terrestrial longitude of the descending equator crossing.",
+ "$ref": "#/definitions/LongitudeType"
+ },
+ "EquatorCrossingDateTime": {
+ "description": "This attribute represents the date and time of the descending equator crossing.",
+ "format": "date-time",
+ "type": "string"
+ }
+ },
+ "anyOf": [{
+ "required": ["OrbitalModelName"]
+ }, {
+ "required": ["EquatorCrossingLongitude"]
+ }, {
+ "required": ["EquatorCrossingDateTime"]
+ }, {
+ "required": ["OrbitNumber"]
+ }, {
+ "required": ["BeginOrbitNumber", "EndOrbitNumber"]
+ }],
+ "allOf": [{
+ "not": {
+ "required": ["OrbitNumber", "BeginOrbitNumber"]
+ }
+ }, {
+ "not": {
+ "required": ["OrbitNumber", "EndOrbitNumber"]
+ }
+ }]
+ },
+ "MeasuredParameterType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity contains the name of the geophysical parameter expressed in the data as well as associated quality flags and quality statistics. The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters. The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.",
+ "properties": {
+ "ParameterName": {
+ "description": "The measured science parameter expressed in the data granule.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 250
+ },
+ "QAStats": {
+ "description": "The associated quality statistics.",
+ "$ref": "#/definitions/QAStatsType"
+ },
+ "QAFlags": {
+ "description": "The associated quality flags.",
+ "$ref": "#/definitions/QAFlagsType"
+ }
+ },
+ "required": ["ParameterName"]
+ },
+ "QAStatsType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "The quality statistics element contains measures of quality for the granule. The parameters used to set these measures are not preset and will be determined by the data producer. Each set of measures can occur many times either for the granule as a whole or for individual parameters.",
+ "properties": {
+ "QAPercentMissingData": {
+ "description": "Granule level % missing data. This attribute can be repeated for individual parameters within a granule.",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 100
+ },
+ "QAPercentOutOfBoundsData": {
+ "description": "Granule level % out of bounds data. This attribute can be repeated for individual parameters within a granule.",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 100
+ },
+ "QAPercentInterpolatedData": {
+ "description": "Granule level % interpolated data. This attribute can be repeated for individual parameters within a granule.",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 100
+ },
+ "QAPercentCloudCover": {
+ "description": "This attribute is used to characterize the cloud cover amount of a granule. This attribute may be repeated for individual parameters within a granule. (Note - there may be more than one way to define a cloud or it's effects within a product containing several parameters; i.e. this attribute may be parameter specific).",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 100
+ }
+ },
+ "anyOf": [{
+ "required": ["QAPercentMissingData"]
+ }, {
+ "required": ["QAPercentOutOfBoundsData"]
+ }, {
+ "required": ["QAPercentInterpolatedData"]
+ }, {
+ "required": ["QAPercentCloudCover"]
+ }]
+ },
+ "QAFlagsType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "The quality flags contain the science, operational and automatic quality flags which indicate the overall quality assurance levels of specific parameter values within a granule.",
+ "properties": {
+ "AutomaticQualityFlag": {
+ "description": "The granule level flag applying generally to the granule and specifically to parameters the granule level. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developer and documented in the Quality Flag Explanation.",
+ "type": "string",
+ "enum": ["Passed", "Failed", "Suspect", "Undetermined"]
+ },
+ "AutomaticQualityFlagExplanation": {
+ "description": "A text explanation of the criteria used to set automatic quality flag; including thresholds or other criteria.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 2048
+ },
+ "OperationalQualityFlag": {
+ "description": "The granule level flag applying both generally to a granule and specifically to parameters at the granule level. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developers and documented in the QualityFlagExplanation.",
+ "type": "string",
+ "enum": ["Passed", "Failed", "Being Investigated", "Not Investigated", "Inferred Passed", "Inferred Failed", "Suspect", "Undetermined"]
+ },
+ "OperationalQualityFlagExplanation": {
+ "description": "A text explanation of the criteria used to set operational quality flag; including thresholds or other criteria.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 2048
+ },
+ "ScienceQualityFlag": {
+ "description": "Granule level flag applying to a granule, and specifically to parameters. When applied to parameter, the flag refers to the quality of that parameter for the granule (as applicable). The parameters determining whether the flag is set are defined by the developers and documented in the Quality Flag Explanation.",
+ "type": "string",
+ "enum": ["Passed", "Failed", "Being Investigated", "Not Investigated", "Inferred Passed", "Inferred Failed", "Suspect", "Hold", "Undetermined"]
+ },
+ "ScienceQualityFlagExplanation": {
+ "description": "A text explanation of the criteria used to set science quality flag; including thresholds or other criteria.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 2048
+ }
+ },
+ "anyOf": [{
+ "required": ["AutomaticQualityFlag"]
+ }, {
+ "required": ["OperationalQualityFlag"]
+ }, {
+ "required": ["ScienceQualityFlag"]
+ }]
+ },
+ "PlatformType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "A reference to a platform in the parent collection that is associated with the acquisition of the granule. The platform must exist in the parent collection. For example, Platform types may include (but are not limited to): ADEOS-II, AEM-2, Terra, Aqua, Aura, BALLOONS, BUOYS, C-130, DEM, DMSP-F1,etc.",
+ "properties": {
+ "ShortName": {
+ "$ref": "#/definitions/ShortNameType"
+ },
+ "Instruments": {
+ "description": "References to the devices in the parent collection that were used to measure or record data, including direct human observation.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/InstrumentType"
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["ShortName"]
+ },
+ "InstrumentType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "A reference to the device in the parent collection that was used to measure or record data, including direct human observation. In cases where instruments have a single composed of child instrument (sensor) or the instrument and composed of child instrument (sensor) are used synonymously (e.g. AVHRR) the both Instrument and composed of child instrument should be recorded. The child instrument information is represented by child entities. The instrument reference may contain granule specific characteristics and operation modes. These characteristics and modes are not checked against the referenced instrument.",
+ "properties": {
+ "ShortName": {
+ "$ref": "#/definitions/ShortNameType"
+ },
+ "Characteristics": {
+ "description": "This entity is used to define item additional attributes (unprocessed, custom data).",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/CharacteristicType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "ComposedOf": {
+ "description": "References to instrument subcomponents in the parent collection's instrument used by various sources in the granule. An instrument subcomponent reference may contain characteristics specific to the granule.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/InstrumentType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "OperationalModes": {
+ "description": "This entity identifies the instrument's operational modes for a specific collection associated with the channel, wavelength, and FOV (e.g., launch, survival, initialization, safe, diagnostic, standby, crosstrack, biaxial, solar calibration).",
+ "type": "array",
+ "items": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 20
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ },
+ "required": ["ShortName"]
+ },
+ "CharacteristicType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity is used to reference characteristics defined in the parent collection.",
+ "properties": {
+ "Name": {
+ "description": "The name of the characteristic attribute.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "Value": {
+ "description": "The value of the Characteristic attribute.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ }
+ },
+ "required": ["Name", "Value"]
+ },
+ "ProjectType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Information describing the scientific endeavor with which the granule is associated.",
+ "properties": {
+ "ShortName": {
+ "description": "The unique identifier by which a project is known. The project is the scientific endeavor associated with the acquisition of the collection.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 40
+ },
+ "Campaigns": {
+ "description": "The name of the campaign/experiment (e.g. Global climate observing system).",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/CampaignType"
+ },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ },
+ "required": ["ShortName"]
+ },
+ "CampaignType": {
+ "description": "Information describing campaign names with which the granule is associated.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 40
+ },
+ "AdditionalAttributeType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "A reference to an additional attribute in the parent collection. The attribute reference may contain a granule specific value that will override the value in the parent collection for this granule. An attribute with the same name must exist in the parent collection.",
+ "properties": {
+ "Name": {
+ "description": "The additional attribute's name.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "Values": {
+ "description": "Values of the additional attribute.",
+ "type": "array",
+ "items": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 500
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["Name", "Values"]
+ },
+ "TilingIdentificationSystemType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity stores the tiling identification system for the granule. The tiling identification system information is an alternative way to express granule's spatial coverage based on a certain two dimensional coordinate system defined by the providers. The name must match the name in the parent collection.",
+ "properties": {
+ "TilingIdentificationSystemName": {
+ "$ref": "#/definitions/TilingIdentificationSystemNameEnum"
+ },
+ "Coordinate1": {
+ "$ref": "#/definitions/TilingCoordinateType"
+ },
+ "Coordinate2": {
+ "$ref": "#/definitions/TilingCoordinateType"
+ }
+ },
+ "required": ["TilingIdentificationSystemName", "Coordinate1", "Coordinate2"]
+ },
+ "TilingCoordinateType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Defines the minimum and maximum value for one dimension of a two dimensional coordinate system.",
+ "properties": {
+ "MinimumValue": {
+ "type": "number"
+ },
+ "MaximumValue": {
+ "type": "number"
+ }
+ },
+ "required": ["MinimumValue"]
+ },
+ "RelatedUrlType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This entity holds all types of online URL associated with the granule such as guide document or ordering site etc.",
+ "properties": {
+ "URL": {
+ "description": "The URL for the relevant resource.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "Type": {
+ "description": "A keyword describing the type of the online resource to this resource.",
+ "$ref": "#/definitions/RelatedUrlTypeEnum"
+ },
+ "Subtype": {
+ "description": "A keyword that provides more detailed information than Type of the online resource to this resource. For example if the Type=VIEW RELATED INFORMATION then the Subtype can be USER'S GUIDE or GENERAL DOCUMENTATION",
+ "$ref": "#/definitions/RelatedUrlSubTypeEnum"
+ },
+ "Description": {
+ "description": "Description of the web page at this URL.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 4000
+ },
+ "Format": {
+ "description": "The format of the resource.",
+ "$ref": "#/definitions/DataFormatType"
+ },
+ "MimeType": {
+ "description": "The mime type of the resource.",
+ "$ref": "#/definitions/MimeTypeEnum"
+ },
+ "Size": {
+ "description": "The size of the resource.",
+ "type": "number"
+ },
+ "SizeUnit": {
+ "description": "Unit of information, together with Size determines total size in bytes of the resource.",
+ "$ref": "#/definitions/FileSizeUnitEnum"
+ }
+ },
+ "required": ["URL", "Type"],
+ "dependencies": {
+ "Size": ["SizeUnit"]
+ }
+ },
+ "ChecksumType": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "Allows the provider to provide a checksum value and checksum algorithm name to allow the user to calculate the checksum.",
+ "properties": {
+ "Value": {
+ "description": "Describes the checksum value for a file.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 128
+ },
+ "Algorithm": {
+ "description": "The algorithm name by which the checksum was calulated. This allows the user to re-calculate the checksum to verify the integrity of the downloaded data.",
+ "type": "string",
+ "enum": ["Adler-32", "BSD checksum", "Fletcher-32", "Fletcher-64", "MD5", "POSIX", "SHA-1", "SHA-2", "SHA-256", "SHA-384", "SHA-512", "SM3", "SYSV"]
+ }
+ },
+ "required": ["Value", "Algorithm"]
+ },
+ "ProjectionNameType": {
+ "description": "Represents the native projection of the granule if the granule has a native projection. The projection name must match the projection that has been defined in the parent collection.",
+ "type": "string",
+ "enum": ["Geographic", "Mercator", "Spherical Mercator", "Space Oblique Mercator", "Universal Transverse Mercator", "Military Grid Reference", "MODIS Sinusoidal System", "Sinusoidal", "Lambert Equal Area", "NSIDC EASE Grid North and South (Lambert EA)", "NSIDC EASE Grid Global", "EASE Grid 2.0 N. Polar", "Plate Carree", "Polar Stereographic", "WELD Albers Equal Area", "Canadian Albers Equal Area Conic", "Lambert Conformal Conic", "State Plane Coordinates", "Albers Equal Area Conic", "Transverse Mercator", "Lambert Azimuthal Equal Area", "UTM Northern Hemisphere", "NAD83 / UTM zone 17N", "UTM Southern Hemisphere", "Cylindrical"]
+ },
+ "GridMappingNameType": {
+ "description": "Represents the native grid mapping of the granule, if the granule is gridded. The grid name must match a grid that has been defined in the parent collection.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "ProviderDateTypeEnum": {
+ "description": "The types of dates that a metadata record can have.",
+ "type": "string",
+ "enum": ["Create", "Insert", "Update", "Delete"]
+ },
+ "FileNameType": {
+ "description": "This field describes the name of the actual file.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "FileSizeUnitEnum": {
+ "description": "The unit of the file size.",
+ "type": "string",
+ "enum": ["KB", "MB", "GB", "TB", "PB", "NA"]
+ },
+ "DistributionMediaType": {
+ "description": "This element defines the media by which the end user can obtain the distributable item. Each media type is listed separately. Examples of media include: CD-ROM, 9 track tape, diskettes, hard drives, online, transparencies, hardcopy, etc.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "GranuleLocalityType" :{
+ "description": "Provides name which spatial/temporal entity is known. This could change on a granule by granule basis. This attribute is paralleled by the AggregationType which applies at the collection level although locality has a more restricted usage. Several locality measures could be included in each granule.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1024
+ },
+ "LatitudeType": {
+ "description": "The latitude value of a spatially referenced point, in degrees. Latitude values range from -90 to 90.",
+ "type": "number",
+ "minimum": -90,
+ "maximum": 90
+ },
+ "LongitudeType": {
+ "description": "The longitude value of a spatially referenced point, in degrees. Longitude values range from -180 to 180.",
+ "type": "number",
+ "minimum": -180,
+ "maximum": 180
+ },
+ "OrbitDirectionTypeEnum": {
+ "description": "Orbit start and end direction. A for ascending orbit and D for descending.",
+ "type": "string",
+ "enum": ["A", "D"]
+ },
+ "ZoneIdentifierType": {
+ "description": "The appropriate numeric or alpha code used to identify the various zones in the granule's grid coordinate system.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "VerticalSpatialDomainTypeEnum": {
+ "type": "string",
+ "enum": ["Atmosphere Layer","Pressure", "Altitude", "Depth"]
+ },
+ "ShortNameType": {
+ "description": "The unique name of the platform or instrument.",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "TilingIdentificationSystemNameEnum": {
+ "type": "string",
+ "enum": ["CALIPSO", "MISR", "MODIS Tile EASE", "MODIS Tile SIN", "SMAP Tile EASE", "WELD Alaska Tile", "WELD CONUS Tile", "WRS-1", "WRS-2"]
+ },
+ "RelatedUrlTypeEnum": {
+ "type": "string",
+ "enum": ["DOWNLOAD SOFTWARE", "EXTENDED METADATA", "GET DATA", "GET DATA VIA DIRECT ACCESS", "GET RELATED VISUALIZATION", "GOTO WEB TOOL", "PROJECT HOME PAGE", "USE SERVICE API", "VIEW RELATED INFORMATION"]
+ },
+ "RelatedUrlSubTypeEnum": {
+ "type": "string",
+ "enum": ["MOBILE APP", "APPEARS", "DATA COLLECTION BUNDLE", "DATA TREE", "DATACAST URL", "DIRECT DOWNLOAD", "EOSDIS DATA POOL", "Earthdata Search", "GIOVANNI", "GoLIVE Portal",
+ "IceBridge Portal", "LAADS", "LANCE", "MIRADOR", "MODAPS", "NOAA CLASS", "NOMADS", "Order", "PORTAL", "Subscribe", "USGS EARTH EXPLORER", "VERTEX", "VIRTUAL COLLECTION",
+ "MAP", "WORLDVIEW", "LIVE ACCESS SERVER (LAS)", "MAP VIEWER", "SIMPLE SUBSET WIZARD (SSW)", "SUBSETTER", "GRADS DATA SERVER (GDS)", "MAP SERVICE", "OPENDAP DATA",
+ "OpenSearch", "SERVICE CHAINING", "TABULAR DATA STREAM (TDS)", "THREDDS DATA", "WEB COVERAGE SERVICE (WCS)", "WEB FEATURE SERVICE (WFS)", "WEB MAP SERVICE (WMS)",
+ "WEB MAP TILE SERVICE (WMTS)", "ALGORITHM DOCUMENTATION", "ALGORITHM THEORETICAL BASIS DOCUMENT (ATBD)", "ANOMALIES", "CASE STUDY", "DATA CITATION POLICY", "DATA QUALITY",
+ "DATA RECIPE", "DELIVERABLES CHECKLIST", "GENERAL DOCUMENTATION", "HOW-TO", "IMPORTANT NOTICE","INSTRUMENT/SENSOR CALIBRATION DOCUMENTATION", "MICRO ARTICLE",
+ "PI DOCUMENTATION", "PROCESSING HISTORY", "PRODUCT HISTORY", "PRODUCT QUALITY ASSESSMENT", "PRODUCT USAGE", "PRODUCTION HISTORY", "PUBLICATIONS", "READ-ME",
+ "REQUIREMENTS AND DESIGN", "SCIENCE DATA PRODUCT SOFTWARE DOCUMENTATION", "SCIENCE DATA PRODUCT VALIDATION", "USER FEEDBACK PAGE", "USER'S GUIDE",
+ "DMR++", "DMR++ MISSING DATA"]
+ },
+ "MimeTypeEnum": {
+ "type": "string",
+ "enum": ["application/json", "application/xml", "application/x-netcdf", "application/x-hdfeos", "application/gml+xml",
+ "application/vnd.google-earth.kml+xml", "image/gif", "image/tiff", "image/bmp", "text/csv",
+ "text/xml", "application/pdf", "application/x-hdf", "application/x-hdf5",
+ "application/octet-stream", "application/vnd.google-earth.kmz", "image/jpeg", "image/png",
+ "image/vnd.collada+xml", "text/html", "text/plain", "application/zip", "application/gzip", "application/tar",
+ "application/tar+gzip", "application/tar+zip", "application/vnd.opendap.dap4.dmrpp+xml", "Not provided"]
+ },
+ "DataFormatType": {
+ "description": "The format that granule data confirms to. While the value is listed as open to any text, CMR requires that it confirm to one of the values on the GranuleDataFormat values in the Keyword Management System: https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/GranuleDataFormat",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 80
+ },
+ "MetadataSpecificationType":
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "This object requires any metadata record that is validated by this schema to provide information about the schema.",
+ "properties": {
+ "URL": {
+ "description": "This element represents the URL where the schema lives. The schema can be downloaded.",
+ "type": "string",
+ "enum": ["https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4"]
+ },
+ "Name": {
+ "description": "This element represents the name of the schema.",
+ "type": "string",
+ "enum": ["UMM-G"]
+ },
+ "Version": {
+ "description": "This element represents the version of the schema.",
+ "type": "string",
+ "enum": ["1.6.4"]
+ }
+ },
+ "required": ["URL", "Name", "Version"]
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java
index 897be71..bce683e 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/aggregator/UMMUtilsTest.java
@@ -112,7 +112,7 @@ public void testEnsureClockwise2CounterclockwisePolygon() throws ParseException
Geometry geometry = wktReader.read(clockwisePolygonWKT);
Coordinate[] coordinates = geometry.getCoordinates();
// the original input array's trailing 3 coordinates will become leading 3 coordinates
- Coordinate[] reversedCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, coordinates);
+ Coordinate[] reversedCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, 0, coordinates);
assertTrue(reversedCoordinates[0].x == Double.valueOf(45.261678) &&
reversedCoordinates[0].y == Double.valueOf(-65.651128));
assertTrue(reversedCoordinates[1].x == Double.valueOf(45.36766) &&
@@ -132,7 +132,7 @@ public void testEnsureCounterclockwise2CounterclockwisePolygon() throws ParseExc
WKTReader wktReader = new WKTReader();
Geometry geometry = wktReader.read(clockwisePolygonWKT);
Coordinate[] coordinates = geometry.getCoordinates();
- Coordinate[] sameSequenceCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, coordinates);
+ Coordinate[] sameSequenceCoordinates = UMMUtils.ensureOrientation(CGAlgorithms.COUNTERCLOCKWISE, 0, coordinates);
assertTrue(sameSequenceCoordinates[0].x == Double.valueOf(-66.1897) &&
sameSequenceCoordinates[0].y == Double.valueOf(63.1972));
assertTrue(sameSequenceCoordinates[1].x == Double.valueOf(-83.1304) &&
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java b/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java
index d1e80fd..86ce1d0 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/AggregatorTestSuite.java
@@ -9,7 +9,9 @@
AggregatorRelease_4_3_0_Test.class,
gov.nasa.cumulus.metadata.test.MetadataFilesToEchoTest.class,
gov.nasa.cumulus.metadata.test.UMMTest.class,
-
+ gov.nasa.cumulus.metadata.test.ImageProcessorTest.class,
+ gov.nasa.cumulus.metadata.test.FootprintProcessorTest.class,
+
})
public class AggregatorTestSuite {
// the class remains completely empty,
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java
index bd3daab..49bd9c6 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/ImageProcessorTest.java
@@ -76,10 +76,10 @@ public void testGetImageDownloadUrl() {
try {
ImageProcessor imageProcessor = new ImageProcessor();
String downloadUri = imageProcessor.getImageDownloadUrl("https://distribution/xxx/bb/download",
- "my-public-bucket", "/collection_name/granuleId/Image1.jpg");
+ "my-public-bucket","/collection_name/granuleId/Image1.jpg");
assertEquals(downloadUri,
"https://distribution/xxx/bb/download/my-public-bucket/collection_name/granuleId/Image1.jpg");
- } catch (URISyntaxException uriSyntaxException) {
+ } catch (URISyntaxException uriSyntaxException) {
System.out.println(uriSyntaxException);
fail();
}
@@ -88,24 +88,26 @@ public void testGetImageDownloadUrl() {
/**
* This test purposely make getImageDownloadUrl throwing URISyntaxException
* by passing illegal character '^' as distribution_url.
- *
+ *
* fail() will force the test case to fail. Since the test is to force URISyntaxException
* to be thrown, it is a failed case if not thrown.
+ *
*/
@Test
public void testGetImageDownloadUrl_URISyntaxException() {
try {
ImageProcessor imageProcessor = new ImageProcessor();
String downloadUri = imageProcessor.getImageDownloadUrl("https://distribution/xxx/bb/download^12334",
- "my-public-bucket", "s3://my-public-bucket/collection_name/granuleId/image1.jpg");
+ "my-public-bucket","s3://my-public-bucket/collection_name/granuleId/image1.jpg");
fail();
- } catch (URISyntaxException uriSyntaxException) {
+ } catch (URISyntaxException uriSyntaxException) {
assertTrue(true);
}
}
@Test
public void testAppendImageUrl() {
+
try {
/**
* From the input message , the distribution_endpoint is set to be:
@@ -115,7 +117,7 @@ public void testAppendImageUrl() {
* "s3://dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/standard-deviation.jpg",
*/
ImageProcessor imageProcessor = new ImageProcessor();
- String newCMRStr = imageProcessor.appendImageUrls(cmaString, cmrString);
+ String newCMRStr = imageProcessor.appendImageUrl(cmaString, cmrString);
JsonObject cmrJsonObj = JsonParser.parseString(newCMRStr).getAsJsonObject();
JsonArray relatedUrls = cmrJsonObj.getAsJsonArray("RelatedUrls");
int count = findTimesOfAppearance(relatedUrls,
@@ -126,18 +128,28 @@ public void testAppendImageUrl() {
"https://distribution_endpoint.jpl.nasa.gov/s3distribute/dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/standard-deviation.jpg");
assertEquals(count, 1);
+ // test description
+ for (JsonElement relatedUrl : relatedUrls) {
+ JsonObject fileObj = relatedUrl.getAsJsonObject();
+ String ummg_downloadUrl = StringUtils.trim(fileObj.get("URL").getAsString());
+ if(ummg_downloadUrl.equals("https://distribution_endpoint.jpl.nasa.gov/s3distribute/dyen-cumulus-public/dataset-image/MODIS_A-JPL-L2P-v2019.0/sst.png")){
+ assertEquals(fileObj.get("Description").getAsString(), "sst");
+ }
+ }
+
} catch (URISyntaxException | IOException pe) {
System.out.println("testAppendImageUrl Error:" + pe);
pe.printStackTrace();
}
}
+
int findTimesOfAppearance(JsonArray relatedUrls, String downloadUrl) {
int count = 0;
downloadUrl = StringUtils.trim(downloadUrl);
for (JsonElement relatedUrl : relatedUrls) {
String ummg_downloadUrl = StringUtils.trim(relatedUrl.getAsJsonObject().get("URL").getAsString());
- if (StringUtils.compare(ummg_downloadUrl, downloadUrl) == 0) count++;
+ if(StringUtils.compare(ummg_downloadUrl, downloadUrl) ==0) count ++;
}
return count;
}
@@ -145,7 +157,7 @@ int findTimesOfAppearance(JsonArray relatedUrls, String downloadUrl) {
@Test
public void testIsDownloadUrlAlreadyExist() {
ImageProcessor imageProcessor = new ImageProcessor();
- JsonObject cmrJsonObj = JsonParser.parseString(cmrString).getAsJsonObject();
+ JsonObject cmrJsonObj = new JsonParser().parse(cmrString).getAsJsonObject();
JsonArray relatedUrls = cmrJsonObj.getAsJsonArray("RelatedUrls");
boolean isAlreadyExist = imageProcessor.isDownloadUrlAlreadyExist(relatedUrls,
"https://jh72u371y2.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/MODIS_A-JPL-L2P-v2019.0/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.sses_standard_deviation.png");
@@ -161,19 +173,19 @@ public void testIsDownloadUrlAlreadyExist() {
@Test
public void testCreateOutputMessage() {
ImageProcessor processor = new ImageProcessor();
- String output = processor.createOutputMessage(cmaString, 334411,
+ String output = processor.createOutputMessage(cmaString, 334411,
new BigInteger("3244"), "granuleId-3344-22.cmr.json", "my-private",
"CMR", "collectionName");
JsonElement jsonElement = JsonParser.parseString(output);
JsonArray granules = jsonElement.getAsJsonObject().get("output").getAsJsonArray();
JsonArray files = granules.get(0).getAsJsonObject().get("files").getAsJsonArray();
- JsonObject foundCMR = processor.getFileJsonObjByFileTrailing(files, ".cmr.json");
+ JsonObject foundCMR = processor.getFileJsonObjByFileTrailing(files, ".cmr.json");
assertEquals(foundCMR.get("bucket").getAsString(), "my-private");
assertEquals(foundCMR.get("key").getAsString(), "CMR/collectionName/granuleId-3344-22.cmr.json");
assertEquals(foundCMR.get("fileName").getAsString(), "granuleId-3344-22.cmr.json");
- Long cmrFileSize = foundCMR.get("size").getAsLong();
- BigInteger revisionId = jsonElement.getAsJsonObject().get("cmrRevisionId").getAsBigInteger();
+ Long cmrFileSize = foundCMR.get("size").getAsLong();
+ BigInteger revisionId = jsonElement.getAsJsonObject().get("cmrRevisionId").getAsBigInteger();
assertEquals(334411, cmrFileSize.longValue());
assertEquals(revisionId.compareTo(new BigInteger("3244")), 0);
}
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java
index 91d2979..82acfc6 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/JSONUtilsTest.java
@@ -4,21 +4,18 @@
import java.io.File;
import java.io.IOException;
-import java.nio.file.Files;
import java.util.ArrayList;
import com.google.gson.JsonParser;
import com.google.gson.JsonObject;
-import gov.nasa.cumulus.metadata.umm.generated.RelatedUrlType;
import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType;
import gov.nasa.cumulus.metadata.umm.generated.TrackType;
import gov.nasa.cumulus.metadata.util.JSONUtils;
import org.apache.commons.io.FileUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.junit.Test;
@@ -44,53 +41,4 @@ public void testGsonToJSONObj() throws IOException, ParseException{
assertEquals(productionDateTime, "2020-02-29T12:20:15.000Z");
}
- @Test
- public void testIsStartWithStrings() {
- String elements[] = {"http", "https"};
- String httpStr = "http://distribution_url/resource.nc";
- assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true);
- httpStr = "https://distribution_url/resource.nc";
- assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true);
- httpStr = " http://distribution_url/resource.nc"; // test with space
- assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), true);
-
- httpStr = " s3://my_bucket/my_folder/resource.nc"; // test with space
- assertEquals(JSONUtils.isStrStarsWithIgnoreCase(httpStr, elements), false);
-
-
- }
- @Test
- public void testIsGETDataType() {
- assertEquals(JSONUtils.isGETDataType("GET DATA"), true);
- assertEquals(JSONUtils.isGETDataType(" GET DATA "), true);
- assertEquals(JSONUtils.isGETDataType(" Get data "), true);
- //test with space
- assertEquals(JSONUtils.isGETDataType(" GEET Type "), false);
- }
-
- @Test
- public void testRelatedUrlsSorting() throws ParseException{
- String cmr_filename= "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json";
- try {
- ClassLoader classLoader = getClass().getClassLoader();
- File inputCMRJsonFile = new File(classLoader.getResource(cmr_filename).getFile());
- String cmrString = new String(Files.readAllBytes(inputCMRJsonFile.toPath()));
- JSONParser parser = new JSONParser();
- JSONObject json = (JSONObject) parser.parse(cmrString);
- json = JSONUtils.sortRelatedUrls(json);
- //check the first item must be http/https resource scientific data
- JSONArray relatedUrlsArray = (JSONArray)json.get("RelatedUrls");
- JSONObject firstJSONObject = (JSONObject)relatedUrlsArray.get(0);
- assertEquals(firstJSONObject.get("URL").toString(), "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-protected/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc");
- assertEquals(firstJSONObject.get("Type").toString(), RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value());
- //check the 6th item must be http/https resource scientific data
- JSONObject sixthJSONObject = (JSONObject)relatedUrlsArray.get(6);
- assertEquals(sixthJSONObject.get("URL").toString(), "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc");
- assertEquals(sixthJSONObject.get("Type").toString(), RelatedUrlType.RelatedUrlTypeEnum.GET_DATA.value());
- } catch (IOException ioe) {
- System.out.println("Test initialization failed: " + ioe);
- ioe.printStackTrace();
- }
- }
-
}
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java
index f47cc1d..6648be2 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataAggregatorLambdaTest.java
@@ -1,6 +1,8 @@
package gov.nasa.cumulus.metadata.test;
import gov.nasa.cumulus.metadata.aggregator.MetadataAggregatorLambda;
+import gov.nasa.cumulus.metadata.state.MENDsIsoXMLSpatialTypeEnum;
+import org.json.simple.JSONArray;
import org.json.simple.parser.ParseException;
import org.junit.Before;
import org.junit.Test;
@@ -8,8 +10,10 @@
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
+import java.util.HashSet;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class MetadataAggregatorLambdaTest {
@@ -37,4 +41,42 @@ public void testGetConceptId() throws ParseException {
assertEquals(conceptId, "G1238611022-POCUMULUS");
}
+ @Test
+ public void testGetIsoXMLSpatialTypeStr() {
+ MetadataAggregatorLambda lambda = new MetadataAggregatorLambda();
+ assertEquals(lambda.getIsoXMLSpatialTypeStr("footprint"), "footprint");
+ assertEquals(lambda.getIsoXMLSpatialTypeStr("orbit"), "orbit");
+ assertEquals(lambda.getIsoXMLSpatialTypeStr("bbox"), "bbox");
+ assertEquals(lambda.getIsoXMLSpatialTypeStr("xxxx"), "");
+ }
+
+ @Test
+ public void testCreateIsoXMLSpatialTypeSet() {
+ MetadataAggregatorLambda lambda = new MetadataAggregatorLambda();
+ org.json.simple.JSONArray array = new JSONArray();
+ array.add("footprint");
+ array.add("orbit");
+ //HashSet h = lambda.createIsoXMLSpatialTypeSet("[footprint,orbit]");
+ HashSet h = lambda.createIsoXMLSpatialTypeSet(array);
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.FOOTPRINT));
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.ORBIT));
+ assertFalse(h.contains(MENDsIsoXMLSpatialTypeEnum.BBOX));
+ assertFalse(h.contains(MENDsIsoXMLSpatialTypeEnum.NONE));
+
+ array.clear();
+ array.add("footprint");
+ array.add("orbit");
+ array.add("bbox");
+ array.add("eebb");
+ array.add("ccmm");
+ //h = lambda.createIsoXMLSpatialTypeSet("[footprint,orbit,bbox,eebb,ccmm]");
+ h = lambda.createIsoXMLSpatialTypeSet(array);
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.FOOTPRINT));
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.ORBIT));
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.BBOX));
+ assertTrue(h.contains(MENDsIsoXMLSpatialTypeEnum.NONE));
+ // last 2 items in the input array will result in NONE added into the HashSet and overwite each other
+ assertTrue(h.size()==4);
+ }
+
}
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java
index 85ce95d..b65753a 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/MetadataFilesToEchoTest.java
@@ -11,6 +11,7 @@
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
+import cumulus_message_adapter.message_parser.AdapterLogger;
import gov.nasa.cumulus.metadata.aggregator.*;
import gov.nasa.cumulus.metadata.umm.adapter.UMMGCollectionAdapter;
@@ -20,6 +21,8 @@
import gov.nasa.cumulus.metadata.umm.generated.TrackPassTileType;
import gov.nasa.cumulus.metadata.umm.generated.TrackType;
+import gov.nasa.podaac.inventory.model.GranuleCharacter;
+import org.apache.commons.lang3.StringUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
@@ -321,7 +324,47 @@ public void testMarshellCyclePassTileSceneStrToAchiveType() throws IOException,
List tileValues = additionalAttributeType.getValues(); //7F, 8F, 9F, 10F
assertEquals(tileValues.get(0), "7F");
assertEquals(tileValues.get(3), "10F");
+ /**
+ * Following section test empty pass. Ex. Cycle: 001, Pass: []
+ */
+ input = "Cycle: 001, Pass: []";
+ isoGranule = mfte.createIsoCyclePassTile(input);
+ trackType = isoGranule.getTrackType();
+ assertEquals(trackType.getCycle(), new Integer("1"));
+
+ /**
+ * Cycle: 483 Pass: [10, Tiles: 72-84R 111-111R 72-84L 110-111L]
+ */
+ input = "Cycle: 483 Pass: [10, Tiles: 72-84R 111-111R 72-84L 110-111L]";
+ isoGranule = mfte.createIsoCyclePassTile(input);
+ trackType = isoGranule.getTrackType();
+ assertEquals(trackType.getCycle(), new Integer("483"));
+
+ List passes = trackType.getPasses();
+ tiles = passes.get(0).getTiles();
+ assertEquals(tiles.size(), 29);
+ assertEquals(tiles.get(0), "72R");
+ assertEquals(tiles.get(28), "111L");
+ input = "Cycle: 406, Pass: [40, Tiles: 4-5L 4-5R] [41, Tiles: 6R 6L], BasinID: 123";
+ isoGranule = mfte.createIsoCyclePassTile(input);
+ trackType = isoGranule.getTrackType();
+ assertEquals(trackType.getCycle(), new Integer("406"));
+
+ passes = trackType.getPasses();
+ tiles = passes.get(0).getTiles();
+ assertEquals(tiles.size(), 4);
+ assertEquals(tiles.get(0), "4L");
+ assertEquals(tiles.get(1), "5L");
+ tiles = passes.get(1).getTiles();
+ assertEquals(tiles.size(), 2);
+ assertEquals(tiles.get(0), "6R");
+ assertEquals(tiles.get(1), "6L");
+ additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes();
+ additionalAttributeType = additionalAttributeTypes.get(2);
+ assertEquals(additionalAttributeType.getName(), "BasinID");
+ List basinIdStrs = additionalAttributeType.getValues();
+ assertEquals(basinIdStrs.get(0), "123");
}
@Test
@@ -373,6 +416,27 @@ public void testReadIsoMendsMetadataFile() throws IOException, ParseException, X
assertEquals(tiles.get(6), "8R");
List additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes();
assertEquals(additionalAttributeTypes.size(), 3);
+
+ /**
+ * Test the behavior of reading SWOT ISO MENDS Orbit and Footprint
+ */
+ file = new File(classLoader.getResource("SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml").getFile());
+ cfgFile = new File(classLoader.getResource("MODIS_T-JPL-L2P-v2014.0.cmr.cfg").getFile());
+ mfte = new MetadataFilesToEcho(true);
+
+
+ mfte.readConfiguration(cfgFile.getAbsolutePath());
+ doc = mfte.makeDoc(file.getAbsolutePath());
+ xpath = mfte.makeXpath(doc);
+ isoGranule = mfte.readIsoMendsMetadataFile("s3://mybucket/mygranule.nc", doc, xpath);
+ isoGranule.getOrbit();
+ Set granuleCharacters = isoGranule.getGranuleCharacterSet();
+ for (GranuleCharacter granuleCharacter : granuleCharacters) {
+ if (granuleCharacter.getDatasetElement().getElementDD().getShortName().equals("line")) {
+ assertTrue(StringUtils.equals("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778",
+ StringUtils.trim(granuleCharacter.getValue())));
+ }
+ }
}
@@ -401,6 +465,33 @@ public void testReadIsoMendsMetadataFile_Pass_Cycle_LeadingZeros() throws IOExce
List additionalAttributeTypes = isoGranule.getAdditionalAttributeTypes();
assertEquals(additionalAttributeTypes.size(), 0);
}
+
+ @Test
+ public void testReadSwotArchoveMetadataFile_Pass_Cycle_LeadingZeros() throws IOException, ParseException, XPathExpressionException, ParserConfigurationException, SAXException{
+ ClassLoader classLoader = getClass().getClassLoader();
+ File file = new File(classLoader.getResource("SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml").getFile());
+ File cfgFile = new File(classLoader.getResource("MODIS_T-JPL-L2P-v2014.0.cmr.cfg").getFile());
+ MetadataFilesToEcho mfte = new MetadataFilesToEcho(true);
+
+ Document doc = null;
+ XPath xpath = null;
+ mfte.readConfiguration(cfgFile.getAbsolutePath());
+ doc = mfte.makeDoc(file.getAbsolutePath());
+ xpath = mfte.makeXpath(doc);
+ mfte.readSwotArchiveXmlFile(file.getAbsolutePath());
+ UMMGranule granule = (UMMGranule) mfte.getGranule();
+ // Verify the values here:
+ TrackType trackType = granule.getTrackType();
+ assertEquals(trackType.getCycle(), new Integer(403));
+ List trackPassTileTypes = trackType.getPasses();
+ assertEquals(trackPassTileTypes.size(), 1);
+ TrackPassTileType trackPassTileType = trackPassTileTypes.get(0);
+ assertEquals(trackPassTileType.getPass(), new Integer(8));
+ List tiles = trackPassTileType.getTiles();
+ assertEquals(tiles.size(), 1);
+ List additionalAttributeTypes = granule.getAdditionalAttributeTypes();
+ assertEquals(additionalAttributeTypes.size(), 1);
+ }
@Test
public void testReadIsoMendsMetadataFileAdditionalFields_publishAll() throws ParseException, IOException, URISyntaxException, XPathExpressionException, ParserConfigurationException, SAXException {
diff --git a/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java b/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java
index 901569e..646c28c 100644
--- a/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java
+++ b/src/test/java/gov/nasa/cumulus/metadata/test/UMMTest.java
@@ -94,7 +94,7 @@ public void testIsoRequiredFields() throws IOException, ParseException, XPathExp
fail("Did not find exactly one Insert and one Update field in ProviderDates");
}
assertNotNull(umm.get("MetadataSpecification"));
- testMetadataSpec(umm, "1.6.3");
+ testMetadataSpec(umm, "1.6.5");
// These tests are based on testCollection.config, and will need
// to be changed if the test resource changes.
JSONObject cr = (JSONObject)umm.get("CollectionReference");
@@ -129,10 +129,10 @@ public void testIso2UmmMappings()
throws XPathExpressionException, ParserConfigurationException, IOException,
SAXException, ParseException, URISyntaxException {
/*
- * These tests are based on the ISO file located in the
+ * These tests are based on the ISO file located in the
* src/test/resources directory. They validate the mapping of ISO to
* UMM-G. If the underlying ISO file changes, these tests will need to
- * be updated.
+ * be updated.
*/
//given an ISO file...
//Granule_ISOMENDS_SWOT_Sample_L1_HR_TileBased_20181202_edit2.xml
@@ -140,38 +140,38 @@ public void testIso2UmmMappings()
String testFile = "Granule_ISOMENDS_SWOT_Sample_L1_HR_TileBased_20181202_edit2.xml";
String testFilePath = testDir + File.separator + testFile;
-
+
String testConfigFile = "testCollection.config";
String testConfigFilePath = testDir + File.separator + testConfigFile;
-
+
String granuleId = "SWOT_L1B_HR_SLC_001_005_001L_20210612T072103_20210612T07215_PGA200_03";
-
+
MetadataFilesToEcho mtfe = new MetadataFilesToEcho(true);
-
+
mtfe.readConfiguration(testConfigFilePath);
mtfe.readIsoMetadataFile(testFilePath, "s3://public/datafile.nc");
-
+
mtfe.getGranule().setName(granuleId);
//write UMM-G to file
mtfe.writeJson( testDir + "/" + granuleId + ".cmr.json");
- //the CMR file should have the following values...
-
+ //the CMR file should have the following values...
+
JSONParser parser = new JSONParser();
Object obj = parser.parse(new FileReader(testDir + "/" + granuleId + ".cmr.json"));
JSONObject umm = (JSONObject) obj;
-
-
+
+
System.out.println(String.format("GranuleUR is not provided by ISO XML, "
+ "defined and supplied via datafile name - suffix: %s", granuleId));
assertEquals(granuleId,umm.get("GranuleUR"));
-
+
//InputGranules
JSONArray a = (JSONArray) umm.get("InputGranules");
- String[] _inputs =
+ String[] _inputs =
{
"SWOT_L0B_HR_Frame_001_005_011F_20210612T072103_20210612T072113_PGA200_03.nc",
- "SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.nc",
+ "SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.nc",
"SWOT_L0B_HR_Frame_001_005_012F_20210612T072113_20210612T072123_PGA200_01.rc.xml"
};
ArrayList inputs = new ArrayList(3);
@@ -183,22 +183,22 @@ public void testIso2UmmMappings()
fail("input array does not contain "+a.get(i));
}
}
-
-
+
+
//TemporalExtent/RangeDateTime
-
+
JSONObject rdt = (JSONObject)((JSONObject) umm.get("TemporalExtent")).get("RangeDateTime");
assertEquals((String)rdt.get("BeginningDateTime"), "2018-07-17T00:00:00.000Z");
assertEquals((String)rdt.get("EndingDateTime"), "2018-07-17T23:59:59.999Z");
-
+
//MetadataSpecification
testMetadataSpec(umm, "1.6.3");
-
+
//Platforms
JSONObject p = (JSONObject) ((JSONArray)umm.get("Platforms")).get(0);
assertEquals(p.get("ShortName"),"SWOT");
assertEquals(((JSONObject)((JSONArray)p.get("Instruments")).get(0)).get("ShortName"),"KaRIn");
-
+
//ProviderDates
/*
* These are generated by the mtfe code, and so we don't test them for an exact date.
@@ -214,14 +214,14 @@ else if(date.get("Type").equals("Update")){
else
fail();
}*/
-
+
//MeasuredParameters
JSONObject param = (JSONObject)((JSONArray)umm.get("MeasuredParameters")).get(0);
assertEquals("amplitude_hh", param.get("ParameterName"));
-
+
assertEquals(20.5, ((JSONObject)param.get("QAStats")).get("QAPercentMissingData"));
assertEquals(10.5, ((JSONObject)param.get("QAStats")).get("QAPercentOutOfBoundsData"));
-
+
//SpatialExtent
JSONObject hsd = (JSONObject)((JSONObject)umm.get("SpatialExtent")).get("HorizontalSpatialDomain");
JSONObject orbit = (JSONObject) hsd.get("Orbit");
@@ -236,13 +236,13 @@ else if(date.get("Type").equals("Update")){
TODO - convert this into a split test, one for ISO with orbit, and one for ISO without
JSONObject geom = (JSONObject) hsd.get("Geometry");
-
+
//Geometry/GPolygons
JSONObject bndry = (JSONObject)((JSONObject)((JSONArray) geom.get("GPolygons")).get(0)).get("Boundary");
JSONArray pnts = (JSONArray) bndry.get("Points");
-
+
for(int i=0; i< pnts.size(); i++){
-
+
JSONObject pt = (JSONObject) pnts.get(i);
if(((Double)pt.get("Latitude")).equals(new Double(-11))){
assertEquals(((Double)pt.get("Longitude")),new Double(-17));
@@ -262,16 +262,16 @@ else if(date.get("Type").equals("Update")){
assertEquals(br.get("EastBoundingCoordinate"), new Double(179.999));
assertEquals(br.get("NorthBoundingCoordinate"), new Double(85.045));
*/
-
+
//Track
JSONObject track = (JSONObject) hsd.get("Track");
assertEquals(track.get("Cycle"), new Long(5));
JSONArray passes = (JSONArray) track.get("Passes");
-
+
ArrayList passVals = new ArrayList(Arrays.asList(new Long(40), new Long(41), new Long(42)));
ArrayList tileVals= new ArrayList(Arrays.asList("4L","5L","5R", "6R", "7F"));
-
-
+
+
for(int i = 0; i < passes.size(); i++){
JSONObject pass = (JSONObject) passes.get(i);
assertTrue(passVals.contains(pass.get("Pass")));
@@ -281,15 +281,15 @@ else if(date.get("Type").equals("Update")){
assertTrue(tileVals.contains(tiles.get(j)));
}
}
-
+
//PGEVersionClass
JSONObject pgev = (JSONObject) umm.get("PGEVersionClass");
assertEquals("PGE_L1B_HR_SLC", pgev.get("PGEName"));
assertEquals("1.1.4", pgev.get("PGEVersion"));
-
+
//DataGranule
JSONObject dg = (JSONObject)umm.get("DataGranule");
-
+
//DataGranule/ArchiveAndDistributionInformation
JSONArray files = (JSONArray) dg.get("ArchiveAndDistributionInformation");
for(int i = 0; i < files.size(); i++){
@@ -322,11 +322,11 @@ else if(f.get("Name").equals("SWOT_L1B_HR_SLC_001_005_001L_20210612T072103_20210
fail("Could not find file with name " + f.get("Name"));
}
}
-
-
+
+
//DataGranule/DayNightFlag
assertEquals("Unspecified",dg.get("DayNightFlag"));
-
+
//DataGranule/Identifiers
JSONArray ids = (JSONArray) dg.get("Identifiers");
for (int i =0; i< ids.size(); i++){
@@ -338,7 +338,7 @@ else if(id.get("IdentifierType").equals("CRID")){
assertEquals("PGA200",id.get("Identifier"));
}
else if(id.get("IdentifierType").equals("Other")){
-
+
if(id.get("IdentifierName").equals("SASVersionId")){
assertEquals("7.8.9",id.get("Identifier"));
}else if(id.get("IdentifierName").equals("PGEVersionId")){
@@ -355,16 +355,16 @@ else if(id.get("IdentifierType").equals("Other")){
fail("Could not find identifier " + id.get("IdentifierType"));
}
}
-
+
assertEquals("One Post-Calibration bulk reprocessing and one End-of-mission bulk reprocessing",dg.get("ReprocessingPlanned"));
assertEquals("2018-07-19T12:01:01.000Z",dg.get("ProductionDateTime"));
-
+
//CollectionReference
JSONObject cr = (JSONObject)umm.get("CollectionReference");
assertEquals("1",cr.get("Version"));
assertEquals("L1B_HR_SLC",cr.get("ShortName"));
-
-
+
+
/*
* "RelatedUrls": [
{
@@ -382,112 +382,112 @@ else if(id.get("IdentifierType").equals("Other")){
//fail("Not yet implemented");
}
- @Test
- public void testSentinelManifest2UmmMappings()
- throws XPathExpressionException, ParserConfigurationException,
- IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
- String testFile = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6.xfdumanifest.xml";
- String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
- String granuleId = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6";
-
- JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
-
- //TemporalExtent/RangeDateTime
- JSONObject rdt = (JSONObject) ((JSONObject) umm.get("TemporalExtent" )).get("RangeDateTime" );
- assertEquals((String) rdt.get("BeginningDateTime" ), "2021-04-14T00:14:38.000Z" );
- assertEquals((String) rdt.get("EndingDateTime" ), "2021-04-14T00:21:49.532Z" );
-
- //SpatialExtent
- JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
-
- //Track
- JSONObject track = (JSONObject) hsd.get("Track" );
- assertEquals(track.get("Cycle" ), new Long(2));
- JSONArray passes = (JSONArray) track.get("Passes" );
- assertEquals(((JSONObject) passes.get(0)).get("Pass"), new Long(127));
-
- JSONObject geom = (JSONObject) hsd.get("Geometry" );
- //Footprint
- // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
- // 2 polygons
- Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
- JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
-
- JSONObject firstPoint = (JSONObject) pnts.get(0);
- assertEquals(new Double(-45.4871), ((Double) firstPoint.get("Latitude" )));
- assertEquals(new Double(-132.544), ((Double) firstPoint.get("Longitude" )));
-
- JSONObject midPoint = (JSONObject) pnts.get(3);
- assertEquals(new Double( -51.5451), ((Double) midPoint.get("Latitude" )));
- assertEquals(new Double(-139.042), ((Double) midPoint.get("Longitude" )));
-
- JSONObject lastPoint = (JSONObject) pnts.get(5);
- assertEquals(new Double(-45.4871), ((Double) lastPoint.get("Latitude" )));
- assertEquals(new Double(-132.544), ((Double) lastPoint.get("Longitude" )));
-
- //DataGranule
- JSONObject dg = (JSONObject) umm.get("DataGranule" );
- assertEquals("2020-04-29T14:33:31.000Z", dg.get("ProductionDateTime" ));
-
- //CollectionReference
- JSONObject cr = (JSONObject) umm.get("CollectionReference" );
- assertEquals("E", cr.get("Version" ));
- assertEquals("JASON_CS_S6A_L0_ALT_ACQ", cr.get("ShortName" ));
-
- JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0);
- assertEquals("ProviderDataSource", productName.get("Name"));
- assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0));
- }
-
- @Test
- public void testSentinelManifestOverIDL()
- throws XPathExpressionException, ParserConfigurationException,
- IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
- // this test file will split to 3 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon
- String testFile = "S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02.xfdumanifest.xml";
- String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
- String granuleId ="S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02";
-
- JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
-
- //SpatialExtent
- JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
-
- JSONObject geom = (JSONObject) hsd.get("Geometry" );
- //Footprint
- // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
- // 2 polygons
- Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
- JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
-
- JSONObject firstPoint = (JSONObject) pnts.get(0);
- assertEquals(Double.valueOf(66.644644), ((Double) firstPoint.get("Latitude" )));
- assertEquals(Double.valueOf(140.378601), ((Double) firstPoint.get("Longitude" )));
-
- JSONObject midPoint = (JSONObject) pnts.get(3);
- assertEquals(Double.valueOf(58.947656), ((Double) midPoint.get("Latitude" )));
- assertEquals(Double.valueOf(180.0), ((Double) midPoint.get("Longitude" )));
-
- JSONObject lastPoint = (JSONObject) pnts.get(5);
- assertEquals(Double.valueOf(63.594104), ((Double) lastPoint.get("Latitude" )));
- assertEquals(Double.valueOf(168.727685), ((Double) lastPoint.get("Longitude" )));
-
- //2nd polygon
- boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(1))).get("Boundary");
- pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
-
- firstPoint = (JSONObject) pnts.get(0);
- assertEquals(Double.valueOf(59.804021000000006), ((Double) firstPoint.get("Latitude" )));
- assertEquals(Double.valueOf(-180), ((Double) firstPoint.get("Longitude" )));
-
- midPoint = (JSONObject) pnts.get(15);
- assertEquals(Double.valueOf(-66.647778), ((Double) midPoint.get("Latitude" )));
- assertEquals(Double.valueOf(-53.840211), ((Double) midPoint.get("Longitude" )));
-
- lastPoint = (JSONObject) pnts.get(29);
- assertEquals(Double.valueOf(56.013938), ((Double) lastPoint.get("Latitude" )));
- assertEquals(Double.valueOf(-171.655155), ((Double) lastPoint.get("Longitude" )));
- }
+// @Test
+// public void testSentinelManifest2UmmMappings()
+// throws XPathExpressionException, ParserConfigurationException,
+// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
+// String testFile = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6.xfdumanifest.xml";
+// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
+// String granuleId = "S6A_P4_0__ACQ_____20210414T001438_20210414T002150_20200429T143331_0432_002_127_063_EUM__OPE_NR_TST.SEN6";
+//
+// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
+//
+// //TemporalExtent/RangeDateTime
+// JSONObject rdt = (JSONObject) ((JSONObject) umm.get("TemporalExtent" )).get("RangeDateTime" );
+// assertEquals((String) rdt.get("BeginningDateTime" ), "2021-04-14T00:14:38.000Z" );
+// assertEquals((String) rdt.get("EndingDateTime" ), "2021-04-14T00:21:49.532Z" );
+//
+// //SpatialExtent
+// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
+//
+// //Track
+// JSONObject track = (JSONObject) hsd.get("Track" );
+// assertEquals(track.get("Cycle" ), new Long(2));
+// JSONArray passes = (JSONArray) track.get("Passes" );
+// assertEquals(((JSONObject) passes.get(0)).get("Pass"), new Long(127));
+//
+// JSONObject geom = (JSONObject) hsd.get("Geometry" );
+// //Footprint
+// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
+// // 2 polygons
+// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
+// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
+//
+// JSONObject firstPoint = (JSONObject) pnts.get(0);
+// assertEquals(new Double(-45.4871), ((Double) firstPoint.get("Latitude" )));
+// assertEquals(new Double(-132.544), ((Double) firstPoint.get("Longitude" )));
+//
+// JSONObject midPoint = (JSONObject) pnts.get(3);
+// assertEquals(new Double( -51.5451), ((Double) midPoint.get("Latitude" )));
+// assertEquals(new Double(-139.042), ((Double) midPoint.get("Longitude" )));
+//
+// JSONObject lastPoint = (JSONObject) pnts.get(5);
+// assertEquals(new Double(-45.4871), ((Double) lastPoint.get("Latitude" )));
+// assertEquals(new Double(-132.544), ((Double) lastPoint.get("Longitude" )));
+//
+// //DataGranule
+// JSONObject dg = (JSONObject) umm.get("DataGranule" );
+// assertEquals("2020-04-29T14:33:31.000Z", dg.get("ProductionDateTime" ));
+//
+// //CollectionReference
+// JSONObject cr = (JSONObject) umm.get("CollectionReference" );
+// assertEquals("E", cr.get("Version" ));
+// assertEquals("JASON_CS_S6A_L0_ALT_ACQ", cr.get("ShortName" ));
+//
+// JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0);
+// assertEquals("ProviderDataSource", productName.get("Name"));
+// assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0));
+// }
+
+// @Test
+// public void testSentinelManifestOverIDL()
+// throws XPathExpressionException, ParserConfigurationException,
+// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
+// // this test file will split to 3 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon
+// String testFile = "S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02.xfdumanifest.xml";
+// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
+// String granuleId ="S6A_P4_2__LR_STD__ST_022_132_20210619T002429_20210619T012042_F02";
+//
+// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
+//
+// //SpatialExtent
+// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
+//
+// JSONObject geom = (JSONObject) hsd.get("Geometry" );
+// //Footprint
+// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
+// // 2 polygons
+// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
+// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
+//
+// JSONObject firstPoint = (JSONObject) pnts.get(0);
+// assertEquals(Double.valueOf(66.644644), ((Double) firstPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(140.378601), ((Double) firstPoint.get("Longitude" )));
+//
+// JSONObject midPoint = (JSONObject) pnts.get(3);
+// assertEquals(Double.valueOf(58.947656), ((Double) midPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(180.0), ((Double) midPoint.get("Longitude" )));
+//
+// JSONObject lastPoint = (JSONObject) pnts.get(5);
+// assertEquals(Double.valueOf(63.594104), ((Double) lastPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(168.727685), ((Double) lastPoint.get("Longitude" )));
+//
+// //2nd polygon
+// boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(1))).get("Boundary");
+// pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
+//
+// firstPoint = (JSONObject) pnts.get(0);
+// assertEquals(Double.valueOf(59.804021000000006), ((Double) firstPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(-180), ((Double) firstPoint.get("Longitude" )));
+//
+// midPoint = (JSONObject) pnts.get(15);
+// assertEquals(Double.valueOf(-66.647778), ((Double) midPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(-53.840211), ((Double) midPoint.get("Longitude" )));
+//
+// lastPoint = (JSONObject) pnts.get(29);
+// assertEquals(Double.valueOf(56.013938), ((Double) lastPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(-171.655155), ((Double) lastPoint.get("Longitude" )));
+// }
@Test
public void testSentinelManifestL0TooFewCoordinates()
@@ -513,39 +513,39 @@ public void testSentinelManifestL0TooFewCoordinates()
assertEquals(gbbx.get("NorthBoundingCoordinate"), Double.valueOf(90.00));
}
- @Test
- public void testSentinelManifestNotOverIDL()
- throws XPathExpressionException, ParserConfigurationException,
- IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
- // this test file will split to 1 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon
- String testFile = "S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02.xfdumanifest.xml";
- String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
- String granuleId ="S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02";
-
- JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
-
- //SpatialExtent
- JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
-
- JSONObject geom = (JSONObject) hsd.get("Geometry" );
- //Footprint
- // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
- // 2 polygons
- Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
- JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
-
- JSONObject firstPoint = (JSONObject) pnts.get(0);
- assertEquals(Double.valueOf(-65.649768), ((Double) firstPoint.get("Latitude" )));
- assertEquals(Double.valueOf(-25.561001), ((Double) firstPoint.get("Longitude" )));
-
- JSONObject midPoint = (JSONObject) pnts.get(16);
- assertEquals(Double.valueOf(65.64749), ((Double) midPoint.get("Latitude" )));
- assertEquals(Double.valueOf(140.321732), ((Double) midPoint.get("Longitude" )));
-
- JSONObject lastPoint = (JSONObject) pnts.get(31);
- assertEquals(Double.valueOf(-62.663981), ((Double) lastPoint.get("Latitude" )));
- assertEquals(Double.valueOf(2.525361), ((Double) lastPoint.get("Longitude" )));
- }
+// @Test
+// public void testSentinelManifestNotOverIDL()
+// throws XPathExpressionException, ParserConfigurationException,
+// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
+// // this test file will split to 1 geos (over dateline) and we will reconnect the 1st and 3rd line to polygon
+// String testFile = "S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02.xfdumanifest.xml";
+// String testConfigFile = "JASON_CS_S6A_L0_ALT_ACQ.config";
+// String granuleId ="S6A_P4_2__LR_STD__ST_022_131_20210618T232816_20210619T002429_F02";
+//
+// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
+//
+// //SpatialExtent
+// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
+//
+// JSONObject geom = (JSONObject) hsd.get("Geometry" );
+// //Footprint
+// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
+// // 2 polygons
+// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
+// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
+//
+// JSONObject firstPoint = (JSONObject) pnts.get(0);
+// assertEquals(Double.valueOf(-65.649768), ((Double) firstPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(-25.561001), ((Double) firstPoint.get("Longitude" )));
+//
+// JSONObject midPoint = (JSONObject) pnts.get(16);
+// assertEquals(Double.valueOf(65.64749), ((Double) midPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(140.321732), ((Double) midPoint.get("Longitude" )));
+//
+// JSONObject lastPoint = (JSONObject) pnts.get(31);
+// assertEquals(Double.valueOf(-62.663981), ((Double) lastPoint.get("Latitude" )));
+// assertEquals(Double.valueOf(2.525361), ((Double) lastPoint.get("Longitude" )));
+// }
@Test
/**
@@ -607,42 +607,42 @@ public void testSentinelAuxManifest2UmmMappings()
assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0));
}
- @Test
- public void testSentinelManifestL1Footprint()
- throws XPathExpressionException, ParserConfigurationException,
- IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
- String testFile = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6.xfdumanifest.xml";
- String testConfigFile = "JASON_CS_S6A_L1_ALT_ECHO_AX.config";
- String granuleId = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6";
- JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
-
- //SpatialExtent
- JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
-
- JSONObject geom = (JSONObject) hsd.get("Geometry" );
- assertNull(geom.get("BoundingRectangles"));
-
- //Footprint
- // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
- // 2 polygons
- Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
- JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
-
- assertEquals(33, pnts.size());
-
- DecimalFormat decimalFormat = new DecimalFormat("###.###");
- JSONObject firstPoint = (JSONObject) pnts.get(0);
- assertEquals("-64.654", decimalFormat.format(firstPoint.get("Latitude")));
- assertEquals("-167.571", decimalFormat.format(firstPoint.get("Longitude")));
-
- JSONObject lastPoint = (JSONObject) pnts.get(31);
- assertEquals("-58.844", decimalFormat.format(lastPoint.get("Latitude")));
- assertEquals("-144.155", decimalFormat.format(lastPoint.get("Longitude")));
-
- JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0);
- assertEquals("ProviderDataSource", productName.get("Name"));
- assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0));
- }
+// @Test
+// public void testSentinelManifestL1Footprint()
+// throws XPathExpressionException, ParserConfigurationException,
+// IOException, SAXException, ParseException, java.text.ParseException, URISyntaxException {
+// String testFile = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6.xfdumanifest.xml";
+// String testConfigFile = "JASON_CS_S6A_L1_ALT_ECHO_AX.config";
+// String granuleId = "S6A_P4_1B_LR______20210412T234541_20210413T004154_20200428T194602_3373_002_100_050_EUM__OPE_NT_TST.SEN6";
+// JSONObject umm = parseXfduManifest(testFile, testConfigFile, granuleId);
+//
+// //SpatialExtent
+// JSONObject hsd = (JSONObject) ((JSONObject) umm.get("SpatialExtent" )).get("HorizontalSpatialDomain" );
+//
+// JSONObject geom = (JSONObject) hsd.get("Geometry" );
+// assertNull(geom.get("BoundingRectangles"));
+//
+// //Footprint
+// // In this case, we have a small polygon which does NOT cross dateline. Hence, it will not be divided to
+// // 2 polygons
+// Object boundaryObj = ((JSONObject)(((JSONArray) geom.get("GPolygons")).get(0))).get("Boundary");
+// JSONArray pnts = (JSONArray) ((JSONObject)boundaryObj).get("Points");
+//
+// assertEquals(33, pnts.size());
+//
+// DecimalFormat decimalFormat = new DecimalFormat("###.###");
+// JSONObject firstPoint = (JSONObject) pnts.get(0);
+// assertEquals("-64.654", decimalFormat.format(firstPoint.get("Latitude")));
+// assertEquals("-167.571", decimalFormat.format(firstPoint.get("Longitude")));
+//
+// JSONObject lastPoint = (JSONObject) pnts.get(31);
+// assertEquals("-58.844", decimalFormat.format(lastPoint.get("Latitude")));
+// assertEquals("-144.155", decimalFormat.format(lastPoint.get("Longitude")));
+//
+// JSONObject productName = (JSONObject) ((JSONArray) umm.get("AdditionalAttributes")).get(0);
+// assertEquals("ProviderDataSource", productName.get("Name"));
+// assertEquals(granuleId, ((JSONArray) productName.get("Values")).get(0));
+// }
@Test
public void testSwotL02UmmMappings()
diff --git a/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java b/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java
new file mode 100644
index 0000000..c8f7637
--- /dev/null
+++ b/src/test/java/gov/nasa/cumulus/metadata/util/MENDsISOXmlUtiilsTest.java
@@ -0,0 +1,70 @@
+package gov.nasa.cumulus.metadata.util;
+
+import gov.nasa.cumulus.metadata.aggregator.IsoMendsXPath;
+import gov.nasa.cumulus.metadata.aggregator.MetadataFilesToEcho;
+import gov.nasa.cumulus.metadata.aggregator.NamespaceResolver;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import java.io.File;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+public class MENDsISOXmlUtiilsTest {
+ File MENDsISOFile = null;
+ @Before
+ public void initialize() {
+ ClassLoader classLoader = getClass().getClassLoader();
+ MENDsISOFile = new File(classLoader.getResource("SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml").getFile());
+ }
+
+ @Test
+ public void testExtractXPathValueSwallowException() throws ParserConfigurationException, SAXException, IOException{
+ Document doc = null;
+ XPath xpath = null;
+ MetadataFilesToEcho mfte = new MetadataFilesToEcho(true);
+ doc = mfte.makeDoc(MENDsISOFile.getAbsolutePath());
+ xpath = mfte.makeXpath(doc);
+
+ String polygonStr = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc,xpath, IsoMendsXPath.POLYGON, "IsoMendsXPath.POLYGON");
+ assertEquals(StringUtils.trim(polygonStr), StringUtils.trim("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778"));
+
+ // added more characters in the end of XPath definition to make it find nothing
+ polygonStr = MENDsISOXmlUtiils.extractXPathValueSwallowException(doc,xpath, IsoMendsXPath.POLYGON+"aabbccdd", "IsoMendsXPath.POLYGON");
+ assertEquals(StringUtils.trim(polygonStr), "");
+ }
+
+ @Test
+ public void testExtractXPathValueThrowException() throws ParserConfigurationException, SAXException, IOException{
+ Document doc = null;
+ XPath xpath = null;
+ MetadataFilesToEcho mfte = new MetadataFilesToEcho(true);
+ doc = mfte.makeDoc(MENDsISOFile.getAbsolutePath());
+ xpath = mfte.makeXpath(doc);
+ String polygonStr="";
+ try {
+ polygonStr = MENDsISOXmlUtiils.extractXPathValueThrowsException(doc, xpath, IsoMendsXPath.POLYGON, "IsoMendsXPath.POLYGON");
+ assertEquals(StringUtils.trim(polygonStr), StringUtils.trim("46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778"));
+ } catch (Exception e) {
+ System.out.println("caught generic Exception: " + e);
+ }
+ // added more characters in the end of XPath definition to make it find nothing
+ try {
+ polygonStr = MENDsISOXmlUtiils.extractXPathValueThrowsException(doc, xpath, "IsoMendsXPath.POLYGON", "IsoMendsXPath.POLYGON");
+ } catch (XPathExpressionException xPathExpressionException) {
+ System.out.println("caught XPathExpressionException: " + xPathExpressionException);
+ } catch (Exception e) {
+ System.out.println("caught generic Exception: " + e);
+ }
+
+ }
+
+}
diff --git a/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json b/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json
deleted file mode 100644
index 5506e08..0000000
--- a/src/test/resources/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0-unsortedUrls.cmr.json
+++ /dev/null
@@ -1,108 +0,0 @@
-{
- "TemporalExtent": {
- "RangeDateTime": {
- "EndingDateTime": "2020-01-01T00:04:57.000Z",
- "BeginningDateTime": "2020-01-01T00:00:00.000Z"
- }
- },
- "MetadataSpecification": {
- "Version": "1.6",
- "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6",
- "Name": "UMM-G"
- },
- "GranuleUR": "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0",
- "ProviderDates": [
- {
- "Type": "Insert",
- "Date": "2020-07-17T23:10:21.470Z"
- },
- {
- "Type": "Update",
- "Date": "2020-07-17T23:10:21.484Z"
- }
- ],
- "SpatialExtent": {
- "HorizontalSpatialDomain": {
- "Geometry": {
- "BoundingRectangles": [
- {
- "WestBoundingCoordinate": 123.165,
- "SouthBoundingCoordinate": -89.989,
- "EastBoundingCoordinate": 180,
- "NorthBoundingCoordinate": -66.906
- },
- {
- "WestBoundingCoordinate": -180,
- "SouthBoundingCoordinate": -89.989,
- "EastBoundingCoordinate": -74.116,
- "NorthBoundingCoordinate": -66.906
- }
- ]
- }
- }
- },
- "DataGranule": {
- "ArchiveAndDistributionInformation": [
- {
- "SizeUnit": "MB",
- "Size": 17.387483596801758,
- "Name": "20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc"
- }
- ],
- "DayNightFlag": "Unspecified",
- "ProductionDateTime": "2020-02-29T12:20:15.000Z"
- },
- "CollectionReference": {
- "Version": "2019.0",
- "ShortName": "MODIS_A-JPL-L2P-v2019.0"
- },
- "RelatedUrls": [
- {
- "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc.md5",
- "Description": "File to download",
- "Type": "EXTENDED METADATA"
- },
- {
- "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-protected/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc",
- "Description": "The base directory location for the granule.",
- "Type": "GET DATA"
- },
- {
- "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc",
- "Description": "The base directory location for the granule.",
- "Type": "GET DATA"
- },
- {
- "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.cmr.json",
- "Description": "File to download",
- "Type": "EXTENDED METADATA"
- },
- {
- "URL": "https://vtdmnpv139.execute-api.us-west-2.amazonaws.com:9000/DEV/s3credentials",
- "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access",
- "Type": "VIEW RELATED INFORMATION"
- },
- {
- "URL": "https://opendap.uat.earthdata.nasa.gov/providers/POCUMULUS/collections/GHRSST%20Level%202P%20Global%20Sea%20Surface%20Skin%20Temperature%20from%20the%20Moderate%20Resolution%20Imaging%20Spectroradiometer%20(MODIS)%20on%20the%20NASA%20Aqua%20satellite%20(GDS2)/granules/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0",
- "Type": "USE SERVICE API",
- "Subtype": "OPENDAP DATA",
- "Description": "OPeNDAP request URL"
- },
- {
- "URL": "https://jh72u371y2.execute-api.us-west-2.amazonaws.com:9000/DEV/dyen-cumulus-public/MODIS_A-JPL-L2P-v2019.0/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.sses_standard_deviation.png",
- "Type": "GET RELATED VISUALIZATION",
- "Subtype": "DIRECT DOWNLOAD",
- "MimeType": "image/png"
- },
- {
- "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc.md5",
- "Description": "The base directory location for the granule.",
- "Type": "EXTENDED METADATA"
- },
- {
- "URL": "s3://my-bucket/folder/20200101000000-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.iso.xml",
- "Description": "The base directory location for the granule.",
- "Type": "EXTENDED METADATA"
- }
- ]
-}
\ No newline at end of file
diff --git a/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml b/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml
new file mode 100644
index 0000000..4a780cd
--- /dev/null
+++ b/src/test/resources/SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.archive.xml
@@ -0,0 +1,86 @@
+
+
+
+
+
+ test
+
+ SWOT_INT_KCAL_Dyn_403_008_20230117T150452_20230117T155629_PIA0_01.nc
+
+
+ test
+ test
+
+ test
+
+
+
+ SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.nc
+ 2020-01-18T11:16:35.056934Z
+ unknown
+ 1075
+ PIA0
+ 01
+ Size: 123456789 SizeUnit: B
+ 403
+ 008
+ unknown
+
+
+ SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.met.json
+ Size: 123 SizeUnit: B
+
+
+ SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.rc.xml
+ Size: 4567 SizeUnit: B
+
+
+ SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.log
+ Size: unknown SizeUnit: B
+
+
+ SWOT_INT_KCAL_Dyn_403_008_20200117T000000_20200117T000000_PIA0_01.archive.xml
+ Size: unknown SizeUnit: B
+
+
+ 2020-01-17T15:04:58.187000Z
+ 2020-01-17T15:56:23.716000Z
+
+ test
+ test
+
+
+
diff --git a/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml b/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml
new file mode 100644
index 0000000..30c8c11
--- /dev/null
+++ b/src/test/resources/SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml
@@ -0,0 +1,2375 @@
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip
+
+
+
+ eng
+
+
+
+ utf8
+
+
+
+ dataset
+
+
+
+
+
+ 2023-06-16T13:18:49.250930Z
+
+
+
+ ISO 19115-2 Geographic Information - Metadata Part 2 Extensions for imagery and gridded data
+
+
+ ISO 19115-2:2009(E)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2023-06-16T13:18:49.250930Z
+
+
+ creation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip
+
+
+ gov.nasa.esdis.umm.producergranuleid
+
+
+ ProducerGranuleId
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TGB0
+
+
+ gov.nasa.esdis.umm.crid
+
+
+ CRID
+
+
+
+
+
+
+
+ ICV01 CollectionVersion filler
+
+
+ gov.nasa.esdis.umm.otherid
+
+
+ OtherId: ScienceAlgorithmVersionId
+
+
+
+
+
+
+ 4.3.0
+
+
+ gov.nasa.esdis.umm.otherid
+
+
+ OtherId: PGEVersionId
+
+
+
+
+
+
+ 1.0
+
+
+ gov.nasa.esdis.umm.otherid
+
+
+ OtherId: SASVersionId
+
+
+
+
+
+
+ 01
+
+
+ gov.nasa.esdis.umm.otherid
+
+
+ OtherId: ProductCounter
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ asNeeded
+
+
+ ReprocessingPlanned: None
+
+
+
+
+
+
+
+
+ https://webmap.ornl.gov/sdat/pimg/TBD
+
+
+ Size: TBD> SizeUnit: KB Description: TBD
+
+
+ Format: PNG MimeType: image/png
+
+
+
+
+
+
+
+ https://webmap.ornl.gov/sdat/pimg/TBD
+
+
+ Size: TBD> SizeUnit: KB Description: TBD
+
+
+ Format: PNG MimeType: image/png
+
+
+
+
+
+
+
+
+
+
+ SWOT
+
+
+ Surface Water Ocean Topography
+
+
+
+ project
+
+
+
+
+ NASA Project Keywords
+
+
+
+
+
+ NASA
+
+
+ User Support Office
+
+
+
+
+
+
+ https://support.earthdata.nasa.gov/
+
+
+ Earthdata Support
+
+
+ File an issue or provide feedback
+
+
+ information
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+
+
+
+
+
+
+ Commissioning Phase
+
+
+ Calibration Phase
+
+
+ Science Ops Phase
+
+
+ campaign
+
+
+
+
+
+
+
+
+ Space-based Platforms
+
+
+ Earth Observation Satellites
+
+
+ SWOT
+
+
+ platform
+
+
+
+
+ NASA Platform Keywords
+
+
+
+
+
+ NASA
+
+
+ User Support Office
+
+
+
+
+
+
+ https://support.earthdata.nasa.gov/
+
+
+ Earthdata Support
+
+
+ File an issue or provide feedback
+
+
+ information
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+
+
+
+
+
+
+ Earth Remote Sensing Instruments
+
+
+ Active Remote Sensing
+
+
+ Imaging Radars
+
+
+ KaRIn
+
+
+ instrument
+
+
+
+
+ NASA Instrument Keywords
+
+
+
+
+
+ NASA
+
+
+ User Support Office
+
+
+
+
+
+
+ https://support.earthdata.nasa.gov/
+
+
+ Earthdata Support
+
+
+ File an issue or provide feedback
+
+
+ information
+
+
+
+
+
+
+ custodian
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PGE_L2_HR_RiverAvg
+
+
+ gov.nasa.esdis.umm.collectionshortname
+
+
+ CollectionShortName
+
+
+
+
+ LargerWorkCitation
+
+
+
+
+
+
+
+
+
+ ICV01 CollectionVersion filler
+
+
+ gov.nasa.esdis.umm.collectionversion
+
+
+ CollectionVersion
+
+
+
+
+ LargerWorkCitation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ https://swot.jpl.nasa.gov/
+
+
+ SWOT Project Homepage
+
+
+ information
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ https://swot.jpl.nasa.gov/
+
+
+ SWOT Project Homepage
+
+
+ information
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ eng
+
+
+
+ utf8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 46.7666666666667 151.802777777778 51.353523932563 179.39615512424 51.3618572658963 179.44615512424 51.3673094007704 179.460468207465 51.3720831976997 179.470818074544 51.9544606526693 179.77399359809 51.962745836046 179.775655449761 65.0256 180.0 65.0243570963542 -179.993114725749 64.2422505696615 -173.124080403646 64.2416666666667 -173.0875 64.2589111328125 -172.942587619358 64.3993570963542 -172.234684583876 66.0076904296875 -169.718114556207 66.0260301378038 -169.70074496799 66.0760314941406 -169.659073554145 66.0902187771267 -169.657690429687 66.1322906494141 -169.675703599718 66.1409630669488 -169.684376017253 71.3826697455512 -175.542419433594 71.4159271240235 -175.726031833225 71.4173094007704 -175.740315416124 71.5993445502387 -178.950753445095 71.6086161295573 -179.125728691949 71.6076221042209 -179.174432712131 71.6005043877496 -179.364869689941 71.5840138753255 -179.63235405816 71.5756805419922 -179.756760321723 71.5339 180.0 71.5409488254123 179.982556491428 76.1909840901693 152.824263509115 76.7576266818576 149.457624986437 76.7590138753255 149.384906344944 76.2006429036458 138.826448059082 75.8756427341037 135.72644788954 75.8408372667101 135.68353644477 71.075 130.025 69.1791666666667 128.695833333333 69.1199666341146 128.666011216905 67.6083333333333 128.1375 67.59375 128.133802117242 66.4433797200521 128.049646674262 66.4350755479601 128.050353325738 66.4208333333333 128.054166666667 65.9953955756294 128.247048102485 55.5633509318034 135.546684095595 55.5125 135.604166666667 46.7844919840495 151.737613932292 46.7714508056641 151.764506530762 46.7672841389974 151.781173197428 46.7666666666667 151.802777777778
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 129.8388049355948
+
+
+ 147.8095258158062
+
+
+ 59.54084540610198
+
+
+ 71.2293826683445
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Cycle: 487, Pass: [4], BasinID: 35
+
+
+ gov.nasa.esdis.umm.swottrack
+
+
+ SWOTTrack
+
+
+
+
+
+
+
+
+
+
+
+
+ 2023-04-10T20:00:18.457000Z
+ 2023-04-11T19:50:56.521000Z
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ MeasuredParameters
+
+
+ physicalMeasurement
+
+
+
+
+
+
+
+ N/A
+
+
+
+
+
+ MeasuredParameters
+
+
+
+
+
+
+
+
+
+
+
+
+ qualityInformation
+
+
+ QAPercentMissingData
+
+
+ float
+
+
+
+
+ N/A
+
+
+
+
+
+
+ qualityInformation
+
+
+
+ QAPercentOutOfBoundsData
+
+
+ float
+
+
+ N/A
+
+
+ N/A
+
+
+
+
+ N/A
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ https://daac.ornl.gov/daacdata/islscp_ii/vegetation/erbe_albedo_monthly_xdeg/data/erbe_albedo_1deg_1986.zip
+
+
+
+
+ Type: GET DATA Format: ZIP MimeType: application/zip Size: 395.673 SizeUnit: KB Description: This link provides direct download access to the granule.
+
+
+ download
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ReprocessingActual: To be determined
+
+
+
+
+
+
+
+ PGEVersionClass
+
+
+
+
+
+
+ PGEName: PGE_L2_HR_RiverAvg PGEVersion: 4.3.0
+
+
+ gov.nasa.esdis.umm.pgeversionclass
+
+
+ PGEVersionClass
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ProductionDateTime
+
+
+ 2023-06-16T13:18:49.250930Z
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip
+
+
+
+ Size: unknown SizeUnit: KB ChecksumValue: unknown ChecksumAlgorithm: unknown Description: dataset in a ZIP file
+
+
+ application/zip
+
+
+
+
+ ZIP
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip
+
+
+ Size: 2661782 SizeUnit: B ChecksumValue: 9bfc923313717b1c163eeefc36783748 ChecksumAlgorithm: MD5
+
+
+ application/zip
+
+
+
+
+ ZIP
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.zip.iso.xml
+
+
+ Size: unknown SizeUnit: unknown ChecksumValue: unknown ChecksumAlgorithm: unknown
+
+
+ text/xml
+
+
+
+
+ XML
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.rc.xml
+
+
+ Size: 1228 SizeUnit: B ChecksumValue: 65cae8e2682a4d3be646f8bd08296cee ChecksumAlgorithm: MD5
+
+
+ text/xml
+
+
+
+
+ XML
+
+
+
+
+
+
+
+
+
+
+ SWOT_L2_HR_RiverAvg_487_SI_35_20230410T200018_20230411T195056_TGB0_01.png
+
+
+ Size: 32503 SizeUnit: B ChecksumValue: 2972b99c9de9e8bf908582012fcdb0ba ChecksumAlgorithm: MD5
+
+
+ image/png
+
+
+
+
+ PNG
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SWOT
+
+
+ gov.nasa.esdis.umm.platformshortname
+
+
+ PlatformShortName
+
+
+
+
+
+
+
+
+
+
+
+
+ KaRIn
+
+
+ gov.nasa.esdis.umm.instrumentshortname
+
+
+ InstrumentShortName
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ instrumentInformation
+
+
+ ICV23 KaRIn Instrument_Characteristics name 1
+
+
+
+
+ ICV24 KaRIn Instrument_Characteristics value 1
+
+
+
+
+
+
+ instrumentInformation
+
+
+ ICV25 KaRIn Instrument_Characteristics name 2
+
+
+
+
+ ICV26 KaRIn Instrument_Characteristics value 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/test/resources/cumulus_message_input_example.json b/src/test/resources/cumulus_message_input_example.json
index af7d66f..bb3971b 100644
--- a/src/test/resources/cumulus_message_input_example.json
+++ b/src/test/resources/cumulus_message_input_example.json
@@ -40,7 +40,8 @@
"key": "dataset-image/MODIS_A-JPL-L2P-v2019.0/sst.png",
"size": 7152,
"fileName": "sst.png",
- "type": "metadata"
+ "type": "metadata",
+ "description": "sst"
},
{
"bucket": "dyen-cumulus-public",