diff --git a/.env.example b/.env.example
index 1329b7af..7334c6fd 100644
--- a/.env.example
+++ b/.env.example
@@ -1,6 +1,7 @@
# .env used for port and service confiuration
# for service specific environment variables, see ./env_files/*.env
+# ports
API_PORT=8080
TELEMETRY_PORT=9090
LOCALSTACK_GATEWAY_PORT=9000
@@ -8,12 +9,24 @@ LOCALSTACK_UI_PORT=9001
KEYCLOAK_PORT=8090
RIVER_QUEUE_UI_PORT=9326
-INSTRUMENTATION_AUTH_JWT_MOCKED=
-INSTRUMENTATION_SURVEY123_IP_WHITELIST=
+# api
+INSTRUMENTATION_AUTH_JWT_MOCKED=false
+
+# sl-client
SLCLIENT_SEEDLINK_SERVER_URI=
+
+# task
TASK_THINGLOGIX_COGNITO_POOL=
TASK_THINGLOGIX_PROVIDER_NAME=
TASK_THINGLOGIX_API_GATEWAY_ENDPOINT=
TASK_THINGLOGIX_USER=
TASK_THINGLOGIX_PASSWORD=
TASK_THINGLOGIX_ACCOUNT_ID=
+
+# opendcs
+CDADATA_USERNAME=
+CDADATA_PASSWORD=
+CDABACKUP_USERNAME=
+CDABACKUP_PASSWORD=
+EDDN1_USERNAME=
+EDDN1_PASSWORD=
diff --git a/.gitignore b/.gitignore
index dde8a666..8184aa10 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,3 +36,5 @@ test.log
**/dist
go.work.sum
+
+**/.settings
diff --git a/api/internal/config/api.go b/api/internal/config/api.go
index 92ab7a7a..bb1d9a7b 100644
--- a/api/internal/config/api.go
+++ b/api/internal/config/api.go
@@ -23,6 +23,7 @@ type APIConfig struct {
AlertEventFlushWorkers int `env:"ALERT_EVENT_FLUSH_WORKERS" envDefault:"4"`
IrisFdsnProxyURL string `env:"IRIS_FDSN_PROXY_URL" envDefault:"https://service.iris.edu/fdsnws/station/1/query"`
CwmsProxyURL string `env:"CWMS_PROXY_URL" envDefault:"https://cwms-data.usace.army.mil/cwms-data/"`
+ OpenDCSWrapperURL string `env:"OPENDCS_WRAPPER_URL" envDefault:"http://opendcs:8080"`
}
// NewAPIConfig returns environment variable config
diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go
index 8bac8a3d..ba2838a4 100644
--- a/api/internal/db/batch.go
+++ b/api/internal/db/batch.go
@@ -658,6 +658,241 @@ func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error {
return b.br.Close()
}
+const goesPlatformConfigFileCommit = `-- name: GoesPlatformConfigFileCommit :batchexec
+update goes_platform_config_file set
+ committed = true,
+ committed_at = $1,
+ committed_commit_id = $2
+where id = $3
+`
+
+type GoesPlatformConfigFileCommitBatchResults struct {
+ br pgx.BatchResults
+ tot int
+ closed bool
+}
+
+type GoesPlatformConfigFileCommitParams struct {
+ CommittedAt *time.Time `json:"committed_at"`
+ CommittedCommitID *uuid.UUID `json:"committed_commit_id"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults {
+ batch := &pgx.Batch{}
+ for _, a := range arg {
+ vals := []interface{}{
+ a.CommittedAt,
+ a.CommittedCommitID,
+ a.ID,
+ }
+ batch.Queue(goesPlatformConfigFileCommit, vals...)
+ }
+ br := q.db.SendBatch(ctx, batch)
+ return &GoesPlatformConfigFileCommitBatchResults{br, len(arg), false}
+}
+
+func (b *GoesPlatformConfigFileCommitBatchResults) Exec(f func(int, error)) {
+ defer b.br.Close()
+ for t := 0; t < b.tot; t++ {
+ if b.closed {
+ if f != nil {
+ f(t, ErrBatchAlreadyClosed)
+ }
+ continue
+ }
+ _, err := b.br.Exec()
+ if f != nil {
+ f(t, err)
+ }
+ }
+}
+
+func (b *GoesPlatformConfigFileCommitBatchResults) Close() error {
+ b.closed = true
+ return b.br.Close()
+}
+
+const goesPlatformRegistryUpsert = `-- name: GoesPlatformRegistryUpsert :batchexec
+insert into goes_platform_registry (
+ platform_key,
+ project_id,
+ goes_telemetry_source_id,
+ platform_id,
+ site_name,
+ commit_id,
+ updated_at
+) values ($1, $2, $3, $4, $5, $6, now())
+on conflict (platform_key) do update set
+ project_id = excluded.project_id,
+ goes_telemetry_source_id = excluded.goes_telemetry_source_id,
+ platform_id = excluded.platform_id,
+ site_name = excluded.site_name,
+ commit_id = excluded.commit_id,
+ updated_at = now()
+`
+
+type GoesPlatformRegistryUpsertBatchResults struct {
+ br pgx.BatchResults
+ tot int
+ closed bool
+}
+
+type GoesPlatformRegistryUpsertParams struct {
+ PlatformKey string `json:"platform_key"`
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ PlatformID *string `json:"platform_id"`
+ SiteName *string `json:"site_name"`
+ CommitID uuid.UUID `json:"commit_id"`
+}
+
+func (q *Queries) GoesPlatformRegistryUpsert(ctx context.Context, arg []GoesPlatformRegistryUpsertParams) *GoesPlatformRegistryUpsertBatchResults {
+ batch := &pgx.Batch{}
+ for _, a := range arg {
+ vals := []interface{}{
+ a.PlatformKey,
+ a.ProjectID,
+ a.GoesTelemetrySourceID,
+ a.PlatformID,
+ a.SiteName,
+ a.CommitID,
+ }
+ batch.Queue(goesPlatformRegistryUpsert, vals...)
+ }
+ br := q.db.SendBatch(ctx, batch)
+ return &GoesPlatformRegistryUpsertBatchResults{br, len(arg), false}
+}
+
+func (b *GoesPlatformRegistryUpsertBatchResults) Exec(f func(int, error)) {
+ defer b.br.Close()
+ for t := 0; t < b.tot; t++ {
+ if b.closed {
+ if f != nil {
+ f(t, ErrBatchAlreadyClosed)
+ }
+ continue
+ }
+ _, err := b.br.Exec()
+ if f != nil {
+ f(t, err)
+ }
+ }
+}
+
+func (b *GoesPlatformRegistryUpsertBatchResults) Close() error {
+ b.closed = true
+ return b.br.Close()
+}
+
+const goesTelemetryConfigMappingsCreateBatch = `-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec
+insert into goes_telemetry_config_mappings (
+ goes_platform_config_file_id,
+ platform_sensor_key,
+ timeseries_id
+) values ($1, $2, $3)
+on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key
+do update set timeseries_id = excluded.timeseries_id
+`
+
+type GoesTelemetryConfigMappingsCreateBatchBatchResults struct {
+ br pgx.BatchResults
+ tot int
+ closed bool
+}
+
+type GoesTelemetryConfigMappingsCreateBatchParams struct {
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id"`
+}
+
+func (q *Queries) GoesTelemetryConfigMappingsCreateBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsCreateBatchParams) *GoesTelemetryConfigMappingsCreateBatchBatchResults {
+ batch := &pgx.Batch{}
+ for _, a := range arg {
+ vals := []interface{}{
+ a.GoesPlatformConfigFileID,
+ a.PlatformSensorKey,
+ a.TimeseriesID,
+ }
+ batch.Queue(goesTelemetryConfigMappingsCreateBatch, vals...)
+ }
+ br := q.db.SendBatch(ctx, batch)
+ return &GoesTelemetryConfigMappingsCreateBatchBatchResults{br, len(arg), false}
+}
+
+func (b *GoesTelemetryConfigMappingsCreateBatchBatchResults) Exec(f func(int, error)) {
+ defer b.br.Close()
+ for t := 0; t < b.tot; t++ {
+ if b.closed {
+ if f != nil {
+ f(t, ErrBatchAlreadyClosed)
+ }
+ continue
+ }
+ _, err := b.br.Exec()
+ if f != nil {
+ f(t, err)
+ }
+ }
+}
+
+func (b *GoesTelemetryConfigMappingsCreateBatchBatchResults) Close() error {
+ b.closed = true
+ return b.br.Close()
+}
+
+const goesTelemetryConfigMappingsDeleteBatch = `-- name: GoesTelemetryConfigMappingsDeleteBatch :batchexec
+delete from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1
+and platform_sensor_key = $2
+`
+
+type GoesTelemetryConfigMappingsDeleteBatchBatchResults struct {
+ br pgx.BatchResults
+ tot int
+ closed bool
+}
+
+type GoesTelemetryConfigMappingsDeleteBatchParams struct {
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+}
+
+func (q *Queries) GoesTelemetryConfigMappingsDeleteBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsDeleteBatchParams) *GoesTelemetryConfigMappingsDeleteBatchBatchResults {
+ batch := &pgx.Batch{}
+ for _, a := range arg {
+ vals := []interface{}{
+ a.GoesPlatformConfigFileID,
+ a.PlatformSensorKey,
+ }
+ batch.Queue(goesTelemetryConfigMappingsDeleteBatch, vals...)
+ }
+ br := q.db.SendBatch(ctx, batch)
+ return &GoesTelemetryConfigMappingsDeleteBatchBatchResults{br, len(arg), false}
+}
+
+func (b *GoesTelemetryConfigMappingsDeleteBatchBatchResults) Exec(f func(int, error)) {
+ defer b.br.Close()
+ for t := 0; t < b.tot; t++ {
+ if b.closed {
+ if f != nil {
+ f(t, ErrBatchAlreadyClosed)
+ }
+ continue
+ }
+ _, err := b.br.Exec()
+ if f != nil {
+ f(t, err)
+ }
+ }
+}
+
+func (b *GoesTelemetryConfigMappingsDeleteBatchBatchResults) Close() error {
+ b.closed = true
+ return b.br.Close()
+}
+
const inclOptsCreateBatch = `-- name: InclOptsCreateBatch :batchexec
insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time)
values ($1, $2, $3, $4)
diff --git a/api/internal/db/copyfrom.go b/api/internal/db/copyfrom.go
new file mode 100644
index 00000000..a51db827
--- /dev/null
+++ b/api/internal/db/copyfrom.go
@@ -0,0 +1,45 @@
+// Code generated by sqlc. DO NOT EDIT.
+// versions:
+// sqlc v1.29.0
+// source: copyfrom.go
+
+package db
+
+import (
+ "context"
+)
+
+// iteratorForGoesMappingSetEntryCreateBatch implements pgx.CopyFromSource.
+type iteratorForGoesMappingSetEntryCreateBatch struct {
+ rows []GoesMappingSetEntryCreateBatchParams
+ skippedFirstNextCall bool
+}
+
+func (r *iteratorForGoesMappingSetEntryCreateBatch) Next() bool {
+ if len(r.rows) == 0 {
+ return false
+ }
+ if !r.skippedFirstNextCall {
+ r.skippedFirstNextCall = true
+ return true
+ }
+ r.rows = r.rows[1:]
+ return len(r.rows) > 0
+}
+
+func (r iteratorForGoesMappingSetEntryCreateBatch) Values() ([]interface{}, error) {
+ return []interface{}{
+ r.rows[0].MappingSetID,
+ r.rows[0].GoesPlatformConfigFileID,
+ r.rows[0].PlatformSensorKey,
+ r.rows[0].TimeseriesID,
+ }, nil
+}
+
+func (r iteratorForGoesMappingSetEntryCreateBatch) Err() error {
+ return nil
+}
+
+func (q *Queries) GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error) {
+ return q.db.CopyFrom(ctx, []string{"goes_mapping_set_entry"}, []string{"mapping_set_id", "goes_platform_config_file_id", "platform_sensor_key", "timeseries_id"}, &iteratorForGoesMappingSetEntryCreateBatch{rows: arg})
+}
diff --git a/api/internal/db/db.go b/api/internal/db/db.go
index b2f6bea4..037499be 100644
--- a/api/internal/db/db.go
+++ b/api/internal/db/db.go
@@ -15,6 +15,7 @@ type DBTX interface {
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
QueryRow(context.Context, string, ...interface{}) pgx.Row
+ CopyFrom(ctx context.Context, tableName pgx.Identifier, columnNames []string, rowSrc pgx.CopyFromSource) (int64, error)
SendBatch(context.Context, *pgx.Batch) pgx.BatchResults
}
diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go
new file mode 100644
index 00000000..3b0556a7
--- /dev/null
+++ b/api/internal/db/goes.sql_gen.go
@@ -0,0 +1,292 @@
+// Code generated by sqlc. DO NOT EDIT.
+// versions:
+// sqlc v1.29.0
+// source: goes.sql
+
+package db
+
+import (
+ "context"
+
+ "github.com/google/uuid"
+)
+
+const goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource = `-- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many
+select committed_content::xml
+from goes_platform_config_file
+where goes_telemetry_source_id = $1
+and committed
+and not deleted
+`
+
+func (q *Queries) GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) {
+ rows, err := q.db.Query(ctx, goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource, goesTelemetrySourceID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []string{}
+ for rows.Next() {
+ var committed_content string
+ if err := rows.Scan(&committed_content); err != nil {
+ return nil, err
+ }
+ items = append(items, committed_content)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one
+insert into goes_platform_config_file (
+ goes_telemetry_source_id,
+ project_id,
+ name,
+ alias,
+ size_bytes,
+ content,
+ created_by
+) values (
+ $1,
+ $2,
+ $3,
+ $4,
+ $5,
+ $6::xml,
+ $7
+)
+returning id
+`
+
+type GoesPlatformConfigFileCreateParams struct {
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ SizeBytes int64 `json:"size_bytes"`
+ Content string `json:"content"`
+ CreatedBy uuid.UUID `json:"created_by"`
+}
+
+func (q *Queries) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) {
+ row := q.db.QueryRow(ctx, goesPlatformConfigFileCreate,
+ arg.GoesTelemetrySourceID,
+ arg.ProjectID,
+ arg.Name,
+ arg.Alias,
+ arg.SizeBytes,
+ arg.Content,
+ arg.CreatedBy,
+ )
+ var id uuid.UUID
+ err := row.Scan(&id)
+ return id, err
+}
+
+const goesPlatformConfigFileDelete = `-- name: GoesPlatformConfigFileDelete :exec
+update goes_platform_config_file set
+ deleted = true,
+ deleted_at = now(),
+ deleted_by = $1,
+ committed = false
+where id = $2
+`
+
+type GoesPlatformConfigFileDeleteParams struct {
+ DeletedBy *uuid.UUID `json:"deleted_by"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformConfigFileDelete, arg.DeletedBy, arg.ID)
+ return err
+}
+
+const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one
+select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by
+from goes_platform_config_file
+where id = $1
+and not deleted
+`
+
+func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) {
+ row := q.db.QueryRow(ctx, goesPlatformConfigFileGet, id)
+ var i GoesPlatformConfigFile
+ err := row.Scan(
+ &i.ID,
+ &i.GoesTelemetrySourceID,
+ &i.ProjectID,
+ &i.Name,
+ &i.Alias,
+ &i.SizeBytes,
+ &i.Content,
+ &i.Committed,
+ &i.CommittedAt,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.UpdatedAt,
+ &i.UpdatedBy,
+ &i.CommittedContent,
+ &i.CommittedCommitID,
+ &i.Deleted,
+ &i.DeletedAt,
+ &i.DeletedBy,
+ )
+ return i, err
+}
+
+const goesPlatformConfigFileListUncommittedForProject = `-- name: GoesPlatformConfigFileListUncommittedForProject :many
+select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by
+from goes_platform_config_file
+where project_id = $1
+and not committed
+and not deleted
+`
+
+func (q *Queries) GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error) {
+ rows, err := q.db.Query(ctx, goesPlatformConfigFileListUncommittedForProject, projectID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesPlatformConfigFile{}
+ for rows.Next() {
+ var i GoesPlatformConfigFile
+ if err := rows.Scan(
+ &i.ID,
+ &i.GoesTelemetrySourceID,
+ &i.ProjectID,
+ &i.Name,
+ &i.Alias,
+ &i.SizeBytes,
+ &i.Content,
+ &i.Committed,
+ &i.CommittedAt,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.UpdatedAt,
+ &i.UpdatedBy,
+ &i.CommittedContent,
+ &i.CommittedCommitID,
+ &i.Deleted,
+ &i.DeletedAt,
+ &i.DeletedBy,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec
+update goes_platform_config_file set
+ name = $1,
+ alias = $2,
+ size_bytes = $3,
+ content = $4::xml,
+ deleted = false,
+ deleted_at = null,
+ deleted_by = null
+where id = $5
+`
+
+type GoesPlatformConfigFileUpdateParams struct {
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ SizeBytes int64 `json:"size_bytes"`
+ Content string `json:"content"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformConfigFileUpdate,
+ arg.Name,
+ arg.Alias,
+ arg.SizeBytes,
+ arg.Content,
+ arg.ID,
+ )
+ return err
+}
+
+const goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile = `-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec
+delete from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1
+`
+
+func (q *Queries) GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error {
+ _, err := q.db.Exec(ctx, goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile, goesPlatformConfigFileID)
+ return err
+}
+
+const goesTelemetryConfigMappingsList = `-- name: GoesTelemetryConfigMappingsList :many
+select goes_platform_config_file_id, platform_sensor_key, timeseries_id
+from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1
+and goes_platform_config_file_id in (
+ select id from goes_platform_config_file where deleted = false
+)
+order by platform_sensor_key
+`
+
+func (q *Queries) GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) {
+ rows, err := q.db.Query(ctx, goesTelemetryConfigMappingsList, goesPlatformConfigFileID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesTelemetryConfigMappings{}
+ for rows.Next() {
+ var i GoesTelemetryConfigMappings
+ if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesTelemetryConfigSetUncommitted = `-- name: GoesTelemetryConfigSetUncommitted :exec
+update goes_platform_config_file set
+ committed = false
+where id = $1
+`
+
+func (q *Queries) GoesTelemetryConfigSetUncommitted(ctx context.Context, id uuid.UUID) error {
+ _, err := q.db.Exec(ctx, goesTelemetryConfigSetUncommitted, id)
+ return err
+}
+
+const goesTelemetrySourceList = `-- name: GoesTelemetrySourceList :many
+select id, name, files
+from v_goes_telemetry_source
+`
+
+func (q *Queries) GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) {
+ rows, err := q.db.Query(ctx, goesTelemetrySourceList)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []VGoesTelemetrySource{}
+ for rows.Next() {
+ var i VGoesTelemetrySource
+ if err := rows.Scan(&i.ID, &i.Name, &i.Files); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
diff --git a/api/internal/db/goes_commit.sql_gen.go b/api/internal/db/goes_commit.sql_gen.go
new file mode 100644
index 00000000..b989584f
--- /dev/null
+++ b/api/internal/db/goes_commit.sql_gen.go
@@ -0,0 +1,568 @@
+// Code generated by sqlc. DO NOT EDIT.
+// versions:
+// sqlc v1.29.0
+// source: goes_commit.sql
+
+package db
+
+import (
+ "context"
+ "encoding/json"
+
+ "github.com/google/uuid"
+)
+
+const goesCommitCreatePending = `-- name: GoesCommitCreatePending :one
+insert into goes_commit (
+ project_id,
+ goes_telemetry_source_id,
+ created_by,
+ status,
+ previous_commit_id,
+ idempotency_key,
+ mapping_set_id
+) values (
+ $1, $2, $3, 'pending', $4, $5, $6
+)
+returning id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id
+`
+
+type GoesCommitCreatePendingParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ CreatedBy uuid.UUID `json:"created_by"`
+ PreviousCommitID *uuid.UUID `json:"previous_commit_id"`
+ IdempotencyKey *string `json:"idempotency_key"`
+ MappingSetID *uuid.UUID `json:"mapping_set_id"`
+}
+
+func (q *Queries) GoesCommitCreatePending(ctx context.Context, arg GoesCommitCreatePendingParams) (GoesCommit, error) {
+ row := q.db.QueryRow(ctx, goesCommitCreatePending,
+ arg.ProjectID,
+ arg.GoesTelemetrySourceID,
+ arg.CreatedBy,
+ arg.PreviousCommitID,
+ arg.IdempotencyKey,
+ arg.MappingSetID,
+ )
+ var i GoesCommit
+ err := row.Scan(
+ &i.ID,
+ &i.ProjectID,
+ &i.GoesTelemetrySourceID,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.Status,
+ &i.OpendcsResponse,
+ &i.PreviousCommitID,
+ &i.IdempotencyKey,
+ &i.MappingSetID,
+ )
+ return i, err
+}
+
+const goesCommitGetActive = `-- name: GoesCommitGetActive :one
+select id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id
+from goes_commit
+where
+ project_id = $1
+ and goes_telemetry_source_id = $2
+ and status = 'active'
+order by created_at desc
+limit 1
+`
+
+type GoesCommitGetActiveParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+}
+
+func (q *Queries) GoesCommitGetActive(ctx context.Context, arg GoesCommitGetActiveParams) (GoesCommit, error) {
+ row := q.db.QueryRow(ctx, goesCommitGetActive, arg.ProjectID, arg.GoesTelemetrySourceID)
+ var i GoesCommit
+ err := row.Scan(
+ &i.ID,
+ &i.ProjectID,
+ &i.GoesTelemetrySourceID,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.Status,
+ &i.OpendcsResponse,
+ &i.PreviousCommitID,
+ &i.IdempotencyKey,
+ &i.MappingSetID,
+ )
+ return i, err
+}
+
+const goesCommitGetByID = `-- name: GoesCommitGetByID :one
+select id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id
+from goes_commit
+where id = $1
+`
+
+func (q *Queries) GoesCommitGetByID(ctx context.Context, id uuid.UUID) (GoesCommit, error) {
+ row := q.db.QueryRow(ctx, goesCommitGetByID, id)
+ var i GoesCommit
+ err := row.Scan(
+ &i.ID,
+ &i.ProjectID,
+ &i.GoesTelemetrySourceID,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.Status,
+ &i.OpendcsResponse,
+ &i.PreviousCommitID,
+ &i.IdempotencyKey,
+ &i.MappingSetID,
+ )
+ return i, err
+}
+
+const goesCommitMarkActive = `-- name: GoesCommitMarkActive :exec
+with target as (
+ select
+ c.id,
+ c.project_id,
+ c.goes_telemetry_source_id
+ from goes_commit c
+ where c.id = $2
+),
+cleared as (
+ update goes_commit c
+ set status = 'inactive'
+ where c.project_id = (select t.project_id from target t)
+ and c.goes_telemetry_source_id = (select t.goes_telemetry_source_id from target t)
+ and c.status = 'active'
+)
+update goes_commit c
+set status = 'active',
+ opendcs_response = $1::jsonb
+where c.id = (select t.id from target t)
+`
+
+type GoesCommitMarkActiveParams struct {
+ OpendcsResponse json.RawMessage `json:"opendcs_response"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesCommitMarkActive(ctx context.Context, arg GoesCommitMarkActiveParams) error {
+ _, err := q.db.Exec(ctx, goesCommitMarkActive, arg.OpendcsResponse, arg.ID)
+ return err
+}
+
+const goesCommitMarkFailed = `-- name: GoesCommitMarkFailed :exec
+update goes_commit set status = 'failed', opendcs_response = $1::jsonb
+where id = $2
+`
+
+type GoesCommitMarkFailedParams struct {
+ OpendcsResponse json.RawMessage `json:"opendcs_response"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesCommitMarkFailed(ctx context.Context, arg GoesCommitMarkFailedParams) error {
+ _, err := q.db.Exec(ctx, goesCommitMarkFailed, arg.OpendcsResponse, arg.ID)
+ return err
+}
+
+const goesMappingSetCreate = `-- name: GoesMappingSetCreate :one
+insert into goes_mapping_set (project_id, created_by, content_hash, idempotency_key)
+values ($1, $2, $3, $4)
+returning id, project_id, created_at, created_by, content_hash, idempotency_key
+`
+
+type GoesMappingSetCreateParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ CreatedBy uuid.UUID `json:"created_by"`
+ ContentHash string `json:"content_hash"`
+ IdempotencyKey *string `json:"idempotency_key"`
+}
+
+func (q *Queries) GoesMappingSetCreate(ctx context.Context, arg GoesMappingSetCreateParams) (GoesMappingSet, error) {
+ row := q.db.QueryRow(ctx, goesMappingSetCreate,
+ arg.ProjectID,
+ arg.CreatedBy,
+ arg.ContentHash,
+ arg.IdempotencyKey,
+ )
+ var i GoesMappingSet
+ err := row.Scan(
+ &i.ID,
+ &i.ProjectID,
+ &i.CreatedAt,
+ &i.CreatedBy,
+ &i.ContentHash,
+ &i.IdempotencyKey,
+ )
+ return i, err
+}
+
+const goesMappingSetEntriesList = `-- name: GoesMappingSetEntriesList :many
+select goes_platform_config_file_id, platform_sensor_key, timeseries_id
+from goes_mapping_set_entry
+where mapping_set_id = $1
+order by goes_platform_config_file_id, platform_sensor_key
+`
+
+type GoesMappingSetEntriesListRow struct {
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id"`
+}
+
+func (q *Queries) GoesMappingSetEntriesList(ctx context.Context, mappingSetID uuid.UUID) ([]GoesMappingSetEntriesListRow, error) {
+ rows, err := q.db.Query(ctx, goesMappingSetEntriesList, mappingSetID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesMappingSetEntriesListRow{}
+ for rows.Next() {
+ var i GoesMappingSetEntriesListRow
+ if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+type GoesMappingSetEntryCreateBatchParams struct {
+ MappingSetID uuid.UUID `json:"mapping_set_id"`
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id"`
+}
+
+const goesPlatformConfigFileCommitArtifactsUpdate = `-- name: GoesPlatformConfigFileCommitArtifactsUpdate :exec
+update goes_platform_config_file set
+ committed_content = $1::xml,
+ committed = true,
+ committed_at = now(),
+ committed_commit_id = $2
+where id = $3
+`
+
+type GoesPlatformConfigFileCommitArtifactsUpdateParams struct {
+ CommittedContent string `json:"committed_content"`
+ CommittedCommitID *uuid.UUID `json:"committed_commit_id"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformConfigFileCommitArtifactsUpdate, arg.CommittedContent, arg.CommittedCommitID, arg.ID)
+ return err
+}
+
+const goesPlatformConfigFileRestoreForRollback = `-- name: GoesPlatformConfigFileRestoreForRollback :exec
+update goes_platform_config_file set
+ content = $1::xml,
+ committed_content = $1::xml,
+ committed = true,
+ committed_at = now(),
+ committed_commit_id = $2,
+ deleted = false,
+ deleted_at = null,
+ deleted_by = null
+where id = $3
+`
+
+type GoesPlatformConfigFileRestoreForRollbackParams struct {
+ Content string `json:"content"`
+ CommittedCommitID *uuid.UUID `json:"committed_commit_id"`
+ ID uuid.UUID `json:"id"`
+}
+
+func (q *Queries) GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformConfigFileRestoreForRollback, arg.Content, arg.CommittedCommitID, arg.ID)
+ return err
+}
+
+const goesPlatformConfigFileSoftDeleteNotInSet = `-- name: GoesPlatformConfigFileSoftDeleteNotInSet :exec
+update goes_platform_config_file f set
+ deleted = true,
+ deleted_at = now(),
+ deleted_by = $3
+where f.project_id = $1
+and f.goes_telemetry_source_id = $2
+and not (f.id = any($4::uuid[]))
+and f.deleted = false
+`
+
+type GoesPlatformConfigFileSoftDeleteNotInSetParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ DeletedBy *uuid.UUID `json:"deleted_by"`
+ FileIds []uuid.UUID `json:"file_ids"`
+}
+
+func (q *Queries) GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformConfigFileSoftDeleteNotInSet,
+ arg.ProjectID,
+ arg.GoesTelemetrySourceID,
+ arg.DeletedBy,
+ arg.FileIds,
+ )
+ return err
+}
+
+const goesPlatformConfigFilesListForCommitByCommitID = `-- name: GoesPlatformConfigFilesListForCommitByCommitID :many
+select id, name, alias, committed_content::text as content
+from goes_platform_config_file
+where project_id = $1
+and goes_telemetry_source_id = $2
+and committed_commit_id = $3
+order by created_at asc
+`
+
+type GoesPlatformConfigFilesListForCommitByCommitIDParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ CommittedCommitID *uuid.UUID `json:"committed_commit_id"`
+}
+
+type GoesPlatformConfigFilesListForCommitByCommitIDRow struct {
+ ID uuid.UUID `json:"id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ Content string `json:"content"`
+}
+
+func (q *Queries) GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) {
+ rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommitByCommitID, arg.ProjectID, arg.GoesTelemetrySourceID, arg.CommittedCommitID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesPlatformConfigFilesListForCommitByCommitIDRow{}
+ for rows.Next() {
+ var i GoesPlatformConfigFilesListForCommitByCommitIDRow
+ if err := rows.Scan(
+ &i.ID,
+ &i.Name,
+ &i.Alias,
+ &i.Content,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesPlatformConfigFilesListUncommitted = `-- name: GoesPlatformConfigFilesListUncommitted :many
+select id, name, alias, content, deleted
+from goes_platform_config_file
+where project_id = $1
+and goes_telemetry_source_id = $2
+and not committed
+order by deleted desc, created_at asc
+`
+
+type GoesPlatformConfigFilesListUncommittedParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+}
+
+type GoesPlatformConfigFilesListUncommittedRow struct {
+ ID uuid.UUID `json:"id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ Content string `json:"content"`
+ Deleted bool `json:"deleted"`
+}
+
+func (q *Queries) GoesPlatformConfigFilesListUncommitted(ctx context.Context, arg GoesPlatformConfigFilesListUncommittedParams) ([]GoesPlatformConfigFilesListUncommittedRow, error) {
+ rows, err := q.db.Query(ctx, goesPlatformConfigFilesListUncommitted, arg.ProjectID, arg.GoesTelemetrySourceID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesPlatformConfigFilesListUncommittedRow{}
+ for rows.Next() {
+ var i GoesPlatformConfigFilesListUncommittedRow
+ if err := rows.Scan(
+ &i.ID,
+ &i.Name,
+ &i.Alias,
+ &i.Content,
+ &i.Deleted,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesPlatformRegistryConflicts = `-- name: GoesPlatformRegistryConflicts :many
+select platform_key, project_id
+from goes_platform_registry
+where goes_telemetry_source_id = $1
+and platform_key = any($3::text[])
+and project_id <> $2
+`
+
+type GoesPlatformRegistryConflictsParams struct {
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ PlatformKeys []string `json:"platform_keys"`
+}
+
+type GoesPlatformRegistryConflictsRow struct {
+ PlatformKey string `json:"platform_key"`
+ ProjectID uuid.UUID `json:"project_id"`
+}
+
+func (q *Queries) GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error) {
+ rows, err := q.db.Query(ctx, goesPlatformRegistryConflicts, arg.GoesTelemetrySourceID, arg.ProjectID, arg.PlatformKeys)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesPlatformRegistryConflictsRow{}
+ for rows.Next() {
+ var i GoesPlatformRegistryConflictsRow
+ if err := rows.Scan(&i.PlatformKey, &i.ProjectID); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesPlatformRegistryDeleteMissing = `-- name: GoesPlatformRegistryDeleteMissing :exec
+delete from goes_platform_registry r
+where r.project_id = $1
+and r.goes_telemetry_source_id = $2
+and not (r.platform_key = any($3::text[]))
+`
+
+type GoesPlatformRegistryDeleteMissingParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ PlatformKeys []string `json:"platform_keys"`
+}
+
+func (q *Queries) GoesPlatformRegistryDeleteMissing(ctx context.Context, arg GoesPlatformRegistryDeleteMissingParams) error {
+ _, err := q.db.Exec(ctx, goesPlatformRegistryDeleteMissing, arg.ProjectID, arg.GoesTelemetrySourceID, arg.PlatformKeys)
+ return err
+}
+
+const goesPlatformRegistryListByProject = `-- name: GoesPlatformRegistryListByProject :many
+select platform_key, platform_id, site_name
+from goes_platform_registry
+where project_id = $1
+and goes_telemetry_source_id = $2
+order by platform_key
+`
+
+type GoesPlatformRegistryListByProjectParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+}
+
+type GoesPlatformRegistryListByProjectRow struct {
+ PlatformKey string `json:"platform_key"`
+ PlatformID *string `json:"platform_id"`
+ SiteName *string `json:"site_name"`
+}
+
+func (q *Queries) GoesPlatformRegistryListByProject(ctx context.Context, arg GoesPlatformRegistryListByProjectParams) ([]GoesPlatformRegistryListByProjectRow, error) {
+ rows, err := q.db.Query(ctx, goesPlatformRegistryListByProject, arg.ProjectID, arg.GoesTelemetrySourceID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesPlatformRegistryListByProjectRow{}
+ for rows.Next() {
+ var i GoesPlatformRegistryListByProjectRow
+ if err := rows.Scan(&i.PlatformKey, &i.PlatformID, &i.SiteName); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesTelemetryConfigMappingsListForFiles = `-- name: GoesTelemetryConfigMappingsListForFiles :many
+select goes_platform_config_file_id, platform_sensor_key, timeseries_id
+from goes_telemetry_config_mappings
+where goes_platform_config_file_id = any($1::uuid[])
+order by goes_platform_config_file_id, platform_sensor_key
+`
+
+func (q *Queries) GoesTelemetryConfigMappingsListForFiles(ctx context.Context, fileIds []uuid.UUID) ([]GoesTelemetryConfigMappings, error) {
+ rows, err := q.db.Query(ctx, goesTelemetryConfigMappingsListForFiles, fileIds)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ items := []GoesTelemetryConfigMappings{}
+ for rows.Next() {
+ var i GoesTelemetryConfigMappings
+ if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const goesTelemetryConfigMappingsReplaceForProjectFromMappingSet = `-- name: GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet :exec
+with file_ids as (
+ select id
+ from goes_platform_config_file
+ where project_id = $1
+ and goes_telemetry_source_id = $2
+)
+, del as (
+ delete from goes_telemetry_config_mappings m
+ using file_ids f
+ where m.goes_platform_config_file_id = f.id
+)
+insert into goes_telemetry_config_mappings (
+ goes_platform_config_file_id,
+ platform_sensor_key,
+ timeseries_id
+)
+select
+ e.goes_platform_config_file_id,
+ e.platform_sensor_key,
+ e.timeseries_id
+from goes_mapping_set_entry e
+join file_ids f on f.id = e.goes_platform_config_file_id
+where e.mapping_set_id = $3
+on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key
+do update set timeseries_id = excluded.timeseries_id
+`
+
+type GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams struct {
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ MappingSetID uuid.UUID `json:"mapping_set_id"`
+}
+
+func (q *Queries) GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx context.Context, arg GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams) error {
+ _, err := q.db.Exec(ctx, goesTelemetryConfigMappingsReplaceForProjectFromMappingSet, arg.ProjectID, arg.GoesTelemetrySourceID, arg.MappingSetID)
+ return err
+}
diff --git a/api/internal/db/models.go b/api/internal/db/models.go
index 9280dab3..aa921ebc 100644
--- a/api/internal/db/models.go
+++ b/api/internal/db/models.go
@@ -643,6 +643,77 @@ type EvaluationInstrument struct {
InstrumentID *uuid.UUID `json:"instrument_id"`
}
+type GoesCommit struct {
+ ID uuid.UUID `json:"id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy uuid.UUID `json:"created_by"`
+ Status string `json:"status"`
+ OpendcsResponse []byte `json:"opendcs_response"`
+ PreviousCommitID *uuid.UUID `json:"previous_commit_id"`
+ IdempotencyKey *string `json:"idempotency_key"`
+ MappingSetID *uuid.UUID `json:"mapping_set_id"`
+}
+
+type GoesMappingSet struct {
+ ID uuid.UUID `json:"id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy uuid.UUID `json:"created_by"`
+ ContentHash string `json:"content_hash"`
+ IdempotencyKey *string `json:"idempotency_key"`
+}
+
+type GoesMappingSetEntry struct {
+ MappingSetID uuid.UUID `json:"mapping_set_id"`
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id"`
+}
+
+type GoesPlatformConfigFile struct {
+ ID uuid.UUID `json:"id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ SizeBytes int64 `json:"size_bytes"`
+ Content string `json:"content"`
+ Committed bool `json:"committed"`
+ CommittedAt *time.Time `json:"committed_at"`
+ CreatedAt time.Time `json:"created_at"`
+ CreatedBy uuid.UUID `json:"created_by"`
+ UpdatedAt *time.Time `json:"updated_at"`
+ UpdatedBy *uuid.UUID `json:"updated_by"`
+ CommittedContent interface{} `json:"committed_content"`
+ CommittedCommitID *uuid.UUID `json:"committed_commit_id"`
+ Deleted bool `json:"deleted"`
+ DeletedAt *time.Time `json:"deleted_at"`
+ DeletedBy *uuid.UUID `json:"deleted_by"`
+}
+
+type GoesPlatformRegistry struct {
+ PlatformKey string `json:"platform_key"`
+ ProjectID uuid.UUID `json:"project_id"`
+ GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ PlatformID *string `json:"platform_id"`
+ SiteName *string `json:"site_name"`
+ CommitID uuid.UUID `json:"commit_id"`
+ UpdatedAt time.Time `json:"updated_at"`
+}
+
+type GoesTelemetryConfigMappings struct {
+ GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"`
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id"`
+}
+
+type GoesTelemetrySource struct {
+ ID uuid.UUID `json:"id"`
+ Name string `json:"name"`
+}
+
type Heartbeat struct {
Time time.Time `json:"time"`
}
@@ -1414,6 +1485,12 @@ type VEvaluation struct {
Instruments []InstrumentIDName `json:"instruments"`
}
+type VGoesTelemetrySource struct {
+ ID uuid.UUID `json:"id"`
+ Name string `json:"name"`
+ Files []VGoesTelemetrySourceFiles `json:"files"`
+}
+
type VInclMeasurement struct {
InstrumentID uuid.UUID `json:"instrument_id"`
Time time.Time `json:"time"`
diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go
index 79a71e7b..1fe56be2 100644
--- a/api/internal/db/overrides.go
+++ b/api/internal/db/overrides.go
@@ -91,6 +91,19 @@ type IDSlugName struct {
Name string `json:"name"`
}
+type IDName struct {
+ ID uuid.UUID `json:"id"`
+ Name string `json:"name"`
+}
+
+type VGoesTelemetrySourceFiles struct {
+ IDName
+ ProjectID uuid.UUID `json:"project_id"`
+ Alias string `json:"alias"`
+ SizeBytes int64 `json:"size_bytes"`
+ Committed bool `json:"committed"`
+}
+
type InstrumentIDName struct {
InstrumentID uuid.UUID `json:"instrument_id"`
InstrumentName string `json:"instrument_name"`
diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go
index bc84971f..c7e53983 100644
--- a/api/internal/db/querier.go
+++ b/api/internal/db/querier.go
@@ -116,6 +116,38 @@ type Querier interface {
EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error)
EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error)
EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error
+ GoesCommitCreatePending(ctx context.Context, arg GoesCommitCreatePendingParams) (GoesCommit, error)
+ GoesCommitGetActive(ctx context.Context, arg GoesCommitGetActiveParams) (GoesCommit, error)
+ GoesCommitGetByID(ctx context.Context, id uuid.UUID) (GoesCommit, error)
+ GoesCommitMarkActive(ctx context.Context, arg GoesCommitMarkActiveParams) error
+ GoesCommitMarkFailed(ctx context.Context, arg GoesCommitMarkFailedParams) error
+ GoesMappingSetCreate(ctx context.Context, arg GoesMappingSetCreateParams) (GoesMappingSet, error)
+ GoesMappingSetEntriesList(ctx context.Context, mappingSetID uuid.UUID) ([]GoesMappingSetEntriesListRow, error)
+ GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error)
+ GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults
+ GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error
+ GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error)
+ GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error)
+ GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error
+ GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error)
+ GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error)
+ GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error
+ GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error
+ GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error
+ GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error)
+ GoesPlatformConfigFilesListUncommitted(ctx context.Context, arg GoesPlatformConfigFilesListUncommittedParams) ([]GoesPlatformConfigFilesListUncommittedRow, error)
+ GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error)
+ GoesPlatformRegistryDeleteMissing(ctx context.Context, arg GoesPlatformRegistryDeleteMissingParams) error
+ GoesPlatformRegistryListByProject(ctx context.Context, arg GoesPlatformRegistryListByProjectParams) ([]GoesPlatformRegistryListByProjectRow, error)
+ GoesPlatformRegistryUpsert(ctx context.Context, arg []GoesPlatformRegistryUpsertParams) *GoesPlatformRegistryUpsertBatchResults
+ GoesTelemetryConfigMappingsCreateBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsCreateBatchParams) *GoesTelemetryConfigMappingsCreateBatchBatchResults
+ GoesTelemetryConfigMappingsDeleteBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsDeleteBatchParams) *GoesTelemetryConfigMappingsDeleteBatchBatchResults
+ GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error
+ GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error)
+ GoesTelemetryConfigMappingsListForFiles(ctx context.Context, fileIds []uuid.UUID) ([]GoesTelemetryConfigMappings, error)
+ GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx context.Context, arg GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams) error
+ GoesTelemetryConfigSetUncommitted(ctx context.Context, id uuid.UUID) error
+ GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error)
HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error)
HeartbeatGetLatest(ctx context.Context) (time.Time, error)
HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error)
diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go
new file mode 100644
index 00000000..79eec79e
--- /dev/null
+++ b/api/internal/dto/goes.go
@@ -0,0 +1,34 @@
+package dto
+
+import (
+ "time"
+
+ "github.com/danielgtaylor/huma/v2"
+ "github.com/google/uuid"
+)
+
+type GoesTelemetryConfigMappingDTO struct {
+ PlatformSensorKey string `json:"platform_sensor_key"`
+ TimeseriesID *uuid.UUID `json:"timeseries_id" required:"false"`
+}
+
+type GoesPlatformConfigFileCommitDTO struct {
+ ID uuid.UUID `json:"id"`
+ CommittedAt time.Time `json:"committed_at"`
+ CommitID uuid.UUID `json:"commit_id"`
+}
+
+type XMLPlatformConfigForm struct {
+ PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"`
+ Alias string `form:"alias"`
+ DryRun bool `form:"dry_run"`
+ UpdateType XMLPlatformConfigUpdateType `form:"update_type" enum:"preserve_all,delete_not_found,delete_all" default:"preserve_all"`
+}
+
+type XMLPlatformConfigUpdateType string
+
+const (
+ XMLPlatformConfigUpdateTypePreserveAll XMLPlatformConfigUpdateType = "preserve_all"
+ XMLPlatformConfigUpdateTypeDeleteNotFound XMLPlatformConfigUpdateType = "delete_not_found"
+ XMLPlatformConfigUpdateTypeDeleteAll XMLPlatformConfigUpdateType = "delete_all"
+)
diff --git a/api/internal/email/client.go b/api/internal/email/client.go
index 063e07c7..06e4e477 100644
--- a/api/internal/email/client.go
+++ b/api/internal/email/client.go
@@ -26,11 +26,11 @@ func (c *Client) Send(ctx context.Context, subject, textBody string, bcc []strin
// URLOpenerFunc turns a URL into a *Client.
type URLOpenerFunc func(ctx context.Context, u *url.URL) (*Client, error)
-var UrlOpeners = map[string]URLOpenerFunc{}
+var URLOpeners = map[string]URLOpenerFunc{}
// RegisterURLScheme registers an opener for scheme.
func RegisterURLScheme(sch string, opener URLOpenerFunc) {
- UrlOpeners[sch] = opener
+ URLOpeners[sch] = opener
}
// OpenURL parses rawurl, looks up its scheme, and invokes the opener.
@@ -39,7 +39,7 @@ func OpenURL(ctx context.Context, rawurl string) (*Client, error) {
if err != nil {
return nil, fmt.Errorf("email: parse URL: %w", err)
}
- opener, ok := UrlOpeners[u.Scheme]
+ opener, ok := URLOpeners[u.Scheme]
if !ok {
return nil, fmt.Errorf("email: unsupported scheme %q", u.Scheme)
}
diff --git a/api/internal/email/client_test.go b/api/internal/email/client_test.go
index 56f443c1..244a7605 100644
--- a/api/internal/email/client_test.go
+++ b/api/internal/email/client_test.go
@@ -70,7 +70,7 @@ func TestOpenURL_RegisterAndOpen_Success(t *testing.T) {
// Register and ensure clean-up
email.RegisterURLScheme(scheme, opener)
- defer delete(email.UrlOpeners, scheme)
+ defer delete(email.URLOpeners, scheme)
raw := scheme + "://foo.bar/baz?x=1"
cli, err := email.OpenURL(context.Background(), raw)
diff --git a/api/internal/email/email.go b/api/internal/email/email.go
index 9d2929a6..fdfda752 100644
--- a/api/internal/email/email.go
+++ b/api/internal/email/email.go
@@ -1,3 +1,4 @@
+// Package email provides structures and methods for formatting email content using templates.
package email
import (
diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go
new file mode 100644
index 00000000..c55d9ac7
--- /dev/null
+++ b/api/internal/handler/goes.go
@@ -0,0 +1,342 @@
+package handler
+
+import (
+ "context"
+ "crypto/subtle"
+ "errors"
+ "io"
+ "net/http"
+ "net/url"
+
+ "github.com/USACE/instrumentation-api/api/v4/internal/ctxkey"
+ "github.com/USACE/instrumentation-api/api/v4/internal/db"
+ "github.com/USACE/instrumentation-api/api/v4/internal/dto"
+ "github.com/USACE/instrumentation-api/api/v4/internal/httperr"
+ "github.com/USACE/instrumentation-api/api/v4/internal/service"
+ "github.com/USACE/instrumentation-api/api/v4/internal/util"
+ "github.com/danielgtaylor/huma/v2"
+)
+
+var goesTags = []string{"GOES Telemetry"}
+
+type TelemetrySourceIDParam struct {
+ TelemetrySourceID UUID `path:"telemetry_source_id"`
+}
+
+type TelemetryConfigIDParam struct {
+ TelemetryConfigID UUID `path:"telemetry_config_id"`
+}
+
+func (h *APIHandler) RegisterGoesTelemetry(api huma.API) {
+ huma.Register(api, huma.Operation{
+ Middlewares: h.Public,
+ OperationID: "goes-telemetry-client-list",
+ Method: http.MethodGet,
+ Path: "/domains/goes",
+ Description: "list of goes client instances (opendcs)",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct{}) (*Response[[]db.VGoesTelemetrySource], error) {
+ aa, err := h.DBService.GoesTelemetrySourceList(ctx)
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return NewResponse(aa), nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.Public,
+ OperationID: "goes-telemetry-config-get",
+ Method: http.MethodGet,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}",
+ Description: "gets a platform configuration xml file",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ TelemetryConfigIDParam
+ }) (*Response[db.GoesPlatformConfigFile], error) {
+ a, err := h.DBService.GoesPlatformConfigFileGet(ctx, input.TelemetryConfigID.UUID)
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return NewResponse(a), nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-telemetry-config-create",
+ Method: http.MethodPost,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}",
+ Description: "create a goes telemetry configuration",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ RawBody huma.MultipartFormFiles[dto.XMLPlatformConfigForm]
+ }) (*Response[service.DBImportResponse], error) {
+ p := ctx.Value(ctxkey.Profile).(db.VProfile)
+ formData := input.RawBody.Data()
+ xmlDoc, err := io.ReadAll(formData.PlatformConfig)
+ if err != nil {
+ return nil, httperr.BadRequest(err)
+ }
+ if len(xmlDoc) == 0 {
+ return nil, httperr.BadRequest(errors.New("uploaded file is empty"))
+ }
+ alias := formData.Alias
+ if alias == "" {
+ alias = formData.PlatformConfig.Filename
+ }
+ a, err := h.DBService.GoesPlatformConfigFileCreate(ctx, service.GoesPlatformConfigFileCreateParams{
+ DryRun: formData.DryRun,
+ GoesPlatformConfigFileCreateParams: db.GoesPlatformConfigFileCreateParams{
+ GoesTelemetrySourceID: input.TelemetrySourceID.UUID,
+ ProjectID: input.ProjectID.UUID,
+ Name: formData.PlatformConfig.Filename,
+ SizeBytes: formData.PlatformConfig.Size,
+ Alias: alias,
+ Content: string(xmlDoc),
+ CreatedBy: p.ID,
+ },
+ })
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return NewResponse(a), nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-telemetry-config-update",
+ Method: http.MethodPut,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}",
+ Description: "updates a goes telemetry configuration",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ TelemetryConfigIDParam
+ RawBody huma.MultipartFormFiles[dto.XMLPlatformConfigForm]
+ }) (*Response[service.DBImportResponse], error) {
+ formData := input.RawBody.Data()
+ xmlDoc, err := io.ReadAll(formData.PlatformConfig)
+ if err != nil {
+ return nil, httperr.BadRequest(err)
+ }
+ if len(xmlDoc) == 0 {
+ return nil, httperr.BadRequest(errors.New("uploaded file is empty"))
+ }
+
+ alias := formData.Alias
+ if alias == "" {
+ alias = formData.PlatformConfig.Filename
+ }
+
+ a, err := h.DBService.GoesPlatformConfigFileUpdate(ctx, service.GoesPlatformConfigFileUpdateParams{
+ DryRun: formData.DryRun,
+ UpdateType: formData.UpdateType,
+ GoesPlatformConfigFileUpdateParams: db.GoesPlatformConfigFileUpdateParams{
+ ID: input.TelemetryConfigID.UUID,
+ Name: formData.PlatformConfig.Filename,
+ Alias: alias,
+ SizeBytes: formData.PlatformConfig.Size,
+ Content: string(xmlDoc),
+ },
+ })
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+
+ return NewResponse(a), nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-telemetry-config-delete",
+ Method: http.MethodDelete,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}",
+ Description: "soft-delete a goes telemetry configuration (removal will be applied on next commit)",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ TelemetryConfigIDParam
+ }) (*Response[struct{}], error) {
+ p := ctx.Value(ctxkey.Profile).(db.VProfile)
+ if err := h.DBService.GoesPlatformConfigFileDelete(ctx, db.GoesPlatformConfigFileDeleteParams{
+ DeletedBy: &p.ID,
+ ID: input.TelemetryConfigID.UUID,
+ }); err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return nil, nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.Public,
+ OperationID: "goes-telemetry-config-mapping-list",
+ Method: http.MethodGet,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/mappings",
+ Description: "lists goes telemetry timeseries mappings",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ TelemetryConfigIDParam
+ }) (*Response[[]db.GoesTelemetryConfigMappings], error) {
+ aa, err := h.DBService.GoesTelemetryConfigMappingsList(ctx, input.TelemetryConfigID.UUID)
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return NewResponse(aa), nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-update-mappings",
+ Method: http.MethodPut,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/mappings",
+ Description: "updates goes telemetry timeseries mappings",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ TelemetryConfigIDParam
+ Body []dto.GoesTelemetryConfigMappingDTO `contentType:"application/json"`
+ }) (*Response[struct{}], error) {
+ if err := h.DBService.GoesTelemetryConfigMappingsUpdate(ctx, input.TelemetryConfigID.UUID, input.Body); err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return nil, nil
+ })
+
+ // Validate all uncommitted (desired) XML for this project/source.
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-telemetry-validate",
+ Method: http.MethodPost,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/validate",
+ Description: "validates all uncommitted platform xml files for the project",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ }) (*Response[service.DBImportResponse], error) {
+ // Assumes a service method which validates the project desired set
+ // by calling the OpenDCS wrapper with validate-only.
+ resp, err := h.DBService.GoesValidateProjectUncommitted(ctx, service.GoesValidateProjectUncommittedParams{
+ ProjectID: input.ProjectID.UUID,
+ GoesTelemetrySourceID: input.TelemetrySourceID.UUID,
+ HTTPClient: h.HTTPClient,
+ })
+ if err != nil {
+ return nil, httperr.Message(http.StatusConflict, err.Error())
+ }
+ return NewResponse(resp), nil
+ })
+
+ // Commit desired active set for project/source.
+ huma.Register(api, huma.Operation{
+ Middlewares: h.ProjectAdmin,
+ OperationID: "goes-telemetry-commit",
+ Method: http.MethodPost,
+ Path: "/projects/{project_id}/goes/{telemetry_source_id}/commit",
+ Description: "commits the desired project configuration to the shared OpenDCS instance",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ ProjectIDParam
+ TelemetrySourceIDParam
+ }) (*Response[service.GoesTelemetryCommitResponse], error) {
+ p := ctx.Value(ctxkey.Profile).(db.VProfile)
+
+ if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches == 1 || h.Config.OpenDCSWrapperURL == "" {
+ return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY"))
+ }
+
+ a, err := h.DBService.GoesCommitEntireSet(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{
+ ProjectID: input.ProjectID.UUID,
+ SourceID: input.TelemetrySourceID.UUID,
+ CreatedBy: p.ID,
+ OpendcsBaseURL: h.Config.OpenDCSWrapperURL,
+ OpendcsAuthToken: h.Config.ApplicationKey,
+ })
+ if err != nil {
+ var urlErr *url.Error
+ if errors.As(err, &urlErr) {
+ u, _ := url.Parse(urlErr.URL)
+ util.RedactQueryParams(u, "key")
+ urlErr.URL = u.String()
+ err = urlErr
+ }
+ return nil, httperr.Message(http.StatusConflict, err.Error())
+ }
+
+ return NewResponse(a), nil
+ })
+
+ // // Project-scoped rollback to previous commit (restores project mappings + xml set).
+ // huma.Register(api, huma.Operation{
+ // Middlewares: h.ProjectAdmin,
+ // OperationID: "goes-telemetry-rollback",
+ // Method: http.MethodPost,
+ // Path: "/projects/{project_id}/goes/{telemetry_source_id}/rollback",
+ // Description: "rolls back project configuration (xml + mappings) to the previous commit",
+ // Tags: goesTags,
+ // }, func(ctx context.Context, input *struct {
+ // ProjectIDParam
+ // TelemetrySourceIDParam
+ // }) (*Response[service.GoesTelemetryCommitResponse], error) {
+ // p := ctx.Value(ctxkey.Profile).(db.VProfile)
+ //
+ // if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches != 1 || h.Config.OpenDCSWrapperURL == "" {
+ // return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY"))
+ // }
+ //
+ // a, err := h.DBService.GoesRollbackProjectToPrevious(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{
+ // ProjectID: input.ProjectID.UUID,
+ // SourceID: input.TelemetrySourceID.UUID,
+ // CreatedBy: p.ID,
+ // OpendcsBaseURL: h.Config.OpenDCSWrapperURL,
+ // OpendcsAuthToken: h.Config.ApplicationKey,
+ // })
+ // if err != nil {
+ // return nil, httperr.Message(http.StatusConflict, err.Error())
+ // }
+ // return NewResponse(a), nil
+ // })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.InternalApp,
+ OperationID: "goes-telemetry-commit-callback",
+ Method: http.MethodPost,
+ Path: "/callback/goes/{telemetry_source_id}/commit",
+ Description: "callback to update API DB state after OpenDCS wrapper commit completes",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ TelemetrySourceIDParam
+ Body []dto.GoesPlatformConfigFileCommitDTO `contentType:"application/json"`
+ }) (*struct{}, error) {
+ if err := h.DBService.GoesPlatformConfigCommit(ctx, input.Body); err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return nil, nil
+ })
+
+ huma.Register(api, huma.Operation{
+ Middlewares: h.InternalApp,
+ OperationID: "goes-files-list-for-telemetry-source",
+ Method: http.MethodGet,
+ Path: "/goes/{telemetry_source_id}/configs/committed",
+ Description: "callback to update API DB state after OpenDCS wrapper commit completes",
+ Tags: goesTags,
+ }, func(ctx context.Context, input *struct {
+ TelemetrySourceIDParam
+ }) (*Response[[]string], error) {
+ aa, err := h.DBService.GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx, input.TelemetrySourceID.UUID)
+ if err != nil {
+ return nil, httperr.InternalServerError(err)
+ }
+ return NewResponse(aa), nil
+ })
+
+}
diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go
index 6cf1b422..520d9d61 100644
--- a/api/internal/handler/handler.go
+++ b/api/internal/handler/handler.go
@@ -1,4 +1,4 @@
-// Package handler contains common types and utilities for HTTP handlers.
+// Package handler provides common types and utilities for HTTP handlers.
package handler
import (
diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go
new file mode 100644
index 00000000..d2d24670
--- /dev/null
+++ b/api/internal/service/goes.go
@@ -0,0 +1,342 @@
+package service
+
+import (
+ "context"
+ "encoding/json"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/USACE/instrumentation-api/api/v4/internal/db"
+ "github.com/USACE/instrumentation-api/api/v4/internal/dto"
+ "github.com/google/uuid"
+)
+
+type DBImportResponse struct {
+ PlatformFileID *uuid.UUID `json:"platform_file_id,omitempty"`
+ Response json.RawMessage `json:"response"`
+}
+
+type xmlValidationResult struct {
+ Valid bool `json:"valid"`
+ Message string `json:"message,omitempty"`
+ SensorCount int `json:"sensor_count"`
+ SensorKeys []string `json:"sensor_keys,omitempty"`
+ ValidatedAt string `json:"validated_at"`
+ Warnings []string `json:"warnings,omitempty"`
+ ParseRootTag string `json:"root_tag,omitempty"`
+}
+
+type GoesPlatformConfigFileCreateParams struct {
+ DryRun bool
+ db.GoesPlatformConfigFileCreateParams
+}
+
+func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (DBImportResponse, error) {
+ var out DBImportResponse
+
+ root, names, err := extractSensorNames(arg.Content)
+ if err != nil {
+ out.Response, _ = json.Marshal(xmlValidationResult{
+ Valid: false,
+ Message: err.Error(),
+ SensorCount: 0,
+ SensorKeys: nil,
+ ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano),
+ Warnings: []string{"xml is not structurally valid; wrapper validation not attempted"},
+ })
+ return out, err
+ }
+
+ out.Response, err = buildLocalValidationResponse(root, names)
+ if err != nil {
+ return out, err
+ }
+
+ if arg.DryRun {
+ return out, nil
+ }
+
+ tx, err := s.db.Begin(ctx)
+ if err != nil {
+ return out, err
+ }
+ defer s.TxDo(ctx, tx.Rollback)
+ qtx := s.WithTx(tx)
+
+ newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg.GoesPlatformConfigFileCreateParams)
+ if err != nil {
+ return out, fmt.Errorf("GoesPlatformConfigFileCreate %w", err)
+ }
+ out.PlatformFileID = &newID
+
+ batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names))
+ for _, k := range names {
+ batch = append(batch, db.GoesTelemetryConfigMappingsCreateBatchParams{
+ GoesPlatformConfigFileID: newID,
+ PlatformSensorKey: k,
+ TimeseriesID: nil,
+ })
+ }
+
+ qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err))
+ if err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err)
+ }
+
+ return out, tx.Commit(ctx)
+}
+
+type GoesPlatformConfigFileUpdateParams struct {
+ DryRun bool
+ UpdateType dto.XMLPlatformConfigUpdateType
+ db.GoesPlatformConfigFileUpdateParams
+}
+
+func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) (DBImportResponse, error) {
+ var out DBImportResponse
+
+ root, names, err := extractSensorNames(arg.Content)
+ if err != nil {
+ out.Response, _ = json.Marshal(xmlValidationResult{
+ Valid: false,
+ Message: err.Error(),
+ SensorCount: 0,
+ SensorKeys: nil,
+ ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano),
+ Warnings: []string{"xml is not structurally valid; wrapper validation not attempted"},
+ })
+ return out, err
+ }
+
+ out.Response, err = buildLocalValidationResponse(root, names)
+ if err != nil {
+ return out, err
+ }
+
+ if arg.DryRun {
+ return out, nil
+ }
+
+ tx, err := s.db.Begin(ctx)
+ if err != nil {
+ return out, err
+ }
+ defer s.TxDo(ctx, tx.Rollback)
+ qtx := s.WithTx(tx)
+
+ if err := qtx.GoesPlatformConfigFileUpdate(ctx, arg.GoesPlatformConfigFileUpdateParams); err != nil {
+ return out, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err)
+ }
+
+ if arg.UpdateType == dto.XMLPlatformConfigUpdateTypeDeleteAll {
+ if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err)
+ }
+
+ batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names))
+ for _, k := range names {
+ batch = append(batch, db.GoesTelemetryConfigMappingsCreateBatchParams{
+ GoesPlatformConfigFileID: arg.ID,
+ PlatformSensorKey: k,
+ TimeseriesID: nil,
+ })
+ }
+
+ qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err))
+ if err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err)
+ }
+
+ return out, tx.Commit(ctx)
+ }
+
+ existing, err := qtx.GoesTelemetryConfigMappingsList(ctx, arg.ID)
+ if err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsList %w", err)
+ }
+
+ existingKeys := make(map[string]struct{}, len(existing))
+ for _, m := range existing {
+ existingKeys[m.PlatformSensorKey] = struct{}{}
+ }
+
+ var newMappings []db.GoesTelemetryConfigMappingsCreateBatchParams
+ var removedMappings []db.GoesTelemetryConfigMappingsDeleteBatchParams
+ for _, k := range names {
+ if _, ok := existingKeys[k]; ok {
+ delete(existingKeys, k)
+ continue
+ }
+ newMappings = append(newMappings, db.GoesTelemetryConfigMappingsCreateBatchParams{
+ GoesPlatformConfigFileID: arg.ID,
+ PlatformSensorKey: k,
+ TimeseriesID: nil,
+ })
+ }
+ if arg.UpdateType == dto.XMLPlatformConfigUpdateTypeDeleteNotFound {
+ for name := range existingKeys {
+ removedMappings = append(removedMappings, db.GoesTelemetryConfigMappingsDeleteBatchParams{
+ GoesPlatformConfigFileID: arg.ID,
+ PlatformSensorKey: name,
+ })
+ }
+ }
+
+ if len(newMappings) > 0 {
+ qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, newMappings).Exec(batchExecErr(&err))
+ if err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err)
+ }
+ }
+ if len(removedMappings) > 0 {
+ qtx.GoesTelemetryConfigMappingsDeleteBatch(ctx, removedMappings).Exec(batchExecErr(&err))
+ if err != nil {
+ return out, fmt.Errorf("GoesTelemetryConfigMappingsDeleteBatch %w", err)
+ }
+ }
+
+ return out, tx.Commit(ctx)
+}
+
+func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error {
+ batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings))
+ for i, m := range mappings {
+ batch[i] = db.GoesTelemetryConfigMappingsCreateBatchParams{
+ GoesPlatformConfigFileID: cfgID,
+ PlatformSensorKey: m.PlatformSensorKey,
+ TimeseriesID: m.TimeseriesID,
+ }
+ }
+
+ tx, err := s.db.Begin(ctx)
+ if err != nil {
+ return err
+ }
+ defer s.TxDo(ctx, tx.Rollback)
+ qtx := s.WithTx(tx)
+
+ if err := qtx.GoesTelemetryConfigSetUncommitted(ctx, cfgID); err != nil {
+ return fmt.Errorf("GoesTelemetryConfigSetUncommitted %w", err)
+ }
+
+ if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, cfgID); err != nil {
+ return fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err)
+ }
+
+ qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err))
+ if err != nil {
+ return fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err)
+ }
+
+ return tx.Commit(ctx)
+}
+
+type PlatformXML struct {
+ XMLName xml.Name `xml:"Platform"`
+ PlatformConfig PlatformConfigXML `xml:"PlatformConfig"`
+}
+
+type PlatformConfigXML struct {
+ ConfigSensors []ConfigSensorXML `xml:"ConfigSensor"`
+}
+
+type ConfigSensorXML struct {
+ SensorName string `xml:"SensorName"`
+ SensorNumber string `xml:"SensorNumber,attr"`
+}
+
+func extractSensorNames(xmlStr string) (string, []string, error) {
+ dec := xml.NewDecoder(strings.NewReader(xmlStr))
+
+ for {
+ tok, err := dec.Token()
+ if err != nil {
+ if errors.Is(err, io.EOF) {
+ return "", nil, fmt.Errorf("empty xml document")
+ }
+ return "", nil, fmt.Errorf("failed to read xml token: %w", err)
+ }
+
+ start, ok := tok.(xml.StartElement)
+ if !ok {
+ continue
+ }
+
+ switch start.Name.Local {
+ case "Platform":
+ var p PlatformXML
+ if err := dec.DecodeElement(&p, &start); err != nil {
+ return "Platform", nil, fmt.Errorf("failed to decode PlatformXML: %w", err)
+ }
+ return "Platform", normalizeSensorKeys(extractFromPlatforms([]PlatformXML{p})), nil
+
+ case "Database":
+ var wrapper struct {
+ Platforms []PlatformXML `xml:"Platform"`
+ }
+ if err := dec.DecodeElement(&wrapper, &start); err != nil {
+ return "Database", nil, fmt.Errorf("failed to decode Database: %w", err)
+ }
+ return "Database", normalizeSensorKeys(extractFromPlatforms(wrapper.Platforms)), nil
+
+ default:
+ return start.Name.Local, nil, fmt.Errorf("unexpected root element <%s>", start.Name.Local)
+ }
+ }
+}
+
+func extractFromPlatforms(platforms []PlatformXML) []string {
+ out := make([]string, 0)
+ for _, platform := range platforms {
+ for _, sensor := range platform.PlatformConfig.ConfigSensors {
+ name := strings.TrimSpace(sensor.SensorName)
+ num := strings.TrimSpace(sensor.SensorNumber)
+ if name == "" || num == "" {
+ continue
+ }
+ out = append(out, name+"."+num)
+ }
+ }
+ return out
+}
+
+func normalizeSensorKeys(keys []string) []string {
+ seen := make(map[string]struct{}, len(keys))
+ out := make([]string, 0, len(keys))
+
+ for _, k := range keys {
+ k = strings.TrimSpace(k)
+ if k == "" {
+ continue
+ }
+ if _, ok := seen[k]; ok {
+ continue
+ }
+ seen[k] = struct{}{}
+ out = append(out, k)
+ }
+
+ sort.Strings(out)
+ return out
+}
+
+func buildLocalValidationResponse(root string, sensorKeys []string) (json.RawMessage, error) {
+ res := xmlValidationResult{
+ Valid: true,
+ SensorCount: len(sensorKeys),
+ SensorKeys: sensorKeys,
+ ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano),
+ Warnings: []string{"wrapper dbimport validation not executed for upload/update endpoints"},
+ ParseRootTag: root,
+ }
+ b, err := json.Marshal(res)
+ if err != nil {
+ return nil, err
+ }
+ return b, nil
+}
diff --git a/api/internal/service/goes_commit.go b/api/internal/service/goes_commit.go
new file mode 100644
index 00000000..078fec79
--- /dev/null
+++ b/api/internal/service/goes_commit.go
@@ -0,0 +1,737 @@
+package service
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "net/http"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/USACE/instrumentation-api/api/v4/internal/db"
+ "github.com/USACE/instrumentation-api/api/v4/internal/dto"
+ "github.com/google/uuid"
+)
+
+type OpendcsCommitFile struct {
+ FileID uuid.UUID `json:"file_id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ XML string `json:"xml"`
+ Checksum string `json:"checksum"`
+}
+
+type OpendcsCommitRequest struct {
+ CommitID uuid.UUID `json:"commit_id"`
+ ProjectID uuid.UUID `json:"project_id"`
+ SourceID uuid.UUID `json:"goes_telemetry_source_id"`
+ Files []OpendcsCommitFile `json:"files"`
+}
+
+type OpendcsCommitResponse struct {
+ Status string `json:"status"` // "ok"|"error"
+ Log string `json:"log,omitempty"`
+ Data json.RawMessage `json:"data,omitempty"`
+}
+
+type GoesCommitEntireSetParams struct {
+ ProjectID uuid.UUID
+ SourceID uuid.UUID
+ CreatedBy uuid.UUID
+ OpendcsBaseURL string
+ OpendcsAuthToken string
+}
+
+type OpendcsCommitParams struct {
+ OpendcsBaseURL string
+ OpendcsAuthToken string
+ OpendcsCommitRequest OpendcsCommitRequest
+}
+
+type platformIdentity struct {
+ PlatformID string
+ SiteName string
+}
+
+func platformKey(platformID, site string) (string, error) {
+ platformID = strings.TrimSpace(platformID)
+ site = strings.TrimSpace(site)
+ if platformID != "" {
+ return "platform:" + strings.ToLower(platformID), nil
+ }
+ if site != "" {
+ return "site:" + strings.ToLower(site), nil
+ }
+ return "", fmt.Errorf("missing platform_id and site")
+}
+
+type GoesTelemetryCommitResponse struct {
+ CommitID uuid.UUID `json:"commit_id"`
+ RawResponse json.RawMessage `json:"raw_response"`
+}
+
+func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Client, arg GoesCommitEntireSetParams) (GoesTelemetryCommitResponse, error) {
+ var a GoesTelemetryCommitResponse
+ tx, err := s.db.Begin(ctx)
+ if err != nil {
+ return a, err
+ }
+ defer s.TxDo(ctx, tx.Rollback)
+ qtx := s.WithTx(tx)
+
+ var prevCommitID *uuid.UUID
+ prev, err := qtx.GoesCommitGetActive(ctx, db.GoesCommitGetActiveParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ })
+ if err == nil {
+ prevCommitID = &prev.ID
+ }
+
+ files, err := qtx.GoesPlatformConfigFilesListUncommitted(ctx, db.GoesPlatformConfigFilesListUncommittedParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesPlatformConfigFilesListForCommit: %w", err)
+ }
+ if len(files) == 0 {
+ return a, fmt.Errorf("no platform config files found for project/source")
+ }
+
+ desiredKeys := make(map[string]platformIdentity, len(files))
+ platformKeys := make([]string, 0, len(files))
+ for _, f := range files {
+ if f.Deleted {
+ continue
+ }
+ pid, site, err := extractPlatformIDAndSite([]byte(f.Content))
+ if err != nil {
+ return a, fmt.Errorf("extract platform id/site for file %s: %w", f.ID, err)
+ }
+ k, err := platformKey(pid, site)
+ if err != nil {
+ return a, fmt.Errorf("platform key for file %s: %w", f.ID, err)
+ }
+ if _, exists := desiredKeys[k]; !exists {
+ platformKeys = append(platformKeys, k)
+ }
+ desiredKeys[k] = platformIdentity{PlatformID: pid, SiteName: site}
+ }
+
+ conflicts, err := qtx.GoesPlatformRegistryConflicts(ctx, db.GoesPlatformRegistryConflictsParams{
+ GoesTelemetrySourceID: arg.SourceID,
+ ProjectID: arg.ProjectID,
+ PlatformKeys: platformKeys,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesPlatformRegistryConflicts: %w", err)
+ }
+ if len(conflicts) > 0 {
+ c := conflicts[0]
+ return a, fmt.Errorf("platform ownership conflict for %q (owned by project %s)", c.PlatformKey, c.ProjectID)
+ }
+
+ fileIDs := make([]uuid.UUID, len(files))
+ for i, f := range files {
+ fileIDs[i] = f.ID
+ }
+
+ mappings, err := qtx.GoesTelemetryConfigMappingsListForFiles(ctx, fileIDs)
+ if err != nil {
+ return a, fmt.Errorf("GoesTelemetryConfigMappingsListForFiles: %w", err)
+ }
+
+ type mapRow struct {
+ fileID uuid.UUID
+ key string
+ ts uuid.UUID
+ }
+ rows := make([]mapRow, 0, len(mappings))
+ for _, m := range mappings {
+ var ts uuid.UUID
+ if m.TimeseriesID != nil {
+ ts = *m.TimeseriesID
+ }
+ rows = append(rows, mapRow{fileID: m.GoesPlatformConfigFileID, key: m.PlatformSensorKey, ts: ts})
+ }
+ sort.Slice(rows, func(i, j int) bool {
+ if rows[i].fileID != rows[j].fileID {
+ return rows[i].fileID.String() < rows[j].fileID.String()
+ }
+ if rows[i].key != rows[j].key {
+ return rows[i].key < rows[j].key
+ }
+ return rows[i].ts.String() < rows[j].ts.String()
+ })
+
+ h := sha256.New()
+ for _, r := range rows {
+ _, _ = h.Write([]byte(r.fileID.String()))
+ _, _ = h.Write([]byte{0})
+ _, _ = h.Write([]byte(r.key))
+ _, _ = h.Write([]byte{0})
+ _, _ = h.Write([]byte(r.ts.String()))
+ _, _ = h.Write([]byte{0})
+ }
+ contentHash := hex.EncodeToString(h.Sum(nil))
+
+ idempotencyKey := uuid.NewString()
+
+ mappingSet, err := qtx.GoesMappingSetCreate(ctx, db.GoesMappingSetCreateParams{
+ ProjectID: arg.ProjectID,
+ CreatedBy: arg.CreatedBy,
+ ContentHash: contentHash,
+ IdempotencyKey: &idempotencyKey,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesMappingSetCreate: %w", err)
+ }
+
+ entryParams := make([]db.GoesMappingSetEntryCreateBatchParams, 0, len(mappings))
+ for _, m := range mappings {
+ entryParams = append(entryParams, db.GoesMappingSetEntryCreateBatchParams{
+ MappingSetID: mappingSet.ID,
+ GoesPlatformConfigFileID: m.GoesPlatformConfigFileID,
+ PlatformSensorKey: m.PlatformSensorKey,
+ TimeseriesID: m.TimeseriesID,
+ })
+ }
+ if len(entryParams) > 0 {
+ if _, err := qtx.GoesMappingSetEntryCreateBatch(ctx, entryParams); err != nil {
+ return a, fmt.Errorf("GoesMappingSetEntryCreateBatch: %w", err)
+ }
+ }
+
+ pending, err := qtx.GoesCommitCreatePending(ctx, db.GoesCommitCreatePendingParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ CreatedBy: arg.CreatedBy,
+ PreviousCommitID: prevCommitID,
+ IdempotencyKey: &idempotencyKey,
+ MappingSetID: &mappingSet.ID,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesCommitCreatePending: %w", err)
+ }
+
+ keyToTS := make(map[string]uuid.UUID, len(mappings))
+ for _, m := range mappings {
+ if m.TimeseriesID == nil {
+ continue
+ }
+ keyToTS[m.PlatformSensorKey] = *m.TimeseriesID
+ }
+
+ committedFiles := make([]OpendcsCommitFile, 0, len(files))
+ for _, f := range files {
+ if f.Deleted {
+ sum := sha256.Sum256([]byte(f.Content))
+ committedFiles = append(committedFiles, OpendcsCommitFile{
+ FileID: f.ID,
+ Name: f.Name,
+ Alias: f.Alias,
+ XML: f.Content, // raw content to override injected
+ Checksum: hex.EncodeToString(sum[:]),
+ })
+ }
+
+ committedXMLBytes, err := injectTimeseriesIDIntoPlatformXML([]byte(f.Content), keyToTS)
+ if err != nil {
+ return a, fmt.Errorf("inject timeseries id for file %s: %w", f.ID, err)
+ }
+ committedXML := string(committedXMLBytes)
+
+ if err := qtx.GoesPlatformConfigFileCommitArtifactsUpdate(ctx, db.GoesPlatformConfigFileCommitArtifactsUpdateParams{
+ ID: f.ID,
+ CommittedContent: committedXML,
+ CommittedCommitID: &pending.ID,
+ }); err != nil {
+ return a, fmt.Errorf("GoesPlatformConfigFileCommitArtifactsUpdate: %w", err)
+ }
+
+ sum := sha256.Sum256(committedXMLBytes)
+ committedFiles = append(committedFiles, OpendcsCommitFile{
+ FileID: f.ID,
+ Name: f.Name,
+ Alias: f.Alias,
+ XML: committedXML,
+ Checksum: hex.EncodeToString(sum[:]),
+ })
+ }
+
+ if err := tx.Commit(ctx); err != nil {
+ return a, err
+ }
+
+ req := OpendcsCommitParams{
+ OpendcsBaseURL: arg.OpendcsBaseURL,
+ OpendcsAuthToken: arg.OpendcsAuthToken,
+ OpendcsCommitRequest: OpendcsCommitRequest{
+ CommitID: pending.ID,
+ ProjectID: arg.ProjectID,
+ SourceID: arg.SourceID,
+ Files: committedFiles,
+ },
+ }
+ rawResp, callErr := s.opendcsCommit(ctx, httpClient, req)
+
+ a.CommitID = pending.ID
+ a.RawResponse = rawResp
+
+ tx2, err := s.db.Begin(ctx)
+ if err != nil {
+ if callErr != nil {
+ return a, callErr
+ }
+ return a, nil
+ }
+ defer s.TxDo(ctx, tx2.Rollback)
+ qtx2 := s.WithTx(tx2)
+
+ if callErr != nil {
+ if err := qtx2.GoesCommitMarkFailed(ctx, db.GoesCommitMarkFailedParams{
+ ID: pending.ID,
+ OpendcsResponse: rawResp,
+ }); err != nil {
+ s.logger.Error(ctx, "GoesCommitMarkFailed query failed", "error", err)
+ }
+ if err := tx2.Commit(ctx); err != nil {
+ s.logger.Error(ctx, "tx2.Commit failed", "error", err)
+ }
+ return a, callErr
+ }
+
+ if err := qtx2.GoesCommitMarkActive(ctx, db.GoesCommitMarkActiveParams{
+ ID: pending.ID,
+ OpendcsResponse: rawResp,
+ }); err != nil {
+ return a, fmt.Errorf("GoesCommitMarkActive: %w", err)
+ }
+
+ if err := qtx2.GoesPlatformRegistryDeleteMissing(ctx, db.GoesPlatformRegistryDeleteMissingParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ PlatformKeys: platformKeys,
+ }); err != nil {
+ return a, fmt.Errorf("GoesPlatformRegistryDeleteMissing: %w", err)
+ }
+
+ upParams := make([]db.GoesPlatformRegistryUpsertParams, 0, len(platformKeys))
+ for _, k := range platformKeys {
+ id := desiredKeys[k]
+
+ var pidPtr, sitePtr *string
+ if id.PlatformID != "" {
+ v := id.PlatformID
+ pidPtr = &v
+ }
+ if id.SiteName != "" {
+ v := id.SiteName
+ sitePtr = &v
+ }
+
+ upParams = append(upParams, db.GoesPlatformRegistryUpsertParams{
+ PlatformKey: k,
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ PlatformID: pidPtr,
+ SiteName: sitePtr,
+ CommitID: pending.ID,
+ })
+ }
+ if len(upParams) > 0 {
+ var upErr error
+ qtx2.GoesPlatformRegistryUpsert(ctx, upParams).Exec(batchExecErr(&upErr))
+ if upErr != nil {
+ return a, fmt.Errorf("GoesPlatformRegistryUpsert: %w", upErr)
+ }
+ }
+
+ if err := tx2.Commit(ctx); err != nil {
+ return a, err
+ }
+
+ return a, nil
+}
+
+func (s *DBService) GoesRollbackProjectToPrevious(ctx context.Context, httpClient *http.Client, arg GoesCommitEntireSetParams) (GoesTelemetryCommitResponse, error) {
+ var a GoesTelemetryCommitResponse
+
+ tx0, err := s.db.Begin(ctx)
+ if err != nil {
+ return a, err
+ }
+ defer s.TxDo(ctx, tx0.Rollback)
+ q0 := s.WithTx(tx0)
+
+ active, err := q0.GoesCommitGetActive(ctx, db.GoesCommitGetActiveParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ })
+ if err != nil {
+ return a, fmt.Errorf("no active commit: %w", err)
+ }
+ if active.PreviousCommitID == nil {
+ return a, fmt.Errorf("active commit has no previous_commit_id")
+ }
+
+ prev, err := q0.GoesCommitGetByID(ctx, *active.PreviousCommitID)
+ if err != nil {
+ return a, fmt.Errorf("previous commit not found: %w", err)
+ }
+ if prev.MappingSetID == nil {
+ return a, fmt.Errorf("previous commit missing mapping_set_id")
+ }
+
+ prevFiles, err := q0.GoesPlatformConfigFilesListForCommitByCommitID(ctx, db.GoesPlatformConfigFilesListForCommitByCommitIDParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ CommittedCommitID: &prev.ID,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesPlatformConfigFilesListForCommitByCommitID: %w", err)
+ }
+ if len(prevFiles) == 0 {
+ return a, fmt.Errorf("previous commit has no files")
+ }
+
+ if err := tx0.Commit(ctx); err != nil {
+ return a, err
+ }
+
+ desiredKeys := make(map[string]platformIdentity, len(prevFiles))
+ platformKeys := make([]string, 0, len(prevFiles))
+ for _, f := range prevFiles {
+ pid, site, err := extractPlatformIDAndSite([]byte(f.Content))
+ if err != nil {
+ return a, fmt.Errorf("extract platform id/site for file %s: %w", f.ID, err)
+ }
+ k, err := platformKey(pid, site)
+ if err != nil {
+ return a, fmt.Errorf("platform key for file %s: %w", f.ID, err)
+ }
+ if _, exists := desiredKeys[k]; !exists {
+ platformKeys = append(platformKeys, k)
+ }
+ desiredKeys[k] = platformIdentity{PlatformID: pid, SiteName: site}
+ }
+
+ tx, err := s.db.Begin(ctx)
+ if err != nil {
+ return a, err
+ }
+ defer s.TxDo(ctx, tx.Rollback)
+ qtx := s.WithTx(tx)
+
+ idempotencyKey := uuid.NewString()
+
+ pending, err := qtx.GoesCommitCreatePending(ctx, db.GoesCommitCreatePendingParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ CreatedBy: arg.CreatedBy,
+ PreviousCommitID: &active.ID,
+ IdempotencyKey: &idempotencyKey,
+ MappingSetID: prev.MappingSetID,
+ })
+ if err != nil {
+ return a, fmt.Errorf("GoesCommitCreatePending(rollback): %w", err)
+ }
+
+ if err := tx.Commit(ctx); err != nil {
+ return a, err
+ }
+
+ commitFiles := make([]OpendcsCommitFile, 0, len(prevFiles))
+ fileIDs := make([]uuid.UUID, 0, len(prevFiles))
+ for _, f := range prevFiles {
+ fileIDs = append(fileIDs, f.ID)
+ xb := []byte(f.Content)
+ sum := sha256.Sum256(xb)
+ commitFiles = append(commitFiles, OpendcsCommitFile{
+ FileID: f.ID,
+ Name: f.Name,
+ Alias: f.Alias,
+ XML: string(xb),
+ Checksum: hex.EncodeToString(sum[:]),
+ })
+ }
+
+ req := OpendcsCommitParams{
+ OpendcsBaseURL: arg.OpendcsBaseURL,
+ OpendcsAuthToken: arg.OpendcsAuthToken,
+ OpendcsCommitRequest: OpendcsCommitRequest{
+ CommitID: pending.ID,
+ ProjectID: arg.ProjectID,
+ SourceID: arg.SourceID,
+ Files: commitFiles,
+ },
+ }
+ rawResp, callErr := s.opendcsCommit(ctx, httpClient, req)
+
+ a.CommitID = pending.ID
+ a.RawResponse = rawResp
+
+ tx2, err := s.db.Begin(ctx)
+ if err != nil {
+ if callErr != nil {
+ return a, callErr
+ }
+ return a, nil
+ }
+ defer s.TxDo(ctx, tx2.Rollback)
+ qtx2 := s.WithTx(tx2)
+
+ if callErr != nil {
+ _ = qtx2.GoesCommitMarkFailed(ctx, db.GoesCommitMarkFailedParams{ID: pending.ID, OpendcsResponse: rawResp})
+ _ = tx2.Commit(ctx)
+ return a, callErr
+ }
+
+ if err := qtx2.GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx, db.GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ MappingSetID: *prev.MappingSetID,
+ }); err != nil {
+ return a, fmt.Errorf("replace mappings from previous mapping_set: %w", err)
+ }
+
+ for _, f := range prevFiles {
+ if err := qtx2.GoesPlatformConfigFileRestoreForRollback(ctx, db.GoesPlatformConfigFileRestoreForRollbackParams{
+ ID: f.ID,
+ Content: f.Content,
+ CommittedCommitID: &pending.ID,
+ }); err != nil {
+ return a, fmt.Errorf("restore file %s: %w", f.ID, err)
+ }
+ }
+
+ if err := qtx2.GoesPlatformConfigFileSoftDeleteNotInSet(ctx, db.GoesPlatformConfigFileSoftDeleteNotInSetParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ DeletedBy: &arg.CreatedBy,
+ FileIds: fileIDs,
+ }); err != nil {
+ return a, fmt.Errorf("soft delete files not in previous set: %w", err)
+ }
+
+ if err := qtx2.GoesCommitMarkActive(ctx, db.GoesCommitMarkActiveParams{ID: pending.ID, OpendcsResponse: rawResp}); err != nil {
+ return a, fmt.Errorf("GoesCommitMarkActive(rollback): %w", err)
+ }
+
+ if err := qtx2.GoesPlatformRegistryDeleteMissing(ctx, db.GoesPlatformRegistryDeleteMissingParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ PlatformKeys: platformKeys,
+ }); err != nil {
+ return a, fmt.Errorf("GoesPlatformRegistryDeleteMissing: %w", err)
+ }
+
+ upParams := make([]db.GoesPlatformRegistryUpsertParams, 0, len(platformKeys))
+ for _, k := range platformKeys {
+ id := desiredKeys[k]
+ var pidPtr, sitePtr *string
+ if id.PlatformID != "" {
+ v := id.PlatformID
+ pidPtr = &v
+ }
+ if id.SiteName != "" {
+ v := id.SiteName
+ sitePtr = &v
+ }
+ upParams = append(upParams, db.GoesPlatformRegistryUpsertParams{
+ PlatformKey: k,
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.SourceID,
+ PlatformID: pidPtr,
+ SiteName: sitePtr,
+ CommitID: pending.ID,
+ })
+ }
+ if len(upParams) > 0 {
+ var upErr error
+ qtx2.GoesPlatformRegistryUpsert(ctx, upParams).Exec(batchExecErr(&upErr))
+ if upErr != nil {
+ return a, fmt.Errorf("GoesPlatformRegistryUpsert: %w", upErr)
+ }
+ }
+
+ if err := tx2.Commit(ctx); err != nil {
+ return a, err
+ }
+
+ return a, nil
+}
+
+func (s *DBService) opendcsCommit(ctx context.Context, httpClient *http.Client, arg OpendcsCommitParams) (json.RawMessage, error) {
+ if arg.OpendcsBaseURL == "" {
+ return nil, fmt.Errorf("opendcsBaseURL not configured")
+ }
+ if arg.OpendcsAuthToken == "" {
+ return nil, fmt.Errorf("opendcsAuthToken not configured")
+ }
+
+ var buf bytes.Buffer
+ writer := multipart.NewWriter(&buf)
+
+ _ = writer.WriteField("commit_id", arg.OpendcsCommitRequest.CommitID.String())
+ _ = writer.WriteField("project_id", arg.OpendcsCommitRequest.ProjectID.String())
+ _ = writer.WriteField("goes_telemetry_source_id", arg.OpendcsCommitRequest.SourceID.String())
+
+ for _, f := range arg.OpendcsCommitRequest.Files {
+ part, err := writer.CreateFormFile("files", f.FileID.String()+".xml")
+ if err != nil {
+ return nil, fmt.Errorf("create form file: %w", err)
+ }
+ r := strings.NewReader(f.XML)
+ if _, err := io.Copy(part, r); err != nil {
+ return nil, fmt.Errorf("copy file: %w", err)
+ }
+ }
+
+ if err := writer.Close(); err != nil {
+ return nil, fmt.Errorf("close multipart writer: %w", err)
+ }
+
+ u := strings.TrimRight(arg.OpendcsBaseURL, "/") + "/commit"
+ httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, u, &buf)
+ if err != nil {
+ return nil, err
+ }
+
+ httpReq.Header.Set("Content-Type", writer.FormDataContentType())
+ q := httpReq.URL.Query()
+ q.Set("key", arg.OpendcsAuthToken)
+ httpReq.URL.RawQuery = q.Encode()
+
+ resp, err := httpClient.Do(httpReq)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ bodyBytes, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices {
+ s.logger.Error(ctx, "opendcs commit failed", "status_code", resp.StatusCode, "response_body", string(bodyBytes))
+ return bodyBytes, fmt.Errorf("opendcs commit failed: http %d", resp.StatusCode)
+ }
+
+ return bodyBytes, nil
+}
+
+type GoesValidateProjectUncommittedParams struct {
+ ProjectID uuid.UUID
+ GoesTelemetrySourceID uuid.UUID
+ HTTPClient *http.Client // kept for future wrapper validation; unused with current wrapper API
+}
+
+type GoesPlatformValidation struct {
+ PlatformFileID uuid.UUID `json:"platform_file_id"`
+ Name string `json:"name"`
+ Alias string `json:"alias"`
+ Valid bool `json:"valid"`
+ Message string `json:"message,omitempty"`
+ SensorCount int `json:"sensor_count,omitempty"`
+ SensorKeys []string `json:"sensor_keys,omitempty"`
+}
+
+type GoesProjectValidationResult struct {
+ Valid bool `json:"valid"`
+ ValidatedAt string `json:"validated_at"`
+ Files []GoesPlatformValidation `json:"files"`
+ Warnings []string `json:"warnings,omitempty"`
+}
+
+func (s *DBService) GoesValidateProjectUncommitted(ctx context.Context, arg GoesValidateProjectUncommittedParams) (DBImportResponse, error) {
+ var out DBImportResponse
+
+ files, err := s.GoesPlatformConfigFilesListUncommitted(ctx, db.GoesPlatformConfigFilesListUncommittedParams{
+ ProjectID: arg.ProjectID,
+ GoesTelemetrySourceID: arg.GoesTelemetrySourceID,
+ })
+ if err != nil {
+ return out, fmt.Errorf("GoesPlatformConfigFilesListForCommit: %w", err)
+ }
+ if len(files) == 0 {
+ result := GoesProjectValidationResult{
+ Valid: true,
+ ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano),
+ Files: nil,
+ Warnings: []string{"no platform configuration files found for project/source"},
+ }
+ b, _ := json.Marshal(result)
+ out.Response = b
+ return out, nil
+ }
+
+ res := GoesProjectValidationResult{
+ Valid: true,
+ ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano),
+ Files: make([]GoesPlatformValidation, 0, len(files)),
+ Warnings: []string{
+ "wrapper dbimport validation is not executed by this endpoint (current wrapper requires file paths or would import)",
+ },
+ }
+
+ var firstErr error
+
+ for _, f := range files {
+ v := GoesPlatformValidation{
+ PlatformFileID: f.ID,
+ Name: f.Name,
+ Alias: f.Alias,
+ Valid: true,
+ }
+
+ _, keys, e := extractSensorNames(f.Content)
+ if e != nil {
+ v.Valid = false
+ v.Message = e.Error()
+ res.Valid = false
+ if firstErr == nil {
+ firstErr = fmt.Errorf("xml validation failed for %s: %w", f.ID, e)
+ }
+ } else {
+ v.SensorKeys = keys
+ v.SensorCount = len(keys)
+ }
+
+ res.Files = append(res.Files, v)
+ }
+
+ b, err := json.Marshal(res)
+ if err != nil {
+ return out, err
+ }
+ out.Response = b
+
+ if firstErr != nil {
+ return out, firstErr
+ }
+ return out, nil
+}
+
+func (s *DBService) GoesPlatformConfigCommit(ctx context.Context, files []dto.GoesPlatformConfigFileCommitDTO) error {
+ params := make([]db.GoesPlatformConfigFileCommitParams, len(files))
+ for i, f := range files {
+ params[i] = db.GoesPlatformConfigFileCommitParams{
+ ID: f.ID,
+ CommittedAt: &f.CommittedAt,
+ CommittedCommitID: &f.CommitID,
+ }
+ }
+
+ var batchErr error
+ s.Queries.GoesPlatformConfigFileCommit(ctx, params).Exec(batchExecErr(&batchErr))
+ return batchErr
+}
diff --git a/api/internal/service/goes_xml.go b/api/internal/service/goes_xml.go
new file mode 100644
index 00000000..cb2b9e19
--- /dev/null
+++ b/api/internal/service/goes_xml.go
@@ -0,0 +1,350 @@
+package service
+
+import (
+ "bytes"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "strings"
+
+ "github.com/google/uuid"
+)
+
+type PlatformSensor struct {
+ Key string
+}
+
+func ParsePlatformSensors(xmlBytes []byte) ([]PlatformSensor, error) {
+ dec := xml.NewDecoder(bytes.NewReader(xmlBytes))
+
+ var sensors []PlatformSensor
+ var stack []string
+
+ for {
+ tok, err := dec.Token()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ switch t := tok.(type) {
+ case xml.StartElement:
+ stack = append(stack, strings.ToLower(t.Name.Local))
+ case xml.EndElement:
+ if len(stack) > 0 {
+ stack = stack[:len(stack)-1]
+ }
+ case xml.CharData:
+ if len(stack) < 1 {
+ continue
+ }
+ _ = t
+ }
+ }
+
+ return sensors, nil
+}
+
+func extractPlatformIDAndSite(xmlIn []byte) (platformID, site string, err error) {
+ dec := xml.NewDecoder(bytes.NewReader(xmlIn))
+
+ var cur string
+ for {
+ tok, tokErr := dec.Token()
+ if tokErr == io.EOF {
+ break
+ }
+ if tokErr != nil {
+ return "", "", tokErr
+ }
+
+ switch t := tok.(type) {
+ case xml.StartElement:
+ cur = strings.ToLower(t.Name.Local)
+ case xml.EndElement:
+ cur = ""
+ case xml.CharData:
+ val := strings.TrimSpace(string(t))
+ if val == "" || cur == "" {
+ continue
+ }
+ switch cur {
+ case "platformid", "platform_id", "platform-id":
+ if platformID == "" {
+ platformID = val
+ }
+ case "site", "sitename", "site_name", "site-name":
+ if site == "" {
+ site = val
+ }
+ }
+ }
+ if platformID != "" && site != "" {
+ break
+ }
+ }
+ return platformID, site, nil
+}
+
+func injectTimeseriesIDIntoPlatformXML(xmlIn []byte, keyToTS map[string]uuid.UUID) ([]byte, error) {
+ perPlatform := map[string]map[string]string{}
+
+ {
+ dec := xml.NewDecoder(bytes.NewReader(xmlIn))
+
+ var (
+ platformID string
+ inConfigSensor bool
+ cfgNum string
+ readingName bool
+ nameBuf strings.Builder
+ )
+
+ for {
+ tok, err := dec.Token()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, fmt.Errorf("pass1 decode: %w", err)
+ }
+
+ switch t := tok.(type) {
+ case xml.StartElement:
+ switch t.Name.Local {
+ case "Platform":
+ platformID = strings.TrimSpace(attr(t.Attr, "PlatformId"))
+ if platformID == "" {
+ platformID = "__no_platform_id__"
+ }
+ if perPlatform[platformID] == nil {
+ perPlatform[platformID] = map[string]string{}
+ }
+ case "ConfigSensor":
+ inConfigSensor = true
+ cfgNum = strings.TrimSpace(attr(t.Attr, "SensorNumber"))
+ nameBuf.Reset()
+ readingName = false
+ case "SensorName":
+ if inConfigSensor {
+ readingName = true
+ }
+ }
+
+ case xml.CharData:
+ if inConfigSensor && readingName {
+ nameBuf.Write([]byte(t))
+ }
+
+ case xml.EndElement:
+ switch t.Name.Local {
+ case "SensorName":
+ readingName = false
+ case "ConfigSensor":
+ if inConfigSensor {
+ n := strings.TrimSpace(nameBuf.String())
+ if platformID != "" && cfgNum != "" && n != "" {
+ perPlatform[platformID][cfgNum] = n
+ }
+ inConfigSensor = false
+ cfgNum = ""
+ }
+ case "Platform":
+ platformID = ""
+ }
+ }
+ }
+ }
+
+ lookupTS := func(platformID, sensorNum string) (uuid.UUID, bool) {
+ m := perPlatform[platformID]
+ if m == nil {
+ return uuid.Nil, false
+ }
+ num := strings.TrimSpace(sensorNum)
+ name := strings.TrimSpace(m[num])
+ if name == "" || num == "" {
+ return uuid.Nil, false
+ }
+ ts, ok := keyToTS[name+"."+num]
+ if !ok || ts == uuid.Nil {
+ return uuid.Nil, false
+ }
+ return ts, true
+ }
+
+ dec := xml.NewDecoder(bytes.NewReader(xmlIn))
+ var out bytes.Buffer
+ enc := xml.NewEncoder(&out)
+
+ var (
+ platformID string
+
+ inPS bool
+ psNum string
+ sawPSProp bool
+
+ inSS bool
+ ssNum string
+ sawSSProp bool
+
+ replacing bool
+ repDepth int
+ repName string
+ )
+
+ writeProp := func(elemLocal string, ts uuid.UUID) error {
+ start := xml.StartElement{
+ Name: xml.Name{Local: elemLocal},
+ Attr: []xml.Attr{{Name: xml.Name{Local: "PropertyName"}, Value: "timeseries_id"}},
+ }
+ if err := enc.EncodeToken(start); err != nil {
+ return err
+ }
+ if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil {
+ return err
+ }
+ return enc.EncodeToken(xml.EndElement{Name: start.Name})
+ }
+
+ for {
+ tok, err := dec.Token()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, fmt.Errorf("pass2 decode: %w", err)
+ }
+
+ if replacing {
+ switch tok.(type) {
+ case xml.StartElement:
+ repDepth++
+ case xml.EndElement:
+ repDepth--
+ if repDepth == 0 {
+ if err := enc.EncodeToken(xml.EndElement{Name: xml.Name{Local: repName}}); err != nil {
+ return nil, err
+ }
+ replacing = false
+ repName = ""
+ }
+ }
+ continue
+ }
+
+ switch t := tok.(type) {
+ case xml.StartElement:
+ switch t.Name.Local {
+ case "Platform":
+ platformID = strings.TrimSpace(attr(t.Attr, "PlatformId"))
+ if platformID == "" {
+ platformID = "__no_platform_id__"
+ }
+
+ case "PlatformSensor":
+ inPS = true
+ psNum = strings.TrimSpace(attr(t.Attr, "SensorNumber"))
+ sawPSProp = false
+
+ case "ScriptSensor":
+ inSS = true
+ ssNum = strings.TrimSpace(attr(t.Attr, "SensorNumber"))
+ sawSSProp = false
+
+ case "PlatformSensorProperty":
+ if inPS && strings.EqualFold(attr(t.Attr, "PropertyName"), "timeseries_id") {
+ sawPSProp = true
+ if err := enc.EncodeToken(t); err != nil {
+ return nil, err
+ }
+ if ts, ok := lookupTS(platformID, psNum); ok {
+ if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil {
+ return nil, err
+ }
+ }
+ replacing = true
+ repDepth = 1
+ repName = "PlatformSensorProperty"
+ continue
+ }
+
+ case "SensorProperty":
+ if inSS && strings.EqualFold(attr(t.Attr, "PropertyName"), "timeseries_id") {
+ sawSSProp = true
+ if err := enc.EncodeToken(t); err != nil {
+ return nil, err
+ }
+ if ts, ok := lookupTS(platformID, ssNum); ok {
+ if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil {
+ return nil, err
+ }
+ }
+ replacing = true
+ repDepth = 1
+ repName = "SensorProperty"
+ continue
+ }
+ }
+
+ if err := enc.EncodeToken(t); err != nil {
+ return nil, err
+ }
+
+ case xml.EndElement:
+ switch t.Name.Local {
+ case "PlatformSensor":
+ if inPS && !sawPSProp {
+ if ts, ok := lookupTS(platformID, psNum); ok {
+ if err := writeProp("PlatformSensorProperty", ts); err != nil {
+ return nil, err
+ }
+ }
+ }
+ inPS = false
+ psNum = ""
+ sawPSProp = false
+
+ case "ScriptSensor":
+ if inSS && !sawSSProp {
+ if ts, ok := lookupTS(platformID, ssNum); ok {
+ if err := writeProp("SensorProperty", ts); err != nil {
+ return nil, err
+ }
+ }
+ }
+ inSS = false
+ ssNum = ""
+ sawSSProp = false
+
+ case "Platform":
+ platformID = ""
+ }
+
+ if err := enc.EncodeToken(t); err != nil {
+ return nil, err
+ }
+
+ default:
+ if err := enc.EncodeToken(tok); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ if err := enc.Flush(); err != nil {
+ return nil, err
+ }
+ return out.Bytes(), nil
+}
+
+func attr(attrs []xml.Attr, local string) string {
+ for _, a := range attrs {
+ if a.Name.Local == local {
+ return a.Value
+ }
+ }
+ return ""
+}
diff --git a/api/migrations/migrations.go b/api/migrations/migrations.go
index 669b8fa8..0a1689cf 100644
--- a/api/migrations/migrations.go
+++ b/api/migrations/migrations.go
@@ -1,3 +1,4 @@
+// Package migrations provides access to the embedded database migration files.
package migrations
import "embed"
diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql
new file mode 100644
index 00000000..ca3f7908
--- /dev/null
+++ b/api/migrations/repeat/0190__views_telemetry.sql
@@ -0,0 +1,16 @@
+create or replace view v_goes_telemetry_source as
+select
+ s.*,
+ f.files
+from goes_telemetry_source s
+left join (
+ select coalesce(jsonb_agg(jsonb_build_object(
+ 'id', cf.id,
+ 'name', cf.name,
+ 'project_id', cf.project_id,
+ 'alias', cf.alias,
+ 'size_bytes', cf.size_bytes,
+ 'committed', cf.committed
+ )), '[]'::jsonb) as files
+ from goes_platform_config_file cf
+) f on true;
diff --git a/api/migrations/schema/V1.58.00__goes.sql b/api/migrations/schema/V1.58.00__goes.sql
new file mode 100644
index 00000000..23c47ac8
--- /dev/null
+++ b/api/migrations/schema/V1.58.00__goes.sql
@@ -0,0 +1,82 @@
+create table goes_telemetry_source (
+ id uuid primary key default uuid_generate_v4(),
+ name text unique not null
+);
+
+
+create table goes_platform_config_file (
+ id uuid primary key default uuid_generate_v4(),
+ goes_telemetry_source_id uuid not null references goes_telemetry_source(id),
+ project_id uuid not null references project(id),
+ name text not null,
+ alias text not null,
+ size_bytes bigint not null,
+ content xml not null,
+ committed boolean not null default false,
+ committed_at timestamptz,
+ created_at timestamptz not null default now(),
+ created_by uuid not null references profile(id),
+ updated_at timestamptz,
+ updated_by uuid references profile(id)
+);
+
+
+create table goes_telemetry_config_mappings (
+ goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade,
+ platform_sensor_key text not null,
+ timeseries_id uuid references timeseries(id),
+ constraint unique_goes_platform_config_file_id_platform_sensor_key unique (goes_platform_config_file_id, platform_sensor_key),
+ primary key (goes_platform_config_file_id, platform_sensor_key)
+);
+
+
+create unique index if not exists unique_timeseries_id_not_null
+on goes_telemetry_config_mappings(timeseries_id)
+where timeseries_id is not null;
+
+
+create table goes_commit (
+ id uuid primary key default uuid_generate_v4(),
+ project_id uuid not null references project(id),
+ goes_telemetry_source_id uuid not null references goes_telemetry_source(id),
+ created_at timestamptz not null default now(),
+ created_by uuid not null references profile(id),
+ status text not null,
+ opendcs_response jsonb,
+ previous_commit_id uuid references goes_commit(id),
+ idempotency_key text,
+ constraint unique_commit_idempotency unique (project_id, goes_telemetry_source_id, idempotency_key)
+);
+
+
+create index goes_commit_active_idx
+on goes_commit (project_id, goes_telemetry_source_id)
+where status = 'active';
+
+
+create table goes_mapping_set (
+ id uuid primary key default uuid_generate_v4(),
+ project_id uuid not null references project(id),
+ created_at timestamptz not null default now(),
+ created_by uuid not null references profile(id),
+ content_hash text not null,
+ idempotency_key text,
+ constraint unique_mapping_idempotency unique (project_id, idempotency_key)
+);
+
+
+create table goes_mapping_set_entry (
+ mapping_set_id uuid not null references goes_mapping_set(id) on delete cascade,
+ goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade,
+ platform_sensor_key text not null,
+ timeseries_id uuid references timeseries(id),
+ primary key (mapping_set_id, goes_platform_config_file_id, platform_sensor_key)
+);
+
+
+alter table goes_platform_config_file
+add column committed_content xml,
+add column committed_commit_id uuid references goes_commit(id);
+
+
+insert into goes_telemetry_source (id, name) values ('666e60ec-2c0a-4446-9eda-6f45cbcd0a60', 'OpenDCS #1');
diff --git a/api/migrations/schema/V1.59.00__goes_project.sql b/api/migrations/schema/V1.59.00__goes_project.sql
new file mode 100644
index 00000000..855a95fe
--- /dev/null
+++ b/api/migrations/schema/V1.59.00__goes_project.sql
@@ -0,0 +1,27 @@
+alter table goes_platform_config_file
+add column if not exists deleted boolean not null default false,
+add column if not exists deleted_at timestamptz,
+add column if not exists deleted_by uuid references profile(id);
+
+create index if not exists goes_platform_config_file_not_deleted_idx
+on goes_platform_config_file (project_id, goes_telemetry_source_id)
+where deleted = false;
+
+alter table goes_commit
+add column if not exists mapping_set_id uuid references goes_mapping_set(id);
+
+create index if not exists goes_commit_mapping_set_id_idx
+on goes_commit (mapping_set_id);
+
+create table if not exists goes_platform_registry (
+ platform_key text primary key,
+ project_id uuid not null references project(id),
+ goes_telemetry_source_id uuid not null references goes_telemetry_source(id),
+ platform_id text,
+ site_name text,
+ commit_id uuid not null references goes_commit(id),
+ updated_at timestamptz not null default now()
+);
+
+create index if not exists goes_platform_registry_project_idx
+on goes_platform_registry (project_id, goes_telemetry_source_id);
diff --git a/api/migrations/seed/V0.17.02__seed_data.sql b/api/migrations/seed/V0.17.02__seed_data.sql
index ba30042e..3b21d3dd 100644
--- a/api/migrations/seed/V0.17.02__seed_data.sql
+++ b/api/migrations/seed/V0.17.02__seed_data.sql
@@ -110,63 +110,63 @@ INSERT INTO instrument_constants (instrument_id, timeseries_id) VALUES
('a7540f69-c41e-43b3-b655-6e44097edb7e', '14247bc8-b264-4857-836f-182d47ebb39d');
-- Time Series Measurements
-INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/1/2020' , 13.16),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/2/2020' , 13.16),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/3/2020' , 13.17),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/4/2020' , 13.17),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/5/2020' , 13.13),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/6/2020' , 13.12),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/7/2020' , 13.10),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/8/2020' , 13.08),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/9/2020' , 13.07),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/10/2020', 13.05),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/11/2020', 13.16),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/12/2020', 13.16),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/13/2020', 13.17),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/14/2020', 13.17),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/15/2020', 13.13),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/16/2020', 13.12),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/17/2020', 13.10),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/18/2020', 13.08),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/19/2020', 13.07),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/20/2020', 13.05),
-('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/21/2020', 13.05),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/1/2020' , 20.16),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/2/2020' , 20.16),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/3/2020' , 20.17),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/4/2020' , 20.17),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/5/2020' , 20.13),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/6/2020' , 20.12),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/7/2020' , 20.10),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/8/2020' , 20.08),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/9/2020' , 20.07),
-('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/10/2020', 20.05),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/1/2020' , 20.16),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/2/2020' , 20.16),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/3/2020' , 20.17),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/4/2020' , 20.17),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/5/2020' , 20.13),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/6/2020' , 20.12),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/7/2020' , 20.10),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/8/2020' , 20.08),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/9/2020' , 20.07),
-('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/10/2020', 20.05),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/1/2020' , 20.16),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/2/2020' , 20.16),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/3/2020' , 20.17),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/4/2020' , 20.17),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/5/2020' , 20.13),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/6/2020' , 20.12),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/7/2020' , 20.10),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/8/2020' , 20.08),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/9/2020' , 20.07),
-('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/10/2020', 20.05),
-('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2015', 40.50),
-('d9697351-3a38-4194-9ac4-41541927e475', '6/10/2020', 40.00),
-('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2020', 39.50),
-('22a734d6-dc24-451d-a462-43a32f335ae8', '3/10/2015', 10.0),
-('479d90eb-3454-4f39-be9a-bfd23099a552', '6/21/2021', 20000.0);
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/1/2020' , 13.16),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/2/2020' , 13.16),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/3/2020' , 13.17),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/4/2020' , 13.17),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/5/2020' , 13.13),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/6/2020' , 13.12),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/7/2020' , 13.10),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/8/2020' , 13.08),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/9/2020' , 13.07),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/10/2020', 13.05),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/11/2020', 13.16),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/12/2020', 13.16),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/13/2020', 13.17),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/14/2020', 13.17),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/15/2020', 13.13),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/16/2020', 13.12),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/17/2020', 13.10),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/18/2020', 13.08),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/19/2020', 13.07),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/20/2020', 13.05),
+-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/21/2020', 13.05),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/1/2020' , 20.16),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/2/2020' , 20.16),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/3/2020' , 20.17),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/4/2020' , 20.17),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/5/2020' , 20.13),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/6/2020' , 20.12),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/7/2020' , 20.10),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/8/2020' , 20.08),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/9/2020' , 20.07),
+-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/10/2020', 20.05),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/1/2020' , 20.16),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/2/2020' , 20.16),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/3/2020' , 20.17),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/4/2020' , 20.17),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/5/2020' , 20.13),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/6/2020' , 20.12),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/7/2020' , 20.10),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/8/2020' , 20.08),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/9/2020' , 20.07),
+-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/10/2020', 20.05),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/1/2020' , 20.16),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/2/2020' , 20.16),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/3/2020' , 20.17),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/4/2020' , 20.17),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/5/2020' , 20.13),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/6/2020' , 20.12),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/7/2020' , 20.10),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/8/2020' , 20.08),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/9/2020' , 20.07),
+-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/10/2020', 20.05),
+-- ('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2015', 40.50),
+-- ('d9697351-3a38-4194-9ac4-41541927e475', '6/10/2020', 40.00),
+-- ('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2020', 39.50),
+-- ('22a734d6-dc24-451d-a462-43a32f335ae8', '3/10/2015', 10.0),
+-- ('479d90eb-3454-4f39-be9a-bfd23099a552', '6/21/2021', 20000.0);
-- inclinometers
INSERT INTO inclinometer_measurement (timeseries_id, time, creator, create_date, values) VALUES
diff --git a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql
index cf4378d2..ffee34e8 100644
--- a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql
+++ b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql
@@ -1,114 +1,114 @@
--- https://www.timescale.com/blog/how-to-shape-sample-data-with-postgresql-generate_series-and-sql/
-create table if not exists seed_data_overrides (
- m_val int not null,
- p_inc float4 not null
-);
-
--- provides baseline curve for more reaslistic looking test data
-insert into seed_data_overrides(m_val, p_inc) values
- (1,1.04),
- (2,1),
- (3,1),
- (4,1),
- (5,1),
- (6,1.10),
- (7,1),
- (8,0.09),
- (9,1),
- (10,1),
- (11,1.08),
- (12,1.18);
-
-create or replace function seed_timeseries_measurements(
- timeseries_ids uuid[],
- begin_time timestamptz,
- end_time timestamptz,
- intv interval
-) returns void language sql volatile as $$
-insert into timeseries_measurement (timeseries_id, time, value)
-select
- timeseries_id,
- m.time,
- m.value
-from
-unnest(timeseries_ids) as timeseries_id,
-(
- with intv_series as (
- select ts, date(ts) as day, rownum
- from generate_series(begin_time, end_time, intv) with ordinality as t(ts, rownum)
- ),
- intv_value as (
- select ts, day, date_part('month', ts) as m_val, rownum, random() as val
- from intv_series
- order by day
- ),
- intv_wave as (
- select
- day,
- 1 + .2 * cos(rownum * 6.28/180) as p_mod
- from intv_series
- day
- )
- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value
- from intv_value dv
- inner join intv_wave dw on dv.day=dw.day
- inner join seed_data_overrides o on dv.m_val=o.m_val
- order by ts
-) m
-on conflict do nothing;
-$$;
-
-select seed_timeseries_measurements(array[
-'869465fc-dc1e-445e-81f4-9979b5fadda9'::uuid,
-'9a3864a8-8766-4bfa-bad1-0328b166f6a8'::uuid,
-'7ee902a3-56d0-4acf-8956-67ac82c03a96'::uuid,
-'8f4ca3a3-5971-4597-bd6f-332d1cf5af7c'::uuid,
-'d9697351-3a38-4194-9ac4-41541927e475'::uuid
-], '2020-01-01'::timestamptz, now(), '1 day'::interval);
-
-with ranked as (
- select
- timeseries_id,
- time,
- row_number() over (partition by timeseries_id order by time) as rn,
- count(*) over (partition by timeseries_id) as total
- from timeseries_measurement
- where timeseries_id in (
- '869465fc-dc1e-445e-81f4-9979b5fadda9',
- '9a3864a8-8766-4bfa-bad1-0328b166f6a8',
- '7ee902a3-56d0-4acf-8956-67ac82c03a96',
- '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c',
- 'd9697351-3a38-4194-9ac4-41541927e475'
- )
-),
-chunk_sizes as (
- select
- timeseries_id,
- floor(total * 0.3) as total_chunk,
- floor(total * 0.3 / 3) as chunk_size
- from ranked
- group by timeseries_id, total
-),
-chunks as (
- select
- timeseries_id,
- chunk_size,
- 1 as masked_start,
- 1 + chunk_size as validated_start,
- 1 + 2 * chunk_size as annotation_start
- from chunk_sizes
-)
-insert into timeseries_notes (masked, validated, annotation, timeseries_id, time)
-select
- case when r.rn between c.masked_start and c.masked_start + c.chunk_size - 1 then true else null end as masked,
- case when r.rn between c.validated_start and c.validated_start + c.chunk_size - 1 then true else null end as validated,
- case when r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1 then 'Test annotation' else null end as annotation,
- r.timeseries_id,
- r.time
-from ranked r
-join chunks c on r.timeseries_id = c.timeseries_id
-where
- (r.rn between c.masked_start and c.masked_start + c.chunk_size - 1)
- or (r.rn between c.validated_start and c.validated_start + c.chunk_size - 1)
- or (r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1)
-on conflict do nothing;
+-- -- https://www.timescale.com/blog/how-to-shape-sample-data-with-postgresql-generate_series-and-sql/
+-- create table if not exists seed_data_overrides (
+-- m_val int not null,
+-- p_inc float4 not null
+-- );
+--
+-- -- provides baseline curve for more reaslistic looking test data
+-- insert into seed_data_overrides(m_val, p_inc) values
+-- (1,1.04),
+-- (2,1),
+-- (3,1),
+-- (4,1),
+-- (5,1),
+-- (6,1.10),
+-- (7,1),
+-- (8,0.09),
+-- (9,1),
+-- (10,1),
+-- (11,1.08),
+-- (12,1.18);
+--
+-- create or replace function seed_timeseries_measurements(
+-- timeseries_ids uuid[],
+-- begin_time timestamptz,
+-- end_time timestamptz,
+-- intv interval
+-- ) returns void language sql volatile as $$
+-- insert into timeseries_measurement (timeseries_id, time, value)
+-- select
+-- timeseries_id,
+-- m.time,
+-- m.value
+-- from
+-- unnest(timeseries_ids) as timeseries_id,
+-- (
+-- with intv_series as (
+-- select ts, date(ts) as day, rownum
+-- from generate_series(begin_time, end_time, intv) with ordinality as t(ts, rownum)
+-- ),
+-- intv_value as (
+-- select ts, day, date_part('month', ts) as m_val, rownum, random() as val
+-- from intv_series
+-- order by day
+-- ),
+-- intv_wave as (
+-- select
+-- day,
+-- 1 + .2 * cos(rownum * 6.28/180) as p_mod
+-- from intv_series
+-- day
+-- )
+-- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value
+-- from intv_value dv
+-- inner join intv_wave dw on dv.day=dw.day
+-- inner join seed_data_overrides o on dv.m_val=o.m_val
+-- order by ts
+-- ) m
+-- on conflict do nothing;
+-- $$;
+--
+-- select seed_timeseries_measurements(array[
+-- '869465fc-dc1e-445e-81f4-9979b5fadda9'::uuid,
+-- '9a3864a8-8766-4bfa-bad1-0328b166f6a8'::uuid,
+-- '7ee902a3-56d0-4acf-8956-67ac82c03a96'::uuid,
+-- '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c'::uuid,
+-- 'd9697351-3a38-4194-9ac4-41541927e475'::uuid
+-- ], '2020-01-01'::timestamptz, now(), '1 day'::interval);
+--
+-- with ranked as (
+-- select
+-- timeseries_id,
+-- time,
+-- row_number() over (partition by timeseries_id order by time) as rn,
+-- count(*) over (partition by timeseries_id) as total
+-- from timeseries_measurement
+-- where timeseries_id in (
+-- '869465fc-dc1e-445e-81f4-9979b5fadda9',
+-- '9a3864a8-8766-4bfa-bad1-0328b166f6a8',
+-- '7ee902a3-56d0-4acf-8956-67ac82c03a96',
+-- '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c',
+-- 'd9697351-3a38-4194-9ac4-41541927e475'
+-- )
+-- ),
+-- chunk_sizes as (
+-- select
+-- timeseries_id,
+-- floor(total * 0.3) as total_chunk,
+-- floor(total * 0.3 / 3) as chunk_size
+-- from ranked
+-- group by timeseries_id, total
+-- ),
+-- chunks as (
+-- select
+-- timeseries_id,
+-- chunk_size,
+-- 1 as masked_start,
+-- 1 + chunk_size as validated_start,
+-- 1 + 2 * chunk_size as annotation_start
+-- from chunk_sizes
+-- )
+-- insert into timeseries_notes (masked, validated, annotation, timeseries_id, time)
+-- select
+-- case when r.rn between c.masked_start and c.masked_start + c.chunk_size - 1 then true else null end as masked,
+-- case when r.rn between c.validated_start and c.validated_start + c.chunk_size - 1 then true else null end as validated,
+-- case when r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1 then 'Test annotation' else null end as annotation,
+-- r.timeseries_id,
+-- r.time
+-- from ranked r
+-- join chunks c on r.timeseries_id = c.timeseries_id
+-- where
+-- (r.rn between c.masked_start and c.masked_start + c.chunk_size - 1)
+-- or (r.rn between c.validated_start and c.validated_start + c.chunk_size - 1)
+-- or (r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1)
+-- on conflict do nothing;
diff --git a/api/migrations/seed/V0.17.22__seed_saa.sql b/api/migrations/seed/V0.17.22__seed_saa.sql
index 78003893..3f344065 100644
--- a/api/migrations/seed/V0.17.22__seed_saa.sql
+++ b/api/migrations/seed/V0.17.22__seed_saa.sql
@@ -51,16 +51,16 @@ INSERT INTO saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_i
('eca4040e-aecb-4cd3-bcde-3e308f0356a6', 8, '4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month');
-INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
-('4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month', 0),
-('cf2f2304-d44e-4363-bc8d-95533222efd6', NOW() - INTERVAL '1 month', 200),
-('ff2086ae-0eae-42a8-b598-2e97be2ab3b0', NOW() - INTERVAL '1 month', 200),
-('669b63d7-87b2-4aed-9b15-e19ea39789b9', NOW() - INTERVAL '1 month', 200),
-('e404e8f4-41c6-4355-9ddb-9d8c635525fc', NOW() - INTERVAL '1 month', 200),
-('ccb80fd4-8902-450f-bb3b-cc1e6718b03c', NOW() - INTERVAL '1 month', 200),
-('7f98f239-ac1e-4651-9d69-c163b2dc06a6', NOW() - INTERVAL '1 month', 200),
-('72bd19f1-23d3-4edb-b16f-9ebb121cf921', NOW() - INTERVAL '1 month', 200),
-('df6a9cca-29fc-4ec3-9415-d497fbae1a58', NOW() - INTERVAL '1 month', 200);
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
+-- ('4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month', 0),
+-- ('cf2f2304-d44e-4363-bc8d-95533222efd6', NOW() - INTERVAL '1 month', 200),
+-- ('ff2086ae-0eae-42a8-b598-2e97be2ab3b0', NOW() - INTERVAL '1 month', 200),
+-- ('669b63d7-87b2-4aed-9b15-e19ea39789b9', NOW() - INTERVAL '1 month', 200),
+-- ('e404e8f4-41c6-4355-9ddb-9d8c635525fc', NOW() - INTERVAL '1 month', 200),
+-- ('ccb80fd4-8902-450f-bb3b-cc1e6718b03c', NOW() - INTERVAL '1 month', 200),
+-- ('7f98f239-ac1e-4651-9d69-c163b2dc06a6', NOW() - INTERVAL '1 month', 200),
+-- ('72bd19f1-23d3-4edb-b16f-9ebb121cf921', NOW() - INTERVAL '1 month', 200),
+-- ('df6a9cca-29fc-4ec3-9415-d497fbae1a58', NOW() - INTERVAL '1 month', 200);
INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES
@@ -86,48 +86,48 @@ INSERT INTO saa_segment (instrument_id, id, length_timeseries_id, x_timeseries_i
('eca4040e-aecb-4cd3-bcde-3e308f0356a6',8,'df6a9cca-29fc-4ec3-9415-d497fbae1a58','fb0795ba-9d80-4a41-abd7-5de140392454','32889a6d-93d0-49f9-b281-44e19e88474c','bcb95c35-08f7-4c5a-83ff-b505b8d76481','54dcd1e1-e9da-4db5-95e5-3c28fab5c03c');
-INSERT INTO timeseries_measurement (timeseries_id, time, value)
-SELECT
- timeseries_id,
- time,
- round((random() * (100-3) + 3)::NUMERIC, 4) AS value
-FROM
- unnest(ARRAY[
- '8b3762ef-a852-4edc-8e87-746a92eaac9d'::uuid,
- 'ecfa267b-339b-4bb8-b7ae-eda550257878'::uuid,
- 'a31a24c4-aa8e-4e52-9895-43cdb69fe703'::uuid,
- 'eec831d1-56a5-47ef-85eb-02c7622d6cb8'::uuid,
- 'eb25ab9f-af8b-4383-839a-7d24899e02c4'::uuid,
- '8e641473-d7bf-433c-a24b-55fa065ca0c3'::uuid,
- '21cfe121-d29d-40a2-b04f-6be71ba479fe'::uuid,
- '23bda2f6-c479-48e0-a0c2-db48c3b08c3c'::uuid,
- '2598aa5f-cb8f-4ab7-8ebf-6de0c30bce70'::uuid,
- '4759bdac-656e-47c3-b403-d3118cf57342'::uuid,
- '1f47a1b9-a2bb-4282-8618-42ba1341533e'::uuid,
- 'd2dbac06-ad03-45d9-a7ad-1e7fb9d09ce2'::uuid,
- 'c22ffd8a-eae3-41cb-a75b-faae36236465'::uuid,
- 'd11a0e91-0125-46cc-a3fc-b0252361bd9c'::uuid,
- '9fbf2061-cf73-45f3-9e6c-b745ae7f72a1'::uuid,
- '0503e693-bc58-49b5-a477-288174dc90ed'::uuid,
- '24ad9638-5c5e-48b6-9ad6-a2eb0b93f87c'::uuid,
- '8cfaffb4-80b2-411b-be81-776385fc5862'::uuid,
- 'ea0f561f-e3f4-4155-a360-17407a0884d4'::uuid,
- 'a10e8627-621c-4aa7-8301-a2142a760e0c'::uuid,
- '88e22274-021e-4e91-88bb-046b67171a36'::uuid,
- 'f684bec8-9cc3-470f-a355-21d65f2be435'::uuid,
- '1a8c9bfc-0e65-4f76-aba9-fc32d643748f'::uuid,
- '2bf6aecd-3df0-4237-b28b-95731b7e333d'::uuid,
- '00f3e1f2-e7ff-4901-abfb-e9bf695802f6'::uuid,
- '2ef9b1d9-ee8f-4f2d-a482-2e0f0dd76f80'::uuid,
- '00ae950d-5bdd-455e-a72a-56da67dafb85'::uuid,
- '3d07cbc0-4aff-4efa-a162-ec1800801665'::uuid,
- 'fb0795ba-9d80-4a41-abd7-5de140392454'::uuid,
- '32889a6d-93d0-49f9-b281-44e19e88474c'::uuid,
- 'bcb95c35-08f7-4c5a-83ff-b505b8d76481'::uuid,
- '54dcd1e1-e9da-4db5-95e5-3c28fab5c03c'::uuid
- ]) AS timeseries_id,
- generate_series(
- now() - INTERVAL '1 month',
- now(),
- INTERVAL '1 hour'
- ) AS time;
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value)
+-- SELECT
+-- timeseries_id,
+-- time,
+-- round((random() * (100-3) + 3)::NUMERIC, 4) AS value
+-- FROM
+-- unnest(ARRAY[
+-- '8b3762ef-a852-4edc-8e87-746a92eaac9d'::uuid,
+-- 'ecfa267b-339b-4bb8-b7ae-eda550257878'::uuid,
+-- 'a31a24c4-aa8e-4e52-9895-43cdb69fe703'::uuid,
+-- 'eec831d1-56a5-47ef-85eb-02c7622d6cb8'::uuid,
+-- 'eb25ab9f-af8b-4383-839a-7d24899e02c4'::uuid,
+-- '8e641473-d7bf-433c-a24b-55fa065ca0c3'::uuid,
+-- '21cfe121-d29d-40a2-b04f-6be71ba479fe'::uuid,
+-- '23bda2f6-c479-48e0-a0c2-db48c3b08c3c'::uuid,
+-- '2598aa5f-cb8f-4ab7-8ebf-6de0c30bce70'::uuid,
+-- '4759bdac-656e-47c3-b403-d3118cf57342'::uuid,
+-- '1f47a1b9-a2bb-4282-8618-42ba1341533e'::uuid,
+-- 'd2dbac06-ad03-45d9-a7ad-1e7fb9d09ce2'::uuid,
+-- 'c22ffd8a-eae3-41cb-a75b-faae36236465'::uuid,
+-- 'd11a0e91-0125-46cc-a3fc-b0252361bd9c'::uuid,
+-- '9fbf2061-cf73-45f3-9e6c-b745ae7f72a1'::uuid,
+-- '0503e693-bc58-49b5-a477-288174dc90ed'::uuid,
+-- '24ad9638-5c5e-48b6-9ad6-a2eb0b93f87c'::uuid,
+-- '8cfaffb4-80b2-411b-be81-776385fc5862'::uuid,
+-- 'ea0f561f-e3f4-4155-a360-17407a0884d4'::uuid,
+-- 'a10e8627-621c-4aa7-8301-a2142a760e0c'::uuid,
+-- '88e22274-021e-4e91-88bb-046b67171a36'::uuid,
+-- 'f684bec8-9cc3-470f-a355-21d65f2be435'::uuid,
+-- '1a8c9bfc-0e65-4f76-aba9-fc32d643748f'::uuid,
+-- '2bf6aecd-3df0-4237-b28b-95731b7e333d'::uuid,
+-- '00f3e1f2-e7ff-4901-abfb-e9bf695802f6'::uuid,
+-- '2ef9b1d9-ee8f-4f2d-a482-2e0f0dd76f80'::uuid,
+-- '00ae950d-5bdd-455e-a72a-56da67dafb85'::uuid,
+-- '3d07cbc0-4aff-4efa-a162-ec1800801665'::uuid,
+-- 'fb0795ba-9d80-4a41-abd7-5de140392454'::uuid,
+-- '32889a6d-93d0-49f9-b281-44e19e88474c'::uuid,
+-- 'bcb95c35-08f7-4c5a-83ff-b505b8d76481'::uuid,
+-- '54dcd1e1-e9da-4db5-95e5-3c28fab5c03c'::uuid
+-- ]) AS timeseries_id,
+-- generate_series(
+-- now() - INTERVAL '1 month',
+-- now(),
+-- INTERVAL '1 hour'
+-- ) AS time;
diff --git a/api/migrations/seed/V0.17.23__seed_ipi.sql b/api/migrations/seed/V0.17.23__seed_ipi.sql
index 4ba0ccdf..b9430675 100644
--- a/api/migrations/seed/V0.17.23__seed_ipi.sql
+++ b/api/migrations/seed/V0.17.23__seed_ipi.sql
@@ -50,17 +50,17 @@ INSERT INTO ipi_segment (instrument_id, id, length_timeseries_id, tilt_timeserie
('01ac435f-fe3c-4af1-9979-f5e00467e7f5',4,'d28efb95-962d-4233-9002-827154bd76ad','3a297a4e-093a-4f9b-b201-1a994e2f4da7', NULL, NULL);
-INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
-('5842c707-b4be-4d10-a89c-1064e282e555', NOW() - INTERVAL '1 month', 0),
-('7d515571-d6a2-4990-a1e2-d6d42049d864', NOW() - INTERVAL '1 month', 50),
-('bce99683-59bd-4e4b-ad79-64a03553cfdc', NOW() - INTERVAL '1 month', 012),
-('e891ca7c-59b2-41bc-9d4a-43995e35b855', NOW() - INTERVAL '1 month', 123),
-('18f17db2-4bc8-44cb-a9fa-ba84d13b8444', NOW() - INTERVAL '1 month', 234),
-('d5c236cf-dca5-4a35-bc59-a9ecac4d572b', NOW() - INTERVAL '1 month', 345),
-('88accf78-6f41-4342-86b5-026a8880cbb4', NOW() - INTERVAL '1 month', 100),
-('fc332ef5-55a8-4657-9d6d-b0abeeb985f2', NOW() - INTERVAL '1 month', 200),
-('a86c7468-09a7-4090-98e0-f7979103bbcd', NOW() - INTERVAL '1 month', 150),
-('d28efb95-962d-4233-9002-827154bd76ad', NOW() - INTERVAL '1 month', 050);
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES
+-- ('5842c707-b4be-4d10-a89c-1064e282e555', NOW() - INTERVAL '1 month', 0),
+-- ('7d515571-d6a2-4990-a1e2-d6d42049d864', NOW() - INTERVAL '1 month', 50),
+-- ('bce99683-59bd-4e4b-ad79-64a03553cfdc', NOW() - INTERVAL '1 month', 012),
+-- ('e891ca7c-59b2-41bc-9d4a-43995e35b855', NOW() - INTERVAL '1 month', 123),
+-- ('18f17db2-4bc8-44cb-a9fa-ba84d13b8444', NOW() - INTERVAL '1 month', 234),
+-- ('d5c236cf-dca5-4a35-bc59-a9ecac4d572b', NOW() - INTERVAL '1 month', 345),
+-- ('88accf78-6f41-4342-86b5-026a8880cbb4', NOW() - INTERVAL '1 month', 100),
+-- ('fc332ef5-55a8-4657-9d6d-b0abeeb985f2', NOW() - INTERVAL '1 month', 200),
+-- ('a86c7468-09a7-4090-98e0-f7979103bbcd', NOW() - INTERVAL '1 month', 150),
+-- ('d28efb95-962d-4233-9002-827154bd76ad', NOW() - INTERVAL '1 month', 050);
INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES
@@ -76,32 +76,32 @@ INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES
('d28efb95-962d-4233-9002-827154bd76ad','01ac435f-fe3c-4af1-9979-f5e00467e7f5');
-INSERT INTO timeseries_measurement (timeseries_id, time, value)
-SELECT
- timeseries_id,
- time,
- round((random() * (100-3) + 3)::NUMERIC, 4) AS value
-FROM
- unnest(ARRAY[
- 'f7fa0d85-c684-4315-a7c6-e18e60667969'::UUID,
- '1bf787e9-8363-4047-8b03-fbaf9ff03eaf'::UUID,
- '258a5834-20bf-45fc-a60c-f245b2822592'::UUID,
- '4ffcb98f-962a-46ea-8923-8f992ef07c58'::UUID,
- '3bd67db5-abd6-4b35-a649-427791f9eeb7'::UUID,
- '1db6717b-6cde-4f46-b7fb-bc82b75051d7'::UUID,
- 'a3c4254b-1448-4f70-a1b6-d7f5e5c66eb7'::UUID,
- '6d90eb76-f292-461e-a82b-0faee9999778'::UUID,
- 'b2968456-b26a-4bbb-b8d9-f1217a6147ff'::UUID,
- 'afcc8471-c91b-466e-833d-f173cc58797f'::UUID,
- '26cb2cfa-910a-46c3-b03f-9dbcf823f8d8'::UUID,
- '3a297a4e-093a-4f9b-b201-1a994e2f4da7'::UUID,
- '8d10fbd9-2669-4727-b4c1-746361691388'::UUID,
- '6044cffb-c241-4b66-9873-068c2bbac451'::UUID,
- '98385e5a-c5d8-4441-aa2e-0f6120414352'::UUID,
- 'c488fc08-18ff-4e3d-851f-46cfd1257b6c'::UUID
-]) AS timeseries_id,
- generate_series(
- now() - INTERVAL '1 month',
- now(),
- INTERVAL '1 hour'
- ) AS time;
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value)
+-- SELECT
+-- timeseries_id,
+-- time,
+-- round((random() * (100-3) + 3)::NUMERIC, 4) AS value
+-- FROM
+-- unnest(ARRAY[
+-- 'f7fa0d85-c684-4315-a7c6-e18e60667969'::UUID,
+-- '1bf787e9-8363-4047-8b03-fbaf9ff03eaf'::UUID,
+-- '258a5834-20bf-45fc-a60c-f245b2822592'::UUID,
+-- '4ffcb98f-962a-46ea-8923-8f992ef07c58'::UUID,
+-- '3bd67db5-abd6-4b35-a649-427791f9eeb7'::UUID,
+-- '1db6717b-6cde-4f46-b7fb-bc82b75051d7'::UUID,
+-- 'a3c4254b-1448-4f70-a1b6-d7f5e5c66eb7'::UUID,
+-- '6d90eb76-f292-461e-a82b-0faee9999778'::UUID,
+-- 'b2968456-b26a-4bbb-b8d9-f1217a6147ff'::UUID,
+-- 'afcc8471-c91b-466e-833d-f173cc58797f'::UUID,
+-- '26cb2cfa-910a-46c3-b03f-9dbcf823f8d8'::UUID,
+-- '3a297a4e-093a-4f9b-b201-1a994e2f4da7'::UUID,
+-- '8d10fbd9-2669-4727-b4c1-746361691388'::UUID,
+-- '6044cffb-c241-4b66-9873-068c2bbac451'::UUID,
+-- '98385e5a-c5d8-4441-aa2e-0f6120414352'::UUID,
+-- 'c488fc08-18ff-4e3d-851f-46cfd1257b6c'::UUID
+-- ]) AS timeseries_id,
+-- generate_series(
+-- now() - INTERVAL '1 month',
+-- now(),
+-- INTERVAL '1 hour'
+-- ) AS time;
diff --git a/api/migrations/seed/V1.25.01__seed_uploader_config.sql b/api/migrations/seed/V1.25.01__seed_uploader_config.sql
index 53bcda78..b7c95bf2 100644
--- a/api/migrations/seed/V1.25.01__seed_uploader_config.sql
+++ b/api/migrations/seed/V1.25.01__seed_uploader_config.sql
@@ -10,7 +10,7 @@ INSERT INTO project_instrument (project_id, instrument_id) VALUES
INSERT INTO timeseries (id, slug, name, instrument_id, parameter_id, unit_id, type) VALUES
('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', 'constant-bottom-elevation', 'Bottom Elevation Constant', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', 'constant');
-INSERT INTO timeseries_measurement (timeseries_id, time, value) values ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now(), 100.5);
+-- INSERT INTO timeseries_measurement (timeseries_id, time, value) values ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now(), 100.5);
INSERT INTO timeseries (id, slug, name, instrument_id, type) VALUES
('1e3a1d3c-38e3-4f34-b65f-d2b8287ed591', 'depth-segment-1', 'Depth Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'),
@@ -72,59 +72,59 @@ INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_
('32f69e6f-c4bf-616e-f044-2f86f4e7c2bb', 'UploaderTimeseries4', 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07', NULL);
-insert into timeseries_measurement (timeseries_id, time, value)
-select
- timeseries_id,
- m.time,
- m.value
-from
-unnest(array[
- '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591'::uuid,
- '229d8f8d-fd60-465e-94c4-c25bc79d4f7e'::uuid,
- '74d19174-f911-4234-96e4-fae1a49969e6'::uuid,
- 'fd7c720e-7119-45dc-bf7a-44da303a9aa4'::uuid,
- 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'::uuid,
- '6d623d88-b6e8-4f0d-86be-d4445b6b6789'::uuid,
- '40dfcce3-7f36-475e-969c-2b0b8633c856'::uuid,
- '20792b7d-0f87-4f4b-81c6-616a8d76613a'::uuid,
- '659d026e-3f47-4efe-899f-4129b5466228'::uuid,
- '015a07f9-4005-4d2d-96dc-7f7d611ca51a'::uuid,
- '600d164d-432c-40e8-a1f7-b4ebc112939e'::uuid,
- 'b5e12c65-acdb-4439-a436-c762096e54d3'::uuid,
- '34bfea2d-f312-4c13-bf4f-ac760236484c'::uuid,
- '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d'::uuid,
- '14a4ae19-a857-44bb-a9e6-3df7e563847d'::uuid,
- '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a'::uuid,
- '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d'::uuid,
- '4d5b281f-14b8-42d7-bb1e-9c6118da813f'::uuid,
- '5e6c3920-25b9-43e9-a58e-28d6e49516b2'::uuid,
- '6f7d4a21-36ca-44fb-99e0-59a7e60627c3'::uuid,
- '7a8e5b22-47db-45fc-aa91-8a08f71738d4'::uuid,
- '8b9f6c23-58ec-460d-ab02-3b19f82849e5'::uuid,
- '9caf7d24-69fd-471e-bc13-6a2a09395af6'::uuid,
- 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07'::uuid
-]) as timeseries_id,
-(
- with daily_series as (
- select ts, date(ts) as day, rownum
- from generate_series(now() - interval '1 year', now(), '1 day'::interval) with ordinality as t(ts, rownum)
- ),
- daily_value as (
- select ts, day, date_part('month', ts) as m_val, rownum, random() as val
- from daily_series
- order by day
- ),
- daily_wave as (
- select
- day,
- 1 + .2 * cos(rownum * 6.28/180) as p_mod
- from daily_series
- day
- )
- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value
- from daily_value dv
- inner join daily_wave dw on dv.day=dw.day
- inner join seed_data_overrides o on dv.m_val=o.m_val
- order by ts
-) m
-on conflict do nothing;
+-- insert into timeseries_measurement (timeseries_id, time, value)
+-- select
+-- timeseries_id,
+-- m.time,
+-- m.value
+-- from
+-- unnest(array[
+-- '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591'::uuid,
+-- '229d8f8d-fd60-465e-94c4-c25bc79d4f7e'::uuid,
+-- '74d19174-f911-4234-96e4-fae1a49969e6'::uuid,
+-- 'fd7c720e-7119-45dc-bf7a-44da303a9aa4'::uuid,
+-- 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'::uuid,
+-- '6d623d88-b6e8-4f0d-86be-d4445b6b6789'::uuid,
+-- '40dfcce3-7f36-475e-969c-2b0b8633c856'::uuid,
+-- '20792b7d-0f87-4f4b-81c6-616a8d76613a'::uuid,
+-- '659d026e-3f47-4efe-899f-4129b5466228'::uuid,
+-- '015a07f9-4005-4d2d-96dc-7f7d611ca51a'::uuid,
+-- '600d164d-432c-40e8-a1f7-b4ebc112939e'::uuid,
+-- 'b5e12c65-acdb-4439-a436-c762096e54d3'::uuid,
+-- '34bfea2d-f312-4c13-bf4f-ac760236484c'::uuid,
+-- '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d'::uuid,
+-- '14a4ae19-a857-44bb-a9e6-3df7e563847d'::uuid,
+-- '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a'::uuid,
+-- '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d'::uuid,
+-- '4d5b281f-14b8-42d7-bb1e-9c6118da813f'::uuid,
+-- '5e6c3920-25b9-43e9-a58e-28d6e49516b2'::uuid,
+-- '6f7d4a21-36ca-44fb-99e0-59a7e60627c3'::uuid,
+-- '7a8e5b22-47db-45fc-aa91-8a08f71738d4'::uuid,
+-- '8b9f6c23-58ec-460d-ab02-3b19f82849e5'::uuid,
+-- '9caf7d24-69fd-471e-bc13-6a2a09395af6'::uuid,
+-- 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07'::uuid
+-- ]) as timeseries_id,
+-- (
+-- with daily_series as (
+-- select ts, date(ts) as day, rownum
+-- from generate_series(now() - interval '1 year', now(), '1 day'::interval) with ordinality as t(ts, rownum)
+-- ),
+-- daily_value as (
+-- select ts, day, date_part('month', ts) as m_val, rownum, random() as val
+-- from daily_series
+-- order by day
+-- ),
+-- daily_wave as (
+-- select
+-- day,
+-- 1 + .2 * cos(rownum * 6.28/180) as p_mod
+-- from daily_series
+-- day
+-- )
+-- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value
+-- from daily_value dv
+-- inner join daily_wave dw on dv.day=dw.day
+-- inner join seed_data_overrides o on dv.m_val=o.m_val
+-- order by ts
+-- ) m
+-- on conflict do nothing;
diff --git a/api/queries/goes.sql b/api/queries/goes.sql
new file mode 100644
index 00000000..e46b1652
--- /dev/null
+++ b/api/queries/goes.sql
@@ -0,0 +1,105 @@
+-- name: GoesTelemetrySourceList :many
+select *
+from v_goes_telemetry_source;
+
+
+-- name: GoesPlatformConfigFileCreate :one
+insert into goes_platform_config_file (
+ goes_telemetry_source_id,
+ project_id,
+ name,
+ alias,
+ size_bytes,
+ content,
+ created_by
+) values (
+ sqlc.arg(goes_telemetry_source_id),
+ sqlc.arg(project_id),
+ sqlc.arg(name),
+ sqlc.arg(alias),
+ sqlc.arg(size_bytes),
+ sqlc.arg(content)::xml,
+ sqlc.arg(created_by)
+)
+returning id;
+
+
+-- name: GoesPlatformConfigFileGet :one
+select *
+from goes_platform_config_file
+where id = $1
+and not deleted;
+
+
+-- name: GoesPlatformConfigFileListUncommittedForProject :many
+select *
+from goes_platform_config_file
+where project_id = $1
+and not committed
+and not deleted;
+
+
+-- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many
+select committed_content::xml
+from goes_platform_config_file
+where goes_telemetry_source_id = $1
+and committed
+and not deleted;
+
+
+-- name: GoesPlatformConfigFileUpdate :exec
+update goes_platform_config_file set
+ name = sqlc.arg(name),
+ alias = sqlc.arg(alias),
+ size_bytes = sqlc.arg(size_bytes),
+ content = sqlc.arg(content)::xml,
+ deleted = false,
+ deleted_at = null,
+ deleted_by = null
+where id = sqlc.arg(id);
+
+
+-- name: GoesPlatformConfigFileDelete :exec
+update goes_platform_config_file set
+ deleted = true,
+ deleted_at = now(),
+ deleted_by = sqlc.arg(deleted_by),
+ committed = false
+where id = sqlc.arg(id);
+
+
+-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec
+insert into goes_telemetry_config_mappings (
+ goes_platform_config_file_id,
+ platform_sensor_key,
+ timeseries_id
+) values ($1, $2, $3)
+on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key
+do update set timeseries_id = excluded.timeseries_id;
+
+
+-- name: GoesTelemetryConfigMappingsDeleteBatch :batchexec
+delete from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1
+and platform_sensor_key = $2;
+
+
+-- name: GoesTelemetryConfigSetUncommitted :exec
+update goes_platform_config_file set
+ committed = false
+where id = sqlc.arg(id);
+
+
+-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec
+delete from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1;
+
+
+-- name: GoesTelemetryConfigMappingsList :many
+select *
+from goes_telemetry_config_mappings
+where goes_platform_config_file_id = $1
+and goes_platform_config_file_id in (
+ select id from goes_platform_config_file where deleted = false
+)
+order by platform_sensor_key;
diff --git a/api/queries/goes_commit.sql b/api/queries/goes_commit.sql
new file mode 100644
index 00000000..ea964919
--- /dev/null
+++ b/api/queries/goes_commit.sql
@@ -0,0 +1,215 @@
+-- name: GoesCommitGetActive :one
+select *
+from goes_commit
+where
+ project_id = $1
+ and goes_telemetry_source_id = $2
+ and status = 'active'
+order by created_at desc
+limit 1;
+
+
+-- name: GoesCommitGetByID :one
+select *
+from goes_commit
+where id = $1;
+
+
+-- name: GoesCommitCreatePending :one
+insert into goes_commit (
+ project_id,
+ goes_telemetry_source_id,
+ created_by,
+ status,
+ previous_commit_id,
+ idempotency_key,
+ mapping_set_id
+) values (
+ $1, $2, $3, 'pending', $4, $5, $6
+)
+returning *;
+
+
+-- name: GoesCommitMarkActive :exec
+with target as (
+ select
+ c.id,
+ c.project_id,
+ c.goes_telemetry_source_id
+ from goes_commit c
+ where c.id = sqlc.arg(id)
+),
+cleared as (
+ update goes_commit c
+ set status = 'inactive'
+ where c.project_id = (select t.project_id from target t)
+ and c.goes_telemetry_source_id = (select t.goes_telemetry_source_id from target t)
+ and c.status = 'active'
+)
+update goes_commit c
+set status = 'active',
+ opendcs_response = sqlc.arg(opendcs_response)::jsonb
+where c.id = (select t.id from target t);
+
+
+-- name: GoesCommitMarkFailed :exec
+update goes_commit set status = 'failed', opendcs_response = sqlc.arg(opendcs_response)::jsonb
+where id = sqlc.arg(id);
+
+
+-- name: GoesPlatformConfigFilesListUncommitted :many
+select id, name, alias, content, deleted
+from goes_platform_config_file
+where project_id = $1
+and goes_telemetry_source_id = $2
+and not committed
+order by deleted desc, created_at asc;
+
+
+-- name: GoesPlatformConfigFilesListForCommitByCommitID :many
+select id, name, alias, committed_content::text as content
+from goes_platform_config_file
+where project_id = $1
+and goes_telemetry_source_id = $2
+and committed_commit_id = $3
+order by created_at asc;
+
+
+-- name: GoesPlatformConfigFileCommitArtifactsUpdate :exec
+update goes_platform_config_file set
+ committed_content = sqlc.arg(committed_content)::xml,
+ committed = true,
+ committed_at = now(),
+ committed_commit_id = sqlc.arg(committed_commit_id)
+where id = sqlc.arg(id);
+
+
+-- name: GoesTelemetryConfigMappingsListForFiles :many
+select goes_platform_config_file_id, platform_sensor_key, timeseries_id
+from goes_telemetry_config_mappings
+where goes_platform_config_file_id = any(sqlc.arg(file_ids)::uuid[])
+order by goes_platform_config_file_id, platform_sensor_key;
+
+
+-- name: GoesMappingSetCreate :one
+insert into goes_mapping_set (project_id, created_by, content_hash, idempotency_key)
+values ($1, $2, $3, $4)
+returning *;
+
+
+-- name: GoesMappingSetEntryCreateBatch :copyfrom
+insert into goes_mapping_set_entry (
+ mapping_set_id,
+ goes_platform_config_file_id,
+ platform_sensor_key,
+ timeseries_id
+) values ($1, $2, $3, $4);
+
+
+-- name: GoesMappingSetEntriesList :many
+select goes_platform_config_file_id, platform_sensor_key, timeseries_id
+from goes_mapping_set_entry
+where mapping_set_id = $1
+order by goes_platform_config_file_id, platform_sensor_key;
+
+
+-- name: GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet :exec
+with file_ids as (
+ select id
+ from goes_platform_config_file
+ where project_id = $1
+ and goes_telemetry_source_id = $2
+)
+, del as (
+ delete from goes_telemetry_config_mappings m
+ using file_ids f
+ where m.goes_platform_config_file_id = f.id
+)
+insert into goes_telemetry_config_mappings (
+ goes_platform_config_file_id,
+ platform_sensor_key,
+ timeseries_id
+)
+select
+ e.goes_platform_config_file_id,
+ e.platform_sensor_key,
+ e.timeseries_id
+from goes_mapping_set_entry e
+join file_ids f on f.id = e.goes_platform_config_file_id
+where e.mapping_set_id = $3
+on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key
+do update set timeseries_id = excluded.timeseries_id;
+
+
+-- name: GoesPlatformRegistryListByProject :many
+select platform_key, platform_id, site_name
+from goes_platform_registry
+where project_id = $1
+and goes_telemetry_source_id = $2
+order by platform_key;
+
+
+-- name: GoesPlatformRegistryConflicts :many
+select platform_key, project_id
+from goes_platform_registry
+where goes_telemetry_source_id = $1
+and platform_key = any(sqlc.arg(platform_keys)::text[])
+and project_id <> $2;
+
+
+-- name: GoesPlatformRegistryUpsert :batchexec
+insert into goes_platform_registry (
+ platform_key,
+ project_id,
+ goes_telemetry_source_id,
+ platform_id,
+ site_name,
+ commit_id,
+ updated_at
+) values ($1, $2, $3, $4, $5, $6, now())
+on conflict (platform_key) do update set
+ project_id = excluded.project_id,
+ goes_telemetry_source_id = excluded.goes_telemetry_source_id,
+ platform_id = excluded.platform_id,
+ site_name = excluded.site_name,
+ commit_id = excluded.commit_id,
+ updated_at = now();
+
+
+-- name: GoesPlatformRegistryDeleteMissing :exec
+delete from goes_platform_registry r
+where r.project_id = $1
+and r.goes_telemetry_source_id = $2
+and not (r.platform_key = any(sqlc.arg(platform_keys)::text[]));
+
+
+-- name: GoesPlatformConfigFileRestoreForRollback :exec
+update goes_platform_config_file set
+ content = sqlc.arg(content)::xml,
+ committed_content = sqlc.arg(content)::xml,
+ committed = true,
+ committed_at = now(),
+ committed_commit_id = sqlc.arg(committed_commit_id),
+ deleted = false,
+ deleted_at = null,
+ deleted_by = null
+where id = sqlc.arg(id);
+
+
+-- name: GoesPlatformConfigFileSoftDeleteNotInSet :exec
+update goes_platform_config_file f set
+ deleted = true,
+ deleted_at = now(),
+ deleted_by = $3
+where f.project_id = $1
+and f.goes_telemetry_source_id = $2
+and not (f.id = any(sqlc.arg(file_ids)::uuid[]))
+and f.deleted = false;
+
+
+-- name: GoesPlatformConfigFileCommit :batchexec
+update goes_platform_config_file set
+ committed = true,
+ committed_at = sqlc.arg(committed_at),
+ committed_commit_id = sqlc.arg(committed_commit_id)
+where id = sqlc.arg(id);
diff --git a/compose.sh b/compose.sh
index be3523ff..af99e49d 100755
--- a/compose.sh
+++ b/compose.sh
@@ -180,6 +180,12 @@ elif [ "$1" = "test" ]; then
elif [ "$1" = "mkdocs" ]; then
mkdocs
+elif [ "$1" = "opendcs-dep" ]; then
+ cid=$(docker create ghcr.io/opendcs/routingscheduler:7.0-nightly) &&
+ mkdir -p "${parent_path}/opendcs/rsgis/src/main/resources" &&
+ docker cp "$cid:/opt/opendcs/bin/opendcs.jar" "${parent_path}/opendcs/rsgis/src/main/resources/opendcs.jar" &&
+ docker rm "$cid"
+
else
echo -e "usage:\n\t./compose.sh watch\n\t./compose.sh up\n\t./compose.sh down\n\t./compose.sh clean\n\t./compose.sh test\n\t./compose.sh mkdocs"
fi
diff --git a/docker-compose.yaml b/docker-compose.yaml
index d14144d2..a4d16aca 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -191,6 +191,17 @@ services:
localstack-init:
condition: service_completed_successfully
+ opendcs:
+ build:
+ context: ./opendcs
+ image: opendcs
+ env_file:
+ - path: ./env_files/opendcs.env
+ required: true
+ - path: .env
+ required: true
+ restart: unless-stopped
+
localstack:
image: localstack/localstack:4
ports:
diff --git a/env_files/opendcs.env b/env_files/opendcs.env
new file mode 100644
index 00000000..adf4227a
--- /dev/null
+++ b/env_files/opendcs.env
@@ -0,0 +1,12 @@
+AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
+AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
+
+AWS_ENDPOINT_URL="http://localstack:4566"
+DATALOAD_S3_ROOT="s3://corpsmap-data-incoming?region=us-east-1&prefix=instrumentation/goes/&endpoint=http://localstack:4566&use_path_style=true&awssdk=v2"
+PLATFORM_IMPORT_PREFIX=appkey
+CDADATA_USERNAME=
+CDADATA_PASSWORD=
+CDABACKUP_USERNAME=
+CDABACKUP_PASSWORD=
+EDDN1_USERNAME=
+EDDN1_PASSWORD=
diff --git a/go.work b/go.work
index 9671ce28..8fbc0b78 100644
--- a/go.work
+++ b/go.work
@@ -1,3 +1,5 @@
-go 1.25
+go 1.25.5
use ./api
+
+use ./opendcs
diff --git a/opendcs/.gitignore b/opendcs/.gitignore
new file mode 100644
index 00000000..641c73d1
--- /dev/null
+++ b/opendcs/.gitignore
@@ -0,0 +1,19 @@
+target/
+pom.xml.tag
+pom.xml.releaseBackup
+pom.xml.versionsBackup
+pom.xml.next
+release.properties
+dependency-reduced-pom.xml
+buildNumber.properties
+.mvn/timing.properties
+# https://maven.apache.org/wrapper/#usage-without-binary-jar
+.mvn/wrapper/maven-wrapper.jar
+
+# Eclipse m2e generated files
+# Eclipse Core
+.project
+# JDT-specific (Eclipse Java Development Tools)
+.classpath
+
+rsgis/src/main/resources
diff --git a/opendcs/Dockerfile b/opendcs/Dockerfile
new file mode 100644
index 00000000..c183d016
--- /dev/null
+++ b/opendcs/Dockerfile
@@ -0,0 +1,79 @@
+ARG OPENDCS_BASE_IMAGE=ghcr.io/opendcs/routingscheduler:7.0-nightly
+ARG MAVEN_BUILD_IMAGE=maven:3-eclipse-temurin-17-noble
+ARG GO_BUILD_IMAGE=golang:1.25-alpine
+ARG OPENDCS_VERSION=7.0-nightly
+
+FROM ${OPENDCS_BASE_IMAGE} AS opendcs_patched
+
+USER root
+
+COPY patch_opendcs.sh /patch_opendcs.sh
+RUN /patch_opendcs.sh && rm /patch_opendcs.sh
+
+FROM opendcs_patched AS opendcs_base
+
+FROM ${MAVEN_BUILD_IMAGE} AS maven_builder
+
+ARG OPENDCS_VERSION
+
+# workaround for issues with cross-compilation
+ENV JAVA_TOOL_OPTIONS="-XX:TieredStopAtLevel=1"
+
+COPY ./rsgis /opt/rsgis
+RUN mkdir -p /opt/rsgis/src/main/resources
+COPY --from=opendcs_base /opt/opendcs/bin/opendcs.jar /opt/rsgis/src/main/resources/opendcs.jar
+
+RUN --mount=type=cache,target=/root/.m2 \
+ mvn -f /opt/rsgis/pom.xml -q install:install-file \
+ -Dfile=/opt/rsgis/src/main/resources/opendcs.jar \
+ -DgroupId=org.opendcs \
+ -DartifactId=opendcs \
+ -Dversion=${OPENDCS_VERSION} \
+ -Dpackaging=jar \
+ -DgeneratePom=true
+
+RUN --mount=type=cache,target=/root/.m2 \
+ mvn -f /opt/rsgis/pom.xml -Dopendcs.version=${OPENDCS_VERSION} clean package
+
+FROM ${GO_BUILD_IMAGE} AS go_builder
+
+WORKDIR /src
+
+COPY go.mod go.sum ./
+RUN --mount=type=cache,target=/go/pkg/mod \
+ go mod download
+
+COPY . .
+
+RUN --mount=type=cache,target=/root/.cache/go-build \
+ go build -o /opendcs-wrapper .
+
+FROM opendcs_patched
+
+USER root
+
+RUN rm -rf /opt/java/openjdk/release
+
+RUN apk add --no-cache coreutils ca-certificates
+
+ENV DCSTOOL_USERDIR=/opt/opendcs
+ENV DATABASE_URL=/opt/opendcs/edit-db
+ENV OPENDCS_IMPORT_DIR=/opt/opendcs/import
+ENV OPENDCS_HTTP_ADDR=:8080
+ENV OPENDCS_LOG_DIR=/opendcs_output
+
+RUN mkdir -p -m 775 ${DCSTOOL_HOME} /opendcs_output ${OPENDCS_IMPORT_DIR} && \
+ chown -R opendcs:opendcs ${DCSTOOL_HOME} /opendcs_output ${OPENDCS_IMPORT_DIR}
+
+RUN apk del py3-cryptography || true
+
+COPY --chown=opendcs:opendcs --from=maven_builder /opt/rsgis/target/rsgis.jar ${DCSTOOL_HOME}/dep
+
+COPY --chown=opendcs:opendcs ./logback.xml ${DCSTOOL_HOME}/logback.xml
+COPY --chown=opendcs:opendcs ./decodes.properties ${DCSTOOL_HOME}/decodes.properties
+COPY --chown=opendcs:opendcs ./midas_config /usr/local/share/midas_config
+COPY --chown=opendcs:opendcs --from=go_builder /opendcs-wrapper /usr/local/bin/opendcs-wrapper
+
+USER opendcs
+
+CMD ["/usr/local/bin/opendcs-wrapper"]
diff --git a/opendcs/app.go b/opendcs/app.go
new file mode 100644
index 00000000..18c8763e
--- /dev/null
+++ b/opendcs/app.go
@@ -0,0 +1,26 @@
+package main
+
+import (
+ "crypto/subtle"
+ "log/slog"
+ "net/http"
+ "sync"
+
+ "github.com/danielgtaylor/huma/v2"
+)
+
+type App struct {
+ cfg Config
+ logger *slog.Logger
+ dbimport *dbimport
+ mu sync.Mutex
+ httpServer *http.Server
+ httpClient *http.Client
+}
+
+func (a *App) checkKey(got string) error {
+ if subtle.ConstantTimeCompare([]byte(got), []byte(a.cfg.AuthToken)) != 1 {
+ return huma.NewError(http.StatusUnauthorized, "unauthorized")
+ }
+ return nil
+}
diff --git a/opendcs/dbimport.go b/opendcs/dbimport.go
new file mode 100644
index 00000000..ba363be8
--- /dev/null
+++ b/opendcs/dbimport.go
@@ -0,0 +1,187 @@
+package main
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "log/slog"
+ "net/http"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "slices"
+ "sort"
+ "strconv"
+ "syscall"
+)
+
+type dbimport struct {
+ cfg Config
+ logger *slog.Logger
+}
+
+func NewDbimport(ctx context.Context, cfg Config, logger *slog.Logger) (*dbimport, error) {
+ i := &dbimport{
+ cfg: cfg,
+ logger: logger,
+ }
+ files, err := i.resolveImportFiles([]string{midasConfigDir})
+ if err != nil {
+ return nil, fmt.Errorf("unable to resolve initial import files: %w", err)
+ }
+ if len(files) == 0 {
+ return nil, errors.New("no inital import files")
+ }
+ _, err = i.runDBImport(ctx, files)
+ if err != nil {
+ return nil, fmt.Errorf("validation failed: %w", err)
+ }
+ return i, nil
+}
+
+type dbimportParams struct {
+ Files []string `json:"files"`
+ ValidateOnly bool `json:"validate_only"`
+}
+
+type dbimportOutput struct {
+ Status int `json:"status"`
+ Log string `json:"log,omitempty"`
+ ParsingErrors []string `json:"parsing_errors,omitempty"`
+}
+
+func (i *dbimport) ProcessAtomic(ctx context.Context, req *dbimportParams) (*dbimportOutput, error) {
+ files, err := i.resolveImportFiles(req.Files)
+ if err != nil {
+ return nil, err
+ }
+ if len(files) == 0 {
+ return nil, errors.New("no import files")
+ }
+
+ dbimportArgs := []string{}
+ if req.ValidateOnly {
+ dbimportArgs = append(dbimportArgs, "-v")
+ }
+
+ logOut, err := i.runDBImport(ctx, files, dbimportArgs...)
+ if err != nil {
+ return nil, fmt.Errorf("import failed: %w", err)
+ }
+
+ if req.ValidateOnly {
+ return &dbimportOutput{Status: http.StatusOK, Log: logOut}, nil
+ }
+
+ return &dbimportOutput{Log: logOut}, nil
+}
+
+func (i *dbimport) resolveImportFiles(files []string) ([]string, error) {
+ var out []string
+ for _, f := range files {
+ info, err := os.Stat(f)
+ if err != nil {
+ return nil, err
+ }
+ if info.IsDir() {
+ err := filepath.WalkDir(f, func(p string, d os.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ if d.IsDir() {
+ return nil
+ }
+ out = append(out, p)
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ continue
+ }
+ out = append(out, f)
+ }
+ sort.Strings(out)
+ return out, nil
+}
+
+func (i *dbimport) runDBImport(ctx context.Context, files []string, extraArgs ...string) (string, error) {
+ args := slices.Clone(extraArgs)
+ args = append(args, files...)
+ cmd := exec.CommandContext(ctx, "dbimport", args...)
+ cmd.Env = os.Environ()
+
+ var buf bytes.Buffer
+ mw := io.MultiWriter(os.Stdout, &buf)
+ cmd.Stdout = mw
+ cmd.Stderr = mw
+
+ err := cmd.Run()
+ return buf.String(), err
+}
+
+func (i *dbimport) startRoutingScheduler(ctx context.Context) error {
+ lockfile := filepath.Join(os.TempDir(), "rs.lock")
+
+ // Ensure lockfile exists (create empty if not)
+ if _, err := os.Stat(lockfile); os.IsNotExist(err) {
+ if err := os.WriteFile(lockfile, []byte{}, 0644); err != nil {
+ return fmt.Errorf("failed to create lockfile: %w", err)
+ }
+ }
+
+ // Kill previous rs process if lockfile contains a PID
+ if pid, err := readLockfilePID(lockfile); err == nil && pid > 0 {
+ if err := killProcess(pid); err != nil && !errors.Is(err, os.ErrNotExist) {
+ i.logger.Warn("failed to kill previous rs process", "pid", pid, "error", err)
+ }
+ _ = os.Remove(lockfile)
+ }
+
+ cmd := exec.CommandContext(ctx, "rs", i.cfg.RoutingSpec)
+ cmd.Env = os.Environ()
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+
+ if err := cmd.Start(); err != nil {
+ return fmt.Errorf("rs failed to run: %w", err)
+ }
+
+ return nil
+}
+
+// Helper to read PID from lockfile
+func readLockfilePID(lockfile string) (int, error) {
+ data, err := os.ReadFile(lockfile)
+ if err != nil {
+ return 0, err
+ }
+ pid, err := strconv.Atoi(string(data))
+ if err != nil {
+ return 0, err
+ }
+ return pid, nil
+}
+
+// Helper to kill process by PID
+func killProcess(pid int) error {
+ proc, err := os.FindProcess(pid)
+ if err != nil {
+ return err
+ }
+ return proc.Signal(syscall.SIGTERM)
+}
+
+type Response[T any] struct {
+ Body T
+}
+
+func NewResponse[T any](body T) *Response[T] {
+ return &Response[T]{Body: body}
+}
+
+type KeyQueryParam struct {
+ Key string `query:"key"`
+}
diff --git a/opendcs/decodes.properties b/opendcs/decodes.properties
new file mode 100644
index 00000000..e422df70
--- /dev/null
+++ b/opendcs/decodes.properties
@@ -0,0 +1,47 @@
+#
+# The 'EditDatabase' is the provisional working database.
+# The default installation is set up for a local XML database.
+#
+EditDatabaseType=XML
+EditDatabaseLocation=/opt/opendcs/edit-db
+
+#
+# For SQL Editable Database, change EditDatabaseType to sql
+# Then...
+# Format for EditDatabaseLocation is a JDBC Database URL:
+#
+# jdbc:protocol:[//host[:port]]/databasename
+#
+# where
+# protocol is usually the DB product name like 'postgresql'
+# host and port are optional. If not supplied, a local database is assumed.
+# databasename is the database name - required.
+#
+# example:
+# EditDatabaseLocation=jdbc:postgresql://mylrgs/decodesedit
+#
+
+# Settings for the dbedit GUI:
+EditPresentationGroup=CWMS-English
+
+# Various agency-specific preferences:
+SiteNameTypePreference=CWMS
+EditTimeZone=UTC
+#EditOutputFormat=Human-Readable
+
+jdbcDriverClass=org.postgresql.Driver
+
+SqlKeyGenerator=decodes.sql.SequenceKeyGenerator
+#sqlDateFormat=
+#sqlTimeZone=
+
+transportMediumTypePreference=goes
+
+#defaultDataSource=
+#routingStatusDir=
+dataTypeStdPreference=CWMS
+#decwizTimeZone=
+#decwizOutputFormat=
+#decwizDebugLevel=
+#decwizDecodedDataDir=
+#decwizSummaryLog=
diff --git a/opendcs/go.mod b/opendcs/go.mod
new file mode 100644
index 00000000..b7c8ac6b
--- /dev/null
+++ b/opendcs/go.mod
@@ -0,0 +1,50 @@
+module github.com/USACE/instrumentation-api/opendcs
+
+go 1.25.5
+
+require (
+ github.com/caarlos0/env/v11 v11.3.1
+ github.com/danielgtaylor/huma/v2 v2.34.1
+ github.com/google/uuid v1.6.0
+ gocloud.dev v0.44.0
+)
+
+require (
+ github.com/aws/aws-sdk-go-v2 v1.39.6 // indirect
+ github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
+ github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 // indirect
+ github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
+ github.com/aws/smithy-go v1.23.2 // indirect
+ github.com/go-logr/logr v1.4.3 // indirect
+ github.com/go-logr/stdr v1.2.2 // indirect
+ github.com/google/wire v0.7.0 // indirect
+ github.com/googleapis/gax-go/v2 v2.15.0 // indirect
+ go.opentelemetry.io/auto/sdk v1.1.0 // indirect
+ go.opentelemetry.io/otel v1.37.0 // indirect
+ go.opentelemetry.io/otel/metric v1.37.0 // indirect
+ go.opentelemetry.io/otel/sdk v1.37.0 // indirect
+ go.opentelemetry.io/otel/sdk/metric v1.37.0 // indirect
+ go.opentelemetry.io/otel/trace v1.37.0 // indirect
+ golang.org/x/net v0.43.0 // indirect
+ golang.org/x/sys v0.35.0 // indirect
+ golang.org/x/text v0.28.0 // indirect
+ golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
+ google.golang.org/api v0.247.0 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a // indirect
+ google.golang.org/grpc v1.74.2 // indirect
+ google.golang.org/protobuf v1.36.7 // indirect
+)
diff --git a/opendcs/go.sum b/opendcs/go.sum
new file mode 100644
index 00000000..390891e2
--- /dev/null
+++ b/opendcs/go.sum
@@ -0,0 +1,164 @@
+cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
+cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
+cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c=
+cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI=
+cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8=
+cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M=
+cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
+cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
+cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
+cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
+cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
+cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
+cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
+cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
+cloud.google.com/go/storage v1.56.0 h1:iixmq2Fse2tqxMbWhLWC9HfBj1qdxqAmiK8/eqtsLxI=
+cloud.google.com/go/storage v1.56.0/go.mod h1:Tpuj6t4NweCLzlNbw9Z9iwxEkrSem20AetIeH/shgVU=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 h1:UQUsRi8WTzhZntp5313l+CHIAT95ojUI2lpP/ExlZa4=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
+github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
+github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+Xsqk=
+github.com/aws/aws-sdk-go-v2 v1.39.6/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y=
+github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y=
+github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c=
+github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
+github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 h1:4GNV1lhyELGjMz5ILMRxDvxvOaeo3Ux9Z69S1EgVMMQ=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3/go.mod h1:br7KA6edAAqDGUYJ+zVVPAyMrPhnN+zdt17yTUT6FPw=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 h1:a+8/MLcWlIxo1lF9xaGt3J/u3yOZx+CdSveSNwjhD40=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13/go.mod h1:oGnKwIYZ4XttyU2JWxFrwvhF6YKiK/9/wmE3v3Iu9K8=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 h1:HBSI2kDkMdWz4ZM7FjwE7e/pWDEZ+nR95x8Ztet1ooY=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13/go.mod h1:YE94ZoDArI7awZqJzBAZ3PDD2zSfuP7w6P2knOzIn8M=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 h1:eg/WYAa12vqTphzIdWMzqYRVKKnCboVPRlvaybNCqPA=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13/go.mod h1:/FDdxWhz1486obGrKKC1HONd7krpk38LBt+dutLcN9k=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 h1:NvMjwvv8hpGUILarKw7Z4Q0w1H9anXKsesMxtw++MA4=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4/go.mod h1:455WPHSwaGj2waRSpQp7TsnpOnBfw8iDfPfbwl7KPJE=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 h1:zhBJXdhWIFZ1acfDYIhu4+LCzdUS2Vbcum7D01dXlHQ=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13/go.mod h1:JaaOeCE368qn2Hzi3sEzY6FgAZVCIYcC2nwbro2QCh8=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 h1:xgBWsgaeUESl8A8k80p6yBdexMWDVeiDmJ/pkjohJ7c=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2/go.mod h1:+wArOOrcHUevqdto9k1tKOF5++YTe9JEcPSc9Tx2ZSw=
+github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0=
+github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
+github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
+github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
+github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
+github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
+github.com/caarlos0/env/v11 v11.3.1 h1:cArPWC15hWmEt+gWk7YBi7lEXTXCvpaSdCiZE2X5mCA=
+github.com/caarlos0/env/v11 v11.3.1/go.mod h1:qupehSf/Y0TUTsxKywqRt/vJjN5nz6vauiYEUUr8P4U=
+github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
+github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
+github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
+github.com/danielgtaylor/huma/v2 v2.34.1 h1:EmOJAbzEGfy0wAq/QMQ1YKfEMBEfE94xdBRLPBP0gwQ=
+github.com/danielgtaylor/huma/v2 v2.34.1/go.mod h1:ynwJgLk8iGVgoaipi5tgwIQ5yoFNmiu+QdhU7CEEmhk=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=
+github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=
+github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=
+github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
+github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
+github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
+github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
+github.com/go-jose/go-jose/v4 v4.1.1 h1:JYhSgy4mXXzAdF3nUx3ygx347LRXJRrpgyU3adRmkAI=
+github.com/go-jose/go-jose/v4 v4.1.1/go.mod h1:BdsZGqgdO3b6tTc6LSE56wcDbMMLuPsw5d4ZD5f94kA=
+github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
+github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
+github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
+github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo=
+github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI=
+github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk=
+github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg=
+github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=
+github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=
+github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
+github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4=
+github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
+github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
+github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
+github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
+github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
+github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
+github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
+github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
+github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
+go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
+go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
+go.opentelemetry.io/contrib/detectors/gcp v1.37.0 h1:B+WbN9RPsvobe6q4vP6KgM8/9plR/HNjgGBrfcOlweA=
+go.opentelemetry.io/contrib/detectors/gcp v1.37.0/go.mod h1:K5zQ3TT7p2ru9Qkzk0bKtCql0RGkPj9pRjpXgZJZ+rU=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 h1:rbRJ8BBoVMsQShESYZ0FkvcITu8X8QNwJogcLUmDNNw=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY=
+go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
+go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
+go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
+go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
+go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI=
+go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg=
+go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc=
+go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps=
+go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
+go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
+gocloud.dev v0.44.0 h1:iVyMAqFl2r6xUy7M4mfqwlN+21UpJoEtgHEcfiLMUXs=
+gocloud.dev v0.44.0/go.mod h1:ZmjROXGdC/eKZLF1N+RujDlFRx3D+4Av2thREKDMVxY=
+golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
+golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
+golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
+golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
+golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
+golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
+golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
+golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
+golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
+golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
+golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
+golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
+google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc=
+google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM=
+google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 h1:Nt6z9UHqSlIdIGJdz6KhTIs2VRx/iOsA5iE8bmQNcxs=
+google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79/go.mod h1:kTmlBHMPqR5uCZPBvwa2B18mvubkjyY3CRLI0c6fj0s=
+google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ=
+google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a h1:tPE/Kp+x9dMSwUm/uM0JKK0IfdiJkwAbSMSeZBXXJXc=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo=
+google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
+google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
+google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
+google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/opendcs/logback.xml b/opendcs/logback.xml
new file mode 100644
index 00000000..b8169db4
--- /dev/null
+++ b/opendcs/logback.xml
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+ time
+ yyyy-MM-dd'T'HH:mm:ss.SSS'Z'
+
+
+
+ {
+ "level": "%level",
+ "thread": "%thread",
+ "msg": "%message"
+ }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/opendcs/main.go b/opendcs/main.go
new file mode 100644
index 00000000..36d7b461
--- /dev/null
+++ b/opendcs/main.go
@@ -0,0 +1,376 @@
+package main
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "os"
+ "os/signal"
+ "path/filepath"
+ "strings"
+ "syscall"
+ "time"
+
+ "github.com/caarlos0/env/v11"
+ "github.com/danielgtaylor/huma/v2"
+ "github.com/danielgtaylor/huma/v2/adapters/humago"
+ "github.com/google/uuid"
+
+ "gocloud.dev/blob"
+ _ "gocloud.dev/blob/s3blob"
+)
+
+const (
+ VERSION = "1.0.0"
+ midasConfigDir = "/usr/local/share/midas_config"
+)
+
+type Config struct {
+ // TODO: use build or default values while container mappings not set in environment
+ RoutingSpec string `env:"OPENDCS_ROUTING_SPEC" envDefault:"goes"`
+ ListenAddr string `env:"OPENDCS_HTTP_ADDR" envDefault:":8080"`
+ MidasAPIHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` // TODO: make sure to remove this
+ TelemetrySourceID uuid.UUID `env:"TELEMETRY_SOURCE_ID" envDefault:"666e60ec-2c0a-4446-9eda-6f45cbcd0a60"`
+
+ AuthToken string `env:"PLATFORM_IMPORT_PREFIX"` // TODO: update this name instead of reusing existing env var mapping
+ S3BucketURL string `env:"DATALOAD_S3_ROOT"` // TODO: rename
+}
+
+func main() {
+ logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo}))
+ slog.SetDefault(logger)
+
+ var cfg Config
+ if err := env.Parse(&cfg); err != nil {
+ log.Fatalf("error initializing Config from env: %v", err)
+ }
+
+ ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)
+ defer cancel()
+
+ if len(os.Args) > 2 && os.Args[1] == "upload" {
+ filePath := os.Args[2]
+ if cfg.S3BucketURL == "" {
+ log.Fatalf("S3_LOAD_DATAROOT env not set")
+ }
+
+ bucket, err := blob.OpenBucket(ctx, cfg.S3BucketURL)
+ if err != nil {
+ log.Fatalf("failed to open bucket: %v", err)
+ }
+ defer bucket.Close()
+
+ file, err := os.Open(filePath)
+ if err != nil {
+ log.Fatalf("failed to open file: %v", err)
+ }
+ defer file.Close()
+
+ key := filepath.Base(filePath)
+ writer, err := bucket.NewWriter(ctx, key, nil)
+ if err != nil {
+ log.Fatalf("failed to create writer: %v", err)
+ }
+ if _, err := io.Copy(writer, file); err != nil {
+ log.Fatalf("failed to upload file: %v", err)
+ }
+ if err := writer.Close(); err != nil {
+ log.Fatalf("failed to close writer: %v", err)
+ }
+ logger.InfoContext(ctx, "File uploaded", "file", filePath, "bucket", cfg.S3BucketURL, "key", key)
+ return
+ }
+
+ dbi, err := NewDbimport(ctx, cfg, logger)
+ if err != nil {
+ log.Fatalf("NewDbimport: %v", err)
+ }
+
+ {
+ u, err := url.Parse(cfg.MidasAPIHost)
+ if err != nil {
+ log.Fatalf("could not parse bad url base path: %v", err)
+ }
+ u.Path = fmt.Sprintf("/v4/goes/%s/configs/committed", cfg.TelemetrySourceID)
+ q := u.Query()
+ q.Add("key", cfg.AuthToken)
+ u.RawQuery = q.Encode()
+
+ res, err := http.Get(u.String())
+ if err != nil {
+ var urlErr *url.Error
+ if errors.As(err, &urlErr) {
+ if u, parseErr := url.Parse(urlErr.URL); parseErr == nil {
+ redactQueryParams(u, "key")
+ urlErr.URL = u.String()
+ err = urlErr
+ } else {
+ err = errors.New("failed to redact query param when parsing error")
+ }
+ }
+ log.Fatal("unable to reach api for existing committed platform config files", "error", err)
+ }
+ defer res.Body.Close()
+
+ resBody, err := io.ReadAll(res.Body)
+ if err != nil {
+ log.Fatal("unable to reach api for existing committed platform config files", "error", err)
+ }
+ var platformConfigs []string
+ if err := json.Unmarshal(resBody, &platformConfigs); err != nil {
+ log.Fatal("unable to reach api for existing committed platform config files", "error", err)
+ }
+ var files []string
+ tmpDir := os.TempDir()
+ for _, pc := range platformConfigs {
+ tmpFile, err := os.CreateTemp(tmpDir, "platform-*.xml")
+ if err != nil {
+ log.Fatal("failed to create temp file for platform config", "error", err)
+ }
+ if _, err := tmpFile.Write([]byte(pc)); err != nil {
+ log.Fatal("failed to write platform config to temp file", "error", err)
+ }
+ files = append(files, tmpFile.Name())
+ tmpFile.Close()
+ }
+ if len(files) > 0 {
+ if _, err := dbi.ProcessAtomic(ctx, &dbimportParams{
+ Files: files,
+ ValidateOnly: false,
+ }); err != nil {
+ log.Fatal("failed to load platform configs into dbimport", "error", err)
+ }
+ }
+ }
+
+ router := http.NewServeMux()
+ api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION))
+
+ app := &App{
+ cfg: cfg,
+ logger: logger,
+ dbimport: dbi,
+ }
+
+ huma.Post(api, "/validate", func(ctx context.Context, input *struct {
+ KeyQueryParam
+ RawBody huma.MultipartFormFiles[struct {
+ Files []huma.FormFile `form:"files" required:"true"`
+ }]
+ }) (*Response[dbimportOutput], error) {
+ if err := app.checkKey(input.Key); err != nil {
+ return nil, err
+ }
+ if !app.mu.TryLock() {
+ return nil, huma.NewError(http.StatusConflict, "dbimport validation already in progress")
+ }
+ defer app.mu.Unlock()
+
+ formData := input.RawBody.Data()
+ files := make([]string, len(formData.Files))
+ tmpDir := os.TempDir()
+
+ var parsingErrors []string
+ for i, file := range formData.Files {
+ content, err := io.ReadAll(file)
+ if err != nil {
+ msg := fmt.Sprintf("failed to read uploaded file %q", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename))
+ if err != nil {
+ msg := fmt.Sprintf("failed to create temp file for uploaded file %q", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ defer tmpFile.Close()
+
+ if _, err := tmpFile.Write(content); err != nil {
+ msg := fmt.Sprintf("failed to write uploaded file %q to temp file", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ files[i] = tmpFile.Name()
+ }
+
+ dbiout, err := app.dbimport.ProcessAtomic(ctx, &dbimportParams{
+ Files: files,
+ ValidateOnly: true,
+ })
+ dbiout.ParsingErrors = parsingErrors
+ if err != nil {
+ return nil, huma.NewError(http.StatusConflict, err.Error())
+ }
+ if dbiout == nil {
+ return nil, huma.NewError(http.StatusInternalServerError, "response is nil; this should never happen")
+ }
+ return NewResponse(*dbiout), nil
+ })
+
+ huma.Post(api, "/commit", func(ctx context.Context, input *struct {
+ KeyQueryParam
+ RawBody huma.MultipartFormFiles[struct {
+ CommitID string `form:"commit_id"`
+ ProjectID string `form:"project_id"`
+ GoesTelemetrySourceID string `form:"goes_telemetry_source_id"`
+ Files []huma.FormFile `form:"files"`
+ }]
+ }) (*Response[dbimportOutput], error) {
+ if err := app.checkKey(input.Key); err != nil {
+ return nil, err
+ }
+
+ if !app.mu.TryLock() {
+ return nil, huma.NewError(http.StatusConflict, "import already in progress")
+ }
+ defer app.mu.Unlock()
+
+ formData := input.RawBody.Data()
+
+ files := make([]string, len(formData.Files))
+ tmpDir := os.TempDir()
+
+ type GoesPlatformConfigFileCommitDTO struct {
+ ID uuid.UUID `json:"id"`
+ CommittedAt time.Time `json:"committed_at"`
+ CommitID string `json:"commit_id"`
+ }
+
+ var parsingErrors []string
+ commitPayload := make([]GoesPlatformConfigFileCommitDTO, len(formData.Files))
+ for i, file := range formData.Files {
+ content, err := io.ReadAll(file)
+ if err != nil {
+ msg := fmt.Sprintf("failed to read uploaded file %q", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename))
+ if err != nil {
+ msg := fmt.Sprintf("failed to create temp file for uploaded file %q", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ defer tmpFile.Close()
+
+ if _, err := tmpFile.Write(content); err != nil {
+ msg := fmt.Sprintf("failed to write uploaded file %q to temp file", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ files[i] = tmpFile.Name()
+ fileBaseName := strings.TrimSuffix(file.Filename, ".xml")
+ fileID, err := uuid.Parse(fileBaseName)
+ if err != nil {
+ msg := fmt.Sprintf("failed to parse uuid from filename %q", file.Filename)
+ parsingErrors = append(parsingErrors, msg)
+ logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err)
+ continue
+ }
+ commitPayload[i] = GoesPlatformConfigFileCommitDTO{
+ ID: fileID,
+ CommittedAt: time.Now().UTC(),
+ CommitID: formData.CommitID,
+ }
+ }
+
+ dbiout, err := app.dbimport.ProcessAtomic(ctx, &dbimportParams{
+ Files: files,
+ ValidateOnly: false,
+ })
+ if err != nil {
+ return nil, huma.NewError(http.StatusConflict, fmt.Errorf("dbimport.ProcessAtomic %w", err).Error())
+ }
+ if dbiout == nil {
+ return nil, huma.NewError(http.StatusInternalServerError, "response is nil; this should never happen")
+ }
+
+ u, err := url.Parse(cfg.MidasAPIHost)
+ if err != nil {
+ return nil, huma.NewError(http.StatusInternalServerError, "could not parse bad url base path")
+ }
+ u.Path = fmt.Sprintf("/v4/callback/goes/%s/commit", formData.GoesTelemetrySourceID)
+ q := u.Query()
+ q.Add("key", cfg.AuthToken)
+ u.RawQuery = q.Encode()
+
+ body, err := json.Marshal(commitPayload)
+ if err != nil {
+ return nil, huma.NewError(http.StatusInternalServerError, "failed to marshal callback payload")
+ }
+ resp, err := http.Post(u.String(), "application/json", bytes.NewReader(body))
+ if err != nil {
+ var urlErr *url.Error
+ if errors.As(err, &urlErr) {
+ if u, parseErr := url.Parse(urlErr.URL); parseErr == nil {
+ redactQueryParams(u, "key")
+ urlErr.URL = u.String()
+ err = urlErr
+ } else {
+ err = errors.New("failed to redact query param when parsing error")
+ }
+ }
+ return nil, huma.NewError(http.StatusBadGateway, err.Error())
+ }
+ defer resp.Body.Close()
+
+ dbiout.Status = resp.StatusCode
+ dbiout.ParsingErrors = parsingErrors
+
+ return NewResponse(*dbiout), nil
+ })
+
+ router.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ _, _ = w.Write([]byte("ok"))
+ })
+
+ app.httpServer = &http.Server{
+ Addr: cfg.ListenAddr,
+ Handler: router,
+ ReadHeaderTimeout: 5 * time.Second,
+ }
+
+ if err := dbi.startRoutingScheduler(ctx); err != nil {
+ log.Fatalf("error starting routing scheduler: %v", err)
+ }
+
+ go func() {
+ <-ctx.Done()
+ shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ defer cancel()
+ if err := app.httpServer.Shutdown(shutdownCtx); err != nil {
+ slog.Error("error shutting down httpServer", "error", err)
+ }
+ }()
+
+ logger.Info("listening", "addr", cfg.ListenAddr)
+ if err := app.httpServer.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
+ logger.Error("server failed", "error", err)
+ os.Exit(1)
+ }
+}
+
+func redactQueryParams(u *url.URL, queryParams ...string) {
+ q := u.Query()
+ for _, p := range queryParams {
+ if q.Has(p) {
+ q.Set(p, "REDACTED")
+ }
+ }
+ u.RawQuery = q.Encode()
+}
diff --git a/opendcs/midas_config/datasource/hotbackup.xml b/opendcs/midas_config/datasource/hotbackup.xml
new file mode 100644
index 00000000..2753d808
--- /dev/null
+++ b/opendcs/midas_config/datasource/hotbackup.xml
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+ hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME}
+
+
+
+
+
+
+ hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME}
+
+
+
+
+
+
+ hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME}
+
+
+
+
diff --git a/opendcs/midas_config/reference/DataTypeEquivalenceList.xml b/opendcs/midas_config/reference/DataTypeEquivalenceList.xml
new file mode 100644
index 00000000..371ba059
--- /dev/null
+++ b/opendcs/midas_config/reference/DataTypeEquivalenceList.xml
@@ -0,0 +1,590 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/opendcs/midas_config/reference/EnumList.xml b/opendcs/midas_config/reference/EnumList.xml
new file mode 100644
index 00000000..ec9cb693
--- /dev/null
+++ b/opendcs/midas_config/reference/EnumList.xml
@@ -0,0 +1,402 @@
+
+
+
+
+ Read messages from LRGS data server
+ decodes.datasource.LrgsDataSource
+ 1
+
+
+ Read messages from one source in a group
+ decodes.datasource.HotBackupGroup
+ 2
+
+
+ Read message(s) from specified file
+ decodes.datasource.FileDataSource
+ 3
+
+
+
+ Read messages from files in a specified directory
+
+ decodes.datasource.DirectoryDataSource
+ 4
+
+
+ Read messages a socket stream
+ decodes.datasource.SocketStreamDataSource
+ 5
+
+
+ Read messages from all sources in a group
+ decodes.datasource.RoundRobinGroup
+ 6
+
+
+
+
+
+ National Data Buoy Center Context-Sensitive Parser
+
+ NDBCMessageParser
+ 1
+
+
+ Hard-coded NOS data parser
+ NOSMessageParser
+ 2
+
+
+ DECODES Format Statements and Unit Conversions
+ DecodesScript
+ 3
+
+
+
+
+ National Weather Service Handbook 5 Name
+ 1
+
+
+ Local Site Naming Convention
+ 2
+
+
+ USGS Numeric Station ID
+ 3
+
+
+ Columbia Basin TeleType
+ 4
+
+
+ CWMS Name
+
+
+ 5
+
+
+ UUID4 identifier
+
+
+ 6
+
+
+
+
+ Pipe data to standard output.
+ decodes.consumer.PipeConsumer
+ 1
+
+
+ Save data in specified file
+ decodes.consumer.FileConsumer
+ 2
+
+
+ Append data to file in a specified directory.
+ decodes.consumer.FileAppendConsumer
+ 3
+
+
+
+ Save message data in files in a directory, then optionally run a trigger
+ script.
+
+ decodes.consumer.DirectoryConsumer
+ 4
+
+
+
+
+ degrees or radians
+ 1
+
+
+ Area
+ 2
+
+
+ Volume / Time
+ 3
+
+
+ Length or distance
+ 4
+
+
+ Ratio
+ 5
+
+
+ Temperature
+ 6
+
+
+ Time
+ 7
+
+
+ Velocity
+ 8
+
+
+ voltage
+ 9
+
+
+ Volume
+ 10
+
+
+
+
+ Y = Ax + B
+ LinearConverter
+ 1
+
+
+ No Conversion (output = input)
+ NoConversion
+ 2
+
+
+ Y = Ax5 + Bx4 + Cx3 + Dx2 + Ex + F
+ Poly5Converter
+ 3
+
+
+ Y = A * (B + x)^C + D
+ UsgsStdConverter
+ 4
+
+
+
+
+ Descending
+ 1
+
+
+ Ascending
+ 2
+
+
+
+
+
+ Input must match table value to produce an output
+
+ ExactMatchLookup
+
+
+ Exponential interpolation between table values
+ ExponentialInterpLookup
+
+
+ Linear interpolation between table values
+ LinearInterpLookup
+
+
+ Logarithmic interpolation between table values
+ LogarithmicInterpLookup
+
+
+ Inputs are rounded to nearest table value
+ RoundingLookup
+
+
+ Inputs are truncated to lower table value
+ TruncatingLookup
+
+
+
+
+ Apply to all platforms
+
+
+
+ Apply to platforms sharing a given configuration
+
+
+
+ Apply to specific platform(s)
+
+
+ Apply to platforms in a network list
+
+
+ Apply to platform at a given site
+
+
+
+
+ English Measurements
+ 1
+
+
+ International Metric System
+ 2
+
+
+
+
+ Display Format
+ decodes.consumer.HumanReadableFormatter
+ 1
+
+
+ Standard Hydrometerologic Exchange Format
+ decodes.consumer.ShefFormatter
+ 2
+
+
+ USACE HEC Intermediate SHEF Format
+ decodes.consumer.ShefitFormatter
+ 3
+
+
+ USGS Standard Message Format
+ decodes.consumer.StdmsgFormatter
+ 4
+
+
+ Compatible with EMIT ASCII format
+ decodes.consumer.EmitAsciiFormatter
+ 5
+
+
+ Compatible with EMIT Oracle format
+ decodes.consumer.EmitOracleFormatter
+ 6
+
+
+ Dump Format for testing and trouble-shooting
+ decodes.consumer.DumpFormatter
+ 7
+
+
+ Transmission Monitor
+ decodes.consumer.TransmitMonitorFormatter
+ 8
+
+
+ Delimited row-column format
+ decodes.consumer.TableFormatter
+ 9
+
+
+ Hydstra Format.
+ decodes.consumer.HydstraFormatter
+ 10
+
+
+ HTML Report Format
+ decodes.consumer.HtmlFormatter
+ 11
+
+
+ CWMS Oracle with TSID format
+ rsgis.consumer.CwmsOracleFormatter
+
+ 12
+
+
+ CWMS Oracle Output Formatter
+ rsgis.consumer.CwmsOutputFormatter
+
+ 13
+
+
+ CWMS Oracle with TSID format
+ rsgis.consumer.MidasOutputFormatter
+
+ 14
+
+
+
+ shef-pe
+
+
+ Standard Hydrometeorologic Exchange Format Physical Element Code
+
+ 1
+
+
+ Environmental Protection Agency Parameter Code
+ 2
+
+
+ U.S. Bureau of Reclamations Hydrologic Database
+ 3
+
+
+ Hydstra Data Code
+ 4
+
+
+ CWMS parameters
+
+
+ 5
+
+
+ UUID4
+
+
+ 6
+
+
+
+
+ Electronic Data Logger File
+ 1
+
+
+ GOES DCP
+ 2
+
+
+ GOES DCP Random Message
+ 3
+
+
+ GOES DCP Self-Timed Message
+ 4
+
+
+ LRGS Archive File
+ 5
+
+
+ Data collected via telephone telementry
+ 6
+
+
+
+
+
+
+ Data Collection Platform
+ 1
+
+
+ Transmitter, data logger, modem, etc.
+ 2
+
+
+ Environmental Sensor
+ 3
+
+
+
+
+ Fixed Regular Interval
+ 1
+
+
+ Variable, Triggered or Random
+ 2
+
+
+
diff --git a/opendcs/midas_config/reference/MIDAS-English.xml b/opendcs/midas_config/reference/MIDAS-English.xml
new file mode 100644
index 00000000..270fb68d
--- /dev/null
+++ b/opendcs/midas_config/reference/MIDAS-English.xml
@@ -0,0 +1,268 @@
+
+
+ true
+
+
+
+ in
+ 2
+
+
+
+
+ in
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ cfs
+ 2
+
+
+
+
+ cfs
+ 2
+
+
+
+
+ cfs
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ W/m2
+ 3
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ kW
+ 3
+
+
+
+
+ in
+ 2
+
+
+
+
+ mb
+ 2
+
+
+
+
+ J/m2
+ 3
+
+
+
+
+ %
+ 3
+
+
+
+
+ rev
+ 3
+
+
+
+
+ mph
+ 3
+
+
+
+
+ mph
+ 2
+
+
+
+
+ mph
+ 2
+
+
+
+
+ rpm
+ 3
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ft
+ 2
+
+
+
+
+ ac-ft
+ 3
+
+
+
+
+ F
+ 3
+
+
+
+
+ F
+ 2
+
+
+
+
+ F
+ 2
+
+
+
+
+ in
+ 2
+
+
+
+
+ in
+ 2
+
+
+
+
+ hr
+ 3
+
+
+
+
+ ft
+ 2
+
+
+
+
+ JTU
+ 3
+
+
+
+
+ FNU
+ 3
+
+
+
+
+ JTU
+ 3
+
+
+
+
+ NTU
+ 3
+
+
+
+
+ Volts
+ 3
+
+
+
+
+ ac-ft
+ 3
+
+
+
+
+ su
+ 2
+
+
diff --git a/opendcs/midas_config/reference/MIDAS-Metric.xml b/opendcs/midas_config/reference/MIDAS-Metric.xml
new file mode 100644
index 00000000..5f548c3a
--- /dev/null
+++ b/opendcs/midas_config/reference/MIDAS-Metric.xml
@@ -0,0 +1,274 @@
+
+
+ false
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ W/m2
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ su
+ 3
+
+
+
+
+ kW
+ 3
+
+
+
+
+ mm
+ 3
+
+
+
+
+ mb
+ 3
+
+
+
+
+ J/m2
+ 3
+
+
+
+
+ %
+ 3
+
+
+
+
+ rev
+ 3
+
+
+
+
+ kph
+ 3
+
+
+
+
+ rpm
+ 3
+
+
+
+
+ m3
+ 3
+
+
+
+
+ C
+ 3
+
+
+
+
+ cm
+ 3
+
+
+
+
+ hr
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ JTU
+ 3
+
+
+
+
+ FNU
+ 3
+
+
+
+
+ JTU
+ 3
+
+
+
+
+ NTU
+ 3
+
+
+
+
+ v
+ 3
+
+
+
+
+ m3
+ 3
+
+
+
+
+ mm
+ 3
+
+
+
+
+ mm
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ cms
+ 3
+
+
+
+
+ cms
+ 3
+
+
+
+
+ cms
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ kph
+ 3
+
+
+
+
+ kph
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ m
+ 3
+
+
+
+
+ C
+ 3
+
+
+
+
+ C
+ 3
+
+
+
+
+ cm
+ 3
+
+
diff --git a/opendcs/midas_config/routing/goes.xml b/opendcs/midas_config/routing/goes.xml
new file mode 100644
index 00000000..4fe54f76
--- /dev/null
+++ b/opendcs/midas_config/routing/goes.xml
@@ -0,0 +1,55 @@
+
+
+ true
+
+
+
+
+
+ hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME}
+
+
+
+
+
+
+ hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME}
+
+
+
+
+
+
+ hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME}
+
+
+
+
+ false
+ false
+ midas-formatter
+ UTC
+ MIDAS-English
+ directory
+ /opendcs_output
+ now - 2 hours
+
+
+ yyyy-MM-dd'T'HH:mm:ss'Z'
+
+
+ ${java.TRANSPORTID}-$DATE(yyyyMMddHHmmss)
+
+
+ False
+
+
+ ,
+
+
+ l
+
+
+ /usr/local/bin/opendcs-wrapper upload ${java.FILENAME}
+
+
diff --git a/opendcs/midas_config/routing/monitor.xml b/opendcs/midas_config/routing/monitor.xml
new file mode 100644
index 00000000..16026a78
--- /dev/null
+++ b/opendcs/midas_config/routing/monitor.xml
@@ -0,0 +1,51 @@
+
+
+ true
+
+
+
+
+
+ hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME}
+
+
+
+
+
+
+ hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME}
+
+
+
+
+
+
+ hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME}
+
+
+
+
+ false
+ false
+ transmit-monitor
+ UTC
+ directory
+ /opendcs_output
+ now - 2 hours
+
+
+ ${java.TRANSPORTID}-$DATE(yyyyMMddHHmmss)
+
+
+ ,
+
+
+ False
+
+
+ l
+
+
+ /usr/local/bin/opendcs-wrapper upload ${java.FILENAME}
+
+
diff --git a/opendcs/patch_opendcs.sh b/opendcs/patch_opendcs.sh
new file mode 100755
index 00000000..aaf49e6f
--- /dev/null
+++ b/opendcs/patch_opendcs.sh
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+set -eu
+
+rm -f /opt/opendcs/dep/commons-net-*.jar \
+ /opt/opendcs/dep/jackson-core*.jar \
+ /opt/opendcs/dep/jackson-dataformat-toml*.jar \
+ /opt/opendcs/dep/commons-vfs2-*.jar \
+ /opt/opendcs/dep/javax.el-*.jar \
+ /opt/opendcs/dep/jdom-*.jar \
+ /opt/opendcs/dep/poi-*.jar \
+ /opt/opendcs/dep/postgresql-*.jar \
+ /opt/opendcs/dep/jetty-*.jar || true
+
+wget -qO /opt/opendcs/dep/commons-net-3.11.1.jar \
+ "https://repo1.maven.org/maven2/commons-net/commons-net/3.11.1/commons-net-3.11.1.jar" &&
+ wget -qO /opt/opendcs/dep/jackson-dataformat-toml-2.18.2.jar \
+ "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-toml/2.18.2/jackson-dataformat-toml-2.18.2.jar" &&
+ wget -qO /opt/opendcs/dep/jackson-core-2.19.2.jar \
+ "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.19.2/jackson-core-2.19.2.jar" &&
+ wget -qO /opt/opendcs/dep/commons-vfs2-2.10.0.jar \
+ "https://repo1.maven.org/maven2/org/apache/commons/commons-vfs2/2.10.0/commons-vfs2-2.10.0.jar" &&
+ wget -qO /opt/opendcs/dep/jdom2-2.0.6.1.jar \
+ "https://repo1.maven.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar" &&
+ wget -qO /opt/opendcs/dep/poi-5.4.1.jar \
+ "https://repo1.maven.org/maven2/org/apache/poi/poi/5.4.1/poi-5.4.1.jar" &&
+ wget -qO /opt/opendcs/dep/postgresql-42.7.7.jar \
+ "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.7.7/postgresql-42.7.7.jar" &&
+ wget -qO /opt/opendcs/dep/logstash-logback-encoder.jar \
+ "https://repo1.maven.org/maven2/net/logstash/logback/logstash-logback-encoder/8.0/logstash-logback-encoder-8.0.jar" &&
+ chown opendcs:opendcs /opt/opendcs/dep/*.jar
diff --git a/opendcs/rsgis/.mvn/jvm.config b/opendcs/rsgis/.mvn/jvm.config
new file mode 100644
index 00000000..e69de29b
diff --git a/opendcs/rsgis/.mvn/maven.config b/opendcs/rsgis/.mvn/maven.config
new file mode 100644
index 00000000..e69de29b
diff --git a/opendcs/rsgis/pom.xml b/opendcs/rsgis/pom.xml
new file mode 100644
index 00000000..7533ce09
--- /dev/null
+++ b/opendcs/rsgis/pom.xml
@@ -0,0 +1,73 @@
+
+
+ 4.0.0
+
+ rsgis.consumer
+ rsgis
+ 1.0-SNAPSHOT
+ rsgis
+ jar
+
+
+ UTF-8
+ 17
+ 7.0-nightly
+
+
+
+
+ org.opendcs
+ opendcs
+ ${opendcs.version}
+ provided
+
+
+
+ org.slf4j
+ slf4j-api
+ 2.0.17
+ provided
+
+
+
+ ch.qos.logback
+ logback-classic
+ 1.5.24
+
+
+
+
+ rsgis
+
+
+
+
+ maven-clean-plugin
+ 3.4.0
+
+
+ maven-resources-plugin
+ 3.3.1
+
+
+ maven-compiler-plugin
+ 3.13.0
+
+
+ maven-surefire-plugin
+ 3.3.0
+
+
+ maven-jar-plugin
+ 3.4.2
+
+
+ maven-install-plugin
+ 3.1.2
+
+
+
+
+
diff --git a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java
new file mode 100644
index 00000000..ec279aa9
--- /dev/null
+++ b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java
@@ -0,0 +1,129 @@
+package rsgis.consumer;
+
+import decodes.consumer.DataConsumer;
+import decodes.consumer.DataConsumerException;
+import decodes.consumer.OutputFormatter;
+import decodes.consumer.OutputFormatterException;
+import decodes.datasource.RawMessage;
+import decodes.datasource.UnknownPlatformException;
+import decodes.db.Platform;
+import decodes.db.PresentationGroup;
+import decodes.decoder.DecodedMessage;
+import decodes.decoder.Sensor;
+import decodes.decoder.TimeSeries;
+import decodes.util.PropertySpec;
+import ilex.util.PropertiesUtil;
+import ilex.var.TimedVariable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.SimpleDateFormat;
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.TimeZone;
+
+public class MidasOutputFormatter extends OutputFormatter {
+ private static final Logger log = LoggerFactory.getLogger(MidasOutputFormatter.class);
+
+ private String delimiter = " ";
+ private String dateFormat = "yyyy-MM-dd'T'HH:mmZ";
+ private PropertySpec[] propSpecs = new PropertySpec[] {
+ new PropertySpec("cwmsOfficeID", "s", ""),
+ new PropertySpec("delimiter", "s", ""),
+ new PropertySpec("justify", "b", ""),
+ new PropertySpec("dateFormat", "s", "")
+ };
+ private SimpleDateFormat sdf;
+
+ protected void initFormatter(
+ String type,
+ TimeZone timeZone,
+ PresentationGroup presentationGroup,
+ Properties properties) throws OutputFormatterException {
+
+ String s;
+ if ((s = PropertiesUtil.getIgnoreCase(properties, "delimiter")) != null) {
+ delimiter = s;
+ }
+ if ((s = PropertiesUtil.getIgnoreCase(properties, "dateFormat")) != null) {
+ dateFormat = s;
+ }
+ sdf = new SimpleDateFormat(dateFormat);
+ sdf.setTimeZone(timeZone);
+ }
+
+ public void shutdown() {
+ }
+
+ public void formatMessage(DecodedMessage decodedMessage, DataConsumer consumer)
+ throws DataConsumerException, OutputFormatterException {
+
+ consumer.startMessage(decodedMessage);
+ RawMessage raw = decodedMessage.getRawMessage();
+
+ Platform platform;
+ try {
+ platform = raw.getPlatform();
+ } catch (UnknownPlatformException e) {
+ if (log.isDebugEnabled()) {
+ log.debug("Unknown platform for raw message: " + e.toString());
+ }
+ return;
+ }
+
+ String platformName = platform.getDisplayName();
+
+ Iterator it = decodedMessage.getAllTimeSeries();
+ while (it.hasNext()) {
+ TimeSeries ts = it.next();
+ Sensor sensor = ts.getSensor();
+
+ if (sensor == null) {
+ log.warn("sensor_null platform={} timeseries={}", platformName, ts.getDisplayName());
+ continue;
+ }
+ if (ts.size() == 0) {
+ log.warn("timeseries_empty platform={} timeseries={}", platformName, ts.getDisplayName());
+ continue;
+ }
+
+ String midasTsId = sensor.getProperty("timeseries_id");
+ if (midasTsId != null) {
+ this.processDataOutput(consumer, ts, platformName, midasTsId);
+ continue;
+ }
+
+ log.info(
+ "measurements_written platform={} timeseries={} count={}",
+ platformName,
+ ts.getDisplayName(),
+ ts.size());
+ }
+
+ consumer.endMessage();
+ }
+
+ public void processDataOutput(
+ DataConsumer consumer,
+ TimeSeries ts,
+ String platformFileId,
+ String midasTsId) {
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < ts.size(); i++) {
+ TimedVariable tv = ts.sampleAt(i);
+ if ((tv.getFlags() & 0x60000000) != 0)
+ continue;
+ sb.setLength(0);
+ sb.append(midasTsId);
+ sb.append(this.delimiter);
+ sb.append(this.sdf.format(tv.getTime()));
+ sb.append(this.delimiter);
+ sb.append(ts.formattedSampleAt(i));
+ consumer.println(sb.toString());
+ }
+ }
+
+ public PropertySpec[] getSupportedProps() {
+ return propSpecs;
+ }
+}
diff --git a/report/src/main.ts b/report/src/main.ts
index 7c690ec3..87d90a09 100644
--- a/report/src/main.ts
+++ b/report/src/main.ts
@@ -414,9 +414,17 @@ async function upload(
key: string,
bucket: string,
): Promise {
+ const tomorrow = new Date(
+ new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000,
+ );
const uploader = new Upload({
client: s3Client,
- params: { Bucket: bucket, Key: key, Body: buf },
+ params: {
+ Bucket: bucket,
+ Key: key,
+ Body: buf,
+ Expires: tomorrow,
+ },
});
const s3UploaderResponse = await uploader.done();
const statusCode = s3UploaderResponse.$metadata.httpStatusCode;
@@ -470,13 +478,15 @@ async function updateJob(
throw new Error(JSON.stringify(failData ?? failErr));
}
+ const tomorrow = new Date(
+ new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000,
+ );
+
const body: ReportDownloadJobDTO = {
status: "SUCCESS",
progress: 100,
file_key: fileKey,
- file_expiry: new Date(
- new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000,
- ).toISOString(),
+ file_expiry: tomorrow.toISOString(),
};
const { data, error } = await apiClient.PUT("/report_jobs/{job_id}", {
diff --git a/sqlc.generate.yaml b/sqlc.generate.yaml
index e41574a3..0c7ec2ce 100644
--- a/sqlc.generate.yaml
+++ b/sqlc.generate.yaml
@@ -41,6 +41,10 @@ sql:
type: uuid.UUID
pointer: true
+ # xml
+ - db_type: xml
+ go_type: string
+
# timestamptz
- db_type: timestamptz
go_type: time.Time
@@ -149,6 +153,12 @@ sql:
type: InstrumentIDName
slice: true
+ # v_goes_telemetry
+ - column: v_goes_telemetry_source.files
+ go_type:
+ type: VGoesTelemetrySourceFiles
+ slice: true
+
# v_incl_measurement
- column: v_incl_measurement.measurements
go_type: