Skip to content

Commit

Permalink
feat: add geomag gain factor and bias config application
Browse files Browse the repository at this point in the history
This adds a tool to generate geomag processing constants
  • Loading branch information
ozym committed Apr 3, 2024
1 parent 3b78172 commit 3f6e38b
Show file tree
Hide file tree
Showing 3 changed files with 277 additions and 0 deletions.
58 changes: 58 additions & 0 deletions cmd/geomag-config/config.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
package main

import (
"encoding/json"
"io"
"time"
)

// Config describes the processing settings for a geomag stream.
type Config struct {
// Srcname can be used as a stream key
Srcname string `json:"srcname"`
// Network is the expected network code as stored.
Network string `json:"network"`
// Station is the expected station code as stored.
Station string `json:"station"`
// Location is the expected site location code as stored.
Location string `json:"location"`
// Channel is the expected channel code as stored.
Channel string `json:"channel"`
// ScaleBias is the offset that needs to be added to each data sample.
ScaleBias float64 `json:"scale_bias"`
// ScaleFactor is the value that needs to be multiplied to each data sample.
ScaleFactor float64 `json:"scale_factor"`
// InputUnits describes the units for the input signal.
InputUnits string `json:"input_units"`
// OutputUnits describes the units for the output after scaling.
OutputUnits string `json:"output_units"`
// Start is the time when the scale factors are valid.
Start time.Time `json:"start"`
// End is the time when the scale factors are no longer valid.
End time.Time `json:"end"`
}

// Less can be used for sorting Config slices.
func (c Config) Less(config Config) bool {
switch {
case c.Srcname < config.Srcname:
return true
case c.Srcname > config.Srcname:
return false
case c.Start.Before(config.Start):
return true
default:
return false
}
}

// Encode will write JSON encoded output of a Config slice.
func Encode(wr io.Writer, d []Config) error {

// build an encoder
enc := json.NewEncoder(wr)
enc.SetIndent("", " ")

// do the encoding
return enc.Encode(d)
}
212 changes: 212 additions & 0 deletions cmd/geomag-config/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,212 @@
package main

import (
"flag"
"fmt"
"log"
"os"
"path/filepath"
"sort"
"strings"

"github.com/GeoNet/delta"
"github.com/GeoNet/delta/internal/stationxml"
"github.com/GeoNet/delta/resp"
)

// Settings holds the application configuration.
type Settings struct {
base string // optional delta base directory
resp string // optional delta response directory
network string // geomag network code
output string // optional output file
}

func main() {

var settings Settings

flag.Usage = func() {
fmt.Fprintf(os.Stderr, "\n")
fmt.Fprintf(os.Stderr, "Build a geomag processing config file\n")
fmt.Fprintf(os.Stderr, "\n")
fmt.Fprintf(os.Stderr, "Usage:\n")
fmt.Fprintf(os.Stderr, "\n")
fmt.Fprintf(os.Stderr, " %s [options]\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\n")
fmt.Fprintf(os.Stderr, "Options:\n")
fmt.Fprintf(os.Stderr, "\n")
flag.PrintDefaults()
fmt.Fprintf(os.Stderr, "\n")
}

flag.StringVar(&settings.base, "base", "", "delta base files")
flag.StringVar(&settings.resp, "resp", "", "delta base files")
flag.StringVar(&settings.network, "network", "GM", "geomag network code")
flag.StringVar(&settings.output, "output", "", "output geomag configuration file")

flag.Parse()

set, err := delta.NewBase(settings.base)
if err != nil {
log.Fatalf("unable to create delta set: %v", err)
}

// avoids the json null
configs := make([]Config, 0)

// external network lookup
externals := make(map[string]string)
for _, n := range set.Networks() {
externals[n.Code] = n.External
}

// network codes
codes := make(map[string]string)
for _, s := range set.Stations() {
codes[s.Code] = s.Network
}

// check each site, skip any that don't match the network
for _, site := range set.Sites() {
// must have a network code
n, ok := codes[site.Station]
if !ok || n != settings.network {
continue
}

// that code must have an external code
external, ok := externals[n]
if !ok {
continue
}

// examine the collection of information for each site
for _, collection := range set.Collections(site) {

// find any corrections that might be needed, e.g. gain or calibration
for _, correction := range set.Corrections(collection) {

pair := stationxml.NewResponse()

// adjust for corrections
if cal := correction.SensorCalibration; cal != nil {
pair.SetCalibration(cal.ScaleFactor, cal.ScaleBias, cal.ScaleAbsolute)
}
if gain := correction.Gain; gain != nil {
pair.SetGain(gain.Scale.Factor, gain.Scale.Bias, gain.Absolute)
}
if correction.Telemetry != nil {
pair.SetTelemetry(correction.Telemetry.ScaleFactor)
}
if correction.Preamp != nil {
pair.SetPreamp(correction.Preamp.ScaleFactor)
}

switch {
// handle instruments with a single response configution (usually no datalogger)
case collection.Component.SamplingRate != 0:

derived, err := resp.LookupBase(settings.resp, collection.Component.Response)
if err != nil {
log.Fatal(err)
}

// generate the derived response
r, err := pair.Derived(derived)
if err != nil {
log.Fatal(err)
}
if r.InstrumentSensitivity != nil {
configs = append(configs, Config{
Srcname: strings.Join([]string{external, site.Station, site.Location, collection.Code()}, "_"),
Network: external,
Station: site.Station,
Location: site.Location,
Channel: collection.Code(),
ScaleFactor: 1.0 / r.InstrumentSensitivity.Value,
ScaleBias: 0.0,
InputUnits: r.InstrumentSensitivity.InputUnits.Name,
OutputUnits: r.InstrumentSensitivity.OutputUnits.Name,
Start: correction.Start,
End: correction.End,
})
}
// handle instruments with a normal response configution (e.g. sensor and datalogger)
default:
sensor, err := resp.LookupBase(settings.resp, collection.Component.Response)
if err != nil {
log.Fatal(err)
}
if err := pair.SetSensor(sensor); err != nil {
log.Fatal(err)
}

datalogger, err := resp.LookupBase(settings.resp, collection.Channel.Response)
if err != nil {
log.Fatal(err)
}
if err := pair.SetDatalogger(datalogger); err != nil {
log.Fatal(err)
}

r, err := pair.ResponseType()
if err != nil {
log.Fatal(err)
}

if r.InstrumentPolynomial != nil {
var factor, bias float64
for _, c := range r.InstrumentPolynomial.Coefficients {
switch c.Number {
case 1:
bias = c.Value
case 2:
factor = c.Value
}
}
configs = append(configs, Config{
Srcname: strings.Join([]string{external, site.Station, site.Location, collection.Code()}, "_"),
Network: external,
Station: site.Station,
Location: site.Location,
Channel: collection.Code(),
ScaleFactor: factor,
ScaleBias: bias,
InputUnits: r.InstrumentPolynomial.InputUnits.Name,
OutputUnits: r.InstrumentPolynomial.OutputUnits.Name,
Start: correction.Start,
End: correction.End,
})
}
}
}
}
}

sort.Slice(configs, func(i, j int) bool {
return configs[i].Less(configs[j])
})

switch {
case settings.output != "":
// need to have a base directory
if err := os.MkdirAll(filepath.Dir(settings.output), 0700); err != nil {
log.Fatalf("unable to make output directory to %q: %v", filepath.Dir(settings.output), err)
}
// output file has been given
file, err := os.Create(settings.output)
if err != nil {
log.Fatalf("unable to create output file %q: %v", settings.output, err)
}
defer file.Close()

if err := Encode(file, configs); err != nil {
log.Fatalf("unable to write output to %q: %v", settings.output, err)
}
default:
if err := Encode(os.Stdout, configs); err != nil {
log.Fatalf("unable to write output: %v", err)
}
}
}
7 changes: 7 additions & 0 deletions meta/correction.go
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,10 @@ func (s *Set) SensorCalibrationCorrections(coll Collection) []Correction {
if c.Serial != coll.InstalledSensor.Serial {
continue
}
if c.Number != coll.Component.Number {
continue
}

if !coll.Span.Overlaps(c.Span) {
continue
}
Expand Down Expand Up @@ -407,6 +411,9 @@ func (s *Set) DataloggerCalibrationCorrections(coll Collection) []Correction {
if c.Serial != coll.DeployedDatalogger.Serial {
continue
}
if c.Number != coll.Channel.Number {
continue
}
if !coll.Span.Overlaps(c.Span) {
continue
}
Expand Down

0 comments on commit 3f6e38b

Please sign in to comment.