Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rename "Values" to "Fields" for writes #1700

Merged
merged 1 commit into from
Feb 23, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions client/influxdb.go
Original file line number Diff line number Diff line change
Expand Up @@ -258,12 +258,12 @@ func (t Timestamp) MarshalJSON() ([]byte, error) {
return []byte(`"` + s + `"`), nil
}

// Point defines the values that will be written to the database
// Point defines the fields that will be written to the database
type Point struct {
Name string `json:"name"`
Tags map[string]string `json:"tags"`
Timestamp Timestamp `json:"timestamp"`
Values map[string]interface{} `json:"values"`
Fields map[string]interface{} `json:"fields"`
Precision string `json:"precision"`
}

Expand All @@ -274,14 +274,14 @@ func (p *Point) UnmarshalJSON(b []byte) error {
Tags map[string]string `json:"tags"`
Timestamp time.Time `json:"timestamp"`
Precision string `json:"precision"`
Values map[string]interface{} `json:"values"`
Fields map[string]interface{} `json:"fields"`
}
var epoch struct {
Name string `json:"name"`
Tags map[string]string `json:"tags"`
Timestamp *int64 `json:"timestamp"`
Precision string `json:"precision"`
Values map[string]interface{} `json:"values"`
Fields map[string]interface{} `json:"fields"`
}

if err := func() error {
Expand All @@ -304,7 +304,7 @@ func (p *Point) UnmarshalJSON(b []byte) error {
p.Tags = epoch.Tags
p.Timestamp = Timestamp(ts)
p.Precision = epoch.Precision
p.Values = normalizeValues(epoch.Values)
p.Fields = normalizeFields(epoch.Fields)
return nil
}(); err == nil {
return nil
Expand All @@ -320,28 +320,28 @@ func (p *Point) UnmarshalJSON(b []byte) error {
p.Tags = normal.Tags
p.Timestamp = Timestamp(normal.Timestamp)
p.Precision = normal.Precision
p.Values = normalizeValues(normal.Values)
p.Fields = normalizeFields(normal.Fields)

return nil
}

// Remove any notion of json.Number
func normalizeValues(values map[string]interface{}) map[string]interface{} {
newValues := map[string]interface{}{}
func normalizeFields(fields map[string]interface{}) map[string]interface{} {
newFields := map[string]interface{}{}

for k, v := range values {
for k, v := range fields {
switch v := v.(type) {
case json.Number:
jv, e := v.Float64()
if e != nil {
panic(fmt.Sprintf("unable to convert json.Number to float64: %s", e))
}
newValues[k] = jv
newFields[k] = jv
default:
newValues[k] = v
newFields[k] = v
}
}
return newValues
return newFields
}

// utility functions
Expand Down
12 changes: 6 additions & 6 deletions cmd/influxd/server_integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ func createBatch(nPoints int, database, retention, measurement string, tags map[
Tags map[string]string `json:"tags"`
Timestamp int64 `json:"timestamp"`
Precision string `json:"precision"`
Values map[string]int `json:"values"`
Fields map[string]int `json:"fields"`
}
type PointBatch struct {
Database string `json:"database"`
Expand All @@ -67,8 +67,8 @@ func createBatch(nPoints int, database, retention, measurement string, tags map[
rand.Seed(time.Now().UTC().UnixNano())
points := make([]Point, 0)
for i := 0; i < nPoints; i++ {
values := map[string]int{"value": rand.Int()}
point := Point{Name: measurement, Tags: tags, Timestamp: time.Now().UTC().UnixNano(), Precision: "n", Values: values}
fields := map[string]int{"value": rand.Int()}
point := Point{Name: measurement, Tags: tags, Timestamp: time.Now().UTC().UnixNano(), Precision: "n", Fields: fields}
points = append(points, point)
}
batch := PointBatch{Database: database, RetentionPolicy: retention, Points: points}
Expand Down Expand Up @@ -425,7 +425,7 @@ func Test_ServerSingleIntegration(t *testing.T) {
},
"timestamp": %d,
"precision": "n",
"values":{
"fields":{
"value": 100
}
}]
Expand Down Expand Up @@ -470,7 +470,7 @@ func Test_Server3NodeIntegration(t *testing.T) {
},
"timestamp": %d,
"precision": "n",
"values":{
"fields":{
"value": 100
}
}]
Expand Down Expand Up @@ -516,7 +516,7 @@ func Test_Server5NodeIntegration(t *testing.T) {
},
"timestamp": %d,
"precision": "n",
"values":{
"fields":{
"value": 100
}
}]
Expand Down
6 changes: 3 additions & 3 deletions collectd/collectd.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,9 @@ func Unmarshal(data *gollectd.Packet) []influxdb.Point {
for i := range data.Values {
name := fmt.Sprintf("%s_%s", data.Plugin, data.Values[i].Name)
tags := make(map[string]string)
values := make(map[string]interface{})
fields := make(map[string]interface{})

values[name] = data.Values[i].Value
fields[name] = data.Values[i].Value

if data.Hostname != "" {
tags["host"] = data.Hostname
Expand All @@ -178,7 +178,7 @@ func Unmarshal(data *gollectd.Packet) []influxdb.Point {
Name: name,
Tags: tags,
Timestamp: timestamp,
Values: values,
Fields: fields,
}

points = append(points, p)
Expand Down
10 changes: 5 additions & 5 deletions collectd/collectd_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ func TestUnmarshal_Points(t *testing.T) {
{
name: "single value",
points: []influxdb.Point{
{Name: "disk_read", Values: map[string]interface{}{"disk_read": float64(1)}},
{Name: "disk_read", Fields: map[string]interface{}{"disk_read": float64(1)}},
},
packet: gollectd.Packet{
Plugin: "disk",
Expand All @@ -223,8 +223,8 @@ func TestUnmarshal_Points(t *testing.T) {
{
name: "multi value",
points: []influxdb.Point{
{Name: "disk_read", Values: map[string]interface{}{"disk_read": float64(1)}},
{Name: "disk_write", Values: map[string]interface{}{"disk_write": float64(5)}},
{Name: "disk_read", Fields: map[string]interface{}{"disk_read": float64(1)}},
{Name: "disk_write", Fields: map[string]interface{}{"disk_write": float64(5)}},
},
packet: gollectd.Packet{
Plugin: "disk",
Expand All @@ -240,7 +240,7 @@ func TestUnmarshal_Points(t *testing.T) {
{
Name: "disk_read",
Tags: map[string]string{"host": "server01", "instance": "sdk", "type": "disk_octets", "type_instance": "single"},
Values: map[string]interface{}{"disk_read": float64(1)},
Fields: map[string]interface{}{"disk_read": float64(1)},
},
},
packet: gollectd.Packet{
Expand Down Expand Up @@ -269,7 +269,7 @@ func TestUnmarshal_Points(t *testing.T) {
t.Errorf("point name mismatch. expected %q, got %q", name, m.Name)
}
// test value
mv := m.Values[m.Name].(float64)
mv := m.Fields[m.Name].(float64)
pv := test.packet.Values[i].Value
if mv != pv {
t.Errorf("point value mismatch. expected %v, got %v", pv, mv)
Expand Down
8 changes: 4 additions & 4 deletions graphite/graphite.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@ func (p *Parser) Parse(line string) (influxdb.Point, error) {
return influxdb.Point{}, err
}

values := make(map[string]interface{})
fieldValues := make(map[string]interface{})
// Determine if value is a float or an int.
if i := int64(v); float64(i) == v {
values[name] = int64(v)
fieldValues[name] = int64(v)
} else {
values[name] = v
fieldValues[name] = v
}

// Parse timestamp.
Expand All @@ -88,7 +88,7 @@ func (p *Parser) Parse(line string) (influxdb.Point, error) {
point := influxdb.Point{
Name: name,
Tags: tags,
Values: values,
Fields: fieldValues,
Timestamp: timestamp,
}

Expand Down
8 changes: 4 additions & 4 deletions graphite/graphite_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -225,13 +225,13 @@ func Test_DecodeMetric(t *testing.T) {
t.Fatalf("tags len mismatch. expected %d, got %d", len(test.tags), len(point.Tags))
}
if test.isInt {
i := point.Values[point.Name].(int64)
i := point.Fields[point.Name].(int64)
if i != test.iv {
t.Fatalf("integerValue value mismatch. expected %v, got %v", test.iv, point.Values[point.Name])
t.Fatalf("integerValue value mismatch. expected %v, got %v", test.iv, point.Fields[point.Name])
}
} else {
f := point.Values[point.Name].(float64)
if point.Values[point.Name] != f {
f := point.Fields[point.Name].(float64)
if point.Fields[point.Name] != f {
t.Fatalf("floatValue value mismatch. expected %v, got %v", test.fv, f)
}
}
Expand Down
Loading