Skip to content

Commit

Permalink
tdg_dashboard: add file connectors section
Browse files Browse the repository at this point in the history
Add "TDG file connectors statistics" section to TDG dashboard templates.
It consists of panels with following metrics:
- tdg_connector_input_file_processed_count
- tdg_connector_input_file_processed_objects_count
- tdg_connector_input_file_failed_count
- tdg_connector_input_file_size
- tdg_connector_input_file_current_bytes_processed
- tdg_connector_input_file_current_processed_objects

Part of #134
  • Loading branch information
DifferentialOrange committed Jun 7, 2022
1 parent c913262 commit 6f07110
Show file tree
Hide file tree
Showing 9 changed files with 1,668 additions and 0 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Thread CPU panels for TDG dashboard
- expirationd panels for TDG dashboard
- Tuples panels for TDG dashboard
- File connectors panels for TDG dashboard


## [1.1.0] - 2022-05-17
Expand Down
6 changes: 6 additions & 0 deletions dashboard/influxdb_tdg_dashboard.libsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -154,4 +154,10 @@ dashboard.new(
policy=variable.influxdb.policy,
measurement=variable.influxdb.measurement,
)
).addPanels(
section.tdg_file_connectors(
datasource=variable.datasource.influxdb,
policy=variable.influxdb.policy,
measurement=variable.influxdb.measurement,
)
)
184 changes: 184 additions & 0 deletions dashboard/panels/tdg/file_connectors.libsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
local grafana = import 'grafonnet/grafana.libsonnet';

local common_utils = import '../common.libsonnet';

local influxdb = grafana.influxdb;
local prometheus = grafana.prometheus;

{
row:: common_utils.row('TDG file connector statistics'),

local target(
datasource,
metric_name,
job=null,
policy=null,
measurement=null,
) =
if datasource == '${DS_PROMETHEUS}' then
prometheus.target(
expr=std.format('%s{job=~"%s"}', [metric_name, job]),
legendFormat='{{connector_name}} — {{alias}}',
)
else if datasource == '${DS_INFLUXDB}' then
influxdb.target(
policy=policy,
measurement=measurement,
group_tags=[
'label_pairs_alias',
'label_pairs_connector_name',
],
alias='$tag_label_pairs_connector_name — $tag_label_pairs_alias',
).where('metric_name', '=', metric_name)
.selectField('value').addConverter('mean'),

files_processed(
title='Total iles processed',
description=|||
A number of files processed.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
rate_time_range=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
labelY1='files',
decimals=0,
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_processed_count',
job,
policy,
measurement,
)),

objects_processed(
title='Total objects processed',
description=|||
A number of objects processed over all files.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
rate_time_range=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
labelY1='objects per second',
decimals=0,
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_processed_objects_count',
job,
policy,
measurement,
)),

files_process_errors(
title='Files failed to processed',
description=|||
A number of files failed to process.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
labelY1='files',
decimals=0,
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_failed_count',
job,
policy,
measurement,
)),

file_size(
title='Current file size',
description=|||
Current file size.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
format='bytes',
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_size',
job,
policy,
measurement,
)),

current_bytes_processed(
title='Current file bytes processed',
description=|||
Processed bytes count from the current file.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
format='bytes',
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_current_bytes_processed',
job,
policy,
measurement,
)),

current_objects_processed(
title='Current file objects processed',
description=|||
Processed objects count from the current file.
|||,
datasource=null,
policy=null,
measurement=null,
job=null,
):: common_utils.default_graph(
title=title,
description=description,
datasource=datasource,
labelY1='objects',
decimals=0,
legend_avg=false,
legend_max=false,
).addTarget(target(
datasource,
'tdg_connector_input_file_current_processed_objects',
job,
policy,
measurement,
)),
}
6 changes: 6 additions & 0 deletions dashboard/prometheus_tdg_dashboard.libsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -182,4 +182,10 @@ dashboard.new(
job=variable.prometheus.job,
rate_time_range=variable.prometheus.rate_time_range,
)
).addPanels(
section.tdg_file_connectors(
datasource=variable.datasource.prometheus,
job=variable.prometheus.job,
rate_time_range=variable.prometheus.rate_time_range,
)
)
47 changes: 47 additions & 0 deletions dashboard/section.libsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ local space = import 'panels/space.libsonnet';
local vinyl = import 'panels/vinyl.libsonnet';

local tdg_expirationd = import 'panels/tdg/expirationd.libsonnet';
local tdg_file_connectors = import 'panels/tdg/file_connectors.libsonnet';
local tdg_kafka_brokers = import 'panels/tdg/kafka/brokers.libsonnet';
local tdg_kafka_common = import 'panels/tdg/kafka/common.libsonnet';
local tdg_kafka_consumer = import 'panels/tdg/kafka/consumer.libsonnet';
Expand Down Expand Up @@ -1742,4 +1743,50 @@ local tdg_tuples = import 'panels/tdg/tuples.libsonnet';
job=job,
),
],

tdg_file_connectors(datasource, policy=null, measurement=null, job=null, rate_time_range=null):: [
tdg_file_connectors.row,

tdg_file_connectors.files_processed(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),

tdg_file_connectors.objects_processed(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),

tdg_file_connectors.files_process_errors(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),

tdg_file_connectors.file_size(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),

tdg_file_connectors.current_bytes_processed(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),

tdg_file_connectors.current_objects_processed(
datasource=datasource,
policy=policy,
measurement=measurement,
job=job,
),
],
}
17 changes: 17 additions & 0 deletions example_cluster/tdg/config/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,16 @@ connector:
- items
group_id: '1'

- name: http
type: http

- name: jsonl_importer
type: file
format: jsonl
workdir: "/var/lib/tarantool"
filename: data.json
routing_key: TestObj1

input_processor:
handlers:
- key: input_key
Expand Down Expand Up @@ -58,3 +68,10 @@ expiration:
lifetime_hours: 1
delay_sec: 1
strategy: permanent

services:
test_service:
function: test_function.call
args:
id: string
return_type: TestEntity
5 changes: 5 additions & 0 deletions example_cluster/tdg/config/src/test_function.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
return {
call = function()
return {id="test_service", nested="test_nested"}
end
}
Loading

0 comments on commit 6f07110

Please sign in to comment.