@@ -175,156 +175,26 @@ def _get_aggregation(
175
175
return instrument_class_aggregation
176
176
177
177
178
- def encode_metrics (data : MetricsData ) -> ExportMetricsServiceRequest :
179
- resource_metrics_dict = {}
180
-
181
- for resource_metrics in data .resource_metrics :
182
-
183
- resource = resource_metrics .resource
184
-
185
- # It is safe to assume that each entry in data.resource_metrics is
186
- # associated with an unique resource.
187
- scope_metrics_dict = {}
188
-
189
- resource_metrics_dict [resource ] = scope_metrics_dict
190
-
191
- for scope_metrics in resource_metrics .scope_metrics :
178
+ class EncodingException (Exception ):
179
+ """
180
+ Raised by encode_metrics() when an exception is caught during encoding. Contains the problematic metric so
181
+ the misbehaving metric name and details can be logged during exception handling.
182
+ """
192
183
193
- instrumentation_scope = scope_metrics .scope
184
+ def __init__ (self , original_exception , metric ):
185
+ super ().__init__ ()
186
+ self .original_exception = original_exception
187
+ self .metric = metric
194
188
195
- # The SDK groups metrics in instrumentation scopes already so
196
- # there is no need to check for existing instrumentation scopes
197
- # here.
198
- pb2_scope_metrics = pb2 .ScopeMetrics (
199
- scope = InstrumentationScope (
200
- name = instrumentation_scope .name ,
201
- version = instrumentation_scope .version ,
202
- )
203
- )
189
+ def __str__ (self ):
190
+ return f"{ self .metric } \n { self .original_exception } "
204
191
205
- scope_metrics_dict [instrumentation_scope ] = pb2_scope_metrics
206
192
207
- for metric in scope_metrics .metrics :
208
- pb2_metric = pb2 .Metric (
209
- name = metric .name ,
210
- description = metric .description ,
211
- unit = metric .unit ,
212
- )
193
+ def encode_metrics (data : MetricsData ) -> ExportMetricsServiceRequest :
194
+ resource_metrics_dict = {}
213
195
214
- if isinstance (metric .data , Gauge ):
215
- for data_point in metric .data .data_points :
216
- pt = pb2 .NumberDataPoint (
217
- attributes = _encode_attributes (
218
- data_point .attributes
219
- ),
220
- time_unix_nano = data_point .time_unix_nano ,
221
- exemplars = encode_exemplars (data_point .exemplars ),
222
- )
223
- if isinstance (data_point .value , int ):
224
- pt .as_int = data_point .value
225
- else :
226
- pt .as_double = data_point .value
227
- pb2_metric .gauge .data_points .append (pt )
228
-
229
- elif isinstance (metric .data , HistogramType ):
230
- for data_point in metric .data .data_points :
231
- pt = pb2 .HistogramDataPoint (
232
- attributes = _encode_attributes (
233
- data_point .attributes
234
- ),
235
- time_unix_nano = data_point .time_unix_nano ,
236
- start_time_unix_nano = (
237
- data_point .start_time_unix_nano
238
- ),
239
- exemplars = encode_exemplars (data_point .exemplars ),
240
- count = data_point .count ,
241
- sum = data_point .sum ,
242
- bucket_counts = data_point .bucket_counts ,
243
- explicit_bounds = data_point .explicit_bounds ,
244
- max = data_point .max ,
245
- min = data_point .min ,
246
- )
247
- pb2_metric .histogram .aggregation_temporality = (
248
- metric .data .aggregation_temporality
249
- )
250
- pb2_metric .histogram .data_points .append (pt )
251
-
252
- elif isinstance (metric .data , Sum ):
253
- for data_point in metric .data .data_points :
254
- pt = pb2 .NumberDataPoint (
255
- attributes = _encode_attributes (
256
- data_point .attributes
257
- ),
258
- start_time_unix_nano = (
259
- data_point .start_time_unix_nano
260
- ),
261
- time_unix_nano = data_point .time_unix_nano ,
262
- exemplars = encode_exemplars (data_point .exemplars ),
263
- )
264
- if isinstance (data_point .value , int ):
265
- pt .as_int = data_point .value
266
- else :
267
- pt .as_double = data_point .value
268
- # note that because sum is a message type, the
269
- # fields must be set individually rather than
270
- # instantiating a pb2.Sum and setting it once
271
- pb2_metric .sum .aggregation_temporality = (
272
- metric .data .aggregation_temporality
273
- )
274
- pb2_metric .sum .is_monotonic = metric .data .is_monotonic
275
- pb2_metric .sum .data_points .append (pt )
276
-
277
- elif isinstance (metric .data , ExponentialHistogramType ):
278
- for data_point in metric .data .data_points :
279
-
280
- if data_point .positive .bucket_counts :
281
- positive = pb2 .ExponentialHistogramDataPoint .Buckets (
282
- offset = data_point .positive .offset ,
283
- bucket_counts = data_point .positive .bucket_counts ,
284
- )
285
- else :
286
- positive = None
287
-
288
- if data_point .negative .bucket_counts :
289
- negative = pb2 .ExponentialHistogramDataPoint .Buckets (
290
- offset = data_point .negative .offset ,
291
- bucket_counts = data_point .negative .bucket_counts ,
292
- )
293
- else :
294
- negative = None
295
-
296
- pt = pb2 .ExponentialHistogramDataPoint (
297
- attributes = _encode_attributes (
298
- data_point .attributes
299
- ),
300
- time_unix_nano = data_point .time_unix_nano ,
301
- start_time_unix_nano = (
302
- data_point .start_time_unix_nano
303
- ),
304
- exemplars = encode_exemplars (data_point .exemplars ),
305
- count = data_point .count ,
306
- sum = data_point .sum ,
307
- scale = data_point .scale ,
308
- zero_count = data_point .zero_count ,
309
- positive = positive ,
310
- negative = negative ,
311
- flags = data_point .flags ,
312
- max = data_point .max ,
313
- min = data_point .min ,
314
- )
315
- pb2_metric .exponential_histogram .aggregation_temporality = (
316
- metric .data .aggregation_temporality
317
- )
318
- pb2_metric .exponential_histogram .data_points .append (pt )
319
-
320
- else :
321
- _logger .warning (
322
- "unsupported data type %s" ,
323
- metric .data .__class__ .__name__ ,
324
- )
325
- continue
326
-
327
- pb2_scope_metrics .metrics .append (pb2_metric )
196
+ for resource_metrics in data .resource_metrics :
197
+ _encode_resource_metrics (resource_metrics , resource_metrics_dict )
328
198
329
199
resource_data = []
330
200
for (
@@ -340,8 +210,144 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest:
340
210
schema_url = sdk_resource .schema_url ,
341
211
)
342
212
)
343
- resource_metrics = resource_data
344
- return ExportMetricsServiceRequest (resource_metrics = resource_metrics )
213
+ return ExportMetricsServiceRequest (resource_metrics = resource_data )
214
+
215
+
216
+ def _encode_resource_metrics (resource_metrics , resource_metrics_dict ):
217
+ resource = resource_metrics .resource
218
+ # It is safe to assume that each entry in data.resource_metrics is
219
+ # associated with an unique resource.
220
+ scope_metrics_dict = {}
221
+ resource_metrics_dict [resource ] = scope_metrics_dict
222
+ for scope_metrics in resource_metrics .scope_metrics :
223
+ instrumentation_scope = scope_metrics .scope
224
+
225
+ # The SDK groups metrics in instrumentation scopes already so
226
+ # there is no need to check for existing instrumentation scopes
227
+ # here.
228
+ pb2_scope_metrics = pb2 .ScopeMetrics (
229
+ scope = InstrumentationScope (
230
+ name = instrumentation_scope .name ,
231
+ version = instrumentation_scope .version ,
232
+ )
233
+ )
234
+
235
+ scope_metrics_dict [instrumentation_scope ] = pb2_scope_metrics
236
+
237
+ for metric in scope_metrics .metrics :
238
+ pb2_metric = pb2 .Metric (
239
+ name = metric .name ,
240
+ description = metric .description ,
241
+ unit = metric .unit ,
242
+ )
243
+
244
+ try :
245
+ _encode_metric (metric , pb2_metric )
246
+ except Exception as ex :
247
+ # `from None` so we don't get "During handling of the above exception, another exception occurred:"
248
+ raise EncodingException (ex , metric ) from None
249
+
250
+ pb2_scope_metrics .metrics .append (pb2_metric )
251
+
252
+
253
+ def _encode_metric (metric , pb2_metric ):
254
+ if isinstance (metric .data , Gauge ):
255
+ for data_point in metric .data .data_points :
256
+ pt = pb2 .NumberDataPoint (
257
+ attributes = _encode_attributes (data_point .attributes ),
258
+ time_unix_nano = data_point .time_unix_nano ,
259
+ exemplars = encode_exemplars (data_point .exemplars ),
260
+ )
261
+ if isinstance (data_point .value , int ):
262
+ pt .as_int = data_point .value
263
+ else :
264
+ pt .as_double = data_point .value
265
+ pb2_metric .gauge .data_points .append (pt )
266
+
267
+ elif isinstance (metric .data , HistogramType ):
268
+ for data_point in metric .data .data_points :
269
+ pt = pb2 .HistogramDataPoint (
270
+ attributes = _encode_attributes (data_point .attributes ),
271
+ time_unix_nano = data_point .time_unix_nano ,
272
+ start_time_unix_nano = data_point .start_time_unix_nano ,
273
+ exemplars = encode_exemplars (data_point .exemplars ),
274
+ count = data_point .count ,
275
+ sum = data_point .sum ,
276
+ bucket_counts = data_point .bucket_counts ,
277
+ explicit_bounds = data_point .explicit_bounds ,
278
+ max = data_point .max ,
279
+ min = data_point .min ,
280
+ )
281
+ pb2_metric .histogram .aggregation_temporality = (
282
+ metric .data .aggregation_temporality
283
+ )
284
+ pb2_metric .histogram .data_points .append (pt )
285
+
286
+ elif isinstance (metric .data , Sum ):
287
+ for data_point in metric .data .data_points :
288
+ pt = pb2 .NumberDataPoint (
289
+ attributes = _encode_attributes (data_point .attributes ),
290
+ start_time_unix_nano = data_point .start_time_unix_nano ,
291
+ time_unix_nano = data_point .time_unix_nano ,
292
+ exemplars = encode_exemplars (data_point .exemplars ),
293
+ )
294
+ if isinstance (data_point .value , int ):
295
+ pt .as_int = data_point .value
296
+ else :
297
+ pt .as_double = data_point .value
298
+ # note that because sum is a message type, the
299
+ # fields must be set individually rather than
300
+ # instantiating a pb2.Sum and setting it once
301
+ pb2_metric .sum .aggregation_temporality = (
302
+ metric .data .aggregation_temporality
303
+ )
304
+ pb2_metric .sum .is_monotonic = metric .data .is_monotonic
305
+ pb2_metric .sum .data_points .append (pt )
306
+
307
+ elif isinstance (metric .data , ExponentialHistogramType ):
308
+ for data_point in metric .data .data_points :
309
+
310
+ if data_point .positive .bucket_counts :
311
+ positive = pb2 .ExponentialHistogramDataPoint .Buckets (
312
+ offset = data_point .positive .offset ,
313
+ bucket_counts = data_point .positive .bucket_counts ,
314
+ )
315
+ else :
316
+ positive = None
317
+
318
+ if data_point .negative .bucket_counts :
319
+ negative = pb2 .ExponentialHistogramDataPoint .Buckets (
320
+ offset = data_point .negative .offset ,
321
+ bucket_counts = data_point .negative .bucket_counts ,
322
+ )
323
+ else :
324
+ negative = None
325
+
326
+ pt = pb2 .ExponentialHistogramDataPoint (
327
+ attributes = _encode_attributes (data_point .attributes ),
328
+ time_unix_nano = data_point .time_unix_nano ,
329
+ start_time_unix_nano = data_point .start_time_unix_nano ,
330
+ exemplars = encode_exemplars (data_point .exemplars ),
331
+ count = data_point .count ,
332
+ sum = data_point .sum ,
333
+ scale = data_point .scale ,
334
+ zero_count = data_point .zero_count ,
335
+ positive = positive ,
336
+ negative = negative ,
337
+ flags = data_point .flags ,
338
+ max = data_point .max ,
339
+ min = data_point .min ,
340
+ )
341
+ pb2_metric .exponential_histogram .aggregation_temporality = (
342
+ metric .data .aggregation_temporality
343
+ )
344
+ pb2_metric .exponential_histogram .data_points .append (pt )
345
+
346
+ else :
347
+ _logger .warning (
348
+ "unsupported data type %s" ,
349
+ metric .data .__class__ .__name__ ,
350
+ )
345
351
346
352
347
353
def encode_exemplars (sdk_exemplars : list ) -> list :
0 commit comments