From 63ab5cd5b6fef04e90fe4ff7babc3a983f0e864c Mon Sep 17 00:00:00 2001 From: Mikyo King Date: Tue, 24 Oct 2023 14:28:03 -0600 Subject: [PATCH] Reformat text --- src/phoenix/trace/evaluation_conventions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/phoenix/trace/evaluation_conventions.py b/src/phoenix/trace/evaluation_conventions.py index 10764899ee..aa777d9479 100644 --- a/src/phoenix/trace/evaluation_conventions.py +++ b/src/phoenix/trace/evaluation_conventions.py @@ -14,13 +14,13 @@ EVAL_DOCUMENTS_PRECISION = "eval.documents_precision" """ The precision of a retriever. -This is the proportion (expressed as a value between 0 and 1) of relevant documents over the total +This is the proportion (expressed as a value between 0 and 1) of relevant documents over the total. """ EVAL_DOCUMENTS_PRECISION_AT_K_TEMPLATE = "eval.documents_precision_at_{k}" """ The prefix given to an evaluation metric that captures the precision of a retriever up to K. E.x. you would have eval.documents_precision_at_1, -eval.documents_precision_at_2, etc. This value -would be computed on top of the document_relevancy attribute of each document. +eval.documents_precision_at_2, etc. This value would be computed on top of the +document_relevancy attribute of each document. """