@@ -1007,6 +1007,13 @@ class CompletionRequest(OpenAIBaseModel):
10071007 "default: 0). Any priority other than 0 will raise an error "
10081008 "if the served model does not use priority scheduling." ),
10091009 )
1010+ request_id : str = Field (
1011+ default_factory = lambda : f"{ random_uuid ()} " ,
1012+ description = (
1013+ "The request_id related to this request. If the caller does "
1014+ "not set it, a random_uuid will be generated. This id is used "
1015+ "through out the inference process and return in response." ),
1016+ )
10101017 logits_processors : Optional [LogitsProcessors ] = Field (
10111018 default = None ,
10121019 description = (
@@ -1251,6 +1258,13 @@ class EmbeddingCompletionRequest(OpenAIBaseModel):
12511258 "default: 0). Any priority other than 0 will raise an error "
12521259 "if the served model does not use priority scheduling." ),
12531260 )
1261+ request_id : str = Field (
1262+ default_factory = lambda : f"{ random_uuid ()} " ,
1263+ description = (
1264+ "The request_id related to this request. If the caller does "
1265+ "not set it, a random_uuid will be generated. This id is used "
1266+ "through out the inference process and return in response." ),
1267+ )
12541268
12551269 # --8<-- [end:embedding-extra-params]
12561270
@@ -1302,6 +1316,13 @@ class EmbeddingChatRequest(OpenAIBaseModel):
13021316 "default: 0). Any priority other than 0 will raise an error "
13031317 "if the served model does not use priority scheduling." ),
13041318 )
1319+ request_id : str = Field (
1320+ default_factory = lambda : f"{ random_uuid ()} " ,
1321+ description = (
1322+ "The request_id related to this request. If the caller does "
1323+ "not set it, a random_uuid will be generated. This id is used "
1324+ "through out the inference process and return in response." ),
1325+ )
13051326 # --8<-- [end:chat-embedding-extra-params]
13061327
13071328 @model_validator (mode = "before" )
0 commit comments