feat(api): add tuning params (temperature, top_p, penalties, stop) to MessageRequest

MessageRequest was missing standard OpenAI-compatible generation tuning
parameters. Callers had no way to control temperature, top_p,
frequency_penalty, presence_penalty, or stop sequences.

Changes:
- Added 5 optional fields to MessageRequest (all Option, None by default)
- Wired into build_chat_completion_request: only included in payload when set
- All existing construction sites updated with ..Default::default()
- MessageRequest now derives Default for ergonomic partial construction

Tests added:
- tuning_params_included_in_payload_when_set: all 5 params flow into JSON
- tuning_params_omitted_from_payload_when_none: absent params stay absent

83 api lib tests passing, 0 failing.
cargo check --workspace: 0 warnings.
This commit is contained in:
YeonGyu-Kim
2026-04-08 07:07:33 +09:00
parent 7546c1903d
commit c667d47c70
7 changed files with 82 additions and 1 deletions

View File

@@ -721,6 +721,25 @@ fn build_chat_completion_request(request: &MessageRequest, config: OpenAiCompatC
payload["tool_choice"] = openai_tool_choice(tool_choice);
}
// OpenAI-compatible tuning parameters — only included when explicitly set.
if let Some(temperature) = request.temperature {
payload["temperature"] = json!(temperature);
}
if let Some(top_p) = request.top_p {
payload["top_p"] = json!(top_p);
}
if let Some(frequency_penalty) = request.frequency_penalty {
payload["frequency_penalty"] = json!(frequency_penalty);
}
if let Some(presence_penalty) = request.presence_penalty {
payload["presence_penalty"] = json!(presence_penalty);
}
if let Some(stop) = &request.stop {
if !stop.is_empty() {
payload["stop"] = json!(stop);
}
}
payload
}
@@ -1049,6 +1068,7 @@ mod tests {
}]),
tool_choice: Some(ToolChoice::Auto),
stream: false,
..Default::default()
},
OpenAiCompatConfig::xai(),
);
@@ -1071,6 +1091,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: true,
..Default::default()
},
OpenAiCompatConfig::openai(),
);
@@ -1089,6 +1110,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: true,
..Default::default()
},
OpenAiCompatConfig::xai(),
);
@@ -1159,4 +1181,45 @@ mod tests {
assert_eq!(normalize_finish_reason("stop"), "end_turn");
assert_eq!(normalize_finish_reason("tool_calls"), "tool_use");
}
#[test]
fn tuning_params_included_in_payload_when_set() {
let request = MessageRequest {
model: "gpt-4o".to_string(),
max_tokens: 1024,
messages: vec![],
system: None,
tools: None,
tool_choice: None,
stream: false,
temperature: Some(0.7),
top_p: Some(0.9),
frequency_penalty: Some(0.5),
presence_penalty: Some(0.3),
stop: Some(vec!["\n".to_string()]),
};
let payload = build_chat_completion_request(&request, OpenAiCompatConfig::openai());
assert_eq!(payload["temperature"], 0.7);
assert_eq!(payload["top_p"], 0.9);
assert_eq!(payload["frequency_penalty"], 0.5);
assert_eq!(payload["presence_penalty"], 0.3);
assert_eq!(payload["stop"], json!(["\n"]));
}
#[test]
fn tuning_params_omitted_from_payload_when_none() {
let request = MessageRequest {
model: "gpt-4o".to_string(),
max_tokens: 1024,
messages: vec![],
stream: false,
..Default::default()
};
let payload = build_chat_completion_request(&request, OpenAiCompatConfig::openai());
assert!(payload.get("temperature").is_none(), "temperature should be absent");
assert!(payload.get("top_p").is_none(), "top_p should be absent");
assert!(payload.get("frequency_penalty").is_none());
assert!(payload.get("presence_penalty").is_none());
assert!(payload.get("stop").is_none());
}
}