Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 7 additions & 16 deletions codex-rs/tui/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -372,10 +372,6 @@ impl App {
}

let enhanced_keys_supported = tui.enhanced_keys_supported();
let model_family = conversation_manager
.get_models_manager()
.construct_model_family(model.as_str(), &config)
.await;
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
let init = crate::chatwidget::ChatWidgetInit {
Expand All @@ -389,7 +385,7 @@ impl App {
models_manager: conversation_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model_family: model_family.clone(),
model: model.clone(),
};
ChatWidget::new(init, conversation_manager.clone())
}
Expand All @@ -415,7 +411,7 @@ impl App {
models_manager: conversation_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model_family: model_family.clone(),
model: model.clone(),
};
ChatWidget::new_from_existing(
init,
Expand Down Expand Up @@ -582,7 +578,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model_family: model_family.clone(),
model: self.current_model.clone(),
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
self.current_model = model_family.get_model_slug().to_string();
Expand Down Expand Up @@ -632,7 +628,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model_family: model_family.clone(),
model: self.current_model.clone(),
};
self.chat_widget = ChatWidget::new_from_existing(
init,
Expand Down Expand Up @@ -767,12 +763,7 @@ impl App {
self.on_update_reasoning_effort(effort);
}
AppEvent::UpdateModel(model) => {
let model_family = self
.server
.get_models_manager()
.construct_model_family(&model, &self.config)
.await;
self.chat_widget.set_model(&model, model_family);
self.chat_widget.set_model(&model);
self.current_model = model;
}
AppEvent::OpenReasoningPopup { model } => {
Expand Down Expand Up @@ -1357,7 +1348,7 @@ mod tests {
async fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let current_model = "gpt-5.2-codex".to_string();
let server = Arc::new(ConversationManager::with_models_provider(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
Expand Down Expand Up @@ -1396,7 +1387,7 @@ mod tests {
) {
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let current_model = "gpt-5.2-codex".to_string();
let server = Arc::new(ConversationManager::with_models_provider(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
Expand Down
4 changes: 1 addition & 3 deletions codex-rs/tui/src/app_backtrack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,10 +338,9 @@ impl App {
) {
let conv = new_conv.conversation;
let session_configured = new_conv.session_configured;
let model_family = self.chat_widget.get_model_family();
let init = crate::chatwidget::ChatWidgetInit {
config: cfg,
model_family: model_family.clone(),
model: self.current_model.clone(),
frame_requester: tui.frame_requester(),
app_event_tx: self.app_event_tx.clone(),
initial_prompt: None,
Expand All @@ -354,7 +353,6 @@ impl App {
};
self.chat_widget =
crate::chatwidget::ChatWidget::new_from_existing(init, conv, session_configured);
self.current_model = model_family.get_model_slug().to_string();
// Trim transcript up to the selected user message and re-render it.
self.trim_transcript_for_backtrack(nth_user_message);
self.render_transcript_once(tui);
Expand Down
66 changes: 27 additions & 39 deletions codex-rs/tui/src/chatwidget.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ use codex_core::features::Feature;
use codex_core::git_info::current_branch_name;
use codex_core::git_info::local_git_branches;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::models_manager::model_family::ModelFamily;
use codex_core::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
Expand Down Expand Up @@ -291,7 +290,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) models_manager: Arc<ModelsManager>,
pub(crate) feedback: codex_feedback::CodexFeedback,
pub(crate) is_first_run: bool,
pub(crate) model_family: ModelFamily,
pub(crate) model: String,
}

#[derive(Default)]
Expand All @@ -316,7 +315,7 @@ pub(crate) struct ChatWidget {
bottom_pane: BottomPane,
active_cell: Option<Box<dyn HistoryCell>>,
config: Config,
model_family: ModelFamily,
model: String,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
Expand Down Expand Up @@ -608,12 +607,10 @@ impl ChatWidget {
}

fn context_remaining_percent(&self, info: &TokenUsageInfo) -> Option<i64> {
info.model_context_window
.or(self.model_family.context_window)
.map(|window| {
info.last_token_usage
.percent_of_context_window_remaining(window)
})
info.model_context_window.map(|window| {
info.last_token_usage
.percent_of_context_window_remaining(window)
})
}

fn context_used_tokens(&self, info: &TokenUsageInfo, percent_known: bool) -> Option<i64> {
Expand Down Expand Up @@ -681,7 +678,7 @@ impl ChatWidget {

if high_usage
&& !self.rate_limit_switch_prompt_hidden()
&& self.model_family.get_model_slug() != NUDGE_MODEL_SLUG
&& self.model != NUDGE_MODEL_SLUG
&& !matches!(
self.rate_limit_switch_prompt,
RateLimitSwitchPromptState::Shown
Expand Down Expand Up @@ -715,9 +712,6 @@ impl ChatWidget {
self.stream_controller = None;
self.maybe_show_pending_rate_limit_prompt();
}
pub(crate) fn get_model_family(&self) -> ModelFamily {
self.model_family.clone()
}

fn on_error(&mut self, message: String) {
self.finalize_turn();
Expand Down Expand Up @@ -1420,11 +1414,10 @@ impl ChatWidget {
models_manager,
feedback,
is_first_run,
model_family,
model,
} = common;
let model_slug = model_family.get_model_slug().to_string();
let mut config = config;
config.model = Some(model_slug.clone());
config.model = Some(model.clone());
let mut rng = rand::rng();
let placeholder = EXAMPLE_PROMPTS[rng.random_range(0..EXAMPLE_PROMPTS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), conversation_manager);
Expand All @@ -1445,10 +1438,10 @@ impl ChatWidget {
}),
active_cell: None,
config,
model_family,
model: model.clone(),
auth_manager,
models_manager,
session_header: SessionHeader::new(model_slug),
session_header: SessionHeader::new(model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
Expand Down Expand Up @@ -1506,10 +1499,9 @@ impl ChatWidget {
auth_manager,
models_manager,
feedback,
model_family,
model,
..
} = common;
let model_slug = model_family.get_model_slug().to_string();
let mut rng = rand::rng();
let placeholder = EXAMPLE_PROMPTS[rng.random_range(0..EXAMPLE_PROMPTS.len())].to_string();

Expand All @@ -1532,10 +1524,10 @@ impl ChatWidget {
}),
active_cell: None,
config,
model_family,
model: model.clone(),
auth_manager,
models_manager,
session_header: SessionHeader::new(model_slug),
session_header: SessionHeader::new(model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
Expand Down Expand Up @@ -2247,22 +2239,20 @@ impl ChatWidget {

pub(crate) fn add_status_output(&mut self) {
let default_usage = TokenUsage::default();
let (total_usage, context_usage) = if let Some(ti) = &self.token_info {
(&ti.total_token_usage, Some(&ti.last_token_usage))
} else {
(&default_usage, Some(&default_usage))
};
let token_info = self.token_info.as_ref();
let total_usage = token_info
.map(|ti| &ti.total_token_usage)
.unwrap_or(&default_usage);
self.add_to_history(crate::status::new_status_output(
&self.config,
self.auth_manager.as_ref(),
&self.model_family,
token_info,
total_usage,
context_usage,
&self.conversation_id,
self.rate_limit_snapshot.as_ref(),
self.plan_type,
Local::now(),
self.model_family.get_model_slug(),
&self.model,
));
}

Expand Down Expand Up @@ -2415,7 +2405,6 @@ impl ChatWidget {
/// Open a popup to choose a quick auto model. Selecting "All models"
/// opens the full picker with every available preset.
pub(crate) fn open_model_popup(&mut self) {
let current_model = self.model_family.get_model_slug().to_string();
let presets: Vec<ModelPreset> =
// todo(aibrahim): make this async function
match self.models_manager.try_list_models(&self.config) {
Expand All @@ -2432,9 +2421,9 @@ impl ChatWidget {

let current_label = presets
.iter()
.find(|preset| preset.model == current_model)
.find(|preset| preset.model == self.model)
.map(|preset| preset.display_name.to_string())
.unwrap_or_else(|| current_model.clone());
.unwrap_or_else(|| self.model.clone());

let (mut auto_presets, other_presets): (Vec<ModelPreset>, Vec<ModelPreset>) = presets
.into_iter()
Expand All @@ -2460,7 +2449,7 @@ impl ChatWidget {
SelectionItem {
name: preset.display_name.clone(),
description,
is_current: model == current_model,
is_current: model == self.model,
is_default: preset.is_default,
actions,
dismiss_on_select: true,
Expand Down Expand Up @@ -2523,12 +2512,11 @@ impl ChatWidget {
return;
}

let current_model = self.model_family.get_model_slug().to_string();
let mut items: Vec<SelectionItem> = Vec::new();
for preset in presets.into_iter() {
let description =
(!preset.description.is_empty()).then_some(preset.description.to_string());
let is_current = preset.model == current_model;
let is_current = preset.model == self.model;
let single_supported_effort = preset.supported_reasoning_efforts.len() == 1;
let preset_for_action = preset.clone();
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
Expand Down Expand Up @@ -2654,7 +2642,7 @@ impl ChatWidget {
.or(Some(default_effort));

let model_slug = preset.model.to_string();
let is_current_model = self.model_family.get_model_slug() == preset.model;
let is_current_model = self.model == preset.model;
let highlight_choice = if is_current_model {
self.config.model_reasoning_effort
} else {
Expand Down Expand Up @@ -3244,9 +3232,9 @@ impl ChatWidget {
}

/// Set the model in the widget's config copy.
pub(crate) fn set_model(&mut self, model: &str, model_family: ModelFamily) {
pub(crate) fn set_model(&mut self, model: &str) {
self.session_header.set_model(model);
self.model_family = model_family;
self.model = model.to_string();
}

pub(crate) fn add_info_message(&mut self, message: String, hint: Option<String>) {
Expand Down
9 changes: 4 additions & 5 deletions codex-rs/tui/src/chatwidget/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,6 @@ async fn helpers_are_available_and_do_not_panic() {
let tx = AppEventSender::new(tx_raw);
let cfg = test_config().await;
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let model_family = ModelsManager::construct_model_family_offline(&resolved_model, &cfg);
let conversation_manager = Arc::new(ConversationManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
Expand All @@ -330,7 +329,7 @@ async fn helpers_are_available_and_do_not_panic() {
models_manager: conversation_manager.get_models_manager(),
feedback: codex_feedback::CodexFeedback::new(),
is_first_run: true,
model_family,
model: resolved_model,
};
let mut w = ChatWidget::new(init, conversation_manager);
// Basic construction sanity.
Expand Down Expand Up @@ -371,11 +370,11 @@ async fn make_chatwidget_manual(
codex_op_tx: op_tx,
bottom_pane: bottom,
active_cell: None,
config: cfg.clone(),
model_family: ModelsManager::construct_model_family_offline(&resolved_model, &cfg),
config: cfg,
model: resolved_model.clone(),
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(auth_manager)),
session_header: SessionHeader::new(resolved_model.clone()),
session_header: SessionHeader::new(resolved_model),
initial_user_message: None,
token_info: None,
rate_limit_snapshot: None,
Expand Down
Loading
Loading