From e4310be51f617f5e60382038fa9cbf53a2429ca4 Mon Sep 17 00:00:00 2001 From: Michael Bolin Date: Thu, 30 Apr 2026 08:06:34 -0700 Subject: [PATCH 1/9] ## New Features - Added persisted `/goal` workflows with app-server APIs, model tools, runtime continuation, and TUI controls for create, pause, resume, and clear. (#18073, #18074, #18075, #18076, #18077, #20082) - Added `codex update`, configurable TUI keymaps, plan-mode nudges, action-required terminal titles, and active-turn `/statusline` and `/title` edits. (#19933, #18593, #19901, #18372, #19917) - Expanded permission profiles with built-in defaults, sandbox CLI profile selection, cwd controls, and active-profile metadata for clients. (#19900, #20117, #20118, #20095) - Improved plugin workflows with marketplace installation, remote bundle caching, remote uninstall, plugin-bundled hooks, hook enablement state, and external-agent config import. (#18704, #19914, #19456, #19705, #19840, #19949) - Added external agent session import, including background imports and imported-session title handling. (#19895, #20284, #20261) - Made MultiAgentV2 configuration more explicit with thread caps, wait-time controls, root/subagent hints, and v2-specific depth handling. (#19360, #19792, #19805, #20052, #20180) ## Bug Fixes - Fixed several resume and interruption issues, including stale interrupt hangs, persisted provider restoration, large remote resume responses, and slow filtered resume lists. (#18392, #19287, #19920, #19591) - Improved TUI reliability around terminal resize reflow, markdown list spacing, slash-command popup layout, keyboard cleanup, shell-mode escape, and working status updates. (#18575, #19706, #19511, #19625, #19986, #19939) - Hardened managed network behavior for deferred denials, proxy bypass defaults, resolved target checks, IPv6 host matching, and `git -C` approval handling. (#19184, #20002, #19999, #19995, #20085) - Fixed Windows sandbox and PTY edge cases, including pseudoconsole startup, elevated runner process handling, core shell environment inheritance, and named-pipe validation. (#20042, #19211, #20089, #19283) - Fixed Bedrock model support for `apply_patch`, GPT-5.4 reasoning levels, and updated Bedrock GPT-5.4 endpoint/model metadata. (#19416, #19461, #20109) - Fixed MCP/plugin edge cases around stdio server cleanup, plugin MCP approval persistence, and custom MCP metadata isolation. (#19753, #19537, #19836, #19875) ## Documentation - Updated the bundled OpenAI Docs skill for GPT-5.5, `gpt-image-2`, and clearer upgrade guidance. (#19407, #19443, #19422) - Clarified contributor-facing docs, including the PR template, Rust async trait guidance, and README wording. (#19912, #20242, #19514) - Added a checked-in `codex-core` public API listing and a ThreadManager sample crate. (#20243, #20141) ## Chores - Published `codex-app-server` release artifacts, stopped publishing GNU Linux binaries, and increased release workflow timeouts. (#19447, #19445, #20271, #20343) - Added Codex-pinned versioning for the Python app-server SDK package. (#18996) - Deprecated `--full-auto` while steering users toward explicit permission profiles and trust flows. (#20133) - Stabilized CI and release plumbing with Bazel setup migration, release smoke-test pinning, and updated workflow pins/timeouts. (#19851, #19854, #19472, #19609) ## Changelog Full Changelog: https://github.com/openai/codex/compare/rust-v0.125.0...rust-v0.128.0 - #19124 Make MultiAgentV2 interruption markers assistant-authored @jif-oai - #19354 chore: alias max_concurrent_threads_per_session @jif-oai - #19360 feat: surface multi-agent thread limit in spawn description @jif-oai - #19351 Add agents.interrupt_message for interruption markers @jif-oai - #18392 Fix hang on turn/interrupt @danwang-oai - #19380 chore: drop MCP Plugins and App from Morpheus @jif-oai - #18907 respect workspace option for disabling plugins @zamoshchin-openai - #19283 check PID of named pipe consumer @iceweasel-oai - #19407 Update bundled OpenAI Docs skill for GPT-5.5 @kkahadze-oai - #19163 Harden package-manager install policy @mcgrew-oai - #19416 Fix: use function apply_patch tool for Bedrock model @celia-oai - #19093 [codex] Omit fork turns from thread started notifications @euroelessar - #19244 Update unix socket transport to use WebSocket upgrade @willwang-openai - #19170 Skip disabled rows in selection menu numbering and default focus @canvrno-oai - #19414 permissions: make legacy profile conversion cwd-free @bolinfest - #18900 Migrate fork and resume reads to thread store @wiltzius-openai - #19445 ci: stop publishing GNU Linux release artifacts @bolinfest - #19443 Add gpt-image-2 to bundled OpenAI Docs skill @kkahadze-oai - #18584 [4/4] Honor Streamable HTTP MCP placement @aibrahim-oai - #19447 ci: publish codex-app-server release artifacts @bolinfest - #19422 Clarify bundled OpenAI Docs upgrade guide wording @kkahadze-oai - #19266 [codex] add non-local thread store regression harness @wiltzius-openai - #19098 feat: Compress skill paths with root aliases @xl-openai - #19207 [codex] Forward Codex Apps tool call IDs to backend metadata @rreichel3-oai - #19453 Serialize legacy Windows PowerShell sandbox tests @dylan-hurd-oai - #19234 Refactor log DB into LogWriter interface @rasmusrygaard - #19461 fix: Bedrock GPT-5.4 reasoning levels @celia-oai - #19449 permissions: remove legacy read-only access modes @bolinfest - #19472 ci: pin codex-action v1.7 @viyatb-oai - #19468 Fix Bazel cargo_bin runfiles paths @fjord-oai - #19410 Remove js_repl feature @fjord-oai - #18073 Add goal persistence foundation (1 / 5) @etraut-openai - #18074 Add goal app-server API (2 / 5) @etraut-openai - #18075 Add goal model tools (3 / 5) @etraut-openai - #18076 Add goal core runtime (4 / 5) @etraut-openai - #18077 Add goal TUI UX (5 / 5) @etraut-openai - #19454 Split approval matrix test groups @dylan-hurd-oai - #19514 Fix codex-rs README grammar @etraut-openai - #19459 Enable unavailable dummy tools by default @mzeng-openai - #19524 [codex] Prune unused codex-mcp API and duplicate helpers @aibrahim-oai - #19526 [codex] Order codex-mcp items by visibility @aibrahim-oai - #19578 fix: increase Bazel timeout to 45 minutes @bolinfest - #19287 Restore persisted model provider on thread resume @etraut-openai - #19593 test: isolate remote thread store regression from plugin warmups @bolinfest - #19511 Keep slash command popup columns stable while scrolling @etraut-openai - #19595 [codex] Bypass managed network for escalated exec @viyatb-oai - #19604 test: stabilize app-server path assertions on Windows @bolinfest - #19609 fix: restore 30-minute timeout for Bazel builds @bolinfest - #19389 Guard npm update readiness @shijie-oai - #18575 fix(tui): reflow scrollback on terminal resize @fcoury-oai - #19610 Support end_turn in response.completed @andmis - #19640 [codex] remove responses command @tibo-openai - #19683 test: harden app-server integration tests @bolinfest - #18904 feat: load AgentIdentity from JWT login/env @efrazer-oai - #19606 permissions: make runtime config profile-backed @bolinfest - #19392 permissions: derive compatibility policies from profiles @bolinfest - #19484 Lift app-server JSON-RPC error handling to request boundary @pakrym-oai - #19487 [codex] Move config loading into codex-config @pakrym-oai - #19393 permissions: migrate approval and sandbox consumers to profiles @bolinfest - #19726 Fix codex-core config test type paths @pakrym-oai - #19727 test: increase core-all-test shard count to 16 @bolinfest - #19725 Split MCP connection modules @aibrahim-oai - #19605 Delete unused ResponseItem::Message.end_turn @andmis - #19394 permissions: remove core legacy policy round trips @bolinfest - #19733 Allow agents.max_threads to work with multi_agent_v2 @andmis - #19395 permissions: finish profile-backed app surfaces @bolinfest - #19739 inline hostname resolution for remote sandbox config @abhinav-oai - #19734 permissions: centralize legacy sandbox projection @bolinfest - #19058 Add /auto-review-denials retry approval flow @won-openai - #19735 permissions: store only constrained permission profiles @bolinfest - #19736 permissions: constrain requirements as profiles @bolinfest - #19737 permissions: derive legacy exec policies at boundaries @bolinfest - #19779 Add Codex issue digest skill @etraut-openai - #19792 multi_agent_v2: move thread cap into feature config @jif-oai - #18982 feat: use git-backed workspace diffs for memory consolidation @jif-oai - #19809 Allow Phase 2 memory claims after retry exhaustion @jif-oai - #19812 Avoid rewriting Phase 2 selection on clean workspace @jif-oai - #19813 nit: one more fix @jif-oai - #19818 chore: split memories part 1 @jif-oai - #19510 Hide rewind preview when no user message exists @etraut-openai - #19618 Persist shell mode commands in prompt history @etraut-openai - #19709 Render delegated patch approval details @etraut-openai - #19490 Streamline plugin, apps, and skills handlers @pakrym-oai - #19762 refactor: make auth loading async @efrazer-oai - #19854 ci: pin npm staging smoke test to a recent rust-release run @bolinfest - #19851 ci: migrate Bazel setup away from archived setup-bazelisk @bolinfest - #19491 Streamline account and command handlers @pakrym-oai - #19771 fix: filter dynamic deferred tools from model_visible_specs @sayan-oai - #19863 [codex-analytics] remove ga flag @rhan-oai - #19865 Cap original-detail image token estimates @fjord-oai - #19591 Fix filtered thread-list resume regression in TUI @etraut-openai - #19513 Delay approval prompts while typing @etraut-openai - #19706 Preserve TUI markdown list spacing after code blocks @etraut-openai - #19841 permissions: remove cwd special path @bolinfest - #19492 Streamline thread start handler @pakrym-oai - #19874 [codex-backend] Prefer state git metadata in filtered thread lists @joeytrasatti-openai - #19493 Streamline thread mutation handlers @pakrym-oai - #19862 [codex] Shard exec Bazel integration test @starr-openai - #18996 Publish Python SDK with Codex-pinned versioning @sdcoffey - #19494 Streamline thread read handlers @pakrym-oai - #19839 [codex] Trace cancelled inference streams @cassirer-openai - #19495 Streamline thread resume and fork handlers @pakrym-oai - #19497 Streamline turn and realtime handlers @pakrym-oai - #18372 Show action required in terminal title @canvrno-oai - #19884 Add MCP app feature flag @mzeng-openai - #19498 Streamline review and feedback handlers @pakrym-oai - #19772 permissions: derive config defaults as profiles @bolinfest - #19836 disallow fileparams metadata for custom mcps @colby-oai - #19892 Refactor exec-server filesystem API into codex-file-system @miz-openai - #19452 Stabilize plugin MCP fixture tests @dylan-hurd-oai - #19481 Remove ghost snapshots @pakrym-oai - #19773 permissions: require profiles in TUI thread state @bolinfest - #19917 Allow /statusline and /title slash commands during active turns @canvrno-oai - #19763 refactor: load agent identity runtime eagerly @efrazer-oai - #17689 [codex-analytics] include user agent in default headers @marksteinbrick-oai - #19912 Clarify PR template invitation requirement @etraut-openai - #19630 Avoid persisting ShutdownComplete after thread shutdown @etraut-openai - #19774 permissions: make SessionConfigured profile-only @bolinfest - #19775 permissions: derive snapshot sandbox projections @bolinfest - #19920 Allow large remote app-server resume responses @etraut-openai - #19776 permissions: store thread sessions as profiles @bolinfest - #19899 app-server-protocol: mark permission profiles experimental @bolinfest - #19933 Add `codex update` command @etraut-openai - #19914 feat: Cache remote plugin bundles on install @xl-openai - #19456 Add remote plugin uninstall API @xli-oai - #19805 Add MultiAgentV2 root and subagent context hints @jif-oai - #19860 feat: split memories part 2 @jif-oai - #19961 feat: fix hinting 2 @jif-oai - #19963 feat: fix hinting 3 @jif-oai - #19967 Stabilize memory Phase 2 input ordering @jif-oai - #19970 feat: trigger memories from user turns with cooldown @jif-oai - #19904 fix: configure AgentIdentity AuthAPI base URL @efrazer-oai - #19990 feat: skip memory startup when Codex rate limits are low @jif-oai - #19998 feat: house-keeping memories 1 @jif-oai - #20000 feat: house-keeping memories 2 @jif-oai - #19832 Preserve assistant phase for replayed messages @friel-openai - #19625 Reset TUI keyboard reporting on exit @etraut-openai - #18593 feat(tui): add configurable keymap support @fcoury-oai - #19846 [sandbox] Enforce protected workspace metadata paths @evawong-oai - #20005 feat: house-keeping memories 3 @jif-oai - #19929 TUI: use cumulative turn duration for worked-for separator @etraut-openai - #19753 Terminate stdio MCP servers on shutdown to avoid process leaks @etraut-openai - #19473 Add turn start timestamp to turn metadata @mchen-oai - #19875 Strip connector provenance metadata from custom MCP tools @colby-oai - #19764 feat: verify agent identity JWTs with JWKS @efrazer-oai - #19847 Enforce workspace metadata protections in Seatbelt @evawong-oai - #19509 Record MCP result telemetry on mcp.tools.call spans @mchen-oai - #19907 Clarify network approval auto-review prompts @maja-openai - #19901 feat(tui): suggest plan mode from composer drafts @fcoury-oai - #19931 Move local /resume cwd filtering into thread/list @canvrno-oai - #19986 fix(tui): let esc exit empty shell mode @fcoury-oai - #19895 External agent session support @stefanstokic-oai - #20002 fix(network-proxy): tighten network proxy bypass defaults @viyatb-oai - #19900 permissions: add built-in default profiles @bolinfest - #20045 Fix plan mode nudge test after task completion signature change @canvrno-oai - #19432 [codex] Add token usage to turn tracing spans @charley-openai - #20001 fix(network-proxy): harden linux proxy bridge helpers @viyatb-oai - #19959 Fix log db batch flush flake @dylan-hurd-oai - #17373 app-server: run initialized rpcs with keyed serialization @euroelessar - #19708 Load cloud requirements for agent identity @shijie-oai - #19999 fix(network-proxy): recheck network proxy connect targets @viyatb-oai - #20047 app-server: allow remote_control runtime feature override @euroelessar - #20052 Make MultiAgentV2 wait minimum configurable @jif-oai - #20008 tui: use permission profiles for sandbox state @bolinfest - #20068 app-server: disable remote control without sqlite @euroelessar - #20066 [rollout-trace] Include x-request-id in rollout trace. @cassirer-openai - #19705 Discover hooks bundled with plugins @abhinav-oai - #18704 /plugins: add marketplace install flow @canvrno-oai - #20085 fix: don't auto approve git -C ... @owenlin0 - #20088 Fix flaky plugin hook env test @abhinav-oai - #19995 fix(network-proxy): normalize network proxy host matching @viyatb-oai - #20010 core tests: submit turns with permission profiles @bolinfest - #20092 Return None when auth refresh fails @gpeal - #19919 app-server: notify clients of remote-control status changes @euroelessar - #20097 Refine Codex issue digest summaries @etraut-openai - #20011 core tests: build user turns from permission profiles @bolinfest - #20013 core tests: migrate more turns to permission profiles @bolinfest - #20015 core tests: configure profiles directly @bolinfest - #20016 core tests: send model turns with permission profiles @bolinfest - #20100 Increase plugin hook env test timeout @abhinav-oai - #20018 core tests: migrate model/personality turns to profiles @bolinfest - #20021 core tests: migrate view image turns to profiles @bolinfest - #20024 core tests: migrate safety check turns to profiles @bolinfest - #20026 core tests: migrate plan item turns to profiles @bolinfest - #20027 core tests: migrate tools tests to permission profiles @bolinfest - #20028 core tests: migrate permissions message tests to profiles @bolinfest - #20030 core tests: migrate exec policy turns to profiles @bolinfest - #20032 core tests: migrate prompt caching turns to profiles @bolinfest - #20033 core tests: migrate request permissions tool turns to profiles @bolinfest - #20034 core tests: migrate zsh-fork permissions to profiles @bolinfest - #20035 core tests: migrate compact turns to profiles @bolinfest - #20037 core tests: migrate rmcp turns to profiles @bolinfest - #20040 core tests: migrate apply patch turns to profiles @bolinfest - #20041 core tests: migrate hook turns to profiles @bolinfest - #20072 Support disabling tool suggest for specific tools. @mzeng-openai - #19949 Support detect and import MCP, Subagents, hooks, commands from external @alexsong-oai - #19442 feat: disable capabilities by model provider @celia-oai - #20108 fix: restore live event submit path for apply patch tests @bolinfest - #19939 Restore TUI working status after steer message is set @canvrno-oai - #20086 Fix plugin list workspace settings test isolation @canvrno-oai - #20049 feat: expose provider capability bounds to app server clients @celia-oai - #20109 feat: update Bedrock Mantle endpoint and GPT-5.4 model ID @celia-oai - #20106 linux-sandbox: switch helper plumbing to PermissionProfile @bolinfest - #20112 Soften skill description budget warnings @xl-openai - #20058 Add environment provider snapshot @starr-openai - #20133 chore(cli) deprecate --full-auto @dylan-hurd-oai - #20117 feat(cli): add explicit sandbox permission profiles @viyatb-oai - #20139 Delete multi_agent_v2 followup_task interrupt parameter @andmis - #20118 feat(cli): add sandbox profile config controls @viyatb-oai - #20144 Fix migrated hook path rewriting @alexsong-oai - #20042 Fix Windows pseudoconsole attribute handling for sandboxed PTY sessions @iceweasel-oai - #20186 nit: drop old memories things @jif-oai - #20180 Make multi-agent v2 ignore agents.max_depth @jif-oai - #20082 Use /goal resume for paused goals @etraut-openai - #20172 TUI: Remove core protocol dependency [1/7] @etraut-openai - #19211 Improve Windows process management edge cases @iceweasel-oai - #20123 [rollout-tracer] Match analysis messages on encrypted id. @cassirer-openai - #20173 TUI: Remove core protocol dependency [2/7] @etraut-openai - #20174 TUI: Remove core protocol dependency [3/7] @etraut-openai - #20228 [codex-backend] Prefer sqlite git info for rollout-path reads @joeytrasatti-openai - #20141 Add ThreadManager sample crate @pakrym-oai - #20046 test protocol: lock inter-agent commentary phase @friel-openai - #20064 Include auto-review rollout in feedback uploads @won-openai - #20096 feat: Use remote installed plugin cache for skills and MCP @xl-openai - #19184 fix: handle deferred network proxy denials @viyatb-oai - #20089 expand the set of core shell env vars for Windows. @iceweasel-oai - #17088 [codex-analytics] ingest server requests and responses @rhan-oai - #20091 [tool_suggest] Improve tool_suggest triggering conditions. @mzeng-openai - #20258 app-server: fix outgoing sender test setup @sayan-oai - #20050 [app-server] type client response payloads @rhan-oai - #19966 Require remote plugin detail before uninstall @xli-oai - #20059 [app-server] centralize client response analytics @rhan-oai - #19334 Fallback login callback port when default is busy @xli-oai - #20231 [apps] Add apps MCP path override @adaley-openai - #20242 docs: discourage `#[async_trait]` and `#[allow(async_fn_in_trait)]` @bolinfest - #19620 Escape turn metadata headers as ASCII JSON @etraut-openai - #19537 [mcp] Fix plugin MCP approval policy. @mzeng-openai - #19229 Add agent graph store interface @rasmusrygaard - #20243 Add codex-core public API listing @pakrym-oai - #19435 stop blocking unified_exec on Windows @iceweasel-oai - #19852 Enforce workspace metadata protections in Linux sandbox @evawong-oai - #20136 Update Codex login success page UX @rafael-jac - #20271 chore: increase release build timeout from 60 min to 90 @bolinfest - #19778 Add hooks/list app-server RPC @abhinav-oai - #20261 Consume ai-title from external sessions and add end marker @alexsong-oai - #20284 Import external agent sessions in background @stefanstokic-oai - #20149 Reduce the surface of collaboration modes @pakrym-oai - #20282 tui: return from side chat on Ctrl-D @etraut-openai - #20250 update codex_plugins_beta_setting (from workspace settings) @zamoshchin-openai - #20080 [codex-analytics] prevent stale guardian events from satisfying reused reviews @rhan-oai - #20291 app-server: remove dead api version handling from bespoke events @pakrym-oai - #20304 [plugins] Allow MSFT curated plugins in tool_suggest @mzeng-openai - #20095 permissions: expose active profile metadata @bolinfest - #19840 Add persisted hook enablement state @abhinav-oai - #20343 ci: increase Windows release workflow timeouts @bolinfest --- codex-rs/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codex-rs/Cargo.toml b/codex-rs/Cargo.toml index 79d932c8be4a..abe3e0385453 100644 --- a/codex-rs/Cargo.toml +++ b/codex-rs/Cargo.toml @@ -109,7 +109,7 @@ members = [ resolver = "2" [workspace.package] -version = "0.0.0" +version = "0.128.0" # Track the edition for all workspace crates in one place. Individual # crates can still override this value, but keeping it here means new # crates created with `cargo new -w ...` automatically inherit the 2024 From adcd003afd2e5ac118c06b03bb19c68bc0f076a6 Mon Sep 17 00:00:00 2001 From: Loongphy Date: Fri, 1 May 2026 01:55:41 +0800 Subject: [PATCH 2/9] chore: copy reapply carry-over files from feat/rust-v0.125.0 --- .../skills/codex-upstream-reapply/SKILL.md | 227 +++++ .../references/advanced.md | 74 ++ .../references/npm-release.md | 88 ++ .../prepare_reimplementation_bundle.sh | 331 +++++++ .../scripts/start_from_tag.sh | 596 +++++++++++ .agents/skills/status-header/SKILL.md | 110 +++ .github/scripts/rusty_v8_bazel.py | 71 -- .github/workflows/Dockerfile.bazel | 20 - .github/workflows/README.md | 33 - .github/workflows/bazel.yml | 298 ------ .github/workflows/blob-size-policy.yml | 32 - .github/workflows/cargo-deny.yml | 26 - .github/workflows/ci.yml | 75 -- .github/workflows/cla.yml | 49 - .../workflows/close-stale-contributor-prs.yml | 107 -- .github/workflows/codespell.yml | 27 - .github/workflows/issue-deduplicator.yml | 402 -------- .github/workflows/issue-labeler.yml | 143 --- .github/workflows/rust-ci-full.yml | 770 --------------- .github/workflows/rust-ci.yml | 222 ----- .../rust-release-argument-comment-lint.yml | 106 -- .github/workflows/rust-release-prepare.yml | 53 - .github/workflows/rust-release-windows.yml | 288 ------ .github/workflows/rust-release-zsh.yml | 95 -- .github/workflows/rust-release.yml | 922 ++++++------------ .github/workflows/rusty-v8-release.yml | 190 ---- .github/workflows/sdk.yml | 122 --- .github/workflows/v8-canary.yml | 136 --- .github/workflows/zstd | 46 - AGENTS.md | 75 +- CHANGED.md | 68 ++ README.md | 105 +- codex-cli/bin/codex.js | 49 +- codex-cli/package.json | 8 +- codex-cli/scripts/build_npm_package.py | 42 +- codex-cli/scripts/install_native_deps.py | 34 +- 36 files changed, 1952 insertions(+), 4088 deletions(-) create mode 100644 .agents/skills/codex-upstream-reapply/SKILL.md create mode 100644 .agents/skills/codex-upstream-reapply/references/advanced.md create mode 100644 .agents/skills/codex-upstream-reapply/references/npm-release.md create mode 100755 .agents/skills/codex-upstream-reapply/scripts/prepare_reimplementation_bundle.sh create mode 100755 .agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh create mode 100644 .agents/skills/status-header/SKILL.md delete mode 100644 .github/workflows/Dockerfile.bazel delete mode 100644 .github/workflows/README.md delete mode 100644 .github/workflows/bazel.yml delete mode 100644 .github/workflows/blob-size-policy.yml delete mode 100644 .github/workflows/cargo-deny.yml delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/cla.yml delete mode 100644 .github/workflows/close-stale-contributor-prs.yml delete mode 100644 .github/workflows/codespell.yml delete mode 100644 .github/workflows/issue-deduplicator.yml delete mode 100644 .github/workflows/issue-labeler.yml delete mode 100644 .github/workflows/rust-ci-full.yml delete mode 100644 .github/workflows/rust-ci.yml delete mode 100644 .github/workflows/rust-release-argument-comment-lint.yml delete mode 100644 .github/workflows/rust-release-prepare.yml delete mode 100644 .github/workflows/rust-release-windows.yml delete mode 100644 .github/workflows/rust-release-zsh.yml delete mode 100644 .github/workflows/rusty-v8-release.yml delete mode 100644 .github/workflows/sdk.yml delete mode 100644 .github/workflows/v8-canary.yml delete mode 100755 .github/workflows/zstd create mode 100644 CHANGED.md diff --git a/.agents/skills/codex-upstream-reapply/SKILL.md b/.agents/skills/codex-upstream-reapply/SKILL.md new file mode 100644 index 000000000000..c071b8d1fd8c --- /dev/null +++ b/.agents/skills/codex-upstream-reapply/SKILL.md @@ -0,0 +1,227 @@ +--- +name: codex-upstream-reapply +description: 'Reapply a fork or secondary-development branch onto the latest stable rust-vX.Y.Z tag by creating a fresh branch from that tag and re-implementing the old branch intent without merge or rebase.' +--- + +# Codex Upstream Reapply + +## Overview + +用于“二开/魔改”场景的 tag 同步:默认按 `rust-*` 过滤拉取/查看 upstream tags,自动选择最新的稳定正式版 Rust tag(只接受 `rust-vX.Y.Z`,忽略 `-alpha`/`-beta`/`-rc`),并使用当前分支作为 `OLD_BRANCH`;然后从该 tag 创建新分支作为开发起点,再读取旧二开分支的 git changes 与意图 Markdown,在新分支上“重实现”需求(不 merge/rebase 旧分支历史)。 + +核心原则:`OLD_BRANCH` 的代码与提交历史只是参考材料,不是要直接照搬到 `NEW_BRANCH`。每次新的 upstream tag 都可能已经重构了相关模块,所以应当以 `CHANGED.md`、意图文档和旧分支行为为需求来源,基于当前 `TAG` 对应的代码结构重新实现。 + +## Default Mode(用户没指定参数时) + +如果用户只是说类似 `$codex-upstream-reapply do it`,默认直接这样做,不再追问 tag / branch: + +1. `REMOTE=upstream` +2. `TAG_PATTERN=rust-*` +3. `TAG=最新稳定正式版 Rust tag` +说明:只接受精确匹配 `rust-vX.Y.Z` 的 tag,例如 `rust-v0.117.0`;忽略 `rust-v0.117.0-alpha.1` +4. `OLD_BRANCH=当前分支` +说明:用 `git branch --show-current` 或等价命令获取 +5. `NEW_BRANCH=feat/` +6. 如果当前分支已经等于 `TAG` 或 `feat/`,说明已经对齐到最新正式版,直接停止,不再继续重实现流程 +7. 如果不一致,再执行后续 reapply 逻辑 + +## Inputs (每次明确这些东西) + +- `REMOTE`:拉取 tags 的 remote(默认 `upstream`) +- `TAG_PATTERN`:tag 过滤规则(默认 `rust-*`) +- `TAG`:你选择的 tag 版本(作为新分支起点;默认取最新稳定正式版 `rust-vX.Y.Z`) +- `OLD_BRANCH`:原本二开的分支(包含改动 + 意图 Markdown;默认取“当前分支”) +- `NEW_BRANCH`:从 tag 新建的分支名(脚本默认 `feat/`) +- 可选:`OLD_BASE_TAG`(仅当基线推断不可靠时显式指定) + +## Workflow (推荐:完全不 merge / 不 rebase 旧分支) + +### 0) Acceptance criteria (必读) + +- 禁止运行 `cargo test`(不需要写/跑测试)。 +- 不得生成测试代码或快照文件:确保本次变更里没有新增/修改测试代码或 `*.snap`/`*.snap.new`。 +- 禁止运行任何会检查/编译测试目标,或可能因此倒逼你修改测试代码的命令;包括但不限于 `cargo clippy --tests`、`cargo clippy --fix --tests`、`just fix`、`cargo insta ...`。 +- 除非用户明确要求,不运行 `cargo fmt` / `just fmt` / `cargo clippy` / `cargo clippy --fix` / `just fix` 这类格式化、lint、自动修正命令;本 skill 默认只做必要的代码实现与 build 验证。 +- 如果仓库通用 AGENTS/README/开发规范要求在大改后额外跑 `fmt` / `clippy` / `fix` / `test`,在本 skill 下默认跳过这些步骤,优先遵守“不改测试代码、只做 build 验证”的约束;如有例外必须先得到用户明确许可。 +- 在 `NEW_BRANCH` 上保留并更新根目录 `AGENTS.md`:明确说明当前正在进行的是一次 upstream reapply 工作,禁止编写/修改测试代码,禁止执行任何 lint / format / auto-fix 命令,并注明本次验收标准以本 skill 的 Acceptance criteria 为准。使用 `start_from_tag.sh` 时,这段临时 guardrails 应由脚本自动刷新;若你没走脚本,则必须手动补上。 +- 对于用户可见的 TUI 功能,如果 `codex-rs/tui` 与 `codex-rs/tui_app_server` 都存在对应的平行实现,则必须同步落地两边;不能只改其中一边就判定该需求已完成,除非 upstream 已明确删除其一,或你能在当前 tag 的代码里给出清晰的“不需要同步”的理由。 +- 如果 `CHANGED.md` 记录的是这类共享 TUI 行为,文案应写成“用户可见行为要求”,并在需要时明确适用于 `tui` 与 `tui_app_server`,避免写成只对应某一个实现细节的说明。 +- 在 `codex-rs` 目录下执行 `cargo build -p codex-cli`,确认能正常启动运行。 +- 更新根目录 `README.md` 的 `Codex build` 徽章版本:使用选定 `TAG` 的版本号,并附加该 tag 指向的短 commit(例如 `rust-v0.94.0-dce99bc`)。推荐使用 `https://img.shields.io/static/v1?label=codex%20build&message=-&color=2ea043`。 + +### 0) One-time setup(如果还没有) + +确认是否已有 `origin`(fork)和 `upstream`(openai/codex),如没有再添加;已有就跳过 `remote add`: + +```bash +git remote -v +git remote add origin +git remote add upstream https://github.com/openai/codex.git +``` + +### 1) Freeze OLD_BRANCH (把现有改动“固化”为可回看的参考) + +- 把工作区改动都提交到 `OLD_BRANCH`(包括你写的意图 Markdown)。 +- 建议把 `OLD_BRANCH` 推到你的 fork 远端(例如 `origin`),避免本地丢失。 +- 可选:打一个 snapshot tag/branch,方便以后回溯。 + +### 2) Fetch tags & resolve TAG + +```bash +git fetch upstream 'refs/tags/rust-*:refs/tags/rust-*' --prune +git for-each-ref --sort=-v:refname --format='%(refname:short)' 'refs/tags/rust-*' +``` + +如只想先查看远端候选而不先写入本地 tags,也可以: + +```bash +git ls-remote --tags --refs upstream 'rust-*' +``` + +默认取最新稳定正式版 `TAG`: + +```bash +git for-each-ref --sort=-v:refname --format='%(refname:short)' 'refs/tags/rust-*' \ + | grep -E '^rust-v[0-9]+\.[0-9]+\.[0-9]+$' \ + | head -n 1 +``` + +如果用户明确指定了 tag,再按用户指定值覆盖默认值。 + +### 3) Generate a re-implementation bundle & create NEW_BRANCH + +用脚本生成“重实现材料包”(默认输出到 `/tmp/codex-upstream-reapply/...`),并从 `TAG` 创建 `NEW_BRANCH`: + +```bash +# 默认模式:自动选择最新稳定 Rust tag + 当前分支作为 OLD_BRANCH +bash .agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh \ + --remote upstream +``` + +如需覆盖默认值,再显式传参: + +```bash +bash .agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh \ + --remote upstream \ + --tag TAG \ + --old-branch OLD_BRANCH +``` + +脚本默认只 fetch `rust-*` tags,并自动选择最新稳定正式版;如确需放宽范围,再显式传 `--tag-pattern `。 + +它会记录: + +- `OLD_BRANCH` 相对 `TAG` 的 `merge-base`(作为改动基线) +- 变更文件清单、diff patch、commit 列表 +- `coverage-checklist.md`:把旧分支里每个变更路径都列成 checklist,并标注它是“脚本自动带过去”还是“必须手动重实现” +-(默认)复制所有“变更过的 Markdown 意图文档”的旧版内容到 bundle 里 +-(可选)用 `--copy-all` 复制所有变更文件的旧版内容(用于离线阅读) +并且会固定复制 `OLD_BRANCH` 的 `AGENTS.md`、`README.md`、`CHANGED.md`、`.agents/skills/` 到 `NEW_BRANCH`;复制后脚本还会刷新 `AGENTS.md` 里的临时 reapply guardrails,并自动把 `README.md` 的 `Codex build` 徽章改成 `TAG` 对应的 `-`。对于 npm / release / CI 相关改动,则会按 `OLD_BRANCH` 相对基线 tag 的 git changes 自动搬运,包括删除。只要 `OLD_BRANCH` 带有 `references/npm-release.md` 对应的 skill 规则,就必须执行 npm release 文档里定义的强制动作,而不是只把它当成“默认原则”。 + +如果分支上包含 codext npm / release 相关改动,必须先看 `references/npm-release.md`。这份文档明确要求:在 `NEW_BRANCH` 上用 `OLD_BRANCH` 的 `rust-release.yml` 覆盖当前 tag 分支内容,删除其他 workflow,并直接复制 `.github/scripts/install-musl-build-tools.sh`、`.github/scripts/rusty_v8_bazel.py`、`codex-cli/package.json`、`codex-cli/bin/codex.js`、`codex-cli/bin/rg`、`codex-cli/scripts/build_npm_package.py`、`codex-cli/scripts/install_native_deps.py`;这些是必做项,不是建议。只有这些动作完成后,才允许评估上游 / 新 tag 额外新增或改动的 CI 是否要合并或忽略。 + +如果这套 codext npm / release 规则生效,所有用户可见文案、提示、tooltips、README/技能文档里凡是引用安装后命令名的地方,也必须同步使用 `codext`。例如恢复会话提示应写成 `codext resume `,不要继续保留 `codex resume ...` 这类上游命令名。 + +如果你没有使用 `start_from_tag.sh`,而是手动创建了 `NEW_BRANCH`,则紧接着必须更新 `NEW_BRANCH` 根目录 `AGENTS.md`,补充一段当前任务说明,至少包含这些信息: + +- 当前正在进行 `TAG` 对应的 upstream reapply / re-implementation 工作。 +- 本次只允许修改实现代码与必要文档,不写、不改任何测试代码或 snapshot。 +- 本次不执行任何 lint / format / auto-fix 命令(例如 `cargo fmt`、`just fmt`、`cargo clippy`、`just fix`)。 +- 本次是否完成,以本 skill 的 “Acceptance criteria” 为唯一验收标准。 + +推荐把这段说明写成显式的临时工作约束,方便后续同线程/同分支继续协作时不偏离边界。 + +如果基线推断可疑(脚本会提示),请显式指定旧分支基线 tag: + +```bash +bash .agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh \ + --remote upstream --tag TAG \ + --old-base-tag rust-vX.Y.Z +``` + +### 4) Read OLD_BRANCH as reference (理解需求与意图,而不是直接套 patch) + +从 bundle 里先读清楚“要实现什么”,再开始在 `NEW_BRANCH` 上写代码。 + +重点: + +- `OLD_BRANCH` 的实现、diff、提交记录只用于帮助理解需求,不应直接 `cherry-pick`、照搬旧提交历史,或把旧分支当成目标代码树覆盖到新分支上。 +- `CHANGED.md` 应视为需求清单的第一参考来源;旧分支代码只是帮助你理解这些需求当时是如何落地的。 +- 对 TUI 相关需求,不要默认只看 `codex-rs/tui`。先确认当前 tag 下 `codex-rs/tui` 与 `codex-rs/tui_app_server` 是否都存在对应 surface,以及 `codex` 默认 interactive 入口实际会分发到哪一条链路,再决定需要同步重实现的范围。 +- 若 upstream 在新 `TAG` 中已经重构相关模块,应优先适配当前 codebase 的结构,在当前实现方式下重新落地相同需求,而不是强行维持旧文件组织或旧接口。 +- 最终目标是“在当前 codebase 上实现同样的需求”,不是“让新分支长得像旧分支的提交历史”。 +- `coverage-checklist.md` 是“当前分支有哪些变更必须被处理”的总清单;不要只凭记忆挑几处改。对每个路径,都要在 `NEW_BRANCH` 上做到“已自动带过 / 已手动重实现 / 明确决定不需要并记录原因”三选一。 + +常用命令(在 `NEW_BRANCH` 上也能直接读取旧分支文件): + +```bash +git show OLD_BRANCH:path/to/file +git diff OLD_BRANCH -- path/to/file +``` + +如果你需要“旧分支相对当时基线的真实改动”,用 bundle 里的 `BASE_COMMIT`(在 `META.md` 里): + +```bash +git diff BASE_COMMIT..OLD_BRANCH -- path/to/file +``` + +### 5) Re-implement on NEW_BRANCH + +- 按“需求点/模块”拆分小 commit 逐步实现。 +- 以 `coverage-checklist.md` 为 per-file 兜底清单,避免遗漏当前分支的任何改动。 +- 以 `CHANGED.md` 中记录的变动为主线逐项核对,确认每项需求都在当前 codebase 上重新实现。 +- 让意图文档与实现保持一致(必要时更新 Markdown)。 +- 不跑测试;不要生成或更新任何测试文件/快照文件。 + +### 5.1) Status header 规范(改动 TUI 状态栏时) + +- 状态栏是共享 TUI surface:如果 `codex-rs/tui` 与 `codex-rs/tui_app_server` 都渲染了这一层,默认两边都要同步修改,不能只改经典 `tui`。 +- 具体图标、颜色、segment 顺序、rate-limit summary 格式与刷新语义,统一遵循 `status-header` skill;这里不要再维护第二份会漂移的细节规范。 +- 如果当前仓库的 TUI 样式规范、lint 或现有封装与状态栏 skill 的示例写法冲突,优先遵循仓库本身的规则,但要保持相同的用户可见效果;不要为了强行对齐示例而引入 `clippy` 警告/报错,或去修改测试代码。 + +### 6) Build (codex-rs) + +在 `codex-rs` 目录下执行: + +```bash +cargo build -p codex-cli +``` + +### 7) Sanity checks + +比较“你最终在新分支做了哪些改动”(相对 `TAG`): + +```bash +git diff --stat TAG..NEW_BRANCH +git diff TAG..NEW_BRANCH +``` + +对照旧分支材料包,确认需求点都覆盖到即可(不要求 diff 完全一致)。 + +更多对照方式(worktree、merge-base 对照等)见 `references/advanced.md`。 + +### 8) Final release build (codex-rs) + +所有重实现修改完成并确认后,再执行最后收尾构建: + +```bash +cd codex-rs +cargo build -p codex-cli --release +``` + +## How changes are computed from OLD_BRANCH + +默认用以下方式推断旧分支的“改动基线”: + +```bash +BASE_COMMIT="$(git merge-base TAG OLD_BRANCH)" +git diff "${BASE_COMMIT}..OLD_BRANCH" +``` + +如果推断结果可疑(例如 `OLD_BRANCH` 的历史标记与 `TAG` 不一致),脚本会停止并要求你明确指定: + +```bash +--old-base-tag rust-vX.Y.Z +``` + +这样可以准确得到 “从指定 Rust tag 到 OLD_BRANCH 的全部二开变更”。 diff --git a/.agents/skills/codex-upstream-reapply/references/advanced.md b/.agents/skills/codex-upstream-reapply/references/advanced.md new file mode 100644 index 000000000000..0f01c6118a85 --- /dev/null +++ b/.agents/skills/codex-upstream-reapply/references/advanced.md @@ -0,0 +1,74 @@ +# Advanced Recipes + +## Two worktrees for side-by-side porting (recommended) + +Use when: +- You want to read old code/docs while implementing on the new tag-based branch. +- You want to avoid constantly switching branches. + +Example: + +```bash +# In repo root (adjust paths as needed) +git fetch upstream 'refs/tags/rust-*:refs/tags/rust-*' --prune + +# Old branch worktree (reference) +git worktree add /tmp/wt-old OLD_BRANCH + +# New branch worktree (fresh branch from selected tag) +git worktree add -b NEW_BRANCH /tmp/wt-new TAG +``` + +Cleanup: + +```bash +git worktree remove /tmp/wt-old +git worktree remove /tmp/wt-new +``` + +## Find the real delta of OLD_BRANCH (merge-base vs tag) + +Use when: +- Your old branch was based on an older tag/commit and you want the exact “custom delta”. + +Example: + +```bash +BASE_COMMIT="$(git merge-base TAG OLD_BRANCH)" +git diff "${BASE_COMMIT}..OLD_BRANCH" > /tmp/old-delta.patch +git diff --name-status "${BASE_COMMIT}..OLD_BRANCH" +``` + +## Read old files without switching branches + +Use when: +- You are on NEW_BRANCH but want to view old docs/code quickly. + +```bash +git show OLD_BRANCH:path/to/file +``` + +For diffs: + +```bash +git diff OLD_BRANCH -- path/to/file +``` + +## Compare “custom delta” old vs new + +Use when: +- NEW_BRANCH is based on a selected tag and you want to verify your re-implementation covers the old intent. + +```bash +OLD_BASE="$(git merge-base TAG OLD_BRANCH)" + +# Old delta (against its original base) +git diff "${OLD_BASE}..OLD_BRANCH" > /tmp/old.patch + +# New delta (against selected tag) +git diff TAG..NEW_BRANCH > /tmp/new.patch + +# Optional quick check (names only) +git diff --name-status "${OLD_BASE}..OLD_BRANCH" +git diff --name-status TAG..NEW_BRANCH +``` diff --git a/.agents/skills/codex-upstream-reapply/references/npm-release.md b/.agents/skills/codex-upstream-reapply/references/npm-release.md new file mode 100644 index 000000000000..1d8661f1ef4f --- /dev/null +++ b/.agents/skills/codex-upstream-reapply/references/npm-release.md @@ -0,0 +1,88 @@ +# NPM release + +这个文档用于指导 `codex-upstream-reapply` 在处理 npm / release / CI 相关改动时,哪些内容必须直接沿用旧分支,哪些删除要同步保留,以及只有在这些必做项执行完之后,才去评估上游 / 新 tag 自己新增或改动的 CI。 + +上游 reapply 时,本文本身就是 npm release 的唯一规则来源。 + +## Source of truth + +- 行为目标:本文 +- 改动来源:`git diff BASE_COMMIT..OLD_BRANCH` +- 默认基线:`BASE_COMMIT="$(git merge-base TAG OLD_BRANCH)"` +- 如果 merge-base 不可靠,必须显式传 `--old-base-tag` + +## Package identity + +这些命名默认直接沿用 `OLD_BRANCH` 当前已经确认过的实现,不重新发明: + +- 顶层发布包名:`@loongphy/codext` +- 平台包名: + - `@loongphy/codext-linux-x64` + - `@loongphy/codext-darwin-x64` + - `@loongphy/codext-darwin-arm64` + - `@loongphy/codext-win32-x64` +- 用户安装后的命令名:`codext` +- NPM 入口脚本:`codex-cli/bin/codex.js` +- 入口脚本最终拉起的原生二进制名: + - Unix: `codex` + - Windows: `codex.exe` + +这里要明确区分: + +- npm registry 上的顶层包名是 `@loongphy/codext` +- shell 里用户执行的命令名是 `codext` +- `codext` 对应的是 `@loongphy/codext` 包里的 JS launcher,不要求 vendor 内原生二进制也改名 +- 当前允许 launcher 最终去执行 vendor 内的 `codex` / `codex.exe` +- 只要 launcher 解析的是 `@loongphy/codext-*` 这些 scoped 平台包,而不是 `@openai/codex-*`,就不会和 `@openai/codex` 混用 +- 因此所有用户可见文案、CLI 提示、tooltips、README/技能文档里涉及安装后命令名时,也应统一写成 `codext`;例如应显示 `codext resume `,而不是 `codex resume ` + +如果 upstream / 新 tag 没有明确要求变更这些名称,就不要在 reapply 时改动它们。 + +## Must execute on the new tag branch + +只要 `OLD_BRANCH` 存在本文对应的 skill 规则,就视为启用这套 codext npm release reapply 规则。此时在 `NEW_BRANCH` 上必须执行这些动作: + +1. 用 `OLD_BRANCH` 的 `.github/workflows/rust-release.yml` 覆盖 `NEW_BRANCH` 当前内容。 +2. 删除 `NEW_BRANCH` 下其他所有 `.github/workflows/*`,只保留 `rust-release.yml`。 +3. 直接复制这些路径的 `OLD_BRANCH` 版本: + - `.github/scripts/install-musl-build-tools.sh` + - `.github/scripts/rusty_v8_bazel.py` + - `codex-cli/package.json` + - `codex-cli/bin/codex.js` + - `codex-cli/bin/rg` + - `codex-cli/scripts/build_npm_package.py` + - `codex-cli/scripts/install_native_deps.py` + +上面这些都是必做项,不是建议,也不是“默认情况下尽量这样做”。执行完之前,不要去讨论新 tag 的结构要不要沿用。 + +这些路径里的内容按“整份文件/目录直接复制”处理,不单独重推导其中的细节。也就是说,下列内容都以 `OLD_BRANCH` 文件内容为准: + +- 当前分支已经改过的 workflow / job / step 配置 +- release workflow 名称、release tag/asset 命名、发包入口名称 +- NPM package name、platform package name、bin 名称、安装命令、dist-tag +- 为了发包链路落地而改过的脚本参数、环境变量名、文案 + +## Only review upstream/new-tag deltas after the mandatory steps + +先执行完上面的必做项。只有在这些动作都完成后,如果 `TAG` / upstream 相对旧基线额外带来了新的或变动的 CI / release 文件,才评估这些 upstream 变化要不要合并进当前分支方案。 + +默认处理顺序: + +- 先保留 `OLD_BRANCH` 已验证过的 CI / release / npm 配置,不因为 `TAG` 文件结构不同就回退成上游写法 +- 再单独看 `TAG` / upstream 新增或改动的 CI,决定是继续忽略、局部吸收,还是手动合并 +- 如果 upstream 变化没有影响当前分支既有发包链路,就保持当前分支方案不动 +- 如果必须吸收 upstream 变化,也只做最小合并;不要顺手改掉当前分支已经确认的包名、命令名、release 命名 + +可用的核对方式: + +- 旧分支已有改动:`git diff --name-status BASE_COMMIT..OLD_BRANCH -- .github/workflows/rust-release.yml .github/scripts/install-musl-build-tools.sh .github/scripts/rusty_v8_bazel.py codex-cli/package.json codex-cli/bin/codex.js codex-cli/bin/rg codex-cli/scripts/build_npm_package.py codex-cli/scripts/install_native_deps.py` +- 新 tag / upstream 额外变化:`git diff --name-status BASE_COMMIT..TAG -- .github/workflows/rust-release.yml .github/scripts/install-musl-build-tools.sh .github/scripts/rusty_v8_bazel.py codex-cli/package.json codex-cli/bin/codex.js codex-cli/bin/rg codex-cli/scripts/build_npm_package.py codex-cli/scripts/install_native_deps.py` + +## Ask the user only for real conflicts + +只有遇到这些“上游新增/变动 CI 后仍然无法安全决定”的情况,再向用户确认: + +- 想删掉的是不是用户仍在使用的发布入口 +- upstream 新 tag 新增了新的 release / CI 入口,但你无法判断它是必须接入还是历史残留 +- upstream 新 tag 明确要求当前分支现有发包命名、安装命令、dist-tag、release 产物矩阵发生变化 +- 旧分支与当前 tag 在发布平台范围上明显冲突 diff --git a/.agents/skills/codex-upstream-reapply/scripts/prepare_reimplementation_bundle.sh b/.agents/skills/codex-upstream-reapply/scripts/prepare_reimplementation_bundle.sh new file mode 100755 index 000000000000..4fbf149ae265 --- /dev/null +++ b/.agents/skills/codex-upstream-reapply/scripts/prepare_reimplementation_bundle.sh @@ -0,0 +1,331 @@ +#!/usr/bin/env bash +set -euo pipefail + +print_usage() { + cat <<'EOF' +prepare_reimplementation_bundle.sh + +Create a "re-implementation bundle" from an old customization branch: +- compute BASE_COMMIT vs a selected tag (or explicit old base tag) +- export changed file list + diff patch + commit list +- copy changed Markdown intent docs (and optionally all changed files) for offline reading + +Usage: + prepare_reimplementation_bundle.sh [options] + +Options: + --old-branch Old customization branch (default: current branch) + --base-ref Selected tag (or commit ref) used to infer merge-base (required) + --old-base-tag Explicit base tag for OLD_BRANCH (overrides merge-base inference) + --remote Remote for optional tag fetch (default: upstream) + --tag-pattern Only fetch tags matching this glob (default: rust-*) + --out Output directory (default: /tmp/codex-upstream-reapply///) + --copy-all Copy ALL changed files (ACMR) from old branch into bundle/old/ + --no-copy-docs Do not copy changed Markdown docs into bundle/old/ (docs are copied by default) + --no-fetch Do not run git fetch (default: fetch tags best-effort) + -h, --help Show help + +Outputs: + META.md, changed-files.txt, diff.patch, diffstat.txt, commits.txt, old/... +EOF +} + +die() { + echo "[ERROR] $*" >&2 + exit 1 +} + +timestamp_utc() { + date -u +"%Y%m%dT%H%M%SZ" +} + +is_markdown_path() { + local path="$1" + local lower="${path,,}" + case "${lower}" in + *.md|*.mdx|*.markdown) + return 0 + ;; + *) + return 1 + ;; + esac +} + +require_git_repo() { + git rev-parse --is-inside-work-tree >/dev/null 2>&1 || die "Not inside a git repository." +} + +ensure_no_in_progress_ops() { + git rev-parse -q --verify REBASE_HEAD >/dev/null 2>&1 && die "Rebase in progress. Finish it first (git rebase --continue/--abort)." + git rev-parse -q --verify CHERRY_PICK_HEAD >/dev/null 2>&1 && die "Cherry-pick in progress. Finish it first." + git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1 && die "Merge in progress. Finish it first." + return 0 +} + +require_ref() { + local ref="$1" + git rev-parse --verify "${ref}^{commit}" >/dev/null 2>&1 || die "Ref not found: ${ref}" +} + +ref_commit() { + git rev-parse "${1}^{commit}" +} + +tag_refspec() { + printf 'refs/tags/%s:refs/tags/%s\n' "${TAG_PATTERN}" "${TAG_PATTERN}" +} + +hint_tag_from_history() { + git describe --tags --abbrev=0 "${1}" 2>/dev/null || true +} + +default_reapply_action_for_path() { + local path="$1" + + case "${path}" in + AGENTS.md|README.md|CHANGED.md|.agents/skills|.agents/skills/*) + printf '%s\n' "auto carry-over by start_from_tag.sh" + ;; + .github/workflows/rust-release.yml|.github/scripts/install-musl-build-tools.sh|.github/scripts/rusty_v8_bazel.py|codex-cli/package.json|codex-cli/bin/codex.js|codex-cli/bin/rg|codex-cli/scripts/build_npm_package.py|codex-cli/scripts/install_native_deps.py) + printf '%s\n' "auto carry-over when npm/release reapply rules are enabled" + ;; + *) + printf '%s\n' "manual re-implementation required" + ;; + esac +} + +OLD_BRANCH="" +BASE_REF="" +OLD_BASE_TAG="" +REMOTE="upstream" +TAG_PATTERN="rust-*" +OUT_DIR="" +COPY_ALL=0 +COPY_DOCS=1 +NO_FETCH=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --old-branch) + OLD_BRANCH="${2:-}" + shift 2 + ;; + --base-ref) + BASE_REF="${2:-}" + shift 2 + ;; + --old-base-tag) + OLD_BASE_TAG="${2:-}" + shift 2 + ;; + --remote) + REMOTE="${2:-}" + shift 2 + ;; + --tag-pattern) + TAG_PATTERN="${2:-}" + shift 2 + ;; + --out) + OUT_DIR="${2:-}" + shift 2 + ;; + --copy-all) + COPY_ALL=1 + shift + ;; + --no-copy-docs) + COPY_DOCS=0 + shift + ;; + --no-fetch) + NO_FETCH=1 + shift + ;; + -h|--help) + print_usage + exit 0 + ;; + *) + die "Unknown argument: $1 (use --help)" + ;; + esac +done + +require_git_repo +ensure_no_in_progress_ops + +if [[ -z "${OLD_BRANCH}" ]]; then + OLD_BRANCH="$(git rev-parse --abbrev-ref HEAD)" +fi + +[[ -n "${OLD_BRANCH}" ]] || die "--old-branch resolved to empty" +[[ "${OLD_BRANCH}" != "HEAD" ]] || die "Detached HEAD; pass --old-branch ." +[[ -n "${BASE_REF}" ]] || die "--base-ref is required (selected tag or commit ref)" +[[ -n "${REMOTE}" ]] || die "--remote must not be empty" + +if [[ "${NO_FETCH}" != "1" ]]; then + echo "[INFO] Fetching tags matching ${TAG_PATTERN} from ${REMOTE} (best-effort)..." + if ! git fetch "${REMOTE}" "$(tag_refspec)" --prune; then + echo "[WARN] git fetch failed; continuing with local refs." + fi +fi + +require_ref "${OLD_BRANCH}" +require_ref "${BASE_REF}" +[[ -z "${OLD_BASE_TAG}" ]] || require_ref "${OLD_BASE_TAG}" + +repo_root="$(git rev-parse --show-toplevel)" +repo_name="$(basename "${repo_root}")" +ts="$(timestamp_utc)" + +if [[ -z "${OUT_DIR}" ]]; then + OUT_DIR="/tmp/codex-upstream-reapply/${repo_name}/${OLD_BRANCH}/${ts}" +fi + +mkdir -p "${OUT_DIR}" + +old_commit="$(git rev-parse "${OLD_BRANCH}")" +base_ref_commit="$(git rev-parse "${BASE_REF}")" +merge_base="$(git merge-base "${BASE_REF}" "${OLD_BRANCH}" 2>/dev/null || true)" + +if [[ -z "${merge_base}" ]]; then + if [[ -n "${OLD_BASE_TAG}" ]]; then + echo "[WARN] Unable to compute merge-base between ${BASE_REF} and ${OLD_BRANCH}; will use --old-base-tag." + else + die "Unable to compute merge-base between ${BASE_REF} and ${OLD_BRANCH}. Provide --old-base-tag." + fi +fi + +base_commit="${merge_base}" +old_base_tag_commit="" +hint_tag="$(hint_tag_from_history "${OLD_BRANCH}")" +hint_tag_commit="" + +if [[ -n "${hint_tag}" ]]; then + hint_tag_commit="$(ref_commit "${hint_tag}")" +fi + +if [[ -n "${OLD_BASE_TAG}" ]]; then + old_base_tag_commit="$(ref_commit "${OLD_BASE_TAG}")" + if ! git merge-base --is-ancestor "${old_base_tag_commit}" "${OLD_BRANCH}"; then + die "--old-base-tag ${OLD_BASE_TAG} is not an ancestor of ${OLD_BRANCH}" + fi + base_commit="${old_base_tag_commit}" +else + if [[ -n "${hint_tag_commit}" ]]; then + if ! git merge-base --is-ancestor "${hint_tag_commit}" "${base_commit}"; then + die "Inferred base (${base_commit}) conflicts with hint tag (${hint_tag}). Re-run with --old-base-tag ." + fi + fi +fi + +echo "[INFO] Repo: ${repo_root}" +echo "[INFO] Remote: ${REMOTE}" +echo "[INFO] Tag/Base: ${BASE_REF}" +echo "[INFO] OLD: ${OLD_BRANCH}" +echo "[INFO] OUT: ${OUT_DIR}" +echo "[INFO] merge-base ${merge_base}" + +cat > "${OUT_DIR}/META.md" < ${BASE_REF} +\`\`\` +EOF + +git diff --name-status "${base_commit}..${OLD_BRANCH}" > "${OUT_DIR}/changed-files.txt" +git diff --stat "${base_commit}..${OLD_BRANCH}" > "${OUT_DIR}/diffstat.txt" +git diff "${base_commit}..${OLD_BRANCH}" > "${OUT_DIR}/diff.patch" +git log --reverse --oneline "${base_commit}..${OLD_BRANCH}" > "${OUT_DIR}/commits.txt" + +{ + cat <<'EOF' +# Coverage Checklist + +Every path from `changed-files.txt` must be accounted for on `NEW_BRANCH`. + +- `auto carry-over by start_from_tag.sh`: the branch bootstrap script copies or refreshes it for you. +- `auto carry-over when npm/release reapply rules are enabled`: the path is copied or deleted automatically only when the npm/release rules apply. +- `manual re-implementation required`: you must port the behavior onto the new tag manually, or explicitly decide to drop it with a recorded reason. + +Checklist: +EOF + echo + + while IFS=$'\t' read -r status path extra; do + [[ -n "${status}" ]] || continue + + if [[ "${status}" == R* || "${status}" == C* ]]; then + action="$(default_reapply_action_for_path "${extra}")" + printf -- '- [ ] %s %s -> %s — %s\n' "${status}" "${path}" "${extra}" "${action}" + else + action="$(default_reapply_action_for_path "${path}")" + printf -- '- [ ] %s %s — %s\n' "${status}" "${path}" "${action}" + fi + done < <(git diff --name-status --find-renames "${base_commit}..${OLD_BRANCH}") +} > "${OUT_DIR}/coverage-checklist.md" + +mkdir -p "${OUT_DIR}/old" + +changed_paths_cmd=(git diff --name-only -z --diff-filter=ACMR "${base_commit}..${OLD_BRANCH}") + +copied_count=0 +docs_count=0 +while IFS= read -r -d '' path; do + if [[ "${COPY_ALL}" == "1" ]]; then + : + else + if [[ "${COPY_DOCS}" != "1" ]]; then + continue + fi + if ! is_markdown_path "${path}"; then + continue + fi + docs_count=$((docs_count + 1)) + fi + + dest="${OUT_DIR}/old/${path}" + mkdir -p "$(dirname "${dest}")" + + if git show "${OLD_BRANCH}:${path}" > "${dest}"; then + copied_count=$((copied_count + 1)) + else + echo "[WARN] Failed to copy ${path} from ${OLD_BRANCH} (skipping)." + rm -f "${dest}" + fi +done < <("${changed_paths_cmd[@]}") + +if [[ "${COPY_ALL}" == "1" ]]; then + echo "[OK] Copied ${copied_count} changed files into: ${OUT_DIR}/old/" +else + if [[ "${COPY_DOCS}" == "1" ]]; then + echo "[OK] Copied ${copied_count}/${docs_count} changed Markdown docs into: ${OUT_DIR}/old/" + else + echo "[OK] Bundle created (no docs copied): ${OUT_DIR}" + fi +fi + +echo "[OK] Bundle ready: ${OUT_DIR}" diff --git a/.agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh b/.agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh new file mode 100755 index 000000000000..4c24b928ac0f --- /dev/null +++ b/.agents/skills/codex-upstream-reapply/scripts/start_from_tag.sh @@ -0,0 +1,596 @@ +#!/usr/bin/env bash +set -euo pipefail + +print_usage() { + cat <<'EOF' +start_from_tag.sh + +Fetch tags, auto-select the latest stable Rust tag when --tag is omitted, generate a +re-implementation bundle from OLD_BRANCH, then create NEW_BRANCH from the selected tag. + +Usage: + start_from_tag.sh [options] + +Options: + --remote Remote to fetch tags from (default: upstream) + --tag-pattern Only fetch/list tags matching this glob (default: rust-*) + --tag Selected tag (optional; default: latest stable rust-vX.Y.Z) + --old-branch Old customization branch (default: current branch) + --new-branch New branch to create from tag (default: feat/) + --old-base-tag Explicit base tag for OLD_BRANCH (override base inference) + --out Bundle output directory (optional) + --copy-all Copy ALL changed files into bundle/old/ + --no-copy-docs Do not copy changed Markdown docs into bundle/old/ + --no-fetch Do not run git fetch (default: fetch tags best-effort) + -h, --help Show help +EOF +} + +die() { + echo "[ERROR] $*" >&2 + exit 1 +} + +timestamp_utc() { + date -u +"%Y%m%dT%H%M%SZ" +} + +require_git_repo() { + git rev-parse --is-inside-work-tree >/dev/null 2>&1 || die "Not inside a git repository." +} + +ensure_no_in_progress_ops() { + git rev-parse -q --verify REBASE_HEAD >/dev/null 2>&1 && die "Rebase in progress. Finish it first (git rebase --continue/--abort)." + git rev-parse -q --verify CHERRY_PICK_HEAD >/dev/null 2>&1 && die "Cherry-pick in progress. Finish it first." + git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1 && die "Merge in progress. Finish it first." + return 0 +} + +list_tags() { + git for-each-ref --sort=-creatordate \ + --format='%(creatordate:iso8601) %(refname:short) %(objectname:short)' \ + "refs/tags/${TAG_PATTERN}" +} + +is_stable_rust_tag() { + local tag_name="$1" + [[ "${tag_name}" =~ ^rust-v[0-9]+\.[0-9]+\.[0-9]+$ ]] +} + +latest_stable_tag() { + local tag_name="" + + while IFS= read -r tag_name; do + if is_stable_rust_tag "${tag_name}"; then + printf '%s\n' "${tag_name}" + return 0 + fi + done < <(git for-each-ref --sort=-v:refname --format='%(refname:short)' "refs/tags/${TAG_PATTERN}") + + return 1 +} + +tag_refspec() { + printf 'refs/tags/%s:refs/tags/%s\n' "${TAG_PATTERN}" "${TAG_PATTERN}" +} + +tag_matches_pattern() { + local tag_name="$1" + + case "${tag_name}" in + ${TAG_PATTERN}) + return 0 + ;; + *) + return 1 + ;; + esac +} + +copy_file_from_old_branch() { + local old_branch="$1" + local path="$2" + + if git cat-file -e "${old_branch}:${path}" 2>/dev/null; then + mkdir -p "$(dirname "${path}")" + git show "${old_branch}:${path}" > "${path}" + git add "${path}" + echo "[INFO] Copied ${path} from ${old_branch}" + else + echo "[WARN] ${path} not found in ${old_branch}; skipping." + fi +} + +copy_path_from_old_branch() { + local old_branch="$1" + local path="$2" + + if git cat-file -e "${old_branch}:${path}" 2>/dev/null; then + git checkout "${old_branch}" -- "${path}" + echo "[INFO] Copied ${path} from ${old_branch}" + else + echo "[WARN] ${path} not found in ${old_branch}; skipping." + fi +} + +copy_entry_from_old_branch() { + local old_branch="$1" + local path="$2" + local object_type="" + + if ! object_type="$(git cat-file -t "${old_branch}:${path}" 2>/dev/null)"; then + echo "[WARN] ${path} not found in ${old_branch}; skipping." + return 0 + fi + + case "${object_type}" in + blob) + copy_file_from_old_branch "${old_branch}" "${path}" + ;; + tree) + copy_path_from_old_branch "${old_branch}" "${path}" + ;; + *) + echo "[WARN] Unsupported git object type for ${path}: ${object_type}; skipping." + ;; + esac +} + +strip_existing_reapply_guardrails() { + local path="$1" + + awk ' + $0 == "" { + skip_marked = 1 + next + } + $0 == "" { + skip_marked = 0 + next + } + skip_marked { + next + } + /^## Temporary Reapply Guardrails \(`/ { + skip_legacy = 1 + next + } + skip_legacy && /^## / { + skip_legacy = 0 + } + skip_legacy { + next + } + { + print + } + ' "${path}" +} + +refresh_reapply_guardrails() { + local tag_name="$1" + local agents_path="AGENTS.md" + local tmp_file="" + local block="" + local first_line="" + + block="$(cat < +## Temporary Reapply Guardrails (\`${tag_name}\`) + +- Current work on this branch is an upstream reapply / re-implementation for \`${tag_name}\`. +- Only implementation code and necessary docs may change for this task. Do not add or modify tests or snapshot files. +- Do not run lint / format / auto-fix commands for this reapply, including \`cargo fmt\`, \`just fmt\`, \`cargo clippy\`, \`cargo clippy --fix\`, and \`just fix\`. +- Acceptance for this reapply is limited to the \`codex-upstream-reapply\` skill criteria, including \`cd codex-rs && cargo build -p codex-cli\` and \`cd codex-rs && cargo build -p codex-cli --release\`. + +EOF +)" + + tmp_file="$(mktemp)" + if [[ -f "${agents_path}" ]]; then + strip_existing_reapply_guardrails "${agents_path}" > "${tmp_file}" + else + : > "${tmp_file}" + fi + + if [[ -s "${tmp_file}" ]]; then + IFS= read -r first_line < "${tmp_file}" || true + if [[ "${first_line}" == \#* ]]; then + { + printf '%s\n\n%s\n\n' "${first_line}" "${block}" + tail -n +2 "${tmp_file}" + } > "${agents_path}" + else + { + printf '%s\n\n' "${block}" + cat "${tmp_file}" + } > "${agents_path}" + fi + else + { + printf '# Rust/codex-rs\n\n%s\n' "${block}" + } > "${agents_path}" + fi + + rm -f "${tmp_file}" + git add "${agents_path}" + echo "[INFO] Refreshed AGENTS.md reapply guardrails for ${tag_name}" +} + +update_readme_build_badge() { + local tag_name="$1" + local readme_path="README.md" + local tmp_file="" + local short_commit="" + local badge_value="" + + [[ -f "${readme_path}" ]] || return 0 + short_commit="$(git rev-parse --short "${tag_name}^{commit}")" + badge_value="${tag_name}-${short_commit}" + tmp_file="$(mktemp)" + + perl -0pe 's|!\[Codex build\]\(https://img\.shields\.io/static/v1\?label=codex%20build&message=[^&)]*&color=2ea043\)|![Codex build](https://img.shields.io/static/v1?label=codex%20build&message='"${badge_value}"'&color=2ea043)|g' \ + "${readme_path}" > "${tmp_file}" + + if cmp -s "${readme_path}" "${tmp_file}"; then + rm -f "${tmp_file}" + return 0 + fi + + mv "${tmp_file}" "${readme_path}" + git add "${readme_path}" + echo "[INFO] Updated README.md build badge to ${badge_value}" +} + +path_exists_in_ref() { + local ref="$1" + local path="$2" + git cat-file -e "${ref}:${path}" 2>/dev/null +} + +matches_release_carry_over_path() { + local path="$1" + + case "${path}" in + .github/workflows/rust-release.yml) + return 0 + ;; + .github/scripts/install-musl-build-tools.sh) + return 0 + ;; + .github/scripts/rusty_v8_bazel.py) + return 0 + ;; + codex-cli/package.json) + return 0 + ;; + codex-cli/bin/codex.js) + return 0 + ;; + codex-cli/bin/rg) + return 0 + ;; + codex-cli/scripts/build_npm_package.py) + return 0 + ;; + codex-cli/scripts/install_native_deps.py) + return 0 + ;; + *) + return 1 + ;; + esac +} + +remove_path_from_new_branch() { + local path="$1" + + if git ls-files --error-unmatch -- "${path}" >/dev/null 2>&1; then + git rm -r -f -- "${path}" >/dev/null + echo "[INFO] Removed ${path} to match OLD_BRANCH deletion" + elif [[ -e "${path}" || -L "${path}" ]]; then + rm -rf -- "${path}" + echo "[INFO] Removed untracked ${path} to match OLD_BRANCH deletion" + else + echo "[INFO] ${path} already absent; deletion already matches OLD_BRANCH" + fi +} + +apply_release_carry_over_changes() { + local base_commit="$1" + local old_branch="$2" + local status="" + local path="" + local old_path="" + local new_path="" + local matched=0 + + while IFS= read -r -d '' status; do + case "${status}" in + R*|C*) + IFS= read -r -d '' old_path || die "Malformed diff stream for ${status}" + IFS= read -r -d '' new_path || die "Malformed diff stream for ${status}" + + if ! matches_release_carry_over_path "${old_path}" && ! matches_release_carry_over_path "${new_path}"; then + continue + fi + + matched=1 + if [[ "${status}" == R* && "${old_path}" != "${new_path}" ]]; then + remove_path_from_new_branch "${old_path}" + fi + + if path_exists_in_ref "${old_branch}" "${new_path}"; then + copy_entry_from_old_branch "${old_branch}" "${new_path}" + else + remove_path_from_new_branch "${new_path}" + fi + ;; + *) + IFS= read -r -d '' path || die "Malformed diff stream for ${status}" + + if ! matches_release_carry_over_path "${path}"; then + continue + fi + + matched=1 + if path_exists_in_ref "${old_branch}" "${path}"; then + copy_entry_from_old_branch "${old_branch}" "${path}" + else + remove_path_from_new_branch "${path}" + fi + ;; + esac + done < <(git diff --name-status -z --find-renames "${base_commit}..${old_branch}") + + if [[ "${matched}" == "0" ]]; then + echo "[INFO] No npm/release/CI carry-over changes detected from ${old_branch}" + fi +} + +carry_over_commit_message() { + local old_branch="$1" + printf 'chore: copy reapply carry-over files from %s\n' "${old_branch}" +} + +resolve_carry_over_base_commit() { + if [[ -n "${OLD_BASE_TAG}" ]]; then + git rev-parse "${OLD_BASE_TAG}^{commit}" + return 0 + fi + + git merge-base "${TAG}" "${OLD_BRANCH}" 2>/dev/null || die "Unable to compute merge-base between ${TAG} and ${OLD_BRANCH}. Pass --old-base-tag." +} + +readonly REAPPLY_COPY_PATHS=( + "AGENTS.md" + "README.md" + "CHANGED.md" + ".agents/skills" +) + +readonly REQUIRED_NPM_RELEASE_COPY_PATHS=( + ".github/scripts/install-musl-build-tools.sh" + ".github/scripts/rusty_v8_bazel.py" + "codex-cli/package.json" + "codex-cli/bin/codex.js" + "codex-cli/bin/rg" + "codex-cli/scripts/build_npm_package.py" + "codex-cli/scripts/install_native_deps.py" +) + +readonly NPM_RELEASE_SKILL_REF=".agents/skills/codex-upstream-reapply/references/npm-release.md" + +has_npm_release_reapply() { + local old_branch="$1" + path_exists_in_ref "${old_branch}" "${NPM_RELEASE_SKILL_REF}" +} + +apply_required_npm_release_carry_over() { + local old_branch="$1" + local required_workflow=".github/workflows/rust-release.yml" + local workflow_path="" + local path="" + + path_exists_in_ref "${old_branch}" "${required_workflow}" \ + || die "OLD_BRANCH has ${NPM_RELEASE_SKILL_REF} but is missing ${required_workflow}" + + echo "[INFO] Applying mandatory npm-release carry-over from ${old_branch}..." + copy_entry_from_old_branch "${old_branch}" "${required_workflow}" + + while IFS= read -r workflow_path; do + [[ -n "${workflow_path}" ]] || continue + [[ "${workflow_path}" == "${required_workflow}" ]] && continue + remove_path_from_new_branch "${workflow_path}" + done < <(git ls-files '.github/workflows/*') + + for path in "${REQUIRED_NPM_RELEASE_COPY_PATHS[@]}"; do + copy_entry_from_old_branch "${old_branch}" "${path}" + done +} + +REMOTE="upstream" +TAG_PATTERN="rust-*" +TAG="" +OLD_BRANCH="" +NEW_BRANCH="" +OLD_BASE_TAG="" +OUT_DIR="" +COPY_ALL=0 +COPY_DOCS=1 +NO_FETCH=0 +AUTO_NEW_BRANCH=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --remote) + REMOTE="${2:-}" + shift 2 + ;; + --tag) + TAG="${2:-}" + shift 2 + ;; + --tag-pattern) + TAG_PATTERN="${2:-}" + shift 2 + ;; + --old-branch) + OLD_BRANCH="${2:-}" + shift 2 + ;; + --new-branch) + NEW_BRANCH="${2:-}" + shift 2 + ;; + --old-base-tag) + OLD_BASE_TAG="${2:-}" + shift 2 + ;; + --out) + OUT_DIR="${2:-}" + shift 2 + ;; + --copy-all) + COPY_ALL=1 + shift + ;; + --no-copy-docs) + COPY_DOCS=0 + shift + ;; + --no-fetch) + NO_FETCH=1 + shift + ;; + -h|--help) + print_usage + exit 0 + ;; + *) + die "Unknown argument: $1 (use --help)" + ;; + esac +done + +require_git_repo +ensure_no_in_progress_ops + +if [[ "${NO_FETCH}" != "1" ]]; then + echo "[INFO] Fetching tags matching ${TAG_PATTERN} from ${REMOTE} (best-effort)..." + if ! git fetch "${REMOTE}" "$(tag_refspec)" --prune; then + echo "[WARN] git fetch failed; continuing with local refs." + fi +fi + +if [[ -z "${OLD_BRANCH}" ]]; then + OLD_BRANCH="$(git rev-parse --abbrev-ref HEAD)" +fi + +[[ -n "${OLD_BRANCH}" ]] || die "--old-branch resolved to empty" +[[ "${OLD_BRANCH}" != "HEAD" ]] || die "Detached HEAD; pass --old-branch ." + +if [[ -z "${TAG}" ]]; then + if ! TAG="$(latest_stable_tag)"; then + echo "[INFO] Available tags matching ${TAG_PATTERN} (newest first):" + list_tags | head -n 50 + die "No stable Rust release tag found under ${TAG_PATTERN}. Pass --tag explicitly." + fi + echo "[INFO] Auto-selected latest stable Rust tag: ${TAG}" +fi + +tag_name="${TAG#refs/tags/}" +tag_matches_pattern "${tag_name}" || die "Selected tag ${TAG} does not match --tag-pattern ${TAG_PATTERN}" +git show-ref --verify --quiet "refs/tags/${tag_name}" || die "Tag not found: ${TAG}. If it exists upstream but was filtered out, retry with --tag-pattern ." + +if [[ -z "${NEW_BRANCH}" ]]; then + NEW_BRANCH="feat/${tag_name}" + AUTO_NEW_BRANCH=1 +fi + +if [[ "${AUTO_NEW_BRANCH}" == "1" ]]; then + if [[ "${OLD_BRANCH}" == "${tag_name}" || "${OLD_BRANCH}" == "${NEW_BRANCH}" ]]; then + echo "[OK] Current branch ${OLD_BRANCH} already matches the latest stable tag ${tag_name}; nothing to do." + exit 0 + fi +fi + +if [[ "${NEW_BRANCH}" == "${OLD_BRANCH}" ]]; then + die "--new-branch must differ from --old-branch" +fi + +if git show-ref --verify --quiet "refs/heads/${NEW_BRANCH}"; then + die "Branch already exists: ${NEW_BRANCH}" +fi + +if [[ "$(git rev-parse --abbrev-ref HEAD)" == "${OLD_BRANCH}" ]]; then + if [[ -n "$(git status --porcelain)" ]]; then + die "Working tree is dirty on ${OLD_BRANCH}. Commit or stash first." + fi +fi + +if [[ -z "${OUT_DIR}" ]]; then + repo_root="$(git rev-parse --show-toplevel)" + repo_name="$(basename "${repo_root}")" + ts="$(timestamp_utc)" + tag_dir="${TAG//\//-}" + OUT_DIR="/tmp/codex-upstream-reapply/${repo_name}/${OLD_BRANCH}/${tag_dir}/${ts}" +fi + +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +bundle_script="${script_dir}/prepare_reimplementation_bundle.sh" + +bundle_args=(--old-branch "${OLD_BRANCH}" --base-ref "${TAG}" --remote "${REMOTE}" --out "${OUT_DIR}") +bundle_args+=(--tag-pattern "${TAG_PATTERN}") +if [[ -n "${OLD_BASE_TAG}" ]]; then + bundle_args+=(--old-base-tag "${OLD_BASE_TAG}") +fi +if [[ "${COPY_ALL}" == "1" ]]; then + bundle_args+=(--copy-all) +fi +if [[ "${COPY_DOCS}" != "1" ]]; then + bundle_args+=(--no-copy-docs) +fi +if [[ "${NO_FETCH}" == "1" ]]; then + bundle_args+=(--no-fetch) +fi + +echo "[INFO] Creating re-implementation bundle..." +"${bundle_script}" "${bundle_args[@]}" + +carry_over_base_commit="$(resolve_carry_over_base_commit)" + +echo "[INFO] Creating new branch ${NEW_BRANCH} from tag ${TAG}..." +git switch -c "${NEW_BRANCH}" "${TAG}" + +echo "[INFO] Copying fixed carry-over files from ${OLD_BRANCH}..." +for path in "${REAPPLY_COPY_PATHS[@]}"; do + copy_entry_from_old_branch "${OLD_BRANCH}" "${path}" +done +refresh_reapply_guardrails "${tag_name}" +update_readme_build_badge "${tag_name}" + +if has_npm_release_reapply "${OLD_BRANCH}"; then + apply_required_npm_release_carry_over "${OLD_BRANCH}" +fi + +echo "[INFO] Replaying npm/release/CI carry-over changes from git diff..." +apply_release_carry_over_changes "${carry_over_base_commit}" "${OLD_BRANCH}" + +if ! git diff --cached --quiet; then + carry_over_commit_msg="$(carry_over_commit_message "${OLD_BRANCH}")" + if git commit -m "${carry_over_commit_msg}"; then + echo "[OK] Committed reapply carry-over file copy" + else + echo "[WARN] Unable to commit copied carry-over files (git user.name/user.email?)." + echo "[WARN] Commit manually with: git commit -m \"${carry_over_commit_msg}\"" + fi +fi + +echo "[OK] New branch created: ${NEW_BRANCH}" +echo "[OK] Bundle: ${OUT_DIR}" +echo +echo "Next:" +echo " - Read intent docs in ${OUT_DIR}/old/" +echo " - Use: git show ${OLD_BRANCH}:path/to/file" +echo " - Re-implement changes on ${NEW_BRANCH}" diff --git a/.agents/skills/status-header/SKILL.md b/.agents/skills/status-header/SKILL.md new file mode 100644 index 000000000000..4988adb3a6a1 --- /dev/null +++ b/.agents/skills/status-header/SKILL.md @@ -0,0 +1,110 @@ +--- +name: status-header +description: 'Enforce the standard TUI status header layout, icons, colors, and rate-limit summary format, and keep equivalent tui and tui_app_server surfaces aligned.' +--- + +# Status Header + +Apply these conventions every time the status header bar is implemented or modified. Treat this skill as defining user-visible behavior, not as permission to update only one code path. Use Stylize helpers and keep the segment order/formatting consistent. + +## Scope and synchronization + +- Before editing the header, identify every implementation that renders the same user-visible surface. In this repo that usually means both `codex-rs/tui` and `codex-rs/tui_app_server`. +- If both implementations expose the same header, keep them aligned. Do not mark the task complete after changing only one side unless the other side has been intentionally removed upstream or there is a documented reason not to sync it. +- Do not assume the classic `tui` is the runtime path users see. Check the current dispatch path for the target tag/config before deciding which implementation to edit. +- Match behavior first, not plumbing. The classic `tui` may use local polling, while `tui_app_server` may use bootstrap data or app-server events; either is acceptable as long as the rendered header stays behaviorally aligned and fresh. + +## Required color mapping + +- Model segment: icon + label in cyan. +- Directory segment: icon + path in yellow. +- Git segment: + - icon + branch in blue + - ahead count in green + - behind count in red + - changed count in yellow + - untracked count in red +- Rate limit segment: icon + summary in cyan. + - Summary format: `95% 23:19` +- Segment separator: " │ " in dim. + +## Reference snippet (behavioral template, adapt to local architecture) + +```rust +let mut spans: Vec> = Vec::new(); +let mut push_segment = |segment: Vec>| { + if !spans.is_empty() { + spans.push(" │ ".dim()); + } + spans.extend(segment); +}; + +if let Some(model_name) = self.model_name.as_ref() { + let label = format_model_label(model_name); + push_segment(vec!["\u{ee9c} ".cyan(), Span::from(label).cyan()]); +} + +if let Some(directory) = self.directory.as_ref() { + push_segment(vec![ + "\u{f07c} ".yellow(), + Span::from(directory.clone()).yellow(), + ]); +} + +if let Some(git_status) = self.git_status.as_ref() { + let mut segment = vec![ + "\u{f418} ".blue(), + Span::from(git_status.branch.clone()).blue(), + ]; + let ahead = git_status.ahead; + if ahead > 0 { + segment.push(Span::from(format!(" ↑{ahead}")).green()); + } + let behind = git_status.behind; + if behind > 0 { + segment.push(Span::from(format!(" ↓{behind}")).red()); + } + let changed = git_status.changed; + if changed > 0 { + segment.push(Span::from(format!(" +{changed}")).yellow()); + } + let untracked = git_status.untracked; + if untracked > 0 { + segment.push(Span::from(format!(" ?{untracked}")).red()); + } + push_segment(segment); +} + +if let Some(summary) = self.rate_limit_summary.as_ref() { + push_segment(vec!["\u{f464} ".cyan(), Span::from(summary.clone()).cyan()]); +} +``` + +Use the snippet as a template for segment order, icon usage, and color intent. Adapt field names, +ownership, helper selection, and refresh wiring to the local module instead of cargo-culting the +exact code. + +## Usage notes + +- Only change colors if this skill explicitly instructs it; do not introduce new colors. +- Keep the separator as dim to avoid competing with the segments. +- Prefer the exact icon codes shown above unless the feature removes a segment entirely. +- If a repo-level lint, style rule, or existing helper abstraction rejects the exact method calls in + the snippet, keep the same visual result using the repo-approved mechanism instead of forcing the + snippet verbatim. +- If a status-header segment depends on background-polled or async state (for example rate-limit + data fetched from `/usage`), the update path must explicitly request a redraw/frame after the + cached state changes so the header updates while the UI is otherwise idle. +- The redraw requirement applies to every implementation that renders the header. If `tui` and + `tui_app_server` both show the header, each side needs its own refresh path and redraw trigger. +- For `tui_app_server`, do not assume the rate-limit source is local `/usage` polling; event-driven + or bootstrap-fed data is acceptable if it keeps the header equivalently fresh. +- In this fork's app-server-backed `codex-rs/tui`, keep ChatGPT rate-limit snapshots fresh with a + 15-second background refresh cadence and redraw the UI after each successful snapshot update. +- Treat the directory segment as the session/thread `cwd`, not the transient `workdir` of an + individual tool call. Creating or using another git worktree does not change the header by + itself; the header switches only when the session `cwd` changes. +- When header git state is refreshed asynchronously, key it by the same `cwd` as the directory + segment. If the session `cwd` changes, retarget polling/refresh to the new `cwd`, clear stale git + state, and ignore late results from the previous `cwd` so an old worktree cannot overwrite the + new header context. diff --git a/.github/scripts/rusty_v8_bazel.py b/.github/scripts/rusty_v8_bazel.py index ec73e0e5a7f0..c11e67263e90 100644 --- a/.github/scripts/rusty_v8_bazel.py +++ b/.github/scripts/rusty_v8_bazel.py @@ -4,7 +4,6 @@ import argparse import gzip -import hashlib import re import shutil import subprocess @@ -13,16 +12,8 @@ import tomllib from pathlib import Path -from rusty_v8_module_bazel import ( - RustyV8ChecksumError, - check_module_bazel, - update_module_bazel, -) - ROOT = Path(__file__).resolve().parents[2] -MODULE_BAZEL = ROOT / "MODULE.bazel" -RUSTY_V8_CHECKSUMS_DIR = ROOT / "third_party" / "v8" MUSL_RUNTIME_ARCHIVE_LABELS = [ "@llvm//runtimes/libcxx:libcxx.static", "@llvm//runtimes/libcxx:libcxxabi.static", @@ -155,24 +146,6 @@ def resolved_v8_crate_version() -> str: return matches[0] -def rusty_v8_checksum_manifest_path(version: str) -> Path: - return RUSTY_V8_CHECKSUMS_DIR / f"rusty_v8_{version.replace('.', '_')}.sha256" - - -def command_version(version: str | None) -> str: - if version is not None: - return version - return resolved_v8_crate_version() - - -def command_manifest_path(manifest: Path | None, version: str) -> Path: - if manifest is None: - return rusty_v8_checksum_manifest_path(version) - if manifest.is_absolute(): - return manifest - return ROOT / manifest - - def staged_archive_name(target: str, source_path: Path) -> str: if source_path.suffix == ".lib": return f"rusty_v8_release_{target}.lib.gz" @@ -271,18 +244,8 @@ def stage_release_pair( shutil.copyfile(binding_path, staged_binding) - staged_checksums = output_dir / f"rusty_v8_release_{target}.sha256" - with staged_checksums.open("w", encoding="utf-8") as checksums: - for path in [staged_library, staged_binding]: - digest = hashlib.sha256() - with path.open("rb") as artifact: - for chunk in iter(lambda: artifact.read(1024 * 1024), b""): - digest.update(chunk) - checksums.write(f"{digest.hexdigest()} {path.name}\n") - print(staged_library) print(staged_binding) - print(staged_checksums) def parse_args() -> argparse.Namespace: @@ -301,24 +264,6 @@ def parse_args() -> argparse.Namespace: subparsers.add_parser("resolved-v8-crate-version") - check_module_bazel_parser = subparsers.add_parser("check-module-bazel") - check_module_bazel_parser.add_argument("--version") - check_module_bazel_parser.add_argument("--manifest", type=Path) - check_module_bazel_parser.add_argument( - "--module-bazel", - type=Path, - default=MODULE_BAZEL, - ) - - update_module_bazel_parser = subparsers.add_parser("update-module-bazel") - update_module_bazel_parser.add_argument("--version") - update_module_bazel_parser.add_argument("--manifest", type=Path) - update_module_bazel_parser.add_argument( - "--module-bazel", - type=Path, - default=MODULE_BAZEL, - ) - return parser.parse_args() @@ -335,22 +280,6 @@ def main() -> int: if args.command == "resolved-v8-crate-version": print(resolved_v8_crate_version()) return 0 - if args.command == "check-module-bazel": - version = command_version(args.version) - manifest_path = command_manifest_path(args.manifest, version) - try: - check_module_bazel(args.module_bazel, manifest_path, version) - except RustyV8ChecksumError as exc: - raise SystemExit(str(exc)) from exc - return 0 - if args.command == "update-module-bazel": - version = command_version(args.version) - manifest_path = command_manifest_path(args.manifest, version) - try: - update_module_bazel(args.module_bazel, manifest_path, version) - except RustyV8ChecksumError as exc: - raise SystemExit(str(exc)) from exc - return 0 raise SystemExit(f"unsupported command: {args.command}") diff --git a/.github/workflows/Dockerfile.bazel b/.github/workflows/Dockerfile.bazel deleted file mode 100644 index 51c199dcc3d8..000000000000 --- a/.github/workflows/Dockerfile.bazel +++ /dev/null @@ -1,20 +0,0 @@ -FROM ubuntu:24.04 - -# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for -# initial debugging, but we should publish to a more proper location. -# -# docker buildx create --use -# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push . - -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - curl git python3 ca-certificates && \ - rm -rf /var/lib/apt/lists/* - -# Install dotslash. -RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin - -# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000. -USER ubuntu - -WORKDIR /workspace diff --git a/.github/workflows/README.md b/.github/workflows/README.md deleted file mode 100644 index d14817f002bc..000000000000 --- a/.github/workflows/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Workflow Strategy - -The workflows in this directory are split so that pull requests get fast, review-friendly signal while `main` still gets the full cross-platform verification pass. - -## Pull Requests - -- `bazel.yml` is the main pre-merge verification path for Rust code. - It runs Bazel `test` and Bazel `clippy` on the supported Bazel targets, - including the generated Rust test binaries needed to lint inline `#[cfg(test)]` - code. -- `rust-ci.yml` keeps the Cargo-native PR checks intentionally small: - - `cargo fmt --check` - - `cargo shear` - - `argument-comment-lint` on Linux, macOS, and Windows - - `tools/argument-comment-lint` package tests when the lint or its workflow wiring changes - -## Post-Merge On `main` - -- `bazel.yml` also runs on pushes to `main`. - This re-verifies the merged Bazel path and helps keep the BuildBuddy caches warm. -- `rust-ci-full.yml` is the full Cargo-native verification workflow. - It keeps the heavier checks off the PR path while still validating them after merge: - - the full Cargo `clippy` matrix - - the full Cargo `nextest` matrix - - release-profile Cargo builds - - cross-platform `argument-comment-lint` - - Linux remote-env tests - -## Rule Of Thumb - -- If a build/test/clippy check can be expressed in Bazel, prefer putting the PR-time version in `bazel.yml`. -- Keep `rust-ci.yml` fast enough that it usually does not dominate PR latency. -- Reserve `rust-ci-full.yml` for heavyweight Cargo-native coverage that Bazel does not replace yet. diff --git a/.github/workflows/bazel.yml b/.github/workflows/bazel.yml deleted file mode 100644 index ef41330c4680..000000000000 --- a/.github/workflows/bazel.yml +++ /dev/null @@ -1,298 +0,0 @@ -name: Bazel - -# Note this workflow was originally derived from: -# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml - -on: - pull_request: {} - push: - branches: - - main - workflow_dispatch: - -concurrency: - # Cancel previous actions from the same PR or branch except 'main' branch. - # See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info. - group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}} - cancel-in-progress: ${{ github.ref_name != 'main' }} -jobs: - test: - # Even though a no-cache-hit Windows build seems to exceed the 30-minute - # limit on occasion, the more common reason for exceeding the limit is a - # true test failure in a rust_test() marked "flaky" that gets run 3x. - # In that case, extra time generally does not give us more signal. - # - # Ultimately we need true distributed builds (e.g., - # https://www.buildbuddy.io/docs/rbe-setup/) to speed things up. - timeout-minutes: 30 - strategy: - fail-fast: false - matrix: - include: - # macOS - - os: macos-15-xlarge - target: aarch64-apple-darwin - - os: macos-15-xlarge - target: x86_64-apple-darwin - - # Linux - - os: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - - os: ubuntu-24.04 - target: x86_64-unknown-linux-musl - # 2026-02-27 Bazel tests have been flaky on arm in CI. - # Disable until we can investigate and stabilize them. - # - os: ubuntu-24.04-arm - # target: aarch64-unknown-linux-musl - # - os: ubuntu-24.04-arm - # target: aarch64-unknown-linux-gnu - - # Windows - - os: windows-latest - target: x86_64-pc-windows-gnullvm - runs-on: ${{ matrix.os }} - - # Configure a human readable name for each job - name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }} - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Check rusty_v8 MODULE.bazel checksums - if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu' - shell: bash - run: | - python3 .github/scripts/rusty_v8_bazel.py check-module-bazel - python3 -m unittest discover -s .github/scripts -p test_rusty_v8_bazel.py - - - name: Prepare Bazel CI - id: prepare_bazel - uses: ./.github/actions/prepare-bazel-ci - with: - target: ${{ matrix.target }} - cache-scope: bazel-${{ github.job }} - install-test-prereqs: "true" - - name: Check MODULE.bazel.lock is up to date - if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu' - shell: bash - run: ./scripts/check-module-bazel-lock.sh - - - name: bazel test //... - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - bazel_targets=( - //... - # Keep standalone V8 library targets out of the ordinary Bazel CI - # path. V8 consumers under `//codex-rs/...` still participate - # transitively through `//...`. - -//third_party/v8:all - ) - - bazel_wrapper_args=( - --print-failed-test-logs - ) - bazel_test_args=( - test - --test_tag_filters=-argument-comment-lint - --test_verbose_timeout_warnings - --build_metadata=COMMIT_SHA=${GITHUB_SHA} - ) - if [[ "${RUNNER_OS}" == "Windows" ]]; then - bazel_wrapper_args+=(--windows-msvc-host-platform) - bazel_test_args+=(--jobs=8) - fi - - ./.github/scripts/run-bazel-ci.sh \ - "${bazel_wrapper_args[@]}" \ - -- \ - "${bazel_test_args[@]}" \ - -- \ - "${bazel_targets[@]}" - - - name: Upload Bazel execution logs - if: always() && !cancelled() - continue-on-error: true - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: bazel-execution-logs-test-${{ matrix.target }} - path: ${{ runner.temp }}/bazel-execution-logs - if-no-files-found: ignore - - # Save the job-scoped Bazel repository cache after cache misses. Keep the - # upload non-fatal so cache service issues never fail the job itself. - - name: Save bazel repository cache - if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ steps.prepare_bazel.outputs.repository-cache-path }} - key: ${{ steps.prepare_bazel.outputs.repository-cache-key }} - - clippy: - timeout-minutes: 30 - strategy: - fail-fast: false - matrix: - include: - # Keep Linux lint coverage on x64 and add the arm64 macOS path that - # the Bazel test job already exercises. Add Windows gnullvm as well - # so PRs get Bazel-native lint signal on the same Windows toolchain - # that the Bazel test job uses. - - os: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - - os: macos-15-xlarge - target: aarch64-apple-darwin - - os: windows-latest - target: x86_64-pc-windows-gnullvm - runs-on: ${{ matrix.os }} - name: Bazel clippy on ${{ matrix.os }} for ${{ matrix.target }} - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Prepare Bazel CI - id: prepare_bazel - uses: ./.github/actions/prepare-bazel-ci - with: - target: ${{ matrix.target }} - cache-scope: bazel-${{ github.job }} - - - name: bazel build --config=clippy lint targets - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - bazel_clippy_args=( - --config=clippy - --build_metadata=COMMIT_SHA=${GITHUB_SHA} - --build_metadata=TAG_job=clippy - ) - bazel_wrapper_args=() - if [[ "${RUNNER_OS}" == "Windows" ]]; then - # Keep this aligned with the Windows Bazel test job. With the - # default `//:local_windows` host platform, Windows `rust_test` - # targets such as `//codex-rs/core:core-all-test` can be skipped - # by `--skip_incompatible_explicit_targets`, which hides clippy - # diagnostics from integration-test modules. - bazel_wrapper_args+=(--windows-msvc-host-platform) - bazel_clippy_args+=(--skip_incompatible_explicit_targets) - fi - - bazel_target_lines="$(./scripts/list-bazel-clippy-targets.sh)" - bazel_targets=() - while IFS= read -r target; do - bazel_targets+=("${target}") - done <<< "${bazel_target_lines}" - - ./.github/scripts/run-bazel-ci.sh \ - --print-failed-action-summary \ - "${bazel_wrapper_args[@]}" \ - -- \ - build \ - "${bazel_clippy_args[@]}" \ - -- \ - "${bazel_targets[@]}" - - - name: Upload Bazel execution logs - if: always() && !cancelled() - continue-on-error: true - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: bazel-execution-logs-clippy-${{ matrix.target }} - path: ${{ runner.temp }}/bazel-execution-logs - if-no-files-found: ignore - - # Save the job-scoped Bazel repository cache after cache misses. Keep the - # upload non-fatal so cache service issues never fail the job itself. - - name: Save bazel repository cache - if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ steps.prepare_bazel.outputs.repository-cache-path }} - key: ${{ steps.prepare_bazel.outputs.repository-cache-key }} - - verify-release-build: - timeout-minutes: 30 - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - - os: macos-15-xlarge - target: aarch64-apple-darwin - - os: windows-latest - target: x86_64-pc-windows-gnullvm - runs-on: ${{ matrix.os }} - name: Verify release build on ${{ matrix.os }} for ${{ matrix.target }} - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Prepare Bazel CI - id: prepare_bazel - uses: ./.github/actions/prepare-bazel-ci - with: - target: ${{ matrix.target }} - cache-scope: bazel-${{ github.job }} - - - name: bazel build verify-release-build targets - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - # This job exists to compile Rust code behind - # `cfg(not(debug_assertions))` so PR CI catches failures that would - # otherwise show up only in a release build. We do not need the full - # optimizer and debug-info work that normally comes with a release - # build to get that signal, so keep Bazel in `fastbuild` and disable - # Rust debug assertions explicitly. - bazel_wrapper_args=() - if [[ "${RUNNER_OS}" == "Windows" ]]; then - bazel_wrapper_args+=(--windows-msvc-host-platform) - fi - - bazel_build_args=( - --compilation_mode=fastbuild - --@rules_rust//rust/settings:extra_rustc_flag=-Cdebug-assertions=no - --@rules_rust//rust/settings:extra_exec_rustc_flag=-Cdebug-assertions=no - --build_metadata=COMMIT_SHA=${GITHUB_SHA} - --build_metadata=TAG_job=verify-release-build - --build_metadata=TAG_rust_debug_assertions=off - ) - - bazel_target_lines="$(bash ./scripts/list-bazel-release-targets.sh)" - bazel_targets=() - while IFS= read -r target; do - bazel_targets+=("${target}") - done <<< "${bazel_target_lines}" - - ./.github/scripts/run-bazel-ci.sh \ - "${bazel_wrapper_args[@]}" \ - -- \ - build \ - "${bazel_build_args[@]}" \ - -- \ - "${bazel_targets[@]}" - - - name: Upload Bazel execution logs - if: always() && !cancelled() - continue-on-error: true - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: bazel-execution-logs-verify-release-build-${{ matrix.target }} - path: ${{ runner.temp }}/bazel-execution-logs - if-no-files-found: ignore - - # Save the job-scoped Bazel repository cache after cache misses. Keep the - # upload non-fatal so cache service issues never fail the job itself. - - name: Save bazel repository cache - if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ steps.prepare_bazel.outputs.repository-cache-path }} - key: ${{ steps.prepare_bazel.outputs.repository-cache-key }} diff --git a/.github/workflows/blob-size-policy.yml b/.github/workflows/blob-size-policy.yml deleted file mode 100644 index b96cb98c30ab..000000000000 --- a/.github/workflows/blob-size-policy.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: blob-size-policy - -on: - pull_request: {} - -jobs: - check: - name: Blob size policy - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - with: - fetch-depth: 0 - - - name: Determine PR comparison range - id: range - shell: bash - run: | - set -euo pipefail - echo "base=$(git rev-parse HEAD^1)" >> "$GITHUB_OUTPUT" - echo "head=$(git rev-parse HEAD^2)" >> "$GITHUB_OUTPUT" - - - name: Check changed blob sizes - env: - BASE_SHA: ${{ steps.range.outputs.base }} - HEAD_SHA: ${{ steps.range.outputs.head }} - run: | - python3 scripts/check_blob_size.py \ - --base "$BASE_SHA" \ - --head "$HEAD_SHA" \ - --max-bytes 512000 \ - --allowlist .github/blob-size-allowlist.txt diff --git a/.github/workflows/cargo-deny.yml b/.github/workflows/cargo-deny.yml deleted file mode 100644 index 5294d0c7c59e..000000000000 --- a/.github/workflows/cargo-deny.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: cargo-deny - -on: - pull_request: - push: - branches: - - main - -jobs: - cargo-deny: - runs-on: ubuntu-latest - defaults: - run: - working-directory: ./codex-rs - steps: - - name: Checkout - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - - - name: Run cargo-deny - uses: EmbarkStudios/cargo-deny-action@82eb9f621fbc699dd0918f3ea06864c14cc84246 # v2 - with: - rust-version: stable - manifest-path: ./codex-rs/Cargo.toml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index bb5ed5b5bf2b..000000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: ci - -on: - pull_request: {} - push: { branches: [main] } - -jobs: - build-test: - runs-on: ubuntu-latest - timeout-minutes: 10 - env: - NODE_OPTIONS: --max-old-space-size=4096 - steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Verify codex-rs Cargo manifests inherit workspace settings - run: python3 .github/scripts/verify_cargo_workspace_manifests.py - - - name: Verify codex-tui does not import codex-core directly - run: python3 .github/scripts/verify_tui_core_boundary.py - - - name: Verify Bazel clippy flags match Cargo workspace lints - run: python3 .github/scripts/verify_bazel_clippy_lints.py - - - name: Setup pnpm - uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5 - with: - run_install: false - - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 - with: - node-version: 22 - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - # stage_npm_packages.py requires DotSlash when staging releases. - - uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2 - - - name: Stage npm package - id: stage_npm_package - env: - GH_TOKEN: ${{ github.token }} - run: | - set -euo pipefail - # Use a recent successful rust-release run that published the full - # cross-platform native payload required by the npm package layout. - # Passing the workflow URL directly avoids relying on old rust-v* - # branches remaining discoverable via `gh run list --branch ...`. - CODEX_VERSION=0.125.0 - WORKFLOW_URL="https://github.com/openai/codex/actions/runs/24901475298" - OUTPUT_DIR="${RUNNER_TEMP}" - python3 ./scripts/stage_npm_packages.py \ - --release-version "$CODEX_VERSION" \ - --workflow-url "$WORKFLOW_URL" \ - --package codex \ - --output-dir "$OUTPUT_DIR" - PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz" - echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT" - - - name: Upload staged npm package artifact - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: codex-npm-staging - path: ${{ steps.stage_npm_package.outputs.pack_output }} - - - name: Ensure root README.md contains only ASCII and certain Unicode code points - run: ./scripts/asciicheck.py README.md - - name: Check root README ToC - run: python3 scripts/readme_toc.py README.md - - - name: Prettier (run `pnpm run format:fix` to fix) - run: pnpm run format diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml deleted file mode 100644 index b48fd36fea0f..000000000000 --- a/.github/workflows/cla.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: CLA Assistant -on: - issue_comment: - types: [created] - pull_request_target: - types: [opened, closed, synchronize] - -permissions: - actions: write - contents: write - pull-requests: write - statuses: write - -jobs: - cla: - # Only run the CLA assistant for the canonical openai repo so forks are not blocked - # and contributors who signed previously do not receive duplicate CLA notifications. - if: ${{ github.repository_owner == 'openai' }} - runs-on: ubuntu-latest - steps: - - uses: contributor-assistant/github-action@ca4a40a7d1004f18d9960b404b97e5f30a505a08 # v2.6.1 - # Run on close only if the PR was merged. This will lock the PR to preserve - # the CLA agreement. We don't want to lock PRs that have been closed without - # merging because the contributor may want to respond with additional comments. - # This action has a "lock-pullrequest-aftermerge" option that can be set to false, - # but that would unconditionally skip locking even in cases where the PR was merged. - if: | - ( - github.event_name == 'pull_request_target' && - ( - github.event.action == 'opened' || - github.event.action == 'synchronize' || - (github.event.action == 'closed' && github.event.pull_request.merged == true) - ) - ) || - ( - github.event_name == 'issue_comment' && - ( - github.event.comment.body == 'recheck' || - github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA' - ) - ) - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md - path-to-signatures: signatures/cla.json - branch: cla-signatures - allowlist: codex,dependabot,dependabot[bot],github-actions[bot] diff --git a/.github/workflows/close-stale-contributor-prs.yml b/.github/workflows/close-stale-contributor-prs.yml deleted file mode 100644 index 8fb51327720d..000000000000 --- a/.github/workflows/close-stale-contributor-prs.yml +++ /dev/null @@ -1,107 +0,0 @@ -name: Close stale contributor PRs - -on: - workflow_dispatch: - schedule: - - cron: "0 6 * * *" - -permissions: - contents: read - issues: write - pull-requests: write - -jobs: - close-stale-contributor-prs: - # Prevent scheduled runs on forks - if: github.repository == 'openai/codex' - runs-on: ubuntu-latest - steps: - - name: Close inactive PRs from contributors - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const DAYS_INACTIVE = 14; - const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000); - const { owner, repo } = context.repo; - const dryRun = false; - const stalePrs = []; - - core.info(`Dry run mode: ${dryRun}`); - - const prs = await github.paginate(github.rest.pulls.list, { - owner, - repo, - state: "open", - per_page: 100, - sort: "updated", - direction: "asc", - }); - - for (const pr of prs) { - const lastUpdated = new Date(pr.updated_at); - if (lastUpdated > cutoff) { - core.info(`PR ${pr.number} is fresh`); - continue; - } - - if (!pr.user || pr.user.type !== "User") { - core.info(`PR ${pr.number} wasn't created by a user`); - continue; - } - - let permission; - try { - const permissionResponse = await github.rest.repos.getCollaboratorPermissionLevel({ - owner, - repo, - username: pr.user.login, - }); - permission = permissionResponse.data.permission; - } catch (error) { - if (error.status === 404) { - core.info(`Author ${pr.user.login} is not a collaborator; skipping #${pr.number}`); - continue; - } - throw error; - } - - const hasContributorAccess = ["admin", "maintain", "write"].includes(permission); - if (!hasContributorAccess) { - core.info(`Author ${pr.user.login} has ${permission} access; skipping #${pr.number}`); - continue; - } - - stalePrs.push(pr); - } - - if (!stalePrs.length) { - core.info("No stale contributor pull requests found."); - return; - } - - for (const pr of stalePrs) { - const issue_number = pr.number; - const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`; - - if (dryRun) { - core.info(`[dry-run] Would close contributor PR #${issue_number} from ${pr.user.login}`); - continue; - } - - await github.rest.issues.createComment({ - owner, - repo, - issue_number, - body: closeComment, - }); - - await github.rest.pulls.update({ - owner, - repo, - pull_number: issue_number, - state: "closed", - }); - - core.info(`Closed contributor PR #${issue_number} from ${pr.user.login}`); - } diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml deleted file mode 100644 index 8e9f701eec81..000000000000 --- a/.github/workflows/codespell.yml +++ /dev/null @@ -1,27 +0,0 @@ -# Codespell configuration is within .codespellrc ---- -name: Codespell - -on: - push: - branches: [main] - pull_request: - branches: [main] - -permissions: - contents: read - -jobs: - codespell: - name: Check for spelling errors - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Annotate locations with typos - uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1 - - name: Codespell - uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2 - with: - ignore_words_file: .codespellignore diff --git a/.github/workflows/issue-deduplicator.yml b/.github/workflows/issue-deduplicator.yml deleted file mode 100644 index 17306d72e128..000000000000 --- a/.github/workflows/issue-deduplicator.yml +++ /dev/null @@ -1,402 +0,0 @@ -name: Issue Deduplicator - -on: - issues: - types: - - opened - - labeled - -jobs: - gather-duplicates-all: - name: Identify potential duplicates (all issues) - # Prevent runs on forks (requires OpenAI API key, wastes Actions minutes) - if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate')) - runs-on: ubuntu-latest - permissions: - contents: read - outputs: - issues_json: ${{ steps.normalize-all.outputs.issues_json }} - reason: ${{ steps.normalize-all.outputs.reason }} - has_matches: ${{ steps.normalize-all.outputs.has_matches }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Prepare Codex inputs - env: - GH_TOKEN: ${{ github.token }} - REPO: ${{ github.repository }} - ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - set -eo pipefail - - CURRENT_ISSUE_FILE=codex-current-issue.json - EXISTING_ALL_FILE=codex-existing-issues-all.json - - gh issue list --repo "$REPO" \ - --json number,title,body,createdAt,updatedAt,state,labels \ - --limit 1000 \ - --state all \ - --search "sort:created-desc" \ - | jq '[.[] | { - number, - title, - body: ((.body // "")[0:4000]), - createdAt, - updatedAt, - state, - labels: ((.labels // []) | map(.name)) - }]' \ - > "$EXISTING_ALL_FILE" - - gh issue view "$ISSUE_NUMBER" \ - --repo "$REPO" \ - --json number,title,body \ - | jq '{number, title, body: ((.body // "")[0:4000])}' \ - > "$CURRENT_ISSUE_FILE" - - echo "Prepared duplicate detection input files." - echo "all_issue_count=$(jq 'length' "$EXISTING_ALL_FILE")" - - # Prompt instructions are intentionally inline in this workflow. The old - # .github/prompts/issue-deduplicator.txt file is obsolete and removed. - - id: codex-all - name: Find duplicates (pass 1, all issues) - uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7 - with: - openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }} - allow-users: "*" - prompt: | - You are an assistant that triages new GitHub issues by identifying potential duplicates. - - You will receive the following JSON files located in the current working directory: - - `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body). - - `codex-existing-issues-all.json`: JSON array of recent issues with states, timestamps, and labels. - - Instructions: - - Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request. - - Prioritize concrete overlap in symptoms, reproduction details, error signatures, and user intent. - - Prefer active unresolved issues when confidence is similar. - - Closed issues can still be valid duplicates if they clearly match. - - Return fewer matches rather than speculative ones. - - If confidence is low, return an empty list. - - Include at most five issue numbers. - - After analysis, provide a short reason for your decision. - - output-schema: | - { - "type": "object", - "properties": { - "issues": { - "type": "array", - "items": { - "type": "string" - } - }, - "reason": { "type": "string" } - }, - "required": ["issues", "reason"], - "additionalProperties": false - } - - - id: normalize-all - name: Normalize pass 1 output - env: - CODEX_OUTPUT: ${{ steps.codex-all.outputs.final-message }} - CURRENT_ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - set -eo pipefail - - raw=${CODEX_OUTPUT//$'\r'/} - parsed=false - issues='[]' - reason='' - - if [ -n "$raw" ] && printf '%s' "$raw" | jq -e 'type == "object" and (.issues | type == "array")' >/dev/null 2>&1; then - parsed=true - issues=$(printf '%s' "$raw" | jq -c '[.issues[] | tostring]') - reason=$(printf '%s' "$raw" | jq -r '.reason // ""') - else - reason='Pass 1 output was empty or invalid JSON.' - fi - - filtered=$(jq -cn --argjson issues "$issues" --arg current "$CURRENT_ISSUE_NUMBER" '[ - $issues[] - | tostring - | select(. != $current) - ] | reduce .[] as $issue ([]; if index($issue) then . else . + [$issue] end) | .[:5]') - - has_matches=false - if [ "$(jq 'length' <<< "$filtered")" -gt 0 ]; then - has_matches=true - fi - - echo "Pass 1 parsed: $parsed" - echo "Pass 1 matches after filtering: $(jq 'length' <<< "$filtered")" - echo "Pass 1 reason: $reason" - - { - echo "issues_json=$filtered" - echo "reason<> "$GITHUB_OUTPUT" - - gather-duplicates-open: - name: Identify potential duplicates (open issues fallback) - # Pass 1 may drop sudo on the runner, so run the fallback in a fresh job. - needs: gather-duplicates-all - if: ${{ needs.gather-duplicates-all.result == 'success' && needs.gather-duplicates-all.outputs.has_matches != 'true' }} - runs-on: ubuntu-latest - permissions: - contents: read - outputs: - issues_json: ${{ steps.normalize-open.outputs.issues_json }} - reason: ${{ steps.normalize-open.outputs.reason }} - has_matches: ${{ steps.normalize-open.outputs.has_matches }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Prepare Codex inputs - env: - GH_TOKEN: ${{ github.token }} - REPO: ${{ github.repository }} - ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - set -eo pipefail - - CURRENT_ISSUE_FILE=codex-current-issue.json - EXISTING_OPEN_FILE=codex-existing-issues-open.json - - gh issue list --repo "$REPO" \ - --json number,title,body,createdAt,updatedAt,state,labels \ - --limit 1000 \ - --state open \ - --search "sort:created-desc" \ - | jq '[.[] | { - number, - title, - body: ((.body // "")[0:4000]), - createdAt, - updatedAt, - state, - labels: ((.labels // []) | map(.name)) - }]' \ - > "$EXISTING_OPEN_FILE" - - gh issue view "$ISSUE_NUMBER" \ - --repo "$REPO" \ - --json number,title,body \ - | jq '{number, title, body: ((.body // "")[0:4000])}' \ - > "$CURRENT_ISSUE_FILE" - - echo "Prepared fallback duplicate detection input files." - echo "open_issue_count=$(jq 'length' "$EXISTING_OPEN_FILE")" - - - id: codex-open - name: Find duplicates (pass 2, open issues) - uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7 - with: - openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }} - allow-users: "*" - prompt: | - You are an assistant that triages new GitHub issues by identifying potential duplicates. - - This is a fallback pass because a broad search did not find convincing matches. - - You will receive the following JSON files located in the current working directory: - - `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body). - - `codex-existing-issues-open.json`: JSON array of open issues only. - - Instructions: - - Search only these active unresolved issues for duplicates of the current issue. - - Prioritize concrete overlap in symptoms, reproduction details, error signatures, and user intent. - - Prefer fewer, higher-confidence matches. - - If confidence is low, return an empty list. - - Include at most five issue numbers. - - After analysis, provide a short reason for your decision. - - output-schema: | - { - "type": "object", - "properties": { - "issues": { - "type": "array", - "items": { - "type": "string" - } - }, - "reason": { "type": "string" } - }, - "required": ["issues", "reason"], - "additionalProperties": false - } - - - id: normalize-open - name: Normalize pass 2 output - env: - CODEX_OUTPUT: ${{ steps.codex-open.outputs.final-message }} - CURRENT_ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - set -eo pipefail - - raw=${CODEX_OUTPUT//$'\r'/} - parsed=false - issues='[]' - reason='' - - if [ -n "$raw" ] && printf '%s' "$raw" | jq -e 'type == "object" and (.issues | type == "array")' >/dev/null 2>&1; then - parsed=true - issues=$(printf '%s' "$raw" | jq -c '[.issues[] | tostring]') - reason=$(printf '%s' "$raw" | jq -r '.reason // ""') - else - reason='Pass 2 output was empty or invalid JSON.' - fi - - filtered=$(jq -cn --argjson issues "$issues" --arg current "$CURRENT_ISSUE_NUMBER" '[ - $issues[] - | tostring - | select(. != $current) - ] | reduce .[] as $issue ([]; if index($issue) then . else . + [$issue] end) | .[:5]') - - has_matches=false - if [ "$(jq 'length' <<< "$filtered")" -gt 0 ]; then - has_matches=true - fi - - echo "Pass 2 parsed: $parsed" - echo "Pass 2 matches after filtering: $(jq 'length' <<< "$filtered")" - echo "Pass 2 reason: $reason" - - { - echo "issues_json=$filtered" - echo "reason<> "$GITHUB_OUTPUT" - - select-final: - name: Select final duplicate set - needs: - - gather-duplicates-all - - gather-duplicates-open - if: ${{ always() && needs.gather-duplicates-all.result == 'success' && (needs.gather-duplicates-open.result == 'success' || needs.gather-duplicates-open.result == 'skipped') }} - runs-on: ubuntu-latest - permissions: - contents: read - outputs: - codex_output: ${{ steps.select-final.outputs.codex_output }} - steps: - - id: select-final - name: Select final duplicate set - env: - PASS1_ISSUES: ${{ needs.gather-duplicates-all.outputs.issues_json }} - PASS1_REASON: ${{ needs.gather-duplicates-all.outputs.reason }} - PASS2_ISSUES: ${{ needs.gather-duplicates-open.outputs.issues_json }} - PASS2_REASON: ${{ needs.gather-duplicates-open.outputs.reason }} - PASS1_HAS_MATCHES: ${{ needs.gather-duplicates-all.outputs.has_matches }} - PASS2_HAS_MATCHES: ${{ needs.gather-duplicates-open.outputs.has_matches }} - run: | - set -eo pipefail - - selected_issues='[]' - selected_reason='No plausible duplicates found.' - selected_pass='none' - - if [ "$PASS1_HAS_MATCHES" = "true" ]; then - selected_issues=${PASS1_ISSUES:-'[]'} - selected_reason=${PASS1_REASON:-'Pass 1 found duplicates.'} - selected_pass='all' - fi - - if [ "$PASS2_HAS_MATCHES" = "true" ]; then - selected_issues=${PASS2_ISSUES:-'[]'} - selected_reason=${PASS2_REASON:-'Pass 2 found duplicates.'} - selected_pass='open-fallback' - fi - - final_json=$(jq -cn \ - --argjson issues "$selected_issues" \ - --arg reason "$selected_reason" \ - --arg pass "$selected_pass" \ - '{issues: $issues, reason: $reason, pass: $pass}') - - echo "Final pass used: $selected_pass" - echo "Final duplicate count: $(jq '.issues | length' <<< "$final_json")" - echo "Final reason: $(jq -r '.reason' <<< "$final_json")" - - { - echo "codex_output<> "$GITHUB_OUTPUT" - - comment-on-issue: - name: Comment with potential duplicates - needs: select-final - if: ${{ always() && needs.select-final.result == 'success' }} - runs-on: ubuntu-latest - permissions: - contents: read - issues: write - steps: - - name: Comment on issue - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - CODEX_OUTPUT: ${{ needs.select-final.outputs.codex_output }} - with: - github-token: ${{ github.token }} - script: | - const raw = process.env.CODEX_OUTPUT ?? ''; - let parsed; - try { - parsed = JSON.parse(raw); - } catch (error) { - core.info(`Codex output was not valid JSON. Raw output: ${raw}`); - core.info(`Parse error: ${error.message}`); - return; - } - - const issues = Array.isArray(parsed?.issues) ? parsed.issues : []; - const currentIssueNumber = String(context.payload.issue.number); - const passUsed = typeof parsed?.pass === 'string' ? parsed.pass : 'unknown'; - const reason = typeof parsed?.reason === 'string' ? parsed.reason : ''; - - console.log(`Current issue number: ${currentIssueNumber}`); - console.log(`Pass used: ${passUsed}`); - if (reason) { - console.log(`Reason: ${reason}`); - } - console.log(issues); - - const filteredIssues = [...new Set(issues.map((value) => String(value)))].filter((value) => value !== currentIssueNumber).slice(0, 5); - - if (filteredIssues.length === 0) { - core.info('Codex reported no potential duplicates.'); - return; - } - - const lines = [ - 'Potential duplicates detected. Please review them and close your issue if it is a duplicate.', - '', - ...filteredIssues.map((value) => `- #${String(value)}`), - '', - '*Powered by [Codex Action](https://github.com/openai/codex-action)*']; - - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.issue.number, - body: lines.join("\n"), - }); - - - name: Remove codex-deduplicate label - if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate' }} - env: - GH_TOKEN: ${{ github.token }} - GH_REPO: ${{ github.repository }} - ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - gh issue edit "$ISSUE_NUMBER" --remove-label codex-deduplicate || true - echo "Attempted to remove label: codex-deduplicate" diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml deleted file mode 100644 index 8fbaed5636e6..000000000000 --- a/.github/workflows/issue-labeler.yml +++ /dev/null @@ -1,143 +0,0 @@ -name: Issue Labeler - -on: - issues: - types: - - opened - - labeled - -jobs: - gather-labels: - name: Generate label suggestions - # Prevent runs on forks (requires OpenAI API key, wastes Actions minutes) - if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label')) - runs-on: ubuntu-latest - permissions: - contents: read - outputs: - codex_output: ${{ steps.codex.outputs.final-message }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - id: codex - uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7 - with: - openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }} - allow-users: "*" - prompt: | - You are an assistant that reviews GitHub issues for the repository. - - Your job is to choose the most appropriate labels for the issue described later in this prompt. - Follow these rules: - - - Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure. - 1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth). - 2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks. - 3. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests). - - - If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to. - 1. CLI — the Codex command line interface. - 2. extension — VS Code (or other IDE) extension-specific issues. - 3. app - Issues related to the Codex desktop application. - 4. codex-web — Issues targeting the Codex web UI/Cloud experience. - 5. github-action — Issues with the Codex GitHub action. - 6. iOS — Issues with the Codex iOS app. - - - Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones. - - For agent-area issues, prefer the most specific applicable label. Use "agent" only as a fallback for agent-related issues that do not fit a more specific agent-area label. Prefer "app-server" over "session" or "config" when the issue is about app-server protocol, API, RPC, schema, launch, or bridge behavior. - 1. windows-os — Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures). - 2. mcp — Topics involving Model Context Protocol servers/clients. - 3. mcp-server — Problems related to the codex mcp-server command, where codex runs as an MCP server. - 4. azure — Problems or requests tied to Azure OpenAI deployments. - 5. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies. - 6. code-review — Issues related to the code review feature or functionality. - 7. safety-check - Issues related to cyber risk detection or trusted access verification. - 8. auth - Problems related to authentication, login, or access tokens. - 9. exec - Problems related to the "codex exec" command or functionality. - 10. hooks - Problems related to event hooks - 11. context - Problems related to compaction, context windows, or available context reporting. - 12. skills - Problems related to skills or plugins - 13. custom-model - Problems that involve using custom model providers, local models, or OSS models. - 14. rate-limits - Problems related to token limits, rate limits, or token usage reporting. - 15. sandbox - Issues related to local sandbox environments or tool call approvals to override sandbox restrictions. - 16. tool-calls - Problems related to specific tool call invocations including unexpected errors, failures, or hangs. - 17. TUI - Problems with the terminal user interface (TUI) including keyboard shortcuts, copy & pasting, menus, or screen update issues. - 18. app-server - Issues involving the app-server protocol or interfaces, including SDK/API payloads, thread/* and turn/* RPCs, app-server launch behavior, external app/controller bridges, and app-server protocol/schema behavior. - 19. connectivity - Network connectivity or endpoint issues, including reconnecting messages, stream dropped/disconnected errors, websocket/SSE/transport failures, timeout/network/VPN/proxy/API endpoint failures, and related retry behavior. - 20. subagent - Issues involving subagents, sub-agents, or multi-agent behavior, including spawn_agent, wait_agent, close_agent, worker/explorer roles, delegation, agent teams, lifecycle, model/config inheritance, quotas, and orchestration. - 21. session - Issues involving session or thread management, including resume, fork, archive, rename/title, thread history, rollout persistence, compaction, checkpoints, retention, and cross-session state. - 22. config - Issues involving config.toml, config keys, config key merging, config updates, profiles, hooks config, project config, agent role TOMLs, instruction/personality config, and config schema behavior. - 23. plan - Issues involving plan mode, planning workflows, or plan-specific tools/behavior. - 24. agent - Fallback only for core agent loop or agent-related issues that do not fit app-server, connectivity, subagent, session, config, or plan. - - Issue number: ${{ github.event.issue.number }} - - Issue title: - ${{ github.event.issue.title }} - - Issue body: - ${{ github.event.issue.body }} - - Repository full name: - ${{ github.repository }} - - output-schema: | - { - "type": "object", - "properties": { - "labels": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": ["labels"], - "additionalProperties": false - } - - apply-labels: - name: Apply labels from Codex output - needs: gather-labels - if: ${{ needs.gather-labels.result != 'skipped' }} - runs-on: ubuntu-latest - permissions: - contents: read - issues: write - env: - GH_TOKEN: ${{ github.token }} - GH_REPO: ${{ github.repository }} - ISSUE_NUMBER: ${{ github.event.issue.number }} - CODEX_OUTPUT: ${{ needs.gather-labels.outputs.codex_output }} - steps: - - name: Apply labels - run: | - json=${CODEX_OUTPUT//$'\r'/} - if [ -z "$json" ]; then - echo "Codex produced no output. Skipping label application." - exit 0 - fi - - if ! printf '%s' "$json" | jq -e 'type == "object" and (.labels | type == "array")' >/dev/null 2>&1; then - echo "Codex output did not include a labels array. Raw output: $json" - exit 0 - fi - - labels=$(printf '%s' "$json" | jq -r '.labels[] | tostring') - if [ -z "$labels" ]; then - echo "Codex returned an empty array. Nothing to do." - exit 0 - fi - - cmd=(gh issue edit "$ISSUE_NUMBER") - while IFS= read -r label; do - cmd+=(--add-label "$label") - done <<< "$labels" - - "${cmd[@]}" || true - - - name: Remove codex-label trigger - if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-label' }} - run: | - gh issue edit "$ISSUE_NUMBER" --remove-label codex-label || true - echo "Attempted to remove label: codex-label" diff --git a/.github/workflows/rust-ci-full.yml b/.github/workflows/rust-ci-full.yml deleted file mode 100644 index 7e4d3a8949d1..000000000000 --- a/.github/workflows/rust-ci-full.yml +++ /dev/null @@ -1,770 +0,0 @@ -name: rust-ci-full -on: - push: - branches: - - main - - "**full-ci**" - workflow_dispatch: - -# CI builds in debug (dev) for faster signal. - -jobs: - # --- CI that doesn't need specific targets --------------------------------- - general: - name: Format / etc - runs-on: ubuntu-24.04 - defaults: - run: - working-directory: codex-rs - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - components: rustfmt - - name: cargo fmt - run: cargo fmt -- --config imports_granularity=Item --check - - cargo_shear: - name: cargo shear - runs-on: ubuntu-24.04 - defaults: - run: - working-directory: codex-rs - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: cargo-shear - version: 1.5.1 - - name: cargo shear - run: cargo shear - - argument_comment_lint_package: - name: Argument comment lint package - runs-on: ubuntu-24.04 - env: - CARGO_DYLINT_VERSION: 5.0.0 - DYLINT_LINK_VERSION: 5.0.0 - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - toolchain: nightly-2025-09-18 - components: llvm-tools-preview, rustc-dev, rust-src - - name: Cache cargo-dylint tooling - id: cargo_dylint_cache - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/cargo-dylint - ~/.cargo/bin/dylint-link - ~/.cargo/registry/index - ~/.cargo/registry/cache - ~/.cargo/git/db - key: argument-comment-lint-${{ runner.os }}-${{ env.CARGO_DYLINT_VERSION }}-${{ env.DYLINT_LINK_VERSION }}-${{ hashFiles('tools/argument-comment-lint/Cargo.lock', 'tools/argument-comment-lint/rust-toolchain', '.github/workflows/rust-ci.yml', '.github/workflows/rust-ci-full.yml') }} - - name: Install cargo-dylint tooling - if: ${{ steps.cargo_dylint_cache.outputs.cache-hit != 'true' }} - shell: bash - run: | - cargo install --locked cargo-dylint --version "$CARGO_DYLINT_VERSION" - cargo install --locked dylint-link --version "$DYLINT_LINK_VERSION" - - name: Check Python wrapper syntax - run: python3 -m py_compile tools/argument-comment-lint/wrapper_common.py tools/argument-comment-lint/run.py tools/argument-comment-lint/run-prebuilt-linter.py tools/argument-comment-lint/test_wrapper_common.py - - name: Test Python wrapper helpers - run: python3 -m unittest discover -s tools/argument-comment-lint -p 'test_*.py' - - name: Test argument comment lint package - working-directory: tools/argument-comment-lint - run: cargo test - env: - RUST_MIN_STACK: "8388608" # 8 MiB - - argument_comment_lint_prebuilt: - name: Argument comment lint - ${{ matrix.name }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - timeout-minutes: 30 - strategy: - fail-fast: false - matrix: - include: - - name: Linux - runner: ubuntu-24.04 - - name: macOS - runner: macos-15-xlarge - - name: Windows - runner: windows-x64 - runs_on: - group: codex-runners - labels: codex-windows-x64 - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: ./.github/actions/setup-bazel-ci - with: - target: ${{ runner.os }} - install-test-prereqs: true - - name: Install Linux sandbox build dependencies - if: ${{ runner.os == 'Linux' }} - shell: bash - run: | - sudo DEBIAN_FRONTEND=noninteractive apt-get update - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev - - name: Run argument comment lint on codex-rs via Bazel - if: ${{ runner.os != 'Windows' }} - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - bazel_targets="$(./tools/argument-comment-lint/list-bazel-targets.sh)" - ./.github/scripts/run-bazel-ci.sh \ - -- \ - build \ - --config=argument-comment-lint \ - --keep_going \ - --build_metadata=COMMIT_SHA=${GITHUB_SHA} \ - -- \ - ${bazel_targets} - - name: Run argument comment lint on codex-rs via Bazel - if: ${{ runner.os == 'Windows' }} - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - ./.github/scripts/run-argument-comment-lint-bazel.sh \ - --config=argument-comment-lint \ - --platforms=//:local_windows \ - --keep_going \ - --build_metadata=COMMIT_SHA=${GITHUB_SHA} - - # --- CI to validate on different os/targets -------------------------------- - lint_build: - name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - timeout-minutes: 30 - defaults: - run: - working-directory: codex-rs - env: - # Speed up repeated builds across CI runs by caching compiled objects, except on - # arm64 macOS runners cross-targeting x86_64 where ring/cc-rs can produce - # mixed-architecture archives under sccache. - USE_SCCACHE: ${{ (startsWith(matrix.runner, 'windows') || (matrix.runner == 'macos-15-xlarge' && matrix.target == 'x86_64-apple-darwin')) && 'false' || 'true' }} - CARGO_INCREMENTAL: "0" - SCCACHE_CACHE_SIZE: 10G - # In rust-ci, representative release-profile checks use thin LTO for faster feedback. - CARGO_PROFILE_RELEASE_LTO: ${{ matrix.profile == 'release' && 'thin' || 'fat' }} - - strategy: - fail-fast: false - matrix: - include: - - runner: macos-15-xlarge - target: aarch64-apple-darwin - profile: dev - - runner: macos-15-xlarge - target: x86_64-apple-darwin - profile: dev - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-musl - profile: dev - runs_on: - group: codex-runners - labels: codex-linux-x64 - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - profile: dev - runs_on: - group: codex-runners - labels: codex-linux-x64 - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-musl - profile: dev - runs_on: - group: codex-runners - labels: codex-linux-arm64 - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-gnu - profile: dev - runs_on: - group: codex-runners - labels: codex-linux-arm64 - - runner: windows-x64 - target: x86_64-pc-windows-msvc - profile: dev - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - profile: dev - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - # Also run representative release builds on Mac and Linux because - # there could be release-only build errors we want to catch. - # Hopefully this also pre-populates the build cache to speed up - # releases. - - runner: macos-15-xlarge - target: aarch64-apple-darwin - profile: release - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-musl - profile: release - runs_on: - group: codex-runners - labels: codex-linux-x64 - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-musl - profile: release - runs_on: - group: codex-runners - labels: codex-linux-arm64 - - runner: windows-x64 - target: x86_64-pc-windows-msvc - profile: release - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - profile: release - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Install Linux build dependencies - if: ${{ runner.os == 'Linux' }} - shell: bash - run: | - set -euo pipefail - if command -v apt-get >/dev/null 2>&1; then - sudo apt-get update -y - packages=(pkg-config libcap-dev) - if [[ "${{ matrix.target }}" == 'x86_64-unknown-linux-musl' || "${{ matrix.target }}" == 'aarch64-unknown-linux-musl' ]]; then - packages+=(libubsan1) - fi - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends "${packages[@]}" - fi - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - targets: ${{ matrix.target }} - components: clippy - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Use hermetic Cargo home (musl) - shell: bash - run: | - set -euo pipefail - cargo_home="${GITHUB_WORKSPACE}/.cargo-home" - mkdir -p "${cargo_home}/bin" - echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV" - echo "${cargo_home}/bin" >> "$GITHUB_PATH" - : > "${cargo_home}/config.toml" - - - name: Compute lockfile hash - id: lockhash - working-directory: codex-rs - shell: bash - run: | - set -euo pipefail - echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT" - echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT" - - # Explicit cache restore: split cargo home vs target, so we can - # avoid caching the large target dir on the gnu-dev job. - - name: Restore cargo home cache - id: cache_cargo_home_restore - uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - ${{ github.workspace }}/.cargo-home/bin/ - ${{ github.workspace }}/.cargo-home/registry/index/ - ${{ github.workspace }}/.cargo-home/registry/cache/ - ${{ github.workspace }}/.cargo-home/git/db/ - key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }} - restore-keys: | - cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}- - - # Install and restore sccache cache - - name: Install sccache - if: ${{ env.USE_SCCACHE == 'true' }} - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: sccache - version: 0.7.5 - - - name: Configure sccache backend - if: ${{ env.USE_SCCACHE == 'true' }} - shell: bash - run: | - set -euo pipefail - if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then - echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV" - echo "Using sccache GitHub backend" - else - echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV" - echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV" - echo "Using sccache local disk + actions/cache fallback" - fi - - - name: Enable sccache wrapper - if: ${{ env.USE_SCCACHE == 'true' }} - shell: bash - run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV" - - - name: Restore sccache cache (fallback) - if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }} - id: cache_sccache_restore - uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ github.workspace }}/.sccache/ - key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }} - restore-keys: | - sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}- - sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}- - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Disable sccache wrapper (musl) - shell: bash - run: | - set -euo pipefail - echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV" - echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV" - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Prepare APT cache directories (musl) - shell: bash - run: | - set -euo pipefail - sudo mkdir -p /var/cache/apt/archives /var/lib/apt/lists - sudo chown -R "$USER:$USER" /var/cache/apt /var/lib/apt/lists - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Restore APT cache (musl) - id: cache_apt_restore - uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - /var/cache/apt - key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1 - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Install Zig - uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2 - with: - version: 0.14.0 - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Install musl build tools - env: - DEBIAN_FRONTEND: noninteractive - TARGET: ${{ matrix.target }} - APT_UPDATE_ARGS: -o Acquire::Retries=3 - APT_INSTALL_ARGS: --no-install-recommends - shell: bash - run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh" - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Configure rustc UBSan wrapper (musl host) - shell: bash - run: | - set -euo pipefail - ubsan="" - if command -v ldconfig >/dev/null 2>&1; then - ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')" - fi - wrapper_root="${RUNNER_TEMP:-/tmp}" - wrapper="${wrapper_root}/rustc-ubsan-wrapper" - cat > "${wrapper}" <> "$GITHUB_ENV" - echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV" - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Clear sanitizer flags (musl) - shell: bash - run: | - set -euo pipefail - # Clear global Rust flags so host/proc-macro builds don't pull in UBSan. - echo "RUSTFLAGS=" >> "$GITHUB_ENV" - echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV" - echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV" - # Override any runner-level Cargo config rustflags as well. - echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV" - echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV" - echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV" - echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV" - echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV" - - sanitize_flags() { - local input="$1" - input="${input//-fsanitize=undefined/}" - input="${input//-fno-sanitize-recover=undefined/}" - input="${input//-fno-sanitize-trap=undefined/}" - echo "$input" - } - - cflags="$(sanitize_flags "${CFLAGS-}")" - cxxflags="$(sanitize_flags "${CXXFLAGS-}")" - echo "CFLAGS=${cflags}" >> "$GITHUB_ENV" - echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV" - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }} - name: Configure musl rusty_v8 artifact overrides and verify checksums - uses: ./.github/actions/setup-rusty-v8-musl - with: - target: ${{ matrix.target }} - - - name: Install cargo-chef - if: ${{ matrix.profile == 'release' }} - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: cargo-chef - version: 0.1.71 - - - name: Pre-warm dependency cache (cargo-chef) - if: ${{ matrix.profile == 'release' }} - shell: bash - run: | - set -euo pipefail - RECIPE="${RUNNER_TEMP}/chef-recipe.json" - cargo chef prepare --recipe-path "$RECIPE" - cargo chef cook --recipe-path "$RECIPE" --target ${{ matrix.target }} --release - - - name: cargo clippy - run: cargo clippy --target ${{ matrix.target }} --tests --profile ${{ matrix.profile }} --timings -- -D warnings - - - name: Upload Cargo timings (clippy) - if: always() - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: cargo-timings-rust-ci-clippy-${{ matrix.target }}-${{ matrix.profile }} - path: codex-rs/target/**/cargo-timings/cargo-timing.html - if-no-files-found: warn - - # Save caches explicitly; make non-fatal so cache packaging - # never fails the overall job. Only save when key wasn't hit. - - name: Save cargo home cache - if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - ${{ github.workspace }}/.cargo-home/bin/ - ${{ github.workspace }}/.cargo-home/registry/index/ - ${{ github.workspace }}/.cargo-home/registry/cache/ - ${{ github.workspace }}/.cargo-home/git/db/ - key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }} - - - name: Save sccache cache (fallback) - if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ github.workspace }}/.sccache/ - key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }} - - - name: sccache stats - if: always() && env.USE_SCCACHE == 'true' - continue-on-error: true - run: sccache --show-stats || true - - - name: sccache summary - if: always() && env.USE_SCCACHE == 'true' - shell: bash - run: | - { - echo "### sccache stats — ${{ matrix.target }} (${{ matrix.profile }})"; - echo; - echo '```'; - sccache --show-stats || true; - echo '```'; - } >> "$GITHUB_STEP_SUMMARY" - - - name: Save APT cache (musl) - if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - /var/cache/apt - key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1 - - tests: - name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.remote_env == 'true' && ' (remote)' || '' }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - # Perhaps we can bring this back down to 30m once we finish the cutover - # from tui_app_server/ to tui/. Incidentally, windows-arm64 was the main - # offender for exceeding the timeout. - timeout-minutes: 45 - defaults: - run: - working-directory: codex-rs - env: - # Speed up repeated builds across CI runs by caching compiled objects, except on - # arm64 macOS runners cross-targeting x86_64 where ring/cc-rs can produce - # mixed-architecture archives under sccache. - USE_SCCACHE: ${{ (startsWith(matrix.runner, 'windows') || (matrix.runner == 'macos-15-xlarge' && matrix.target == 'x86_64-apple-darwin')) && 'false' || 'true' }} - CARGO_INCREMENTAL: "0" - SCCACHE_CACHE_SIZE: 10G - - strategy: - fail-fast: false - matrix: - include: - - runner: macos-15-xlarge - target: aarch64-apple-darwin - profile: dev - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - profile: dev - remote_env: "true" - runs_on: - group: codex-runners - labels: codex-linux-x64 - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-gnu - profile: dev - runs_on: - group: codex-runners - labels: codex-linux-arm64 - - runner: windows-x64 - target: x86_64-pc-windows-msvc - profile: dev - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - profile: dev - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Install Linux build dependencies - if: ${{ runner.os == 'Linux' }} - shell: bash - run: | - set -euo pipefail - if command -v apt-get >/dev/null 2>&1; then - sudo apt-get update -y - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev - fi - - # Some integration tests rely on DotSlash being installed. - # See https://github.com/openai/codex/pull/7617. - - name: Install DotSlash - uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2 - - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - targets: ${{ matrix.target }} - - - name: Compute lockfile hash - id: lockhash - working-directory: codex-rs - shell: bash - run: | - set -euo pipefail - echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT" - echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT" - - - name: Restore cargo home cache - id: cache_cargo_home_restore - uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }} - restore-keys: | - cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}- - - - name: Install sccache - if: ${{ env.USE_SCCACHE == 'true' }} - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: sccache - version: 0.7.5 - - - name: Configure sccache backend - if: ${{ env.USE_SCCACHE == 'true' }} - shell: bash - run: | - set -euo pipefail - if [[ -n "${ACTIONS_CACHE_URL:-}" && -n "${ACTIONS_RUNTIME_TOKEN:-}" ]]; then - echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV" - echo "Using sccache GitHub backend" - else - echo "SCCACHE_GHA_ENABLED=false" >> "$GITHUB_ENV" - echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> "$GITHUB_ENV" - echo "Using sccache local disk + actions/cache fallback" - fi - - - name: Enable sccache wrapper - if: ${{ env.USE_SCCACHE == 'true' }} - shell: bash - run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV" - - - name: Restore sccache cache (fallback) - if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }} - id: cache_sccache_restore - uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ github.workspace }}/.sccache/ - key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }} - restore-keys: | - sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}- - sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}- - - - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: nextest - version: 0.9.103 - - - name: Enable unprivileged user namespaces (Linux) - if: runner.os == 'Linux' - run: | - # Required for bubblewrap to work on Linux CI runners. - sudo sysctl -w kernel.unprivileged_userns_clone=1 - # Ubuntu 24.04+ can additionally gate unprivileged user namespaces - # behind AppArmor. - if sudo sysctl -a 2>/dev/null | grep -q '^kernel.apparmor_restrict_unprivileged_userns'; then - sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 - fi - - - name: Set up remote test env (Docker) - if: ${{ runner.os == 'Linux' && matrix.remote_env == 'true' }} - shell: bash - run: | - set -euo pipefail - export CODEX_TEST_REMOTE_ENV_CONTAINER_NAME=codex-remote-test-env - source "${GITHUB_WORKSPACE}/scripts/test-remote-env.sh" - echo "CODEX_TEST_REMOTE_ENV=${CODEX_TEST_REMOTE_ENV}" >> "$GITHUB_ENV" - echo "CODEX_TEST_REMOTE_EXEC_SERVER_URL=${CODEX_TEST_REMOTE_EXEC_SERVER_URL}" >> "$GITHUB_ENV" - - - name: tests - id: test - run: cargo nextest run --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test --timings - env: - RUST_BACKTRACE: 1 - RUST_MIN_STACK: "8388608" # 8 MiB - NEXTEST_STATUS_LEVEL: leak - - - name: Upload Cargo timings (nextest) - if: always() - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: cargo-timings-rust-ci-nextest-${{ matrix.target }}-${{ matrix.profile }} - path: codex-rs/target/**/cargo-timings/cargo-timing.html - if-no-files-found: warn - - - name: Save cargo home cache - if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }} - - - name: Save sccache cache (fallback) - if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: ${{ github.workspace }}/.sccache/ - key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }} - - - name: sccache stats - if: always() && env.USE_SCCACHE == 'true' - continue-on-error: true - run: sccache --show-stats || true - - - name: sccache summary - if: always() && env.USE_SCCACHE == 'true' - shell: bash - run: | - { - echo "### sccache stats — ${{ matrix.target }} (tests)"; - echo; - echo '```'; - sccache --show-stats || true; - echo '```'; - } >> "$GITHUB_STEP_SUMMARY" - - - name: Tear down remote test env - if: ${{ always() && runner.os == 'Linux' && matrix.remote_env == 'true' }} - shell: bash - run: | - set +e - if [[ "${{ steps.test.outcome }}" != "success" ]]; then - docker logs codex-remote-test-env || true - fi - docker rm -f codex-remote-test-env >/dev/null 2>&1 || true - - - name: verify tests passed - if: steps.test.outcome == 'failure' - run: | - echo "Tests failed. See logs for details." - exit 1 - - # --- Gatherer job for the full post-merge workflow -------------------------- - results: - name: Full CI results - needs: - [ - general, - cargo_shear, - argument_comment_lint_package, - argument_comment_lint_prebuilt, - lint_build, - tests, - ] - if: always() - runs-on: ubuntu-24.04 - steps: - - name: Summarize - shell: bash - run: | - echo "argpkg : ${{ needs.argument_comment_lint_package.result }}" - echo "arglint: ${{ needs.argument_comment_lint_prebuilt.result }}" - echo "general: ${{ needs.general.result }}" - echo "shear : ${{ needs.cargo_shear.result }}" - echo "lint : ${{ needs.lint_build.result }}" - echo "tests : ${{ needs.tests.result }}" - [[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; } - [[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; } - [[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; } - [[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; } - [[ '${{ needs.lint_build.result }}' == 'success' ]] || { echo 'lint_build failed'; exit 1; } - [[ '${{ needs.tests.result }}' == 'success' ]] || { echo 'tests failed'; exit 1; } - - - name: sccache summary note - if: always() - run: | - echo "Per-job sccache stats are attached to each matrix job's Step Summary." diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml deleted file mode 100644 index 42a3ca876412..000000000000 --- a/.github/workflows/rust-ci.yml +++ /dev/null @@ -1,222 +0,0 @@ -name: rust-ci -on: - pull_request: {} - workflow_dispatch: - -jobs: - # --- Detect what changed so the fast PR workflow only runs relevant jobs ---- - changed: - name: Detect changed areas - runs-on: ubuntu-24.04 - outputs: - argument_comment_lint: ${{ steps.detect.outputs.argument_comment_lint }} - argument_comment_lint_package: ${{ steps.detect.outputs.argument_comment_lint_package }} - codex: ${{ steps.detect.outputs.codex }} - workflows: ${{ steps.detect.outputs.workflows }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - with: - fetch-depth: 0 - - name: Detect changed paths (no external action) - id: detect - shell: bash - run: | - set -euo pipefail - - if [[ "${{ github.event_name }}" == "pull_request" ]]; then - BASE_SHA='${{ github.event.pull_request.base.sha }}' - HEAD_SHA='${{ github.event.pull_request.head.sha }}' - echo "Base SHA: $BASE_SHA" - echo "Head SHA: $HEAD_SHA" - mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA") - else - # On manual runs, default to the full fast-PR bundle. - files=("codex-rs/force" "tools/argument-comment-lint/force" ".github/force") - fi - - codex=false - argument_comment_lint=false - argument_comment_lint_package=false - workflows=false - for f in "${files[@]}"; do - [[ $f == codex-rs/* ]] && codex=true - [[ $f == codex-rs/* || $f == tools/argument-comment-lint/* || $f == justfile ]] && argument_comment_lint=true - [[ $f == defs.bzl || $f == workspace_root_test_launcher.sh.tpl || $f == workspace_root_test_launcher.bat.tpl ]] && argument_comment_lint=true - [[ $f == tools/argument-comment-lint/* || $f == .github/workflows/rust-ci.yml || $f == .github/workflows/rust-ci-full.yml ]] && argument_comment_lint_package=true - [[ $f == .github/* ]] && workflows=true - done - - echo "argument_comment_lint=$argument_comment_lint" >> "$GITHUB_OUTPUT" - echo "argument_comment_lint_package=$argument_comment_lint_package" >> "$GITHUB_OUTPUT" - echo "codex=$codex" >> "$GITHUB_OUTPUT" - echo "workflows=$workflows" >> "$GITHUB_OUTPUT" - - # --- Fast Cargo-native PR checks ------------------------------------------- - general: - name: Format / etc - runs-on: ubuntu-24.04 - needs: changed - if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' }} - defaults: - run: - working-directory: codex-rs - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - components: rustfmt - - name: cargo fmt - run: cargo fmt -- --config imports_granularity=Item --check - - cargo_shear: - name: cargo shear - runs-on: ubuntu-24.04 - needs: changed - if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' }} - defaults: - run: - working-directory: codex-rs - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - - uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2 - with: - tool: cargo-shear - version: 1.5.1 - - name: cargo shear - run: cargo shear - - argument_comment_lint_package: - name: Argument comment lint package - runs-on: ubuntu-24.04 - needs: changed - if: ${{ needs.changed.outputs.argument_comment_lint_package == 'true' }} - env: - CARGO_DYLINT_VERSION: 5.0.0 - DYLINT_LINK_VERSION: 5.0.0 - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - - name: Install nightly argument-comment-lint toolchain - shell: bash - run: | - rustup toolchain install nightly-2025-09-18 \ - --profile minimal \ - --component llvm-tools-preview \ - --component rustc-dev \ - --component rust-src \ - --no-self-update - rustup default nightly-2025-09-18 - - name: Cache cargo-dylint tooling - id: cargo_dylint_cache - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cargo/bin/cargo-dylint - ~/.cargo/bin/dylint-link - ~/.cargo/registry/index - ~/.cargo/registry/cache - ~/.cargo/git/db - key: argument-comment-lint-${{ runner.os }}-${{ env.CARGO_DYLINT_VERSION }}-${{ env.DYLINT_LINK_VERSION }}-${{ hashFiles('tools/argument-comment-lint/Cargo.lock', 'tools/argument-comment-lint/rust-toolchain', '.github/workflows/rust-ci.yml', '.github/workflows/rust-ci-full.yml') }} - - name: Install cargo-dylint tooling - if: ${{ steps.cargo_dylint_cache.outputs.cache-hit != 'true' }} - shell: bash - run: | - cargo install --locked cargo-dylint --version "$CARGO_DYLINT_VERSION" - cargo install --locked dylint-link --version "$DYLINT_LINK_VERSION" - - name: Check Python wrapper syntax - run: python3 -m py_compile tools/argument-comment-lint/wrapper_common.py tools/argument-comment-lint/run.py tools/argument-comment-lint/run-prebuilt-linter.py tools/argument-comment-lint/test_wrapper_common.py - - name: Test Python wrapper helpers - run: python3 -m unittest discover -s tools/argument-comment-lint -p 'test_*.py' - - name: Test argument comment lint package - working-directory: tools/argument-comment-lint - run: cargo test - env: - RUST_MIN_STACK: "8388608" # 8 MiB - - argument_comment_lint_prebuilt: - name: Argument comment lint - ${{ matrix.name }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - timeout-minutes: ${{ matrix.timeout_minutes }} - needs: changed - strategy: - fail-fast: false - matrix: - include: - - name: Linux - runner: ubuntu-24.04 - timeout_minutes: 30 - - name: macOS - runner: macos-15-xlarge - timeout_minutes: 30 - - name: Windows - runner: windows-x64 - timeout_minutes: 30 - runs_on: - group: codex-runners - labels: codex-windows-x64 - steps: - - name: Check whether argument comment lint should run - id: argument_comment_lint_gate - shell: bash - env: - ARGUMENT_COMMENT_LINT: ${{ needs.changed.outputs.argument_comment_lint }} - WORKFLOWS: ${{ needs.changed.outputs.workflows }} - run: | - if [[ "$ARGUMENT_COMMENT_LINT" == "true" || "$WORKFLOWS" == "true" ]]; then - echo "run=true" >> "$GITHUB_OUTPUT" - exit 0 - fi - - echo "No argument-comment-lint relevant changes." - echo "run=false" >> "$GITHUB_OUTPUT" - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }} - - name: Run argument comment lint on codex-rs via Bazel - if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }} - uses: ./.github/actions/run-argument-comment-lint - with: - target: ${{ runner.os }} - buildbuddy-api-key: ${{ secrets.BUILDBUDDY_API_KEY }} - - # --- Gatherer job that you mark as the ONLY required status ----------------- - results: - name: CI results (required) - needs: - [ - changed, - general, - cargo_shear, - argument_comment_lint_package, - argument_comment_lint_prebuilt, - ] - if: always() - runs-on: ubuntu-24.04 - steps: - - name: Summarize - shell: bash - run: | - echo "argpkg : ${{ needs.argument_comment_lint_package.result }}" - echo "arglint: ${{ needs.argument_comment_lint_prebuilt.result }}" - echo "general: ${{ needs.general.result }}" - echo "shear : ${{ needs.cargo_shear.result }}" - - # If nothing relevant changed (PR touching only root README, etc.), - # declare success regardless of other jobs. - if [[ '${{ needs.changed.outputs.argument_comment_lint }}' != 'true' && '${{ needs.changed.outputs.codex }}' != 'true' && '${{ needs.changed.outputs.workflows }}' != 'true' ]]; then - echo 'No relevant changes -> CI not required.' - exit 0 - fi - - if [[ '${{ needs.changed.outputs.argument_comment_lint_package }}' == 'true' ]]; then - [[ '${{ needs.argument_comment_lint_package.result }}' == 'success' ]] || { echo 'argument_comment_lint_package failed'; exit 1; } - fi - - if [[ '${{ needs.changed.outputs.argument_comment_lint }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then - [[ '${{ needs.argument_comment_lint_prebuilt.result }}' == 'success' ]] || { echo 'argument_comment_lint_prebuilt failed'; exit 1; } - fi - - if [[ '${{ needs.changed.outputs.codex }}' == 'true' || '${{ needs.changed.outputs.workflows }}' == 'true' ]]; then - [[ '${{ needs.general.result }}' == 'success' ]] || { echo 'general failed'; exit 1; } - [[ '${{ needs.cargo_shear.result }}' == 'success' ]] || { echo 'cargo_shear failed'; exit 1; } - fi diff --git a/.github/workflows/rust-release-argument-comment-lint.yml b/.github/workflows/rust-release-argument-comment-lint.yml deleted file mode 100644 index ba0d147d4f6a..000000000000 --- a/.github/workflows/rust-release-argument-comment-lint.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: rust-release-argument-comment-lint - -on: - workflow_call: - inputs: - publish: - required: true - type: boolean - -jobs: - skip: - if: ${{ !inputs.publish }} - runs-on: ubuntu-latest - steps: - - run: echo "Skipping argument-comment-lint release assets for prerelease tag" - - build: - if: ${{ inputs.publish }} - name: Build - ${{ matrix.runner }} - ${{ matrix.target }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - timeout-minutes: 60 - env: - CARGO_DYLINT_VERSION: 5.0.0 - DYLINT_LINK_VERSION: 5.0.0 - - strategy: - fail-fast: false - matrix: - include: - - runner: macos-15-xlarge - target: aarch64-apple-darwin - archive_name: argument-comment-lint-aarch64-apple-darwin.tar.gz - lib_name: libargument_comment_lint@nightly-2025-09-18-aarch64-apple-darwin.dylib - runner_binary: argument-comment-lint - cargo_dylint_binary: cargo-dylint - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-gnu - archive_name: argument-comment-lint-x86_64-unknown-linux-gnu.tar.gz - lib_name: libargument_comment_lint@nightly-2025-09-18-x86_64-unknown-linux-gnu.so - runner_binary: argument-comment-lint - cargo_dylint_binary: cargo-dylint - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-gnu - archive_name: argument-comment-lint-aarch64-unknown-linux-gnu.tar.gz - lib_name: libargument_comment_lint@nightly-2025-09-18-aarch64-unknown-linux-gnu.so - runner_binary: argument-comment-lint - cargo_dylint_binary: cargo-dylint - - runner: windows-x64 - target: x86_64-pc-windows-msvc - archive_name: argument-comment-lint-x86_64-pc-windows-msvc.zip - lib_name: argument_comment_lint@nightly-2025-09-18-x86_64-pc-windows-msvc.dll - runner_binary: argument-comment-lint.exe - cargo_dylint_binary: cargo-dylint.exe - runs_on: - group: codex-runners - labels: codex-windows-x64 - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - toolchain: nightly-2025-09-18 - targets: ${{ matrix.target }} - components: llvm-tools-preview, rustc-dev, rust-src - - - name: Install tooling - shell: bash - run: | - install_root="${RUNNER_TEMP}/argument-comment-lint-tools" - cargo install --locked cargo-dylint --version "$CARGO_DYLINT_VERSION" --root "$install_root" - cargo install --locked dylint-link --version "$DYLINT_LINK_VERSION" - echo "INSTALL_ROOT=$install_root" >> "$GITHUB_ENV" - - - name: Cargo build - working-directory: tools/argument-comment-lint - shell: bash - run: cargo build --release --target ${{ matrix.target }} - - - name: Stage artifact - shell: bash - run: | - dest="dist/argument-comment-lint/${{ matrix.target }}" - mkdir -p "$dest" - package_root="${RUNNER_TEMP}/argument-comment-lint" - rm -rf "$package_root" - mkdir -p "$package_root/bin" "$package_root/lib" - - cp "tools/argument-comment-lint/target/${{ matrix.target }}/release/${{ matrix.runner_binary }}" \ - "$package_root/bin/${{ matrix.runner_binary }}" - cp "${INSTALL_ROOT}/bin/${{ matrix.cargo_dylint_binary }}" \ - "$package_root/bin/${{ matrix.cargo_dylint_binary }}" - cp "tools/argument-comment-lint/target/${{ matrix.target }}/release/${{ matrix.lib_name }}" \ - "$package_root/lib/${{ matrix.lib_name }}" - - archive_path="$dest/${{ matrix.archive_name }}" - if [[ "${{ runner.os }}" == "Windows" ]]; then - (cd "${RUNNER_TEMP}" && 7z a "$GITHUB_WORKSPACE/$archive_path" argument-comment-lint >/dev/null) - else - (cd "${RUNNER_TEMP}" && tar -czf "$GITHUB_WORKSPACE/$archive_path" argument-comment-lint) - fi - - - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: argument-comment-lint-${{ matrix.target }} - path: dist/argument-comment-lint/${{ matrix.target }}/* diff --git a/.github/workflows/rust-release-prepare.yml b/.github/workflows/rust-release-prepare.yml deleted file mode 100644 index 528c329d582a..000000000000 --- a/.github/workflows/rust-release-prepare.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: rust-release-prepare -on: - workflow_dispatch: - schedule: - - cron: "0 */4 * * *" - -concurrency: - group: ${{ github.workflow }} - cancel-in-progress: false - -permissions: - contents: write - pull-requests: write - -jobs: - prepare: - # Prevent scheduled runs on forks (no secrets, wastes Actions minutes) - if: github.repository == 'openai/codex' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - with: - ref: main - fetch-depth: 0 - - - name: Update models.json - env: - OPENAI_API_KEY: ${{ secrets.CODEX_OPENAI_API_KEY }} - run: | - set -euo pipefail - - client_version="99.99.99" - terminal_info="github-actions" - user_agent="codex_cli_rs/99.99.99 (Linux $(uname -r); $(uname -m)) ${terminal_info}" - base_url="${OPENAI_BASE_URL:-https://chatgpt.com/backend-api/codex}" - - headers=( - -H "Authorization: Bearer ${OPENAI_API_KEY}" - -H "User-Agent: ${user_agent}" - ) - - url="${base_url%/}/models?client_version=${client_version}" - curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/models-manager/models.json - - - name: Open pull request (if changed) - uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8 - with: - commit-message: "Update models.json" - title: "Update models.json" - body: "Automated update of models.json." - branch: "bot/update-models-json" - reviewers: "pakrym-oai,aibrahim-oai" - delete-branch: true diff --git a/.github/workflows/rust-release-windows.yml b/.github/workflows/rust-release-windows.yml deleted file mode 100644 index 5e1edae17381..000000000000 --- a/.github/workflows/rust-release-windows.yml +++ /dev/null @@ -1,288 +0,0 @@ -name: rust-release-windows - -on: - workflow_call: - inputs: - release-lto: - required: true - type: string - secrets: - AZURE_TRUSTED_SIGNING_CLIENT_ID: - required: true - AZURE_TRUSTED_SIGNING_TENANT_ID: - required: true - AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID: - required: true - AZURE_TRUSTED_SIGNING_ENDPOINT: - required: true - AZURE_TRUSTED_SIGNING_ACCOUNT_NAME: - required: true - AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME: - required: true - -jobs: - build-windows-binaries: - name: Build Windows binaries - ${{ matrix.runner }} - ${{ matrix.target }} - ${{ matrix.bundle }} - runs-on: ${{ matrix.runs_on }} - # Windows release builds can exceed an hour on fat-LTO mainline releases, - # so keep the timeout aligned with the top-level release build headroom. - timeout-minutes: 90 - permissions: - contents: read - defaults: - run: - working-directory: codex-rs - env: - CARGO_PROFILE_RELEASE_LTO: ${{ inputs.release-lto }} - - strategy: - fail-fast: false - matrix: - include: - - runner: windows-x64 - target: x86_64-pc-windows-msvc - bundle: primary - binaries: "codex codex-responses-api-proxy" - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - bundle: primary - binaries: "codex codex-responses-api-proxy" - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - runner: windows-x64 - target: x86_64-pc-windows-msvc - bundle: helpers - binaries: "codex-windows-sandbox-setup codex-command-runner" - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - bundle: helpers - binaries: "codex-windows-sandbox-setup codex-command-runner" - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - runner: windows-x64 - target: x86_64-pc-windows-msvc - bundle: app-server - binaries: "codex-app-server" - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - bundle: app-server - binaries: "codex-app-server" - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Print runner specs (Windows) - shell: powershell - run: | - $computer = Get-CimInstance Win32_ComputerSystem - $cpu = Get-CimInstance Win32_Processor | Select-Object -First 1 - $ramGiB = [math]::Round($computer.TotalPhysicalMemory / 1GB, 1) - Write-Host "Runner: $env:RUNNER_NAME" - Write-Host "OS: $([System.Environment]::OSVersion.VersionString)" - Write-Host "CPU: $($cpu.Name)" - Write-Host "Logical CPUs: $($computer.NumberOfLogicalProcessors)" - Write-Host "Physical CPUs: $($computer.NumberOfProcessors)" - Write-Host "Total RAM: $ramGiB GiB" - Write-Host "Disk usage:" - Get-PSDrive -PSProvider FileSystem | Format-Table -AutoSize Name, @{Name='Size(GB)';Expression={[math]::Round(($_.Used + $_.Free) / 1GB, 1)}}, @{Name='Free(GB)';Expression={[math]::Round($_.Free / 1GB, 1)}} - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 - with: - targets: ${{ matrix.target }} - - - name: Cargo build (Windows binaries) - shell: bash - run: | - build_args=() - for binary in ${{ matrix.binaries }}; do - build_args+=(--bin "$binary") - done - cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}" - - - name: Upload Cargo timings - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: cargo-timings-rust-release-windows-${{ matrix.target }}-${{ matrix.bundle }} - path: codex-rs/target/**/cargo-timings/cargo-timing.html - if-no-files-found: warn - - - name: Stage Windows binaries - shell: bash - run: | - output_dir="target/${{ matrix.target }}/release/staged-${{ matrix.bundle }}" - mkdir -p "$output_dir" - for binary in ${{ matrix.binaries }}; do - cp "target/${{ matrix.target }}/release/${binary}.exe" "$output_dir/${binary}.exe" - done - - - name: Upload Windows binaries - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: windows-binaries-${{ matrix.target }}-${{ matrix.bundle }} - path: | - codex-rs/target/${{ matrix.target }}/release/staged-${{ matrix.bundle }}/* - - build-windows: - needs: - - build-windows-binaries - name: Build - ${{ matrix.runner }} - ${{ matrix.target }} - runs-on: ${{ matrix.runs_on }} - timeout-minutes: 90 - permissions: - contents: read - id-token: write - defaults: - run: - working-directory: codex-rs - env: - WINDOWS_BINARIES: "codex codex-responses-api-proxy codex-windows-sandbox-setup codex-command-runner codex-app-server" - - strategy: - fail-fast: false - matrix: - include: - - runner: windows-x64 - target: x86_64-pc-windows-msvc - runs_on: - group: codex-runners - labels: codex-windows-x64 - - runner: windows-arm64 - target: aarch64-pc-windows-msvc - runs_on: - group: codex-runners - labels: codex-windows-arm64 - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Download prebuilt Windows primary binaries - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 - with: - name: windows-binaries-${{ matrix.target }}-primary - path: codex-rs/target/${{ matrix.target }}/release - - - name: Download prebuilt Windows helper binaries - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 - with: - name: windows-binaries-${{ matrix.target }}-helpers - path: codex-rs/target/${{ matrix.target }}/release - - - name: Download prebuilt Windows app-server binary - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 - with: - name: windows-binaries-${{ matrix.target }}-app-server - path: codex-rs/target/${{ matrix.target }}/release - - - name: Verify binaries - shell: bash - run: | - set -euo pipefail - for binary in ${WINDOWS_BINARIES}; do - ls -lh "target/${{ matrix.target }}/release/${binary}.exe" - done - - - name: Sign Windows binaries with Azure Trusted Signing - uses: ./.github/actions/windows-code-sign - with: - target: ${{ matrix.target }} - binaries: ${{ env.WINDOWS_BINARIES }} - client-id: ${{ secrets.AZURE_TRUSTED_SIGNING_CLIENT_ID }} - tenant-id: ${{ secrets.AZURE_TRUSTED_SIGNING_TENANT_ID }} - subscription-id: ${{ secrets.AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID }} - endpoint: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }} - account-name: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }} - certificate-profile-name: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME }} - - - name: Stage artifacts - shell: bash - run: | - dest="dist/${{ matrix.target }}" - mkdir -p "$dest" - - for binary in ${WINDOWS_BINARIES}; do - cp "target/${{ matrix.target }}/release/${binary}.exe" \ - "$dest/${binary}-${{ matrix.target }}.exe" - done - - - name: Install DotSlash - uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2 - - - name: Compress artifacts - shell: bash - run: | - # Path that contains the uncompressed binaries for the current - # ${{ matrix.target }} - dest="dist/${{ matrix.target }}" - repo_root=$PWD - - # For compatibility with environments that lack the `zstd` tool we - # additionally create a `.tar.gz` and `.zip` for every Windows binary. - # The end result is: - # codex-.zst - # codex-.tar.gz - # codex-.zip - for f in "$dest"/*; do - base="$(basename "$f")" - # Skip files that are already archives (shouldn't happen, but be - # safe). - if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then - continue - fi - - # Don't try to compress signature bundles. - if [[ "$base" == *.sigstore ]]; then - continue - fi - - # Create per-binary tar.gz - tar -C "$dest" -czf "$dest/${base}.tar.gz" "$base" - - # Create zip archive for Windows binaries. - # Must run from inside the dest dir so 7z won't embed the - # directory path inside the zip. - if [[ "$base" == "codex-${{ matrix.target }}.exe" ]]; then - # Bundle the sandbox helper binaries into the main codex zip so - # WinGet installs include the required helpers next to codex.exe. - # Fall back to the single-binary zip if the helpers are missing - # to avoid breaking releases. - bundle_dir="$(mktemp -d)" - runner_src="$dest/codex-command-runner-${{ matrix.target }}.exe" - setup_src="$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe" - if [[ -f "$runner_src" && -f "$setup_src" ]]; then - cp "$dest/$base" "$bundle_dir/$base" - cp "$runner_src" "$bundle_dir/codex-command-runner.exe" - cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe" - # Use an absolute path so bundle zips land in the real dist - # dir even when 7z runs from a temp directory. - (cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .) - else - echo "warning: missing sandbox binaries; falling back to single-binary zip" - echo "warning: expected $runner_src and $setup_src" - (cd "$dest" && 7z a "${base}.zip" "$base") - fi - rm -rf "$bundle_dir" - else - (cd "$dest" && 7z a "${base}.zip" "$base") - fi - - # Keep raw executables and produce .zst alongside them. - "${GITHUB_WORKSPACE}/.github/workflows/zstd" -T0 -19 "$dest/$base" - done - - - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: ${{ matrix.target }} - path: | - codex-rs/dist/${{ matrix.target }}/* diff --git a/.github/workflows/rust-release-zsh.yml b/.github/workflows/rust-release-zsh.yml deleted file mode 100644 index 7ec49f9863f9..000000000000 --- a/.github/workflows/rust-release-zsh.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: rust-release-zsh - -on: - workflow_call: - -env: - ZSH_COMMIT: 77045ef899e53b9598bebc5a41db93a548a40ca6 - ZSH_PATCH: codex-rs/shell-escalation/patches/zsh-exec-wrapper.patch - -jobs: - linux: - name: Build zsh (Linux) - ${{ matrix.variant }} - ${{ matrix.target }} - runs-on: ${{ matrix.runner }} - timeout-minutes: 30 - container: - image: ${{ matrix.image }} - - strategy: - fail-fast: false - matrix: - include: - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-musl - variant: ubuntu-24.04 - image: ubuntu:24.04 - archive_name: codex-zsh-x86_64-unknown-linux-musl.tar.gz - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-musl - variant: ubuntu-24.04 - image: arm64v8/ubuntu:24.04 - archive_name: codex-zsh-aarch64-unknown-linux-musl.tar.gz - - steps: - - name: Install build prerequisites - shell: bash - run: | - set -euo pipefail - apt-get update - DEBIAN_FRONTEND=noninteractive apt-get install -y \ - autoconf \ - bison \ - build-essential \ - ca-certificates \ - gettext \ - git \ - libncursesw5-dev - - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Build, smoke-test, and stage zsh artifact - shell: bash - run: | - "${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \ - "dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}" - - - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: codex-zsh-${{ matrix.target }} - path: dist/zsh/${{ matrix.target }}/* - - darwin: - name: Build zsh (macOS) - ${{ matrix.variant }} - ${{ matrix.target }} - runs-on: ${{ matrix.runner }} - timeout-minutes: 30 - - strategy: - fail-fast: false - matrix: - include: - - runner: macos-15-xlarge - target: aarch64-apple-darwin - variant: macos-15 - archive_name: codex-zsh-aarch64-apple-darwin.tar.gz - - steps: - - name: Install build prerequisites - shell: bash - run: | - set -euo pipefail - if ! command -v autoconf >/dev/null 2>&1; then - brew install autoconf - fi - - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Build, smoke-test, and stage zsh artifact - shell: bash - run: | - "${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \ - "dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}" - - - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: codex-zsh-${{ matrix.target }} - path: dist/zsh/${{ matrix.target }}/* diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml index 073ba5821065..771327f608ef 100644 --- a/.github/workflows/rust-release.yml +++ b/.github/workflows/rust-release.yml @@ -1,175 +1,53 @@ -# Release workflow for codex-rs. -# To release, follow a workflow like: -# ``` -# git tag -a rust-v0.1.0 -m "Release 0.1.0" -# git push origin rust-v0.1.0 -# ``` - name: rust-release + on: push: - tags: - - "rust-v*.*.*" + branches: + - main + workflow_dispatch: concurrency: group: ${{ github.workflow }} cancel-in-progress: true jobs: - tag-check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: dtolnay/rust-toolchain@c2b55edffaf41a251c410bb32bed22afefa800f1 # 1.92 - - name: Validate tag matches Cargo.toml version - shell: bash - run: | - set -euo pipefail - echo "::group::Tag validation" - - # 1. Must be a tag and match the regex - [[ "${GITHUB_REF_TYPE}" == "tag" ]] \ - || { echo "❌ Not a tag push"; exit 1; } - [[ "${GITHUB_REF_NAME}" =~ ^rust-v[0-9]+\.[0-9]+\.[0-9]+(-(alpha|beta)(\.[0-9]+)?)?$ ]] \ - || { echo "❌ Tag '${GITHUB_REF_NAME}' doesn't match expected format"; exit 1; } - - # 2. Extract versions - tag_ver="${GITHUB_REF_NAME#rust-v}" - cargo_ver="$(grep -m1 '^version' codex-rs/Cargo.toml \ - | sed -E 's/version *= *"([^"]+)".*/\1/')" - - # 3. Compare - [[ "${tag_ver}" == "${cargo_ver}" ]] \ - || { echo "❌ Tag ${tag_ver} ≠ Cargo.toml ${cargo_ver}"; exit 1; } - - echo "✅ Tag and Cargo.toml agree (${tag_ver})" - echo "::endgroup::" - - build: - needs: tag-check - name: Build - ${{ matrix.runner }} - ${{ matrix.target }} - ${{ matrix.bundle }} - runs-on: ${{ matrix.runs_on || matrix.runner }} - # Release builds can take a long time, so leave some headroom to avoid - # having to restart the full workflow due to a timeout. - timeout-minutes: 90 + build-unix: + name: Build - ${{ matrix.runner }} - ${{ matrix.target }} + runs-on: ${{ matrix.runner }} + timeout-minutes: 240 permissions: contents: read - id-token: write defaults: run: working-directory: codex-rs - env: - # 2026-03-04: temporarily change releases to use thin LTO because - # Ubuntu ARM is timing out at 60 minutes. - CARGO_PROFILE_RELEASE_LTO: ${{ contains(github.ref_name, '-alpha') && 'thin' || 'thin' }} - strategy: fail-fast: false matrix: include: - - runner: macos-15-xlarge - target: aarch64-apple-darwin - bundle: primary - artifact_name: aarch64-apple-darwin - binaries: "codex codex-responses-api-proxy" - build_dmg: "true" - - runner: macos-15-xlarge - target: aarch64-apple-darwin - bundle: app-server - artifact_name: aarch64-apple-darwin-app-server - binaries: "codex-app-server" - build_dmg: "false" - - runner: macos-15-xlarge - target: x86_64-apple-darwin - bundle: primary - artifact_name: x86_64-apple-darwin - binaries: "codex codex-responses-api-proxy" - build_dmg: "true" - - runner: macos-15-xlarge - target: x86_64-apple-darwin - bundle: app-server - artifact_name: x86_64-apple-darwin-app-server - binaries: "codex-app-server" - build_dmg: "false" - # Release artifacts intentionally ship MUSL-linked Linux binaries. - - runner: ubuntu-24.04 - target: x86_64-unknown-linux-musl - bundle: primary - artifact_name: x86_64-unknown-linux-musl - binaries: "codex codex-responses-api-proxy" - build_dmg: "false" - runner: ubuntu-24.04 target: x86_64-unknown-linux-musl - bundle: app-server - artifact_name: x86_64-unknown-linux-musl-app-server - binaries: "codex-app-server" - build_dmg: "false" - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-musl - bundle: primary - artifact_name: aarch64-unknown-linux-musl - binaries: "codex codex-responses-api-proxy" - build_dmg: "false" - - runner: ubuntu-24.04-arm - target: aarch64-unknown-linux-musl - bundle: app-server - artifact_name: aarch64-unknown-linux-musl-app-server - binaries: "codex-app-server" - build_dmg: "false" + - runner: macos-15-intel + target: x86_64-apple-darwin + - runner: macos-15 + target: aarch64-apple-darwin steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Print runner specs (Linux) - if: ${{ runner.os == 'Linux' }} - shell: bash - run: | - set -euo pipefail - cpu_model="$(lscpu | awk -F: '/Model name/ {gsub(/^[ \t]+/, "", $2); print $2; exit}')" - total_ram="$(awk '/MemTotal/ {printf "%.1f GiB\n", $2 / 1024 / 1024}' /proc/meminfo)" - echo "Runner: ${RUNNER_NAME:-unknown}" - echo "OS: $(uname -a)" - echo "CPU model: ${cpu_model}" - echo "Logical CPUs: $(nproc)" - echo "Total RAM: ${total_ram}" - echo "Disk usage:" - df -h . - - name: Print runner specs (macOS) - if: ${{ runner.os == 'macOS' }} - shell: bash - run: | - set -euo pipefail - total_ram="$(sysctl -n hw.memsize | awk '{printf "%.1f GiB\n", $1 / 1024 / 1024 / 1024}')" - echo "Runner: ${RUNNER_NAME:-unknown}" - echo "OS: $(sw_vers -productName) $(sw_vers -productVersion)" - echo "Hardware model: $(sysctl -n hw.model)" - echo "CPU architecture: $(uname -m)" - echo "Logical CPUs: $(sysctl -n hw.logicalcpu)" - echo "Physical CPUs: $(sysctl -n hw.physicalcpu)" - echo "Total RAM: ${total_ram}" - echo "Disk usage:" - df -h . - - name: Install Linux bwrap build dependencies + - uses: actions/checkout@v6 + + - name: Install Linux build dependencies if: ${{ runner.os == 'Linux' }} shell: bash run: | set -euo pipefail sudo apt-get update -y - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev - - name: Install UBSan runtime (musl) - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }} - shell: bash - run: | - set -euo pipefail - if command -v apt-get >/dev/null 2>&1; then - sudo apt-get update -y - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1 - fi - - uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0 + sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev libubsan1 + + - uses: dtolnay/rust-toolchain@1.93.0 with: targets: ${{ matrix.target }} - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Use hermetic Cargo home (musl) + - name: Use hermetic Cargo home + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} shell: bash run: | set -euo pipefail @@ -179,20 +57,45 @@ jobs: echo "${cargo_home}/bin" >> "$GITHUB_PATH" : > "${cargo_home}/config.toml" - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Install Zig - uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2 + - name: Cache Cargo home and target dir + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/.cargo-home/registry + ${{ github.workspace }}/.cargo-home/git + codex-rs/target + key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('codex-rs/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.target }}-cargo- + + - name: Cache Cargo home and target dir + if: ${{ matrix.target != 'x86_64-unknown-linux-musl' }} + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + codex-rs/target + key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('codex-rs/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.target }}-cargo- + + - name: Install Zig + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} + uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 with: version: 0.14.0 - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Install musl build tools + - name: Install musl build tools + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} env: TARGET: ${{ matrix.target }} + shell: bash run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh" - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Configure rustc UBSan wrapper (musl host) + - name: Configure rustc UBSan wrapper + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} shell: bash run: | set -euo pipefail @@ -214,512 +117,350 @@ jobs: echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV" echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV" - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}} - name: Clear sanitizer flags (musl) + - name: Clear sanitizer flags + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} shell: bash run: | set -euo pipefail - # Avoid problematic aws-lc jitter entropy code path on musl builders. echo "AWS_LC_SYS_NO_JITTER_ENTROPY=1" >> "$GITHUB_ENV" target_no_jitter="AWS_LC_SYS_NO_JITTER_ENTROPY_${{ matrix.target }}" target_no_jitter="${target_no_jitter//-/_}" echo "${target_no_jitter}=1" >> "$GITHUB_ENV" - - # Clear global Rust flags so host/proc-macro builds don't pull in UBSan. echo "RUSTFLAGS=" >> "$GITHUB_ENV" echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV" echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV" - # Override any runner-level Cargo config rustflags as well. echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV" echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV" echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV" echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV" echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV" - sanitize_flags() { - local input="$1" - input="${input//-fsanitize=undefined/}" - input="${input//-fno-sanitize-recover=undefined/}" - input="${input//-fno-sanitize-trap=undefined/}" - echo "$input" - } - - cflags="$(sanitize_flags "${CFLAGS-}")" - cxxflags="$(sanitize_flags "${CXXFLAGS-}")" - echo "CFLAGS=${cflags}" >> "$GITHUB_ENV" - echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV" - - - if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }} - name: Configure musl rusty_v8 artifact overrides and verify checksums - uses: ./.github/actions/setup-rusty-v8-musl - with: - target: ${{ matrix.target }} - - - name: Cargo build + - name: Configure musl rusty_v8 artifact overrides + if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} + env: + TARGET: ${{ matrix.target }} shell: bash run: | - build_args=() - for binary in ${{ matrix.binaries }}; do - build_args+=(--bin "$binary") - done - echo "CARGO_PROFILE_RELEASE_LTO: ${CARGO_PROFILE_RELEASE_LTO}" - cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}" - - - name: Upload Cargo timings - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: cargo-timings-rust-release-${{ matrix.target }}-${{ matrix.bundle }} - path: codex-rs/target/**/cargo-timings/cargo-timing.html - if-no-files-found: warn - - - if: ${{ contains(matrix.target, 'linux') }} - name: Cosign Linux artifacts - uses: ./.github/actions/linux-code-sign - with: - target: ${{ matrix.target }} - artifacts-dir: ${{ github.workspace }}/codex-rs/target/${{ matrix.target }}/release - binaries: ${{ matrix.binaries }} + set -euo pipefail + version="$(python3 "${GITHUB_WORKSPACE}/.github/scripts/rusty_v8_bazel.py" resolved-v8-crate-version)" + release_tag="rusty-v8-v${version}" + base_url="https://github.com/openai/codex/releases/download/${release_tag}" + archive="${base_url}/librusty_v8_release_${TARGET}.a.gz" + binding_dir="${RUNNER_TEMP}/rusty_v8" + binding_path="${binding_dir}/src_binding_release_${TARGET}.rs" + mkdir -p "${binding_dir}" + curl -fsSL "${base_url}/src_binding_release_${TARGET}.rs" -o "${binding_path}" + echo "RUSTY_V8_ARCHIVE=${archive}" >> "$GITHUB_ENV" + echo "RUSTY_V8_SRC_BINDING_PATH=${binding_path}" >> "$GITHUB_ENV" - - if: ${{ runner.os == 'macOS' }} - name: MacOS code signing (binaries) - uses: ./.github/actions/macos-code-sign - with: - target: ${{ matrix.target }} - binaries: ${{ matrix.binaries }} - sign-binaries: "true" - sign-dmg: "false" - apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }} - apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} - apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }} - apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} - - - if: ${{ runner.os == 'macOS' && matrix.build_dmg == 'true' }} - name: Build macOS dmg + - name: Cargo build shell: bash - run: | - set -euo pipefail + run: cargo build --locked --target ${{ matrix.target }} --release -p codex-cli --bin codex - target="${{ matrix.target }}" - release_dir="target/${target}/release" - dmg_root="${RUNNER_TEMP}/codex-dmg-root" - volname="Codex (${target})" - dmg_path="${release_dir}/codex-${target}.dmg" - - # The previous "MacOS code signing (binaries)" step signs + notarizes the - # built artifacts in `${release_dir}`. This step packages *those same* - # signed binaries into a dmg. - rm -rf "$dmg_root" - mkdir -p "$dmg_root" - - for binary in ${{ matrix.binaries }}; do - binary_path="${release_dir}/${binary}" - if [[ ! -f "${binary_path}" ]]; then - echo "Binary ${binary_path} not found" - exit 1 - fi - ditto "${binary_path}" "${dmg_root}/${binary}" - done + - uses: actions/upload-artifact@v7 + with: + name: codex-bin-${{ matrix.target }} + path: codex-rs/target/${{ matrix.target }}/release/codex + if-no-files-found: error - rm -f "$dmg_path" - hdiutil create \ - -volname "$volname" \ - -srcfolder "$dmg_root" \ - -format UDZO \ - -ov \ - "$dmg_path" + build-windows: + name: Build - windows-2025 - ${{ matrix.target }} + runs-on: windows-2025 + timeout-minutes: 90 + permissions: + contents: read + defaults: + run: + working-directory: codex-rs + strategy: + fail-fast: false + matrix: + target: + - x86_64-pc-windows-msvc - if [[ ! -f "$dmg_path" ]]; then - echo "dmg $dmg_path not found after build" - exit 1 - fi + steps: + - uses: actions/checkout@v6 - - if: ${{ runner.os == 'macOS' && matrix.build_dmg == 'true' }} - name: MacOS code signing (dmg) - uses: ./.github/actions/macos-code-sign + - uses: dtolnay/rust-toolchain@1.93.0 with: - target: ${{ matrix.target }} - sign-binaries: "false" - sign-dmg: "true" - apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }} - apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} - apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }} - apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} - - - name: Stage artifacts - shell: bash - run: | - dest="dist/${{ matrix.target }}" - mkdir -p "$dest" - - for binary in ${{ matrix.binaries }}; do - cp "target/${{ matrix.target }}/release/${binary}" "$dest/${binary}-${{ matrix.target }}" - if [[ "${{ matrix.target }}" == *linux* ]]; then - cp "target/${{ matrix.target }}/release/${binary}.sigstore" \ - "$dest/${binary}-${{ matrix.target }}.sigstore" - fi - done + targets: ${{ matrix.target }} - if [[ "${{ matrix.build_dmg }}" == "true" ]]; then - cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg" - fi + - name: Cache Cargo home and target dir + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + codex-rs/target + key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('codex-rs/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.target }}-cargo- - - name: Compress artifacts + - name: Cargo build shell: bash run: | - # Path that contains the uncompressed binaries for the current - # ${{ matrix.target }} - dest="dist/${{ matrix.target }}" - - # For compatibility with environments that lack the `zstd` tool we - # additionally create a `.tar.gz` alongside every binary we publish. - # The end result is: - # codex-.zst (existing) - # codex-.tar.gz (new) - - # 1. Produce a .tar.gz for every file in the directory *before* we - # run `zstd --rm`, because that flag deletes the original files. - for f in "$dest"/*; do - base="$(basename "$f")" - # Skip files that are already archives (shouldn't happen, but be - # safe). - if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then - continue - fi - - # Don't try to compress signature bundles. - if [[ "$base" == *.sigstore ]]; then - continue - fi - - # Create per-binary tar.gz - tar -C "$dest" -czf "$dest/${base}.tar.gz" "$base" - - # Also create .zst and remove the uncompressed binaries to keep - # non-Windows artifact directories small. - zstd -T0 -19 --rm "$dest/$base" - done - - - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + cargo build --locked --target ${{ matrix.target }} --release \ + -p codex-cli \ + -p codex-windows-sandbox \ + --bin codex \ + --bin codex-windows-sandbox-setup \ + --bin codex-command-runner + + - uses: actions/upload-artifact@v7 with: - name: ${{ matrix.artifact_name }} - # Upload the per-binary .zst files as well as the new .tar.gz - # equivalents we generated in the previous step. + name: codex-bin-${{ matrix.target }} path: | - codex-rs/dist/${{ matrix.target }}/* + codex-rs/target/${{ matrix.target }}/release/codex.exe + codex-rs/target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe + codex-rs/target/${{ matrix.target }}/release/codex-command-runner.exe + if-no-files-found: error - build-windows: - needs: tag-check - uses: ./.github/workflows/rust-release-windows.yml - with: - release-lto: ${{ contains(github.ref_name, '-alpha') && 'thin' || 'fat' }} - secrets: inherit - - argument-comment-lint-release-assets: - name: argument-comment-lint release assets - needs: tag-check - uses: ./.github/workflows/rust-release-argument-comment-lint.yml - with: - publish: true - - zsh-release-assets: - name: zsh release assets - needs: tag-check - uses: ./.github/workflows/rust-release-zsh.yml - - release: + publish-release: needs: - - build + - build-unix - build-windows - - argument-comment-lint-release-assets - - zsh-release-assets - name: release runs-on: ubuntu-latest permissions: contents: write - actions: read - outputs: - version: ${{ steps.release_name.outputs.name }} - tag: ${{ github.ref_name }} - should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }} - npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }} steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + - uses: actions/checkout@v6 - - name: Generate release notes from tag commit message - id: release_notes + - name: Compute release version + id: compute shell: bash run: | set -euo pipefail + base_version="$(grep -m1 '^version' codex-rs/Cargo.toml | sed -E 's/version *= *"([^"]+)".*/\1/')" + short_sha="${GITHUB_SHA::7}" + echo "base_version=${base_version}" >> "$GITHUB_OUTPUT" + echo "release_version=${base_version}-${short_sha}" >> "$GITHUB_OUTPUT" + echo "release_tag=codext-v${base_version}-${short_sha}" >> "$GITHUB_OUTPUT" - # On tag pushes, GITHUB_SHA may be a tag object for annotated tags; - # peel it to the underlying commit. - commit="$(git rev-parse "${GITHUB_SHA}^{commit}")" - notes_path="${RUNNER_TEMP}/release-notes.md" - - # Use the commit message for the commit the tag points at (not the - # annotated tag message). - git log -1 --format=%B "${commit}" > "${notes_path}" - # Ensure trailing newline so GitHub's markdown renderer doesn't - # occasionally run the last line into subsequent content. - echo >> "${notes_path}" - - echo "path=${notes_path}" >> "${GITHUB_OUTPUT}" - - - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 - with: - path: dist - - - name: List - run: ls -R dist/ - - - name: Delete entries from dist/ that should not go in the release - run: | - rm -rf dist/windows-binaries* - # cargo-timing.html appears under multiple target-specific directories. - # If included in files: dist/**, release upload races on duplicate - # asset names and can fail with 404s. - find dist -type f -name 'cargo-timing.html' -delete - find dist -type d -empty -delete - - ls -R dist/ - - - name: Add config schema release asset - run: | - cp codex-rs/core/config.schema.json dist/config-schema.json - - - name: Define release name - id: release_name - run: | - # Extract the version from the tag name, which is in the format - # "rust-v0.1.0". - version="${GITHUB_REF_NAME#rust-v}" - echo "name=${version}" >> $GITHUB_OUTPUT - - - name: Determine npm publish settings - id: npm_publish_settings + - name: Generate release notes + id: release_notes env: - VERSION: ${{ steps.release_name.outputs.name }} + BASE_VERSION: ${{ steps.compute.outputs.base_version }} + shell: bash run: | set -euo pipefail - version="${VERSION}" - - if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - echo "should_publish=true" >> "$GITHUB_OUTPUT" - echo "npm_tag=" >> "$GITHUB_OUTPUT" - elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then - echo "should_publish=true" >> "$GITHUB_OUTPUT" - echo "npm_tag=alpha" >> "$GITHUB_OUTPUT" - else - echo "should_publish=false" >> "$GITHUB_OUTPUT" - echo "npm_tag=" >> "$GITHUB_OUTPUT" - fi - - - name: Setup pnpm - uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5 - with: - run_install: false - - - name: Setup Node.js for npm packaging - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 + notes_path="${RUNNER_TEMP}/release-notes.md" + upstream_tag="rust-v${BASE_VERSION}" + upstream_label="codex-v${BASE_VERSION}" + upstream_url="https://github.com/openai/codex/releases/tag/${upstream_tag}" + { + printf 'For upstream changes, see [%s](%s).\n\n' "${upstream_label}" "${upstream_url}" + git log -1 --format=%B "${GITHUB_SHA}" + echo + } > "${notes_path}" + echo "path=${notes_path}" >> "$GITHUB_OUTPUT" + + - name: Download build artifacts + uses: actions/download-artifact@v8 with: - node-version: 22 - - - name: Install dependencies - run: pnpm install --frozen-lockfile + pattern: codex-bin-* + path: ${{ runner.temp }}/artifacts - # stage_npm_packages.py requires DotSlash when staging releases. - - uses: facebook/install-dotslash@1e4e7b3e07eaca387acb98f1d4720e0bee8dbb6a # v2 - - name: Stage npm packages + - name: Stage release assets env: - GH_TOKEN: ${{ github.token }} - RELEASE_VERSION: ${{ steps.release_name.outputs.name }} - run: | - ./scripts/stage_npm_packages.py \ - --release-version "$RELEASE_VERSION" \ - --package codex \ - --package codex-responses-api-proxy \ - --package codex-sdk - - - name: Stage installer scripts + VERSION: ${{ steps.compute.outputs.release_version }} + shell: bash run: | - cp scripts/install/install.sh dist/install.sh - cp scripts/install/install.ps1 dist/install.ps1 - - - name: Create GitHub Release - uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2 - with: - name: ${{ steps.release_name.outputs.name }} - tag_name: ${{ github.ref_name }} - body_path: ${{ steps.release_notes.outputs.path }} - files: dist/** - # Mark as prerelease only when the version has a suffix after x.y.z - # (e.g. -alpha, -beta). Otherwise publish a normal release. - prerelease: ${{ contains(steps.release_name.outputs.name, '-') }} - - - uses: facebook/dotslash-publish-release@9c9ec027515c34db9282a09a25a9cab5880b2c52 # v2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag: ${{ github.ref_name }} - config: .github/dotslash-config.json - - - uses: facebook/dotslash-publish-release@9c9ec027515c34db9282a09a25a9cab5880b2c52 # v2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag: ${{ github.ref_name }} - config: .github/dotslash-zsh-config.json + set -euo pipefail + artifacts_root="${RUNNER_TEMP}/artifacts" + out_dir="${GITHUB_WORKSPACE}/dist/release" + mkdir -p "${out_dir}" + + linux_stage="$(mktemp -d "${RUNNER_TEMP}/release-linux-x64-XXXXXX")" + cp "${artifacts_root}/codex-bin-x86_64-unknown-linux-musl/codex" "${linux_stage}/codext" + chmod +x "${linux_stage}/codext" + tar -C "${linux_stage}" -czf "${out_dir}/codext-linux-x64-${VERSION}.tar.gz" codext + + darwin_x64_stage="$(mktemp -d "${RUNNER_TEMP}/release-darwin-x64-XXXXXX")" + cp "${artifacts_root}/codex-bin-x86_64-apple-darwin/codex" "${darwin_x64_stage}/codext" + chmod +x "${darwin_x64_stage}/codext" + tar -C "${darwin_x64_stage}" -czf "${out_dir}/codext-darwin-x64-${VERSION}.tar.gz" codext + + darwin_arm64_stage="$(mktemp -d "${RUNNER_TEMP}/release-darwin-arm64-XXXXXX")" + cp "${artifacts_root}/codex-bin-aarch64-apple-darwin/codex" "${darwin_arm64_stage}/codext" + chmod +x "${darwin_arm64_stage}/codext" + tar -C "${darwin_arm64_stage}" -czf "${out_dir}/codext-darwin-arm64-${VERSION}.tar.gz" codext + + windows_stage="$(mktemp -d "${RUNNER_TEMP}/release-win32-x64-XXXXXX")" + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex.exe" "${windows_stage}/codext.exe" + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex-windows-sandbox-setup.exe" "${windows_stage}/codex-windows-sandbox-setup.exe" + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex-command-runner.exe" "${windows_stage}/codex-command-runner.exe" + ( + cd "${windows_stage}" + zip -q -r "${out_dir}/codext-win32-x64-${VERSION}.zip" . + ) - - uses: facebook/dotslash-publish-release@9c9ec027515c34db9282a09a25a9cab5880b2c52 # v2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag: ${{ github.ref_name }} - config: .github/dotslash-argument-comment-lint-config.json - - - name: Trigger developers.openai.com deploy - # Only trigger the deploy if the release is not a pre-release. - # The deploy is used to update the developers.openai.com website with the new config schema json file. - if: ${{ !contains(steps.release_name.outputs.name, '-') }} - continue-on-error: true + - name: Create or update GitHub Release env: - DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL: ${{ secrets.DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL }} + GH_TOKEN: ${{ github.token }} + RELEASE_NAME: ${{ steps.compute.outputs.release_version }} + RELEASE_TAG: ${{ steps.compute.outputs.release_tag }} + RELEASE_NOTES: ${{ steps.release_notes.outputs.path }} + shell: bash run: | - if ! curl -sS -f -o /dev/null -X POST "$DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL"; then - echo "::warning title=developers.openai.com deploy hook failed::Vercel deploy hook POST failed for ${GITHUB_REF_NAME}" - exit 1 + set -euo pipefail + if gh release view "${RELEASE_TAG}" --repo "${GITHUB_REPOSITORY}" >/dev/null 2>&1; then + gh release edit "${RELEASE_TAG}" \ + --repo "${GITHUB_REPOSITORY}" \ + --title "codext ${RELEASE_NAME}" \ + --notes-file "${RELEASE_NOTES}" \ + --latest + else + gh release create "${RELEASE_TAG}" \ + --repo "${GITHUB_REPOSITORY}" \ + --title "codext ${RELEASE_NAME}" \ + --notes-file "${RELEASE_NOTES}" \ + --target "${GITHUB_SHA}" \ + --latest fi - # Publish to npm using OIDC authentication. - # July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/ - # npm docs: https://docs.npmjs.com/trusted-publishers + gh release upload "${RELEASE_TAG}" dist/release/* \ + --repo "${GITHUB_REPOSITORY}" \ + --clobber + publish-npm: - # Publish to npm for stable releases and alpha pre-releases with numeric suffixes. - if: ${{ needs.release.outputs.should_publish_npm == 'true' }} - name: publish-npm - needs: release + needs: + - build-unix + - build-windows runs-on: ubuntu-latest permissions: - id-token: write # Required for OIDC + id-token: write contents: read steps: + - uses: actions/checkout@v6 + + - name: Compute release version + id: compute + shell: bash + run: | + set -euo pipefail + base_version="$(grep -m1 '^version' codex-rs/Cargo.toml | sed -E 's/version *= *"([^"]+)".*/\1/')" + short_sha="${GITHUB_SHA::7}" + echo "release_version=${base_version}-${short_sha}" >> "$GITHUB_OUTPUT" + - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 + uses: actions/setup-node@v6 with: # Node 24 bundles npm >= 11.5.1, which trusted publishing requires. node-version: 24 - registry-url: "https://registry.npmjs.org" - scope: "@openai" + registry-url: https://registry.npmjs.org - - name: Download npm tarballs from release - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - RELEASE_TAG: ${{ needs.release.outputs.tag }} - RELEASE_VERSION: ${{ needs.release.outputs.version }} + - name: Download build artifacts + uses: actions/download-artifact@v8 + with: + pattern: codex-bin-* + path: ${{ runner.temp }}/artifacts + + - name: Assemble vendor tree + shell: bash run: | set -euo pipefail - version="$RELEASE_VERSION" - tag="$RELEASE_TAG" - mkdir -p dist/npm - patterns=( - "codex-npm-${version}.tgz" - "codex-npm-linux-*-${version}.tgz" - "codex-npm-darwin-*-${version}.tgz" - "codex-npm-win32-*-${version}.tgz" - "codex-responses-api-proxy-npm-${version}.tgz" - "codex-sdk-npm-${version}.tgz" - ) - for pattern in "${patterns[@]}"; do - gh release download "$tag" \ - --repo "${GITHUB_REPOSITORY}" \ - --pattern "$pattern" \ - --dir dist/npm - done - - # No NODE_AUTH_TOKEN needed because we use OIDC. - - name: Publish to npm + artifacts_root="${RUNNER_TEMP}/artifacts" + vendor_root="${RUNNER_TEMP}/npm-root/vendor" + + mkdir -p \ + "${vendor_root}/x86_64-unknown-linux-musl/codex" \ + "${vendor_root}/x86_64-apple-darwin/codex" \ + "${vendor_root}/aarch64-apple-darwin/codex" \ + "${vendor_root}/x86_64-pc-windows-msvc/codex" + + cp "${artifacts_root}/codex-bin-x86_64-unknown-linux-musl/codex" \ + "${vendor_root}/x86_64-unknown-linux-musl/codex/codex" + cp "${artifacts_root}/codex-bin-x86_64-apple-darwin/codex" \ + "${vendor_root}/x86_64-apple-darwin/codex/codex" + cp "${artifacts_root}/codex-bin-aarch64-apple-darwin/codex" \ + "${vendor_root}/aarch64-apple-darwin/codex/codex" + + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex.exe" \ + "${vendor_root}/x86_64-pc-windows-msvc/codex/codex.exe" + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex-windows-sandbox-setup.exe" \ + "${vendor_root}/x86_64-pc-windows-msvc/codex/codex-windows-sandbox-setup.exe" + cp "${artifacts_root}/codex-bin-x86_64-pc-windows-msvc/codex-command-runner.exe" \ + "${vendor_root}/x86_64-pc-windows-msvc/codex/codex-command-runner.exe" + + - uses: facebook/install-dotslash@v2 + + - name: Install ripgrep payloads + run: python3 codex-cli/scripts/install_native_deps.py --component rg "${RUNNER_TEMP}/npm-root" + + - name: Stage npm tarballs env: - VERSION: ${{ needs.release.outputs.version }} - NPM_TAG: ${{ needs.release.outputs.npm_tag }} + VERSION: ${{ steps.compute.outputs.release_version }} + shell: bash run: | set -euo pipefail - prefix="" - if [[ -n "${NPM_TAG}" ]]; then - prefix="${NPM_TAG}-" - fi - - root_tarball="dist/npm/codex-npm-${VERSION}.tgz" - sdk_tarball="dist/npm/codex-sdk-npm-${VERSION}.tgz" - # Keep this list in sync with CODEX_PLATFORM_PACKAGES in - # codex-cli/scripts/build_npm_package.py. The root wrapper advances - # @openai/codex@latest as soon as it publishes, so every platform - # package it aliases must already exist in the registry first. - platform_tarballs=( - "dist/npm/codex-npm-linux-x64-${VERSION}.tgz" - "dist/npm/codex-npm-linux-arm64-${VERSION}.tgz" - "dist/npm/codex-npm-darwin-x64-${VERSION}.tgz" - "dist/npm/codex-npm-darwin-arm64-${VERSION}.tgz" - "dist/npm/codex-npm-win32-x64-${VERSION}.tgz" - "dist/npm/codex-npm-win32-arm64-${VERSION}.tgz" + out_dir="${GITHUB_WORKSPACE}/dist/npm" + mkdir -p "${out_dir}" + vendor_src="${RUNNER_TEMP}/npm-root/vendor" + packages=( + codex + codex-linux-x64 + codex-darwin-x64 + codex-darwin-arm64 + codex-win32-x64 ) - for required_tarball in "${platform_tarballs[@]}" "${root_tarball}"; do - if [[ ! -f "${required_tarball}" ]]; then - echo "Missing npm tarball: ${required_tarball}" - exit 1 + for package in "${packages[@]}"; do + stage_dir="$(mktemp -d "${RUNNER_TEMP}/npm-stage-${package}-XXXXXX")" + if [[ "${package}" == "codex" ]]; then + pack_output="${out_dir}/codex-npm-${VERSION}.tgz" + python3 codex-cli/scripts/build_npm_package.py \ + --package "${package}" \ + --release-version "${VERSION}" \ + --staging-dir "${stage_dir}" \ + --pack-output "${pack_output}" + else + platform="${package#codex-}" + pack_output="${out_dir}/codex-npm-${platform}-${VERSION}.tgz" + python3 codex-cli/scripts/build_npm_package.py \ + --package "${package}" \ + --release-version "${VERSION}" \ + --staging-dir "${stage_dir}" \ + --pack-output "${pack_output}" \ + --vendor-src "${vendor_src}" fi + rm -rf "${stage_dir}" done + - name: Publish to npm + env: + VERSION: ${{ steps.compute.outputs.release_version }} + shell: bash + run: | + set -euo pipefail shopt -s nullglob - other_tarballs=() - for tarball in dist/npm/*-"${VERSION}".tgz; do - if [[ "${tarball}" == "${root_tarball}" || "${tarball}" == "${sdk_tarball}" ]]; then - continue - fi - - is_platform_tarball=false - for platform_tarball in "${platform_tarballs[@]}"; do - if [[ "${tarball}" == "${platform_tarball}" ]]; then - is_platform_tarball=true - break - fi - done - if [[ "${is_platform_tarball}" == true ]]; then - continue - fi - - other_tarballs+=("${tarball}") - done - - # Publish the platform packages before the root CLI wrapper. The root - # wrapper advances @openai/codex@latest, so it should only publish - # after the optional dependency versions it references exist. - tarballs=( - "${platform_tarballs[@]}" - "${other_tarballs[@]}" - "${root_tarball}" - ) - if [[ -f "${sdk_tarball}" ]]; then - tarballs+=("${sdk_tarball}") + tarballs=(dist/npm/*-"${VERSION}".tgz) + if [[ ${#tarballs[@]} -eq 0 ]]; then + echo "No npm tarballs found in dist/npm for version ${VERSION}" + exit 1 fi for tarball in "${tarballs[@]}"; do filename="$(basename "${tarball}")" + publish_cmd=(npm publish "${GITHUB_WORKSPACE}/${tarball}" --access public) tag="" case "${filename}" in - codex-npm-linux-*-"${VERSION}".tgz|codex-npm-darwin-*-"${VERSION}".tgz|codex-npm-win32-*-"${VERSION}".tgz) - platform="${filename#codex-npm-}" - platform="${platform%-${VERSION}.tgz}" - tag="${prefix}${platform}" + codex-npm-linux-*-"${VERSION}".tgz) + tag="${filename#codex-npm-}" + tag="${tag%-${VERSION}.tgz}" ;; - codex-npm-"${VERSION}".tgz|codex-responses-api-proxy-npm-"${VERSION}".tgz|codex-sdk-npm-"${VERSION}".tgz) - tag="${NPM_TAG}" + codex-npm-darwin-*-"${VERSION}".tgz) + tag="${filename#codex-npm-}" + tag="${tag%-${VERSION}.tgz}" + ;; + codex-npm-win32-*-"${VERSION}".tgz) + tag="${filename#codex-npm-}" + tag="${tag%-${VERSION}.tgz}" + ;; + codex-npm-"${VERSION}".tgz) + tag="latest" ;; *) echo "Unexpected npm tarball: ${filename}" @@ -727,7 +468,6 @@ jobs: ;; esac - publish_cmd=(npm publish "${GITHUB_WORKSPACE}/${tarball}") if [[ -n "${tag}" ]]; then publish_cmd+=(--tag "${tag}") fi @@ -750,45 +490,3 @@ jobs: exit "${publish_status}" done - - winget: - name: winget - needs: release - # Only publish stable/mainline releases to WinGet; pre-releases include a - # '-' in the semver string (e.g., 1.2.3-alpha.1). - if: ${{ !contains(needs.release.outputs.version, '-') }} - # This job only invokes a GitHub Action to open/update the winget-pkgs PR; - # it does not execute Windows-only tooling, so Linux is sufficient. - runs-on: ubuntu-latest - permissions: - contents: read - - steps: - - name: Publish to WinGet - uses: vedantmgoyal9/winget-releaser@7bd472be23763def6e16bd06cc8b1cdfab0e2fd5 - with: - identifier: OpenAI.Codex - version: ${{ needs.release.outputs.version }} - release-tag: ${{ needs.release.outputs.tag }} - fork-user: openai-oss-forks - installers-regex: '^codex-(?:x86_64|aarch64)-pc-windows-msvc\.exe\.zip$' - token: ${{ secrets.WINGET_PUBLISH_PAT }} - - update-branch: - name: Update latest-alpha-cli branch - permissions: - contents: write - needs: release - runs-on: ubuntu-latest - - steps: - - name: Update latest-alpha-cli branch - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - set -euo pipefail - gh api \ - repos/${GITHUB_REPOSITORY}/git/refs/heads/latest-alpha-cli \ - -X PATCH \ - -f sha="${GITHUB_SHA}" \ - -F force=true diff --git a/.github/workflows/rusty-v8-release.yml b/.github/workflows/rusty-v8-release.yml deleted file mode 100644 index ee92eff4fa14..000000000000 --- a/.github/workflows/rusty-v8-release.yml +++ /dev/null @@ -1,190 +0,0 @@ -name: rusty-v8-release - -on: - workflow_dispatch: - inputs: - release_tag: - description: Optional release tag. Defaults to rusty-v8-v. - required: false - type: string - publish: - description: Publish the staged musl artifacts to a GitHub release. - required: false - default: true - type: boolean - -concurrency: - group: ${{ github.workflow }}::${{ inputs.release_tag || github.run_id }} - cancel-in-progress: false - -jobs: - metadata: - runs-on: ubuntu-latest - outputs: - release_tag: ${{ steps.release_tag.outputs.release_tag }} - v8_version: ${{ steps.v8_version.outputs.version }} - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Set up Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 - with: - python-version: "3.12" - - - name: Resolve exact v8 crate version - id: v8_version - shell: bash - run: | - set -euo pipefail - version="$(python3 .github/scripts/rusty_v8_bazel.py resolved-v8-crate-version)" - echo "version=${version}" >> "$GITHUB_OUTPUT" - - - name: Resolve release tag - id: release_tag - env: - RELEASE_TAG_INPUT: ${{ inputs.release_tag }} - V8_VERSION: ${{ steps.v8_version.outputs.version }} - shell: bash - run: | - set -euo pipefail - - release_tag="${RELEASE_TAG_INPUT}" - if [[ -z "${release_tag}" ]]; then - release_tag="rusty-v8-v${V8_VERSION}" - fi - - echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT" - - build: - name: Build ${{ matrix.target }} - needs: metadata - runs-on: ${{ matrix.runner }} - permissions: - contents: read - actions: read - strategy: - fail-fast: false - matrix: - include: - - runner: ubuntu-24.04 - platform: linux_amd64_musl - target: x86_64-unknown-linux-musl - - runner: ubuntu-24.04-arm - platform: linux_arm64_musl - target: aarch64-unknown-linux-musl - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Set up Bazel - uses: ./.github/actions/setup-bazel-ci - with: - target: ${{ matrix.target }} - - - name: Set up Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 - with: - python-version: "3.12" - - - name: Build Bazel V8 release pair - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - PLATFORM: ${{ matrix.platform }} - TARGET: ${{ matrix.target }} - shell: bash - run: | - set -euo pipefail - - target_suffix="${TARGET//-/_}" - pair_target="//third_party/v8:rusty_v8_release_pair_${target_suffix}" - extra_targets=() - if [[ "${TARGET}" == *-unknown-linux-musl ]]; then - extra_targets=( - "@llvm//runtimes/libcxx:libcxx.static" - "@llvm//runtimes/libcxx:libcxxabi.static" - ) - fi - - bazel_args=( - build - -c - opt - "--platforms=@llvm//platforms:${PLATFORM}" - "${pair_target}" - "${extra_targets[@]}" - --build_metadata=COMMIT_SHA=$(git rev-parse HEAD) - ) - - bazel \ - --noexperimental_remote_repo_contents_cache \ - "${bazel_args[@]}" \ - --config=ci-v8 \ - "--remote_header=x-buildbuddy-api-key=${BUILDBUDDY_API_KEY}" - - - name: Stage release pair - env: - PLATFORM: ${{ matrix.platform }} - TARGET: ${{ matrix.target }} - shell: bash - run: | - set -euo pipefail - - python3 .github/scripts/rusty_v8_bazel.py stage-release-pair \ - --platform "${PLATFORM}" \ - --target "${TARGET}" \ - --compilation-mode opt \ - --output-dir "dist/${TARGET}" - - - name: Upload staged musl artifacts - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: rusty-v8-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }} - path: dist/${{ matrix.target }}/* - - publish-release: - if: ${{ inputs.publish }} - needs: - - metadata - - build - runs-on: ubuntu-latest - permissions: - contents: write - actions: read - - steps: - - name: Ensure publishing from default branch - if: ${{ github.ref_name != github.event.repository.default_branch }} - env: - DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} - shell: bash - run: | - set -euo pipefail - echo "Publishing is only allowed from ${DEFAULT_BRANCH}; current ref is ${GITHUB_REF_NAME}." >&2 - exit 1 - - - name: Ensure release tag is new - env: - GH_TOKEN: ${{ github.token }} - RELEASE_TAG: ${{ needs.metadata.outputs.release_tag }} - shell: bash - run: | - set -euo pipefail - - if gh release view "${RELEASE_TAG}" --repo "${GITHUB_REPOSITORY}" > /dev/null 2>&1; then - echo "Release tag ${RELEASE_TAG} already exists; musl artifact tags are immutable." >&2 - exit 1 - fi - - - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8 - with: - path: dist - - - name: Create GitHub Release - uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2 - with: - tag_name: ${{ needs.metadata.outputs.release_tag }} - name: ${{ needs.metadata.outputs.release_tag }} - files: dist/** - # Keep V8 artifact releases out of Codex's normal "latest release" channel. - prerelease: true diff --git a/.github/workflows/sdk.yml b/.github/workflows/sdk.yml deleted file mode 100644 index 45c983ac1ee8..000000000000 --- a/.github/workflows/sdk.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: sdk - -on: - push: - branches: [main] - pull_request: {} - -jobs: - sdks: - runs-on: - group: codex-runners - labels: codex-linux-x64 - timeout-minutes: 10 - steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Install Linux bwrap build dependencies - shell: bash - run: | - set -euo pipefail - sudo apt-get update -y - sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev - - - name: Setup pnpm - uses: pnpm/action-setup@a8198c4bff370c8506180b035930dea56dbd5288 # v5 - with: - run_install: false - - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 - with: - node-version: 22 - cache: pnpm - - - name: Set up Bazel CI - id: setup_bazel - uses: ./.github/actions/setup-bazel-ci - with: - target: x86_64-unknown-linux-gnu - - - name: Build codex with Bazel - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - shell: bash - run: | - set -euo pipefail - # Use the shared CI wrapper so fork PRs fall back cleanly when - # BuildBuddy credentials are unavailable. This workflow needs the - # built `codex` binary on disk afterwards, so ask the wrapper to - # override CI's default remote_download_minimal behavior. - ./.github/scripts/run-bazel-ci.sh \ - --remote-download-toplevel \ - -- \ - build \ - --build_metadata=COMMIT_SHA=${GITHUB_SHA} \ - --build_metadata=TAG_job=sdk \ - -- \ - //codex-rs/cli:codex - - # Resolve the exact output file using the same wrapper/config path as - # the build instead of guessing which Bazel convenience symlink is - # available on the runner. - cquery_output="$( - ./.github/scripts/run-bazel-ci.sh \ - -- \ - cquery \ - --output=files \ - -- \ - //codex-rs/cli:codex \ - | grep -E '^(/|bazel-out/)' \ - | tail -n 1 - )" - if [[ "${cquery_output}" = /* ]]; then - codex_bazel_output_path="${cquery_output}" - else - codex_bazel_output_path="${GITHUB_WORKSPACE}/${cquery_output}" - fi - if [[ -z "${codex_bazel_output_path}" ]]; then - echo "Bazel did not report an output path for //codex-rs/cli:codex." >&2 - exit 1 - fi - if [[ ! -e "${codex_bazel_output_path}" ]]; then - echo "Unable to locate the Bazel-built codex binary at ${codex_bazel_output_path}." >&2 - exit 1 - fi - - # Stage the binary into the workspace and point the SDK tests at that - # stable path. The tests spawn `codex` directly many times, so using a - # normal executable path is more reliable than invoking Bazel for each - # test process. - install_dir="${GITHUB_WORKSPACE}/.tmp/sdk-ci" - mkdir -p "${install_dir}" - install -m 755 "${codex_bazel_output_path}" "${install_dir}/codex" - echo "CODEX_EXEC_PATH=${install_dir}/codex" >> "$GITHUB_ENV" - - - name: Warm up Bazel-built codex - shell: bash - run: | - set -euo pipefail - "${CODEX_EXEC_PATH}" --version - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Build SDK packages - run: pnpm -r --filter ./sdk/typescript run build - - - name: Lint SDK packages - run: pnpm -r --filter ./sdk/typescript run lint - - - name: Test SDK packages - run: pnpm -r --filter ./sdk/typescript run test - - - name: Save bazel repository cache - if: always() && !cancelled() && steps.setup_bazel.outputs.cache-hit != 'true' - continue-on-error: true - uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5 - with: - path: | - ~/.cache/bazel-repo-cache - key: bazel-cache-x86_64-unknown-linux-gnu-${{ hashFiles('MODULE.bazel', 'codex-rs/Cargo.lock', 'codex-rs/Cargo.toml') }} diff --git a/.github/workflows/v8-canary.yml b/.github/workflows/v8-canary.yml deleted file mode 100644 index 119d0422759e..000000000000 --- a/.github/workflows/v8-canary.yml +++ /dev/null @@ -1,136 +0,0 @@ -name: v8-canary - -on: - pull_request: - paths: - - ".github/actions/setup-bazel-ci/**" - - ".github/scripts/rusty_v8_bazel.py" - - ".github/workflows/rusty-v8-release.yml" - - ".github/workflows/v8-canary.yml" - - "MODULE.bazel" - - "MODULE.bazel.lock" - - "codex-rs/Cargo.toml" - - "patches/BUILD.bazel" - - "patches/v8_*.patch" - - "third_party/v8/**" - push: - branches: - - main - paths: - - ".github/actions/setup-bazel-ci/**" - - ".github/scripts/rusty_v8_bazel.py" - - ".github/workflows/rusty-v8-release.yml" - - ".github/workflows/v8-canary.yml" - - "MODULE.bazel" - - "MODULE.bazel.lock" - - "codex-rs/Cargo.toml" - - "patches/BUILD.bazel" - - "patches/v8_*.patch" - - "third_party/v8/**" - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }} - cancel-in-progress: ${{ github.ref_name != 'main' }} - -jobs: - metadata: - runs-on: ubuntu-latest - outputs: - v8_version: ${{ steps.v8_version.outputs.version }} - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Set up Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 - with: - python-version: "3.12" - - - name: Resolve exact v8 crate version - id: v8_version - shell: bash - run: | - set -euo pipefail - version="$(python3 .github/scripts/rusty_v8_bazel.py resolved-v8-crate-version)" - echo "version=${version}" >> "$GITHUB_OUTPUT" - - build: - name: Build ${{ matrix.target }} - needs: metadata - runs-on: ${{ matrix.runner }} - permissions: - contents: read - actions: read - strategy: - fail-fast: false - matrix: - include: - - runner: ubuntu-24.04 - platform: linux_amd64_musl - target: x86_64-unknown-linux-musl - - runner: ubuntu-24.04-arm - platform: linux_arm64_musl - target: aarch64-unknown-linux-musl - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - - name: Set up Bazel - uses: ./.github/actions/setup-bazel-ci - with: - target: ${{ matrix.target }} - - - name: Set up Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 - with: - python-version: "3.12" - - - name: Build Bazel V8 release pair - env: - BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }} - PLATFORM: ${{ matrix.platform }} - TARGET: ${{ matrix.target }} - shell: bash - run: | - set -euo pipefail - - target_suffix="${TARGET//-/_}" - pair_target="//third_party/v8:rusty_v8_release_pair_${target_suffix}" - extra_targets=( - "@llvm//runtimes/libcxx:libcxx.static" - "@llvm//runtimes/libcxx:libcxxabi.static" - ) - - bazel_args=( - build - "--platforms=@llvm//platforms:${PLATFORM}" - "${pair_target}" - "${extra_targets[@]}" - --build_metadata=COMMIT_SHA=$(git rev-parse HEAD) - ) - - bazel \ - --noexperimental_remote_repo_contents_cache \ - "${bazel_args[@]}" \ - --config=ci-v8 \ - "--remote_header=x-buildbuddy-api-key=${BUILDBUDDY_API_KEY}" - - - name: Stage release pair - env: - PLATFORM: ${{ matrix.platform }} - TARGET: ${{ matrix.target }} - shell: bash - run: | - set -euo pipefail - - python3 .github/scripts/rusty_v8_bazel.py stage-release-pair \ - --platform "${PLATFORM}" \ - --target "${TARGET}" \ - --output-dir "dist/${TARGET}" - - - name: Upload staged musl artifacts - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - with: - name: v8-canary-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }} - path: dist/${{ matrix.target }}/* diff --git a/.github/workflows/zstd b/.github/workflows/zstd deleted file mode 100755 index 7c601a5a99a3..000000000000 --- a/.github/workflows/zstd +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env dotslash - -// This DotSlash file wraps zstd for Windows runners. -// The upstream release provides win32/win64 binaries; for windows-aarch64 we -// use the win64 artifact via Windows x64 emulation. -{ - "name": "zstd", - "platforms": { - "windows-x86_64": { - "size": 1747181, - "hash": "sha256", - "digest": "acb4e8111511749dc7a3ebedca9b04190e37a17afeb73f55d4425dbf0b90fad9", - "format": "zip", - "path": "zstd-v1.5.7-win64/zstd.exe", - "providers": [ - { - "url": "https://github.com/facebook/zstd/releases/download/v1.5.7/zstd-v1.5.7-win64.zip" - }, - { - "type": "github-release", - "repo": "facebook/zstd", - "tag": "v1.5.7", - "name": "zstd-v1.5.7-win64.zip" - } - ] - }, - "windows-aarch64": { - "size": 1747181, - "hash": "sha256", - "digest": "acb4e8111511749dc7a3ebedca9b04190e37a17afeb73f55d4425dbf0b90fad9", - "format": "zip", - "path": "zstd-v1.5.7-win64/zstd.exe", - "providers": [ - { - "url": "https://github.com/facebook/zstd/releases/download/v1.5.7/zstd-v1.5.7-win64.zip" - }, - { - "type": "github-release", - "repo": "facebook/zstd", - "tag": "v1.5.7", - "name": "zstd-v1.5.7-win64.zip" - } - ] - } - } -} diff --git a/AGENTS.md b/AGENTS.md index 6939d146b00e..bb8f6d86e6a0 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,65 +1,20 @@ # Rust/codex-rs -In the codex-rs folder where the rust code lives: - -- Crate names are prefixed with `codex-`. For example, the `core` folder's crate is named `codex-core` -- When using format! and you can inline variables into {}, always do that. -- Install any commands the repo relies on (for example `just`, `rg`, or `cargo-insta`) if they aren't already available before running instructions here. -- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`. - - You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations. - - Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate. -- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if -- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args -- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls -- Avoid bool or ambiguous `Option` parameters that force callers to write hard-to-read code such as `foo(false)` or `bar(None)`. Prefer enums, named methods, newtypes, or other idiomatic Rust API shapes when they keep the callsite self-documenting. -- When you cannot make that API change and still need a small positional-literal callsite in Rust, follow the `argument_comment_lint` convention: - - Use an exact `/*param_name*/` comment before opaque literal arguments such as `None`, booleans, and numeric literals when passing them by position. - - Do not add these comments for string or char literals unless the comment adds real clarity; those literals are intentionally exempt from the lint. - - The parameter name in the comment must exactly match the callee signature. - - You can run `just argument-comment-lint` to run the lint check locally. This is powered by Bazel, so running it the first time can be slow if Bazel is not warmed up, though incremental invocations should take <15s. Most of the time, it is best to update the PR and let CI take responsibility for checking this (or run it asynchronously in the background after submitting the PR). Note CI checks all three platforms, which the local run does not. -- When possible, make `match` statements exhaustive and avoid wildcard arms. -- Newly added traits should include doc comments that explain their role and how implementations are expected to use them. -- Discourage both `#[async_trait]` and `#[allow(async_fn_in_trait)]` in Rust traits. - - Prefer native RPITIT trait methods with explicit `Send` bounds on the returned future, as in `3c7f013f9735` / `#16630`. - - Preferred trait shape: - `fn foo(&self, ...) -> impl std::future::Future + Send;` - - Implementations may still use `async fn foo(&self, ...) -> T` when they satisfy that contract. - - Do not use `#[allow(async_fn_in_trait)]` as a shortcut around spelling the future contract explicitly. -- When writing tests, prefer comparing the equality of entire objects over fields one by one. -- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable. -- Prefer private modules and explicitly exported public crate API. -- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`. -- When working with MCP tool calls, prefer using `codex-rs/codex-mcp/src/mcp_connection_manager.rs` to handle mutation of tools and tool calls. Aim to minimize the footprint of changes and leverage existing abstractions rather than plumbing code through multiple levels of function calls. -- If you change Rust dependencies (`Cargo.toml` or `Cargo.lock`), run `just bazel-lock-update` from the - repo root to refresh `MODULE.bazel.lock`, and include that lockfile update in the same change. -- After dependency changes, run `just bazel-lock-check` from the repo root so lockfile drift is caught - locally before CI. -- Bazel does not automatically make source-tree files available to compile-time Rust file access. If - you add `include_str!`, `include_bytes!`, `sqlx::migrate!`, or similar build-time file or - directory reads, update the crate's `BUILD.bazel` (`compile_data`, `build_script_data`, or test - data) or Bazel may fail even when Cargo passes. -- Do not create small helper methods that are referenced only once. -- Avoid large modules: - - Prefer adding new modules instead of growing existing ones. - - Target Rust modules under 500 LoC, excluding tests. - - If a file exceeds roughly 800 LoC, add new functionality in a new module instead of extending - the existing file unless there is a strong documented reason not to. - - This rule applies especially to high-touch files that already attract unrelated changes, such - as `codex-rs/tui/src/app.rs`, `codex-rs/tui/src/bottom_pane/chat_composer.rs`, - `codex-rs/tui/src/bottom_pane/footer.rs`, `codex-rs/tui/src/chatwidget.rs`, - `codex-rs/tui/src/bottom_pane/mod.rs`, and similarly central orchestration modules. - - When extracting code from a large module, move the related tests and module/type docs toward - the new implementation so the invariants stay close to the code that owns them. - - Avoid adding new standalone methods to `codex-rs/tui/src/chatwidget.rs` unless the change is - trivial; prefer new modules/files and keep `chatwidget.rs` focused on orchestration. -- When running Rust commands (e.g. `just fix` or `cargo test`) be patient with the command and never try to kill them using the PID. Rust lock can make the execution slow, this is expected. - -Run `just fmt` (in `codex-rs` directory) automatically after you have finished making Rust code changes; do not ask for approval to run it. Additionally, run the tests: - -1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`. -2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test` (or `just test` if `cargo-nextest` is installed). Avoid `--all-features` for routine local runs because it expands the build matrix and can significantly increase `target/` disk usage; use it only when you specifically need full feature coverage. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite. - -Before finalizing a large change to `codex-rs`, run `just fix -p ` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Do not re-run tests after running `fix` or `fmt`. + +## Temporary Reapply Guardrails (`rust-v0.128.0`) + +- Current work on this branch is an upstream reapply / re-implementation for `rust-v0.128.0`. +- Only implementation code and necessary docs may change for this task. Do not add or modify tests or snapshot files. +- Do not run lint / format / auto-fix commands for this reapply, including `cargo fmt`, `just fmt`, `cargo clippy`, `cargo clippy --fix`, and `just fix`. +- Acceptance for this reapply is limited to the `codex-upstream-reapply` skill criteria, including `cd codex-rs && cargo build -p codex-cli` and `cd codex-rs && cargo build -p codex-cli --release`. + + + + + + + + ## The `codex-core` crate diff --git a/CHANGED.md b/CHANGED.md new file mode 100644 index 000000000000..ac0bdea460f0 --- /dev/null +++ b/CHANGED.md @@ -0,0 +1,68 @@ +# Changes in This Fork + +This file captures the full set of changes currently in the working tree. + +## TUI composer draft clipboard shortcut + +- Added `Ctrl+Shift+C` in the TUI composer to copy the current draft to the system clipboard when the input contains text. +- Existing `Ctrl+C` behavior stays unchanged. +- When the composer has no copyable text, `Ctrl+Shift+C` falls back to the existing `Ctrl+C` clear/interrupt/quit path. +- On WSL2, composer draft copy reuses the existing Windows clipboard fallback so copies still land in the Windows system clipboard. +- `Ctrl+Shift+C` now takes its own composer-copy path instead of falling through to the existing `Ctrl+C` clear/interrupt/quit behavior when draft text is present. +- Added footer shortcut help text for the new draft-copy binding. +- `rust-v0.118.0` removed the old `tui_app_server` crate upstream, so this behavior now lives in the app-server-backed `codex-rs/tui` surface only. + +## TUI status header and polling + +- Added a status header above the composer in the app-server-backed `codex-rs/tui` surface. It shows model + reasoning effort, current directory, git branch/ahead/behind/changes, and rate-limit remaining/reset time. +- Git status is collected in the background (15s interval, 2s timeout) and rendered when available. +- The directory segment represents the session/thread `cwd`, not a one-off tool `workdir`. +- When the session `cwd` changes (for example after switching into a new worktree), the git-status poller now rebinds to that new `cwd`, clears stale git state, and ignores late results from the previous `cwd`. +- ChatGPT `5h` / weekly usage-limit snapshots in the TUI now refresh in the background every 15 seconds, so the header and any `/statusline` limit items keep moving while the UI is otherwise idle. +- `rust-v0.118.0` removed the old `tui_app_server` crate upstream, so the reapply keeps only the surviving TUI path aligned with the status-header skill. + +## TUI auth.json watcher + +- The running TUI now watches `CODEX_HOME/auth.json` and reloads auth when the file changes. +- Watch notifications are now trailing-debounced so reload happens after writes settle, reducing partial-file reads. +- If `auth.json` changes while the TUI still has an active task/turn running, auth reload is deferred until that work fully finishes; Codex does not hot-swap auth in the middle of the running task. +- Auth reload failures no longer clear cached auth (so transient parse/read errors do not appear as a logout). +- On auth reload failure, the TUI retries every 5 seconds for up to 3 attempts before surfacing a final warning. +- When the account identity changes, the TUI surfaces a warning in the transcript (including old/new emails when available). +- Auth change warnings now show the account plan type (e.g., Plus/Team/Free/Pro) instead of the generic ChatGPT label. +- Rate-limit state and polling are refreshed after auth changes so the header reflects the new account. +- That post-task auth refresh also resets cached rate-limit warning/prompt state for the new auth snapshot, so stale usage-limit/UI state from the previous auth context does not keep re-triggering after the reload. +- The TUI now supports `[tui].usage_limit_resume_prompt` for the synthetic recovery user turn sent after `UsageLimitExceeded`. If the field is unset, Codext uses the built-in default recovery prompt; if the field is set to an empty string, Codext disables the automatic recovery turn. +- When a turn hits `UsageLimitExceeded`, the TUI now queues that synthetic recovery turn ahead of other queued user input. If an `auth.json` reload is also pending, the reload still runs first, and only then does Codext submit the recovery turn before draining later queued inputs. +- After a turn stops on `UsageLimitExceeded`, Codext now keeps that synthetic recovery turn parked until the next `auth.json` reload that actually changes account identity, so switching accounts can continue the interrupted task without a manual resend. +- If the user manually submits a new message before that auth reload arrives, Codext clears the parked usage-limit recovery turn instead of replaying the stale synthetic prompt later. + +## Approval fallback when auto-review is unavailable + +- When automatic approval review times out or fails internally (for example, the reviewer hits a usage limit), sandbox approval requests now fall back to an explicit user approval prompt instead of stopping at a hard auto-review denial. +- The TUI no longer renders a misleading `Request denied ...` history line for those reviewer-failure cases; the warning remains visible and the manual approval prompt follows. + +## Collaboration modes and config overrides + +- Added `collaboration_modes` config overrides with per-mode `model` and `reasoning_effort` fields (plan/code). +- Collaboration mode presets now derive defaults from `/model` + reasoning effort and apply the optional overrides. +- The app-server collaboration-mode list uses these overrides and the resolved base model so UI and API stay aligned. +- Built-in Plan preset keeps `medium` reasoning effort by default, while allowing per-mode override via config. + +## AGENTS.md reload semantics + +- On each new user turn, Codex now checks whether project docs (`AGENTS.md` hierarchy) changed. +- If changed, it reloads instructions before creating the turn, so updates made during a running turn take effect on the next turn. +- When a reload happens, Codex emits an explicit warning in the transcript: + `AGENTS.md instructions changed. Reloaded and applied starting this turn.` + +## TUI exit resume command + +- Added a fork requirement that user-facing resume hints use `codext resume ` / `codext resume ` instead of `codex resume ...`. +- This includes the final resume hint shown after exiting the TUI and other resume guidance surfaced inside the TUI. + +## WSL bubblewrap `.codex` artifact + +- Fixed the Linux bubblewrap sandbox path that protected a missing top-level `.codex` by bind-mounting `/dev/null` onto the first missing component. +- Missing project-local `.codex` read-only carveouts now use a sandbox-local read-only blocker and the Linux sandbox helper removes any empty host-side blocker file after bubblewrap exits. +- Existing project-local `.codex` paths remain protected by the normal read-only remount path. diff --git a/README.md b/README.md index 5cc7fd4953cb..691e641c2d12 100644 --- a/README.md +++ b/README.md @@ -1,60 +1,89 @@ -

npm i -g @openai/codex
or brew install --cask codex

-

Codex CLI is a coding agent from OpenAI that runs locally on your computer. -

- Codex CLI splash -

-
-If you want Codex in your code editor (VS Code, Cursor, Windsurf), install in your IDE. -
If you want the desktop app experience, run codex app or visit the Codex App page. -
If you are looking for the cloud-based agent from OpenAI, Codex Web, go to chatgpt.com/codex.

+# Codext ---- +An opinionated Codex CLI. This is strictly a personal hobby project, forked from openai/codex. -## Quickstart +![Codex build](https://img.shields.io/static/v1?label=codex%20build&message=rust-v0.128.0-e4310be51f&color=2ea043) -### Installing and running Codex CLI +![TUI]( +https://github.com/user-attachments/assets/127abbc2-cb30-4d6e-8a81-ce707260c045) -Install globally with your preferred package manager: +## Quick Start + +Choose one of these two ways: + +* Install from npm: ```shell -# Install using npm -npm install -g @openai/codex +npm i -g @loongphy/codext ``` +* Build from source: + ```shell -# Install using Homebrew -brew install --cask codex +cd codex-rs +cargo run --bin codex ``` -Then simply run `codex` to get started. +## Features + +> Full change log: see [CHANGED.md](./CHANGED.md). -
-You can also go to the latest GitHub Release and download the appropriate binary for your platform. +* `Ctrl+Shift+C` in the TUI composer copies the current draft to the system clipboard; `Ctrl+C` keeps its existing behavior, and empty drafts still fall back to the old `Ctrl+C` path. +* TUI status header with model/effort, cwd, git summary, and rate-limit status. +* Collaboration mode presets accept per-mode overrides and default to the active `/model` settings. Example: -Each GitHub Release contains many executables, but in practice, you likely want one of these: + ```toml + # config.toml + [collaboration_modes.plan] + model = "gpt-5.4" + reasoning_effort = "xhigh" -- macOS - - Apple Silicon/arm64: `codex-aarch64-apple-darwin.tar.gz` - - x86_64 (older Mac hardware): `codex-x86_64-apple-darwin.tar.gz` -- Linux - - x86_64: `codex-x86_64-unknown-linux-musl.tar.gz` - - arm64: `codex-aarch64-unknown-linux-musl.tar.gz` + [collaboration_modes.code] + model = "gpt-5.4" + ``` -Each archive contains a single entry with the platform baked into the name (e.g., `codex-x86_64-unknown-linux-musl`), so you likely want to rename it to `codex` after extracting it. +* TUI watches `auth.json` for external login changes and reloads auth automatically after writes settle. If a task is still running, the reload waits until the turn is idle, then refreshes rate limits and warns on account switch. When a turn stops on a usage limit, Codext queues a synthetic user turn ahead of other queued follow-ups and auto-dispatches it after the next auth reload that changes account identity; if a reload is already pending, that reload is applied first. This works well with [codex-auth](https://github.com/Loongphy/codex-auth) when you refresh or switch login state outside the TUI. +* The synthetic recovery turn text is configurable with `[tui].usage_limit_resume_prompt`. Leave it unset to use the built-in default, or set it to `""` to disable the automatic recovery turn entirely. The built-in default is: -
+ ```text + The previous turn stopped because the active account hit a usage limit. Any pending auth reload has already been applied. Please continue the previous coding task from where it stopped, and use apply_patch for any required file edits. + ``` -### Using Codex with your ChatGPT plan + Example: -Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Business, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt). + ```toml + [tui] + usage_limit_resume_prompt = "" + ``` +* AGENTS.md and project-doc instructions are refreshed on each new user turn, and Codex shows an explicit warning when a refresh is applied. -You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key). +## Project Goals -## Docs +We will never merge code from the upstream repo; instead, we re-implement our changes on top of the latest upstream code. + +Iteration flow (aligned with `.agents/skills/codex-upstream-reapply`): + +```mermaid +flowchart TD + A[Freeze old branch: commit changes + intent docs] --> B[Fetch upstream tags] + B --> C[Pick tag + create new branch from tag] + C --> D[Generate reimplementation bundle] + D --> E[Read old branch + bundle for intent] + E --> F[Re-implement changes on new branch] + F --> G[Sanity check diffs vs tag] + G --> H[Force-push to fork main] +``` + +## Skills + +When syncing to the latest upstream codex version, use `.agents/skills/codex-upstream-reapply` to re-implement our custom requirements on top of the newest code, avoiding merge conflicts from the old branch history. + +Example: + +``` +$codex-upstream-reapply old_branch feat/rust-v0.94.0, new origin tag: rust-v0.98.0 +``` -- [**Codex Documentation**](https://developers.openai.com/codex) -- [**Contributing**](./docs/contributing.md) -- [**Installing & building**](./docs/install.md) -- [**Open source fund**](./docs/open-source-fund.md) +## Credits -This repository is licensed under the [Apache-2.0 License](LICENSE). +Status bar design reference: diff --git a/codex-cli/bin/codex.js b/codex-cli/bin/codex.js index 67ab3e2d95df..53f97a53cfdf 100755 --- a/codex-cli/bin/codex.js +++ b/codex-cli/bin/codex.js @@ -1,8 +1,8 @@ #!/usr/bin/env node -// Unified entry point for the Codex CLI. +// Unified entry point for the Codext CLI. import { spawn } from "node:child_process"; -import { existsSync } from "fs"; +import { chmodSync, existsSync, statSync } from "fs"; import { createRequire } from "node:module"; import path from "path"; import { fileURLToPath } from "url"; @@ -13,12 +13,10 @@ const __dirname = path.dirname(__filename); const require = createRequire(import.meta.url); const PLATFORM_PACKAGE_BY_TARGET = { - "x86_64-unknown-linux-musl": "@openai/codex-linux-x64", - "aarch64-unknown-linux-musl": "@openai/codex-linux-arm64", - "x86_64-apple-darwin": "@openai/codex-darwin-x64", - "aarch64-apple-darwin": "@openai/codex-darwin-arm64", - "x86_64-pc-windows-msvc": "@openai/codex-win32-x64", - "aarch64-pc-windows-msvc": "@openai/codex-win32-arm64", + "x86_64-unknown-linux-musl": "@loongphy/codext-linux-x64", + "x86_64-apple-darwin": "@loongphy/codext-darwin-x64", + "aarch64-apple-darwin": "@loongphy/codext-darwin-arm64", + "x86_64-pc-windows-msvc": "@loongphy/codext-win32-x64", }; const { platform, arch } = process; @@ -31,9 +29,6 @@ switch (platform) { case "x64": targetTriple = "x86_64-unknown-linux-musl"; break; - case "arm64": - targetTriple = "aarch64-unknown-linux-musl"; - break; default: break; } @@ -55,9 +50,6 @@ switch (platform) { case "x64": targetTriple = "x86_64-pc-windows-msvc"; break; - case "arm64": - targetTriple = "aarch64-pc-windows-msvc"; - break; default: break; } @@ -95,10 +87,10 @@ try { const packageManager = detectPackageManager(); const updateCommand = packageManager === "bun" - ? "bun install -g @openai/codex@latest" - : "npm install -g @openai/codex@latest"; + ? "bun install -g @loongphy/codext@latest" + : "npm install -g @loongphy/codext@latest"; throw new Error( - `Missing optional dependency ${platformPackage}. Reinstall Codex: ${updateCommand}`, + `Missing optional dependency ${platformPackage}. Reinstall Codext: ${updateCommand}`, ); } } @@ -107,16 +99,29 @@ if (!vendorRoot) { const packageManager = detectPackageManager(); const updateCommand = packageManager === "bun" - ? "bun install -g @openai/codex@latest" - : "npm install -g @openai/codex@latest"; + ? "bun install -g @loongphy/codext@latest" + : "npm install -g @loongphy/codext@latest"; throw new Error( - `Missing optional dependency ${platformPackage}. Reinstall Codex: ${updateCommand}`, + `Missing optional dependency ${platformPackage}. Reinstall Codext: ${updateCommand}`, ); } const archRoot = path.join(vendorRoot, targetTriple); const binaryPath = path.join(archRoot, "codex", codexBinaryName); +function ensureExecutable(filePath) { + if (process.platform === "win32" || !existsSync(filePath)) { + return; + } + + const currentMode = statSync(filePath).mode; + if ((currentMode & 0o111) !== 0) { + return; + } + + chmodSync(filePath, currentMode | 0o111); +} + // Use an asynchronous spawn instead of spawnSync so that Node is able to // respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is // executing. This allows us to forward those signals to the child process @@ -134,7 +139,7 @@ function getUpdatedPath(newDirs) { } /** - * Use heuristics to detect the package manager that was used to install Codex + * Use heuristics to detect the package manager that was used to install Codext * in order to give the user a hint about how to update it. */ function detectPackageManager() { @@ -172,6 +177,8 @@ const packageManagerEnvVar = : "CODEX_MANAGED_BY_NPM"; env[packageManagerEnvVar] = "1"; +ensureExecutable(binaryPath); + const child = spawn(binaryPath, process.argv.slice(2), { stdio: "inherit", env, diff --git a/codex-cli/package.json b/codex-cli/package.json index 5fbac8300b38..c679948527e2 100644 --- a/codex-cli/package.json +++ b/codex-cli/package.json @@ -1,9 +1,9 @@ { - "name": "@openai/codex", + "name": "@loongphy/codext", "version": "0.0.0-dev", "license": "Apache-2.0", "bin": { - "codex": "bin/codex.js" + "codext": "bin/codex.js" }, "type": "module", "engines": { @@ -15,8 +15,8 @@ ], "repository": { "type": "git", - "url": "git+https://github.com/openai/codex.git", + "url": "git+https://github.com/Loongphy/codext.git", "directory": "codex-cli" }, - "packageManager": "pnpm@10.33.0+sha512.10568bb4a6afb58c9eb3630da90cc9516417abebd3fabbe6739f0ae795728da1491e9db5a544c76ad8eb7570f5c4bb3d6c637b2cb41bfdcdb47fa823c8649319" + "packageManager": "pnpm@10.29.3+sha512.498e1fb4cca5aa06c1dcf2611e6fafc50972ffe7189998c409e90de74566444298ffe43e6cd2acdc775ba1aa7cc5e092a8b7054c811ba8c5770f84693d33d2dc" } diff --git a/codex-cli/scripts/build_npm_package.py b/codex-cli/scripts/build_npm_package.py index eda6c2615282..c27bd67968fe 100755 --- a/codex-cli/scripts/build_npm_package.py +++ b/codex-cli/scripts/build_npm_package.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Stage and optionally package the @openai/codex npm module.""" +"""Stage and optionally package the @loongphy/codext npm module.""" import argparse import json @@ -14,53 +14,39 @@ REPO_ROOT = CODEX_CLI_ROOT.parent RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm" CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript" -CODEX_NPM_NAME = "@openai/codex" +CODEX_NPM_NAME = "@loongphy/codext" # `npm_name` is the local optional-dependency alias consumed by `bin/codex.js`. -# The underlying package published to npm is always `@openai/codex`. +# The underlying package published to npm is always `@loongphy/codext`. CODEX_PLATFORM_PACKAGES: dict[str, dict[str, str]] = { "codex-linux-x64": { - "npm_name": "@openai/codex-linux-x64", + "npm_name": "@loongphy/codext-linux-x64", "npm_tag": "linux-x64", "target_triple": "x86_64-unknown-linux-musl", "os": "linux", "cpu": "x64", }, - "codex-linux-arm64": { - "npm_name": "@openai/codex-linux-arm64", - "npm_tag": "linux-arm64", - "target_triple": "aarch64-unknown-linux-musl", - "os": "linux", - "cpu": "arm64", - }, "codex-darwin-x64": { - "npm_name": "@openai/codex-darwin-x64", + "npm_name": "@loongphy/codext-darwin-x64", "npm_tag": "darwin-x64", "target_triple": "x86_64-apple-darwin", "os": "darwin", "cpu": "x64", }, "codex-darwin-arm64": { - "npm_name": "@openai/codex-darwin-arm64", + "npm_name": "@loongphy/codext-darwin-arm64", "npm_tag": "darwin-arm64", "target_triple": "aarch64-apple-darwin", "os": "darwin", "cpu": "arm64", }, "codex-win32-x64": { - "npm_name": "@openai/codex-win32-x64", + "npm_name": "@loongphy/codext-win32-x64", "npm_tag": "win32-x64", "target_triple": "x86_64-pc-windows-msvc", "os": "win32", "cpu": "x64", }, - "codex-win32-arm64": { - "npm_name": "@openai/codex-win32-arm64", - "npm_tag": "win32-arm64", - "target_triple": "aarch64-pc-windows-msvc", - "os": "win32", - "cpu": "arm64", - }, } PACKAGE_EXPANSIONS: dict[str, list[str]] = { @@ -70,11 +56,9 @@ PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = { "codex": [], "codex-linux-x64": ["codex", "rg"], - "codex-linux-arm64": ["codex", "rg"], "codex-darwin-x64": ["codex", "rg"], "codex-darwin-arm64": ["codex", "rg"], "codex-win32-x64": ["codex", "rg", "codex-windows-sandbox-setup", "codex-command-runner"], - "codex-win32-arm64": ["codex", "rg", "codex-windows-sandbox-setup", "codex-command-runner"], "codex-responses-api-proxy": ["codex-responses-api-proxy"], "codex-sdk": [], } @@ -96,7 +80,7 @@ def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.") + parser = argparse.ArgumentParser(description="Build or stage the Codext CLI npm package.") parser.add_argument( "--package", choices=PACKAGE_CHOICES, @@ -407,6 +391,7 @@ def copy_native_binaries( if dest_component_dir.exists(): shutil.rmtree(dest_component_dir) shutil.copytree(src_component_dir, dest_component_dir) + ensure_executable_files(dest_component_dir) if target_filter is not None: missing_targets = sorted(target_filter - copied_targets) @@ -415,6 +400,15 @@ def copy_native_binaries( raise RuntimeError(f"Missing target directories in vendor source: {missing_list}") +def ensure_executable_files(root: Path) -> None: + for path in root.rglob("*"): + if not path.is_file(): + continue + + current_mode = path.stat().st_mode + path.chmod(current_mode | 0o111) + + def run_npm_pack(staging_dir: Path, output_path: Path) -> Path: output_path = output_path.resolve() output_path.parent.mkdir(parents=True, exist_ok=True) diff --git a/codex-cli/scripts/install_native_deps.py b/codex-cli/scripts/install_native_deps.py index 58fbd370fc15..e5cd02d996e6 100755 --- a/codex-cli/scripts/install_native_deps.py +++ b/codex-cli/scripts/install_native_deps.py @@ -25,11 +25,9 @@ RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg" BINARY_TARGETS = ( "x86_64-unknown-linux-musl", - "aarch64-unknown-linux-musl", "x86_64-apple-darwin", "aarch64-apple-darwin", "x86_64-pc-windows-msvc", - "aarch64-pc-windows-msvc", ) @@ -70,11 +68,9 @@ class BinaryComponent: RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [ ("x86_64-unknown-linux-musl", "linux-x86_64"), - ("aarch64-unknown-linux-musl", "linux-aarch64"), ("x86_64-apple-darwin", "macos-x86_64"), ("aarch64-apple-darwin", "macos-aarch64"), ("x86_64-pc-windows-msvc", "windows-x86_64"), - ("aarch64-pc-windows-msvc", "windows-aarch64"), ] RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS} DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS] @@ -165,22 +161,24 @@ def main() -> int: "rg", ] - workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip() - if not workflow_url: - workflow_url = DEFAULT_WORKFLOW_URL + binary_components = [BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS] + if binary_components: + workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip() + if not workflow_url: + workflow_url = DEFAULT_WORKFLOW_URL - workflow_id = workflow_url.rstrip("/").split("/")[-1] - print(f"Downloading native artifacts from workflow {workflow_id}...") + workflow_id = workflow_url.rstrip("/").split("/")[-1] + print(f"Downloading native artifacts from workflow {workflow_id}...") - with _gha_group(f"Download native artifacts from workflow {workflow_id}"): - with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str: - artifacts_dir = Path(artifacts_dir_str) - _download_artifacts(workflow_id, artifacts_dir) - install_binary_components( - artifacts_dir, - vendor_dir, - [BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS], - ) + with _gha_group(f"Download native artifacts from workflow {workflow_id}"): + with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str: + artifacts_dir = Path(artifacts_dir_str) + _download_artifacts(workflow_id, artifacts_dir) + install_binary_components( + artifacts_dir, + vendor_dir, + binary_components, + ) if "rg" in components: with _gha_group("Fetch ripgrep binaries"): From 536c722ec048a70a23372ec229e9fb66ebece63c Mon Sep 17 00:00:00 2001 From: Loongphy Date: Fri, 1 May 2026 02:41:58 +0800 Subject: [PATCH 3/9] reapply fork changes for rust-v0.128.0 --- codex-rs/Cargo.lock | 222 +++++----- .../schema/json/ClientRequest.json | 7 +- .../codex_app_server_protocol.schemas.json | 7 +- .../codex_app_server_protocol.v2.schemas.json | 7 +- .../schema/json/v2/GetAccountParams.json | 7 +- .../schema/typescript/v2/GetAccountParams.ts | 9 +- .../app-server-protocol/src/protocol/v2.rs | 7 + codex-rs/app-server/README.md | 5 +- .../app-server/src/codex_message_processor.rs | 58 ++- codex-rs/config/src/config_toml.rs | 4 + codex-rs/config/src/types.rs | 26 ++ codex-rs/core/src/client.rs | 21 + codex-rs/core/src/config/mod.rs | 20 + codex-rs/core/src/session/handlers.rs | 4 + codex-rs/core/src/session/mod.rs | 46 +++ codex-rs/core/src/test_support.rs | 4 +- codex-rs/core/src/tools/orchestrator.rs | 117 +++++- codex-rs/core/src/util.rs | 4 +- codex-rs/linux-sandbox/README.md | 12 +- codex-rs/login/src/auth/manager.rs | 52 ++- codex-rs/login/src/lib.rs | 1 + codex-rs/models-manager/Cargo.toml | 1 + .../src/collaboration_mode_presets.rs | 137 ++++++- codex-rs/models-manager/src/manager.rs | 5 +- codex-rs/tui/Cargo.toml | 1 + codex-rs/tui/src/app.rs | 126 ++++++ codex-rs/tui/src/app/event_dispatch.rs | 12 +- codex-rs/tui/src/app_event.rs | 17 + codex-rs/tui/src/app_server_session.rs | 37 ++ codex-rs/tui/src/auth_watch.rs | 74 ++++ codex-rs/tui/src/bottom_pane/footer.rs | 14 + codex-rs/tui/src/chatwidget.rs | 383 +++++++++++++++--- codex-rs/tui/src/chatwidget/slash_dispatch.rs | 6 +- codex-rs/tui/src/chatwidget/status_header.rs | 312 ++++++++++++++ .../tui/src/chatwidget/status_surfaces.rs | 2 +- codex-rs/tui/src/collaboration_modes.rs | 97 ++++- codex-rs/tui/src/git_status.rs | 117 ++++++ codex-rs/tui/src/key_hint.rs | 4 + codex-rs/tui/src/lib.rs | 2 + codex-rs/tui/tooltips.txt | 2 +- fallback.md | 7 + 41 files changed, 1778 insertions(+), 218 deletions(-) create mode 100644 codex-rs/tui/src/auth_watch.rs create mode 100644 codex-rs/tui/src/chatwidget/status_header.rs create mode 100644 codex-rs/tui/src/git_status.rs create mode 100644 fallback.md diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock index d8744c437380..90482aa7080b 100644 --- a/codex-rs/Cargo.lock +++ b/codex-rs/Cargo.lock @@ -402,7 +402,7 @@ checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" [[package]] name = "app_test_support" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "base64 0.22.1", @@ -1750,7 +1750,7 @@ dependencies = [ [[package]] name = "codex-agent-graph-store" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "codex-protocol", @@ -1765,7 +1765,7 @@ dependencies = [ [[package]] name = "codex-agent-identity" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "base64 0.22.1", @@ -1784,7 +1784,7 @@ dependencies = [ [[package]] name = "codex-analytics" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-app-server-protocol", "codex-git-utils", @@ -1804,7 +1804,7 @@ dependencies = [ [[package]] name = "codex-ansi-escape" -version = "0.0.0" +version = "0.128.0" dependencies = [ "ansi-to-tui", "ratatui", @@ -1813,7 +1813,7 @@ dependencies = [ [[package]] name = "codex-api" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "assert_matches", @@ -1847,7 +1847,7 @@ dependencies = [ [[package]] name = "codex-app-server" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "app_test_support", @@ -1934,7 +1934,7 @@ dependencies = [ [[package]] name = "codex-app-server-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-app-server", "codex-app-server-protocol", @@ -1958,7 +1958,7 @@ dependencies = [ [[package]] name = "codex-app-server-protocol" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -1985,7 +1985,7 @@ dependencies = [ [[package]] name = "codex-app-server-test-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -2006,7 +2006,7 @@ dependencies = [ [[package]] name = "codex-apply-patch" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "assert_cmd", @@ -2025,7 +2025,7 @@ dependencies = [ [[package]] name = "codex-arg0" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-apply-patch", @@ -2042,7 +2042,7 @@ dependencies = [ [[package]] name = "codex-async-utils" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "pretty_assertions", @@ -2052,7 +2052,7 @@ dependencies = [ [[package]] name = "codex-aws-auth" -version = "0.0.0" +version = "0.128.0" dependencies = [ "aws-config", "aws-credential-types", @@ -2067,7 +2067,7 @@ dependencies = [ [[package]] name = "codex-backend-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-api", @@ -2084,7 +2084,7 @@ dependencies = [ [[package]] name = "codex-backend-openapi-models" -version = "0.0.0" +version = "0.128.0" dependencies = [ "serde", "serde_json", @@ -2093,7 +2093,7 @@ dependencies = [ [[package]] name = "codex-chatgpt" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -2114,7 +2114,7 @@ dependencies = [ [[package]] name = "codex-cli" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "assert_cmd", @@ -2171,7 +2171,7 @@ dependencies = [ [[package]] name = "codex-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "bytes", @@ -2201,7 +2201,7 @@ dependencies = [ [[package]] name = "codex-cloud-requirements" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "base64 0.22.1", @@ -2226,7 +2226,7 @@ dependencies = [ [[package]] name = "codex-cloud-tasks" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -2258,7 +2258,7 @@ dependencies = [ [[package]] name = "codex-cloud-tasks-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -2273,7 +2273,7 @@ dependencies = [ [[package]] name = "codex-cloud-tasks-mock-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "chrono", @@ -2283,7 +2283,7 @@ dependencies = [ [[package]] name = "codex-code-mode" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-channel", "async-trait", @@ -2300,11 +2300,11 @@ dependencies = [ [[package]] name = "codex-collaboration-mode-templates" -version = "0.0.0" +version = "0.128.0" [[package]] name = "codex-config" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -2350,7 +2350,7 @@ dependencies = [ [[package]] name = "codex-connectors" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-app-server-protocol", @@ -2362,7 +2362,7 @@ dependencies = [ [[package]] name = "codex-core" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "arc-swap", @@ -2482,7 +2482,7 @@ dependencies = [ [[package]] name = "codex-core-api" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-analytics", "codex-arg0", @@ -2499,7 +2499,7 @@ dependencies = [ [[package]] name = "codex-core-plugins" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chrono", @@ -2535,7 +2535,7 @@ dependencies = [ [[package]] name = "codex-core-skills" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-analytics", @@ -2566,7 +2566,7 @@ dependencies = [ [[package]] name = "codex-debug-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -2578,7 +2578,7 @@ dependencies = [ [[package]] name = "codex-device-key" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "base64 0.22.1", @@ -2594,7 +2594,7 @@ dependencies = [ [[package]] name = "codex-exec" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "assert_cmd", @@ -2639,7 +2639,7 @@ dependencies = [ [[package]] name = "codex-exec-server" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "arc-swap", @@ -2673,7 +2673,7 @@ dependencies = [ [[package]] name = "codex-execpolicy" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -2690,7 +2690,7 @@ dependencies = [ [[package]] name = "codex-execpolicy-legacy" -version = "0.0.0" +version = "0.128.0" dependencies = [ "allocative", "anyhow", @@ -2710,7 +2710,7 @@ dependencies = [ [[package]] name = "codex-experimental-api-macros" -version = "0.0.0" +version = "0.128.0" dependencies = [ "proc-macro2", "quote", @@ -2719,7 +2719,7 @@ dependencies = [ [[package]] name = "codex-external-agent-migration" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-hooks", "pretty_assertions", @@ -2731,7 +2731,7 @@ dependencies = [ [[package]] name = "codex-external-agent-sessions" -version = "0.0.0" +version = "0.128.0" dependencies = [ "chrono", "codex-app-server-protocol", @@ -2745,7 +2745,7 @@ dependencies = [ [[package]] name = "codex-features" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-otel", "codex-protocol", @@ -2758,7 +2758,7 @@ dependencies = [ [[package]] name = "codex-feedback" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-login", @@ -2771,7 +2771,7 @@ dependencies = [ [[package]] name = "codex-file-search" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -2787,7 +2787,7 @@ dependencies = [ [[package]] name = "codex-file-system" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "codex-protocol", @@ -2797,7 +2797,7 @@ dependencies = [ [[package]] name = "codex-git-utils" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chrono", @@ -2821,7 +2821,7 @@ dependencies = [ [[package]] name = "codex-hooks" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chrono", @@ -2841,7 +2841,7 @@ dependencies = [ [[package]] name = "codex-install-context" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-utils-home-dir", "pretty_assertions", @@ -2850,7 +2850,7 @@ dependencies = [ [[package]] name = "codex-keyring-store" -version = "0.0.0" +version = "0.128.0" dependencies = [ "keyring", "tracing", @@ -2858,7 +2858,7 @@ dependencies = [ [[package]] name = "codex-linux-sandbox" -version = "0.0.0" +version = "0.128.0" dependencies = [ "cc", "clap", @@ -2882,7 +2882,7 @@ dependencies = [ [[package]] name = "codex-lmstudio" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-core", "codex-model-provider-info", @@ -2896,7 +2896,7 @@ dependencies = [ [[package]] name = "codex-login" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -2938,7 +2938,7 @@ dependencies = [ [[package]] name = "codex-mcp" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-channel", @@ -2970,7 +2970,7 @@ dependencies = [ [[package]] name = "codex-mcp-server" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-arg0", @@ -3001,7 +3001,7 @@ dependencies = [ [[package]] name = "codex-memories-read" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-protocol", "codex-shell-command", @@ -3015,7 +3015,7 @@ dependencies = [ [[package]] name = "codex-memories-write" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chrono", @@ -3050,7 +3050,7 @@ dependencies = [ [[package]] name = "codex-model-provider" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "codex-agent-identity", @@ -3074,7 +3074,7 @@ dependencies = [ [[package]] name = "codex-model-provider-info" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-api", "codex-app-server-protocol", @@ -3091,12 +3091,13 @@ dependencies = [ [[package]] name = "codex-models-manager" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "chrono", "codex-app-server-protocol", "codex-collaboration-mode-templates", + "codex-config", "codex-login", "codex-otel", "codex-protocol", @@ -3112,7 +3113,7 @@ dependencies = [ [[package]] name = "codex-network-proxy" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -3143,7 +3144,7 @@ dependencies = [ [[package]] name = "codex-ollama" -version = "0.0.0" +version = "0.128.0" dependencies = [ "assert_matches", "async-stream", @@ -3162,7 +3163,7 @@ dependencies = [ [[package]] name = "codex-otel" -version = "0.0.0" +version = "0.128.0" dependencies = [ "chrono", "codex-api", @@ -3194,7 +3195,7 @@ dependencies = [ [[package]] name = "codex-plugin" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-config", "codex-utils-absolute-path", @@ -3204,7 +3205,7 @@ dependencies = [ [[package]] name = "codex-process-hardening" -version = "0.0.0" +version = "0.128.0" dependencies = [ "libc", "pretty_assertions", @@ -3212,7 +3213,7 @@ dependencies = [ [[package]] name = "codex-protocol" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chardetng", @@ -3253,7 +3254,7 @@ dependencies = [ [[package]] name = "codex-realtime-webrtc" -version = "0.0.0" +version = "0.128.0" dependencies = [ "libwebrtc", "thiserror 2.0.18", @@ -3262,7 +3263,7 @@ dependencies = [ [[package]] name = "codex-response-debug-context" -version = "0.0.0" +version = "0.128.0" dependencies = [ "base64 0.22.1", "codex-api", @@ -3273,7 +3274,7 @@ dependencies = [ [[package]] name = "codex-responses-api-proxy" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -3290,7 +3291,7 @@ dependencies = [ [[package]] name = "codex-rmcp-client" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "axum", @@ -3327,7 +3328,7 @@ dependencies = [ [[package]] name = "codex-rollout" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -3352,7 +3353,7 @@ dependencies = [ [[package]] name = "codex-rollout-trace" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-code-mode", @@ -3367,7 +3368,7 @@ dependencies = [ [[package]] name = "codex-sandboxing" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -3388,7 +3389,7 @@ dependencies = [ [[package]] name = "codex-secrets" -version = "0.0.0" +version = "0.128.0" dependencies = [ "age", "anyhow", @@ -3409,7 +3410,7 @@ dependencies = [ [[package]] name = "codex-shell-command" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "base64 0.22.1", @@ -3429,7 +3430,7 @@ dependencies = [ [[package]] name = "codex-shell-escalation" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "async-trait", @@ -3450,7 +3451,7 @@ dependencies = [ [[package]] name = "codex-skills" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-utils-absolute-path", "include_dir", @@ -3459,7 +3460,7 @@ dependencies = [ [[package]] name = "codex-state" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "chrono", @@ -3482,7 +3483,7 @@ dependencies = [ [[package]] name = "codex-stdio-to-uds" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-uds", @@ -3494,7 +3495,7 @@ dependencies = [ [[package]] name = "codex-terminal-detection" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", "tracing", @@ -3502,7 +3503,7 @@ dependencies = [ [[package]] name = "codex-test-binary-support" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-arg0", "tempfile", @@ -3510,7 +3511,7 @@ dependencies = [ [[package]] name = "codex-thread-manager-sample" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "clap", @@ -3520,7 +3521,7 @@ dependencies = [ [[package]] name = "codex-thread-store" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-trait", "chrono", @@ -3545,7 +3546,7 @@ dependencies = [ [[package]] name = "codex-tools" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-app-server-protocol", "codex-code-mode", @@ -3562,7 +3563,7 @@ dependencies = [ [[package]] name = "codex-tui" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "arboard", @@ -3625,6 +3626,7 @@ dependencies = [ "itertools 0.14.0", "lazy_static", "libc", + "notify", "pathdiff", "pretty_assertions", "pulldown-cmark", @@ -3667,7 +3669,7 @@ dependencies = [ [[package]] name = "codex-uds" -version = "0.0.0" +version = "0.128.0" dependencies = [ "async-io", "pretty_assertions", @@ -3679,7 +3681,7 @@ dependencies = [ [[package]] name = "codex-utils-absolute-path" -version = "0.0.0" +version = "0.128.0" dependencies = [ "dirs", "dunce", @@ -3693,14 +3695,14 @@ dependencies = [ [[package]] name = "codex-utils-approval-presets" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-protocol", ] [[package]] name = "codex-utils-cache" -version = "0.0.0" +version = "0.128.0" dependencies = [ "lru 0.16.3", "sha1", @@ -3709,7 +3711,7 @@ dependencies = [ [[package]] name = "codex-utils-cargo-bin" -version = "0.0.0" +version = "0.128.0" dependencies = [ "assert_cmd", "runfiles", @@ -3718,7 +3720,7 @@ dependencies = [ [[package]] name = "codex-utils-cli" -version = "0.0.0" +version = "0.128.0" dependencies = [ "clap", "codex-protocol", @@ -3729,15 +3731,15 @@ dependencies = [ [[package]] name = "codex-utils-elapsed" -version = "0.0.0" +version = "0.128.0" [[package]] name = "codex-utils-fuzzy-match" -version = "0.0.0" +version = "0.128.0" [[package]] name = "codex-utils-home-dir" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-utils-absolute-path", "dirs", @@ -3747,7 +3749,7 @@ dependencies = [ [[package]] name = "codex-utils-image" -version = "0.0.0" +version = "0.128.0" dependencies = [ "base64 0.22.1", "codex-utils-cache", @@ -3759,7 +3761,7 @@ dependencies = [ [[package]] name = "codex-utils-json-to-toml" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", "serde_json", @@ -3768,7 +3770,7 @@ dependencies = [ [[package]] name = "codex-utils-oss" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-core", "codex-lmstudio", @@ -3778,7 +3780,7 @@ dependencies = [ [[package]] name = "codex-utils-output-truncation" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-protocol", "codex-utils-string", @@ -3787,7 +3789,7 @@ dependencies = [ [[package]] name = "codex-utils-path" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-utils-absolute-path", "dunce", @@ -3797,7 +3799,7 @@ dependencies = [ [[package]] name = "codex-utils-plugins" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-exec-server", "codex-login", @@ -3810,7 +3812,7 @@ dependencies = [ [[package]] name = "codex-utils-pty" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "filedescriptor", @@ -3826,7 +3828,7 @@ dependencies = [ [[package]] name = "codex-utils-readiness" -version = "0.0.0" +version = "0.128.0" dependencies = [ "assert_matches", "async-trait", @@ -3837,14 +3839,14 @@ dependencies = [ [[package]] name = "codex-utils-rustls-provider" -version = "0.0.0" +version = "0.128.0" dependencies = [ "rustls", ] [[package]] name = "codex-utils-sandbox-summary" -version = "0.0.0" +version = "0.128.0" dependencies = [ "codex-core", "codex-model-provider-info", @@ -3855,7 +3857,7 @@ dependencies = [ [[package]] name = "codex-utils-sleep-inhibitor" -version = "0.0.0" +version = "0.128.0" dependencies = [ "core-foundation 0.9.4", "libc", @@ -3865,14 +3867,14 @@ dependencies = [ [[package]] name = "codex-utils-stream-parser" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", ] [[package]] name = "codex-utils-string" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", "regex-lite", @@ -3882,14 +3884,14 @@ dependencies = [ [[package]] name = "codex-utils-template" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", ] [[package]] name = "codex-v8-poc" -version = "0.0.0" +version = "0.128.0" dependencies = [ "pretty_assertions", "v8", @@ -3897,7 +3899,7 @@ dependencies = [ [[package]] name = "codex-windows-sandbox" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "base64 0.22.1", @@ -4147,7 +4149,7 @@ dependencies = [ [[package]] name = "core_test_support" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "assert_cmd", @@ -8327,7 +8329,7 @@ dependencies = [ [[package]] name = "mcp_test_support" -version = "0.0.0" +version = "0.128.0" dependencies = [ "anyhow", "codex-login", diff --git a/codex-rs/app-server-protocol/schema/json/ClientRequest.json b/codex-rs/app-server-protocol/schema/json/ClientRequest.json index 06eaf9c8d358..eadb1886da0f 100644 --- a/codex-rs/app-server-protocol/schema/json/ClientRequest.json +++ b/codex-rs/app-server-protocol/schema/json/ClientRequest.json @@ -1400,6 +1400,11 @@ "default": false, "description": "When `true`, requests a proactive token refresh before returning.\n\nIn managed auth mode this triggers the normal refresh-token flow. In external auth mode this flag is ignored. Clients should refresh tokens themselves and call `account/login/start` with `chatgptAuthTokens`.", "type": "boolean" + }, + "reloadAuthFromStorage": { + "default": false, + "description": "When `true`, reloads the auth snapshot from storage before returning.\n\nThis keeps long-lived clients in sync with `auth.json` updates without requiring a full app-server restart.", + "type": "boolean" } }, "type": "object" @@ -6229,4 +6234,4 @@ } ], "title": "ClientRequest" -} \ No newline at end of file +} diff --git a/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.schemas.json b/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.schemas.json index 59c653096210..eda1ed4c7450 100644 --- a/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.schemas.json +++ b/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.schemas.json @@ -9252,6 +9252,11 @@ "default": false, "description": "When `true`, requests a proactive token refresh before returning.\n\nIn managed auth mode this triggers the normal refresh-token flow. In external auth mode this flag is ignored. Clients should refresh tokens themselves and call `account/login/start` with `chatgptAuthTokens`.", "type": "boolean" + }, + "reloadAuthFromStorage": { + "default": false, + "description": "When `true`, reloads the auth snapshot from storage before returning.\n\nThis keeps long-lived clients in sync with `auth.json` updates without requiring a full app-server restart.", + "type": "boolean" } }, "title": "GetAccountParams", @@ -18083,4 +18088,4 @@ }, "title": "CodexAppServerProtocol", "type": "object" -} \ No newline at end of file +} diff --git a/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.v2.schemas.json b/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.v2.schemas.json index 45a6147bfd13..a5d4b5e472c7 100644 --- a/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.v2.schemas.json +++ b/codex-rs/app-server-protocol/schema/json/codex_app_server_protocol.v2.schemas.json @@ -5862,6 +5862,11 @@ "default": false, "description": "When `true`, requests a proactive token refresh before returning.\n\nIn managed auth mode this triggers the normal refresh-token flow. In external auth mode this flag is ignored. Clients should refresh tokens themselves and call `account/login/start` with `chatgptAuthTokens`.", "type": "boolean" + }, + "reloadAuthFromStorage": { + "default": false, + "description": "When `true`, reloads the auth snapshot from storage before returning.\n\nThis keeps long-lived clients in sync with `auth.json` updates without requiring a full app-server restart.", + "type": "boolean" } }, "title": "GetAccountParams", @@ -15968,4 +15973,4 @@ }, "title": "CodexAppServerProtocolV2", "type": "object" -} \ No newline at end of file +} diff --git a/codex-rs/app-server-protocol/schema/json/v2/GetAccountParams.json b/codex-rs/app-server-protocol/schema/json/v2/GetAccountParams.json index ca18a451e948..94c2050a4863 100644 --- a/codex-rs/app-server-protocol/schema/json/v2/GetAccountParams.json +++ b/codex-rs/app-server-protocol/schema/json/v2/GetAccountParams.json @@ -5,8 +5,13 @@ "default": false, "description": "When `true`, requests a proactive token refresh before returning.\n\nIn managed auth mode this triggers the normal refresh-token flow. In external auth mode this flag is ignored. Clients should refresh tokens themselves and call `account/login/start` with `chatgptAuthTokens`.", "type": "boolean" + }, + "reloadAuthFromStorage": { + "default": false, + "description": "When `true`, reloads the auth snapshot from storage before returning.\n\nThis keeps long-lived clients in sync with `auth.json` updates without requiring a full app-server restart.", + "type": "boolean" } }, "title": "GetAccountParams", "type": "object" -} \ No newline at end of file +} diff --git a/codex-rs/app-server-protocol/schema/typescript/v2/GetAccountParams.ts b/codex-rs/app-server-protocol/schema/typescript/v2/GetAccountParams.ts index a5c5c25f6647..75d52d3738e2 100644 --- a/codex-rs/app-server-protocol/schema/typescript/v2/GetAccountParams.ts +++ b/codex-rs/app-server-protocol/schema/typescript/v2/GetAccountParams.ts @@ -10,4 +10,11 @@ export type GetAccountParams = { * external auth mode this flag is ignored. Clients should refresh tokens * themselves and call `account/login/start` with `chatgptAuthTokens`. */ -refreshToken: boolean, }; +refreshToken: boolean, +/** + * When `true`, reloads the auth snapshot from storage before returning. + * + * This keeps long-lived clients in sync with `auth.json` updates without + * requiring a full app-server restart. + */ +reloadAuthFromStorage: boolean, }; diff --git a/codex-rs/app-server-protocol/src/protocol/v2.rs b/codex-rs/app-server-protocol/src/protocol/v2.rs index 2c1f2abc3080..5b7d6cb5dc8a 100644 --- a/codex-rs/app-server-protocol/src/protocol/v2.rs +++ b/codex-rs/app-server-protocol/src/protocol/v2.rs @@ -2454,6 +2454,13 @@ pub struct GetAccountParams { /// themselves and call `account/login/start` with `chatgptAuthTokens`. #[serde(default)] pub refresh_token: bool, + + /// When `true`, reloads the auth snapshot from storage before returning. + /// + /// This keeps long-lived clients in sync with `auth.json` updates without + /// requiring a full app-server restart. + #[serde(default)] + pub reload_auth_from_storage: bool, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)] diff --git a/codex-rs/app-server/README.md b/codex-rs/app-server/README.md index 3aae4ae610d6..cf390536a704 100644 --- a/codex-rs/app-server/README.md +++ b/codex-rs/app-server/README.md @@ -1618,7 +1618,7 @@ Codex supports these authentication modes. The current mode is surfaced in `acco ### API Overview -- `account/read` — fetch current account info; optionally refresh tokens. +- `account/read` — fetch current account info; optionally refresh tokens or reload auth from storage. - `account/login/start` — begin login (`apiKey`, `chatgpt`, `chatgptDeviceCode`). - `account/login/completed` (notify) — emitted when a login attempt finishes (success or error). - `account/login/cancel` — cancel a pending managed ChatGPT login by `loginId`. @@ -1635,7 +1635,7 @@ Codex supports these authentication modes. The current mode is surfaced in `acco Request: ```json -{ "method": "account/read", "id": 1, "params": { "refreshToken": false } } +{ "method": "account/read", "id": 1, "params": { "refreshToken": false, "reloadAuthFromStorage": false } } ``` Response examples: @@ -1650,6 +1650,7 @@ Response examples: Field notes: - `refreshToken` (bool): set `true` to force a token refresh. +- `reloadAuthFromStorage` (bool): set `true` to reload the current auth snapshot from storage before returning. - `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials. ### 2) Log in with an API key diff --git a/codex-rs/app-server/src/codex_message_processor.rs b/codex-rs/app-server/src/codex_message_processor.rs index ea82c306bfe7..beba4a4e393b 100644 --- a/codex-rs/app-server/src/codex_message_processor.rs +++ b/codex-rs/app-server/src/codex_message_processor.rs @@ -304,6 +304,7 @@ use codex_feedback::FeedbackUploadOptions; use codex_git_utils::git_diff_to_remote; use codex_git_utils::resolve_root_git_project_for_trust; use codex_login::AuthManager; +use codex_login::AuthReloadStatus; use codex_login::CLIENT_ID; use codex_login::CodexAuth; use codex_login::ServerOptions as LoginServerOptions; @@ -325,6 +326,9 @@ use codex_memories_write::clear_memory_roots_contents; use codex_model_provider::ProviderAccountError; use codex_model_provider::create_model_provider; use codex_models_manager::collaboration_mode_presets::builtin_collaboration_mode_presets; +use codex_models_manager::collaboration_mode_presets::collaboration_mode_presets_with_overrides_and_config; +use codex_models_manager::collaboration_mode_presets::CollaborationModesConfig; +use codex_models_manager::manager::RefreshStrategy; use codex_protocol::ThreadId; use codex_protocol::config_types::CollaborationMode; use codex_protocol::config_types::ForcedLoginMethod; @@ -895,9 +899,10 @@ impl CodexMessageProcessor { fn normalize_turn_start_collaboration_mode( &self, mut collaboration_mode: CollaborationMode, + collaboration_modes_config: CollaborationModesConfig, ) -> CollaborationMode { if collaboration_mode.settings.developer_instructions.is_none() - && let Some(instructions) = builtin_collaboration_mode_presets() + && let Some(instructions) = builtin_collaboration_mode_presets(collaboration_modes_config) .into_iter() .find(|preset| preset.mode == Some(collaboration_mode.mode)) .and_then(|preset| preset.developer_instructions.flatten()) @@ -1203,11 +1208,18 @@ impl CodexMessageProcessor { ClientRequest::CollaborationModeList { request_id, params } => { let outgoing = self.outgoing.clone(); let thread_manager = self.thread_manager.clone(); + let config = self.config.clone(); let request_id = to_connection_request_id(request_id); tokio::spawn(async move { - Self::list_collaboration_modes(outgoing, thread_manager, request_id, params) - .await; + Self::list_collaboration_modes( + outgoing, + thread_manager, + config, + request_id, + params, + ) + .await; }); } ClientRequest::MockExperimentalMethod { request_id, params } => { @@ -1968,6 +1980,19 @@ impl CodexMessageProcessor { ) -> Result { let do_refresh = params.refresh_token; + if params.reload_auth_from_storage { + match self.auth_manager.reload_with_status().await { + AuthReloadStatus::Reloaded { .. } => {} + AuthReloadStatus::Failed => { + return Err(JSONRPCErrorError { + code: INTERNAL_ERROR_CODE, + message: "failed to reload auth from storage".to_string(), + data: None, + }); + } + } + } + self.refresh_token_if_requested(do_refresh).await; let provider = create_model_provider( @@ -5285,12 +5310,28 @@ impl CodexMessageProcessor { async fn list_collaboration_modes( outgoing: Arc, thread_manager: Arc, + config: Arc, request_id: ConnectionRequestId, params: CollaborationModeListParams, ) { let CollaborationModeListParams {} = params; - let items = thread_manager - .list_collaboration_modes() + let config = (*config).clone(); + let collaboration_modes_config = CollaborationModesConfig { + default_mode_request_user_input: config + .features + .enabled(Feature::DefaultModeRequestUserInput), + }; + let collaboration_mode_overrides = config.collaboration_mode_overrides(); + let base_model = thread_manager + .get_models_manager() + .get_default_model(&config.model, RefreshStrategy::Offline) + .await; + let items = collaboration_mode_presets_with_overrides_and_config( + &base_model, + config.model_reasoning_effort, + collaboration_mode_overrides.as_ref(), + collaboration_modes_config, + ) .into_iter() .map(Into::into) .collect(); @@ -6558,9 +6599,14 @@ impl CodexMessageProcessor { self.track_error_response(&request_id, error, /*error_type*/ None); })?; + let collaboration_modes_config = CollaborationModesConfig { + default_mode_request_user_input: self.config + .features + .enabled(Feature::DefaultModeRequestUserInput), + }; let collaboration_mode = params .collaboration_mode - .map(|mode| self.normalize_turn_start_collaboration_mode(mode)); + .map(|mode| self.normalize_turn_start_collaboration_mode(mode, collaboration_modes_config)); let environments: Option> = params.environments.map(|environments| { environments diff --git a/codex-rs/config/src/config_toml.rs b/codex-rs/config/src/config_toml.rs index 2e89550dd1a2..47b76e860fcb 100644 --- a/codex-rs/config/src/config_toml.rs +++ b/codex-rs/config/src/config_toml.rs @@ -11,6 +11,7 @@ use crate::types::AnalyticsConfigToml; use crate::types::ApprovalsReviewer; use crate::types::AppsConfigToml; use crate::types::AuthCredentialsStoreMode; +use crate::types::CollaborationModeOverrides; use crate::types::FeedbackConfigToml; use crate::types::History; use crate::types::MarketplaceConfig; @@ -259,6 +260,9 @@ pub struct ConfigToml { /// Defaults to `false`. pub show_raw_agent_reasoning: Option, + /// Optional overrides for collaboration mode presets. + pub collaboration_modes: Option, + pub model_reasoning_effort: Option, pub plan_mode_reasoning_effort: Option, pub model_reasoning_summary: Option, diff --git a/codex-rs/config/src/types.rs b/codex-rs/config/src/types.rs index c9ec732a021a..ab19232fbfa3 100644 --- a/codex-rs/config/src/types.rs +++ b/codex-rs/config/src/types.rs @@ -19,6 +19,7 @@ pub use codex_protocol::config_types::ServiceTier; use codex_protocol::config_types::ShellEnvironmentPolicy; use codex_protocol::config_types::ShellEnvironmentPolicyInherit; pub use codex_protocol::config_types::WebSearchMode; +use codex_protocol::openai_models::ReasoningEffort; use codex_utils_absolute_path::AbsolutePathBuf; use std::collections::BTreeMap; use std::collections::HashMap; @@ -650,6 +651,14 @@ pub struct Tui { #[serde(default)] pub keymap: TuiKeymap, + /// Optional synthetic user-turn prompt injected after a turn fails with + /// `UsageLimitExceeded`. + /// + /// When unset, Codex uses the built-in default recovery prompt. + /// When set to an empty string, Codex disables this automatic recovery turn. + #[serde(default)] + pub usage_limit_resume_prompt: Option, + /// Startup tooltip availability NUX state persisted by the TUI. #[serde(default)] pub model_availability_nux: ModelAvailabilityNuxConfig, @@ -662,6 +671,23 @@ pub struct Tui { pub terminal_resize_reflow_max_rows: Option, } +/// Optional overrides for collaboration mode presets. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)] +#[schemars(deny_unknown_fields)] +pub struct CollaborationModeOverrides { + pub plan: Option, + /// Legacy alias for the default/code mode overrides. + pub code: Option, +} + +/// Overrides for a single collaboration mode preset. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default, JsonSchema)] +#[schemars(deny_unknown_fields)] +pub struct CollaborationModeOverride { + pub model: Option, + pub reasoning_effort: Option, +} + const fn default_true() -> bool { true } diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index ba81b451a748..5d285995fac8 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -242,6 +242,7 @@ struct WebsocketSession { connection: Option, last_request: Option, last_response_rx: Option>, + auth_fingerprint: Option)>>, connection_reused: StdMutex, } @@ -288,6 +289,20 @@ fn sideband_websocket_auth_headers(api_auth: &dyn AuthProvider) -> ApiHeaderMap headers } +fn websocket_auth_fingerprint(api_auth: &dyn AuthProvider) -> Vec<(String, Vec)> { + let headers = sideband_websocket_auth_headers(api_auth); + let mut fingerprint: Vec<(String, Vec)> = headers + .iter() + .map(|(name, value)| (name.as_str().to_string(), value.as_bytes().to_vec())) + .collect(); + fingerprint.sort_by(|(left_name, left_value), (right_name, right_value)| { + left_name + .cmp(right_name) + .then_with(|| left_value.cmp(right_value)) + }); + fingerprint +} + impl ModelClient { #[allow(clippy::too_many_arguments)] /// Creates a new session-scoped `ModelClient`. @@ -824,6 +839,7 @@ impl ModelClientSession { self.websocket_session.connection = None; self.websocket_session.last_request = None; self.websocket_session.last_response_rx = None; + self.websocket_session.auth_fingerprint = None; self.websocket_session .set_connection_reused(/*connection_reused*/ false); } @@ -1078,6 +1094,10 @@ impl ModelClientSession { auth_context, request_route_telemetry, } = params; + let auth_fingerprint = websocket_auth_fingerprint(api_auth.as_ref()); + if self.websocket_session.auth_fingerprint.as_ref() != Some(&auth_fingerprint) { + self.reset_websocket_session(); + } let needs_new = match self.websocket_session.connection.as_ref() { Some(conn) => conn.is_closed().await, None => true, @@ -1112,6 +1132,7 @@ impl ModelClientSession { } }; self.websocket_session.connection = Some(new_conn); + self.websocket_session.auth_fingerprint = Some(auth_fingerprint); self.websocket_session .set_connection_reused(/*connection_reused*/ false); } else { diff --git a/codex-rs/core/src/config/mod.rs b/codex-rs/core/src/config/mod.rs index c5965d389e81..395f7a5a6d99 100644 --- a/codex-rs/core/src/config/mod.rs +++ b/codex-rs/core/src/config/mod.rs @@ -35,6 +35,7 @@ use codex_config::profile_toml::ConfigProfile; use codex_config::sandbox_mode_requirement_for_permission_profile; use codex_config::types::ApprovalsReviewer; use codex_config::types::AuthCredentialsStoreMode; +use codex_config::types::CollaborationModeOverrides; use codex_config::types::DEFAULT_OTEL_ENVIRONMENT; use codex_config::types::History; use codex_config::types::McpServerConfig; @@ -543,6 +544,13 @@ pub struct Config { /// 3. built-in defaults pub tui_keymap: TuiKeymap, + /// Synthetic user-turn prompt injected after a `UsageLimitExceeded` turn + /// failure. + /// + /// `None` uses the built-in default prompt. `Some("")` disables the + /// automatic recovery turn. + pub tui_usage_limit_resume_prompt: Option, + /// The absolute directory that should be treated as the current working /// directory for the session. All relative paths inside the business-logic /// layer are resolved against this path. @@ -996,6 +1004,14 @@ impl Config { } } + pub fn collaboration_mode_overrides(&self) -> Option { + self.config_layer_stack + .effective_config() + .try_into() + .ok() + .and_then(|config: ConfigToml| config.collaboration_modes) + } + pub async fn to_mcp_config( &self, plugins_manager: &crate::plugins::PluginsManager, @@ -2997,6 +3013,10 @@ impl Config { .as_ref() .map(|t| t.keymap.clone()) .unwrap_or_default(), + tui_usage_limit_resume_prompt: cfg + .tui + .as_ref() + .and_then(|t| t.usage_limit_resume_prompt.clone()), otel: { let t: OtelConfigToml = cfg.otel.unwrap_or_default(); let log_user_prompt = t.log_user_prompt.unwrap_or(false); diff --git a/codex-rs/core/src/session/handlers.rs b/codex-rs/core/src/session/handlers.rs index 624331ea62a7..450be2f3d0f0 100644 --- a/codex-rs/core/src/session/handlers.rs +++ b/codex-rs/core/src/session/handlers.rs @@ -244,6 +244,10 @@ pub(super) async fn user_input_or_turn_inner( _ => unreachable!(), }; + let requested_cwd = updates.cwd.clone(); + sess.maybe_refresh_project_docs_for_user_turn(&sub_id, requested_cwd.as_deref()) + .await; + let Ok(current_context) = sess.new_turn_with_sub_id(sub_id.clone(), updates).await else { // new_turn_with_sub_id already emits the error event. return; diff --git a/codex-rs/core/src/session/mod.rs b/codex-rs/core/src/session/mod.rs index e45db2084809..cc1499367bcc 100644 --- a/codex-rs/core/src/session/mod.rs +++ b/codex-rs/core/src/session/mod.rs @@ -1121,6 +1121,52 @@ impl Session { } } + pub(crate) async fn maybe_refresh_project_docs_for_user_turn( + &self, + sub_id: &str, + requested_cwd: Option<&Path>, + ) { + let environment = self.services.environment_manager.default_environment(); + let (mut config, previous_user_instructions, session_cwd) = { + let state = self.state.lock().await; + ( + (*state.session_configuration.original_config_do_not_use).clone(), + state.session_configuration.user_instructions.clone(), + state.session_configuration.cwd.clone(), + ) + }; + config.cwd = requested_cwd + .map(|cwd| { + AbsolutePathBuf::relative_to_current_dir(normalize_for_native_workdir(cwd)) + .unwrap_or_else(|e| { + warn!("failed to normalize update cwd: {cwd:?}: {e}"); + session_cwd.clone() + }) + }) + .unwrap_or(session_cwd); + + let next_user_instructions = AgentsMdManager::new(&config) + .user_instructions(environment.as_deref()) + .await; + if next_user_instructions == previous_user_instructions { + return; + } + + { + let mut state = self.state.lock().await; + state.session_configuration.user_instructions = next_user_instructions; + } + self.send_event_raw(Event { + id: sub_id.to_string(), + msg: EventMsg::Warning(WarningEvent { + message: + "AGENTS.md instructions changed. Reloaded and applied starting this turn." + .to_string(), + }), + }) + .await; + } + // Merges connector IDs into the session-level explicit connector selection. pub(crate) async fn merge_connector_selection( &self, diff --git a/codex-rs/core/src/test_support.rs b/codex-rs/core/src/test_support.rs index 34ed487fc5e8..fb3d1dfb7375 100644 --- a/codex-rs/core/src/test_support.rs +++ b/codex-rs/core/src/test_support.rs @@ -126,5 +126,7 @@ pub fn all_model_presets() -> &'static Vec { } pub fn builtin_collaboration_mode_presets() -> Vec { - collaboration_mode_presets::builtin_collaboration_mode_presets() + collaboration_mode_presets::builtin_collaboration_mode_presets( + collaboration_mode_presets::CollaborationModesConfig::default(), + ) } diff --git a/codex-rs/core/src/tools/orchestrator.rs b/codex-rs/core/src/tools/orchestrator.rs index dcb42c36c6e5..62ff4146974f 100644 --- a/codex-rs/core/src/tools/orchestrator.rs +++ b/codex-rs/core/src/tools/orchestrator.rs @@ -46,6 +46,13 @@ pub(crate) struct OrchestratorRunResult { pub deferred_network_approval: Option, } +const GUARDIAN_REVIEW_FAILURE_PREFIX: &str = "Automatic approval review failed:"; + +struct ApprovalOutcome { + decision: ReviewDecision, + rejection_review_id: Option, +} + impl ToolOrchestrator { pub fn new() -> Self { Self { @@ -160,7 +167,7 @@ impl ToolOrchestrator { retry_reason: None, network_approval_context: None, }; - let decision = Self::request_approval( + let approval = Self::request_approval( tool, req, tool_ctx.call_id.as_str(), @@ -170,8 +177,12 @@ impl ToolOrchestrator { &otel, ) .await?; - Self::reject_if_not_approved(tool_ctx, guardian_review_id.as_deref(), decision) - .await?; + Self::reject_if_not_approved( + tool_ctx, + approval.rejection_review_id.as_deref(), + approval.decision, + ) + .await?; already_approved = true; } else { otel.tool_decision( @@ -195,7 +206,7 @@ impl ToolOrchestrator { retry_reason: reason, network_approval_context: None, }; - let decision = Self::request_approval( + let approval = Self::request_approval( tool, req, tool_ctx.call_id.as_str(), @@ -206,8 +217,12 @@ impl ToolOrchestrator { ) .await?; - Self::reject_if_not_approved(tool_ctx, guardian_review_id.as_deref(), decision) - .await?; + Self::reject_if_not_approved( + tool_ctx, + approval.rejection_review_id.as_deref(), + approval.decision, + ) + .await?; already_approved = true; } } @@ -330,7 +345,7 @@ impl ToolOrchestrator { }; let permission_request_run_id = format!("{}:retry", tool_ctx.call_id); - let decision = Self::request_approval( + let approval = Self::request_approval( tool, req, &permission_request_run_id, @@ -341,8 +356,12 @@ impl ToolOrchestrator { ) .await?; - Self::reject_if_not_approved(tool_ctx, guardian_review_id.as_deref(), decision) - .await?; + Self::reject_if_not_approved( + tool_ctx, + approval.rejection_review_id.as_deref(), + approval.decision, + ) + .await?; } let escalated_attempt = SandboxAttempt { @@ -390,7 +409,7 @@ impl ToolOrchestrator { tool_ctx: &ToolCtx, evaluate_permission_request_hooks: bool, otel: &codex_otel::SessionTelemetry, - ) -> Result + ) -> Result where T: ToolRuntime, { @@ -413,7 +432,10 @@ impl ToolOrchestrator { &decision, ToolDecisionSource::Config, ); - return Ok(decision); + return Ok(ApprovalOutcome { + decision, + rejection_review_id: None, + }); } Some(PermissionRequestDecision::Deny { message }) => { let decision = ReviewDecision::Denied; @@ -429,7 +451,13 @@ impl ToolOrchestrator { } } - let otel_source = if approval_ctx.guardian_review_id.is_some() { + let session = approval_ctx.session; + let turn = approval_ctx.turn; + let call_id = approval_ctx.call_id; + let guardian_review_id = approval_ctx.guardian_review_id.clone(); + let retry_reason = approval_ctx.retry_reason.clone(); + let network_approval_context = approval_ctx.network_approval_context.clone(); + let otel_source = if guardian_review_id.is_some() { ToolDecisionSource::AutomatedReviewer } else { ToolDecisionSource::User @@ -441,7 +469,61 @@ impl ToolOrchestrator { &decision, otel_source, ); - Ok(decision) + if Self::should_retry_approval_without_guardian( + session, + guardian_review_id.as_deref(), + &decision, + ) + .await + { + let approval_ctx = ApprovalCtx { + session, + turn, + call_id, + guardian_review_id: None, + retry_reason, + network_approval_context, + }; + let decision = tool.start_approval_async(req, approval_ctx).await; + otel.tool_decision( + &tool_ctx.tool_name, + &tool_ctx.call_id, + &decision, + ToolDecisionSource::User, + ); + return Ok(ApprovalOutcome { + decision, + rejection_review_id: None, + }); + } + + Ok(ApprovalOutcome { + decision, + rejection_review_id: guardian_review_id, + }) + } + + async fn should_retry_approval_without_guardian( + session: &std::sync::Arc, + guardian_review_id: Option<&str>, + decision: &ReviewDecision, + ) -> bool { + let Some(review_id) = guardian_review_id else { + return false; + }; + match decision { + ReviewDecision::TimedOut => true, + ReviewDecision::Denied | ReviewDecision::Abort => { + guardian_rejection_message(session.as_ref(), review_id) + .await + .trim_start() + .starts_with(GUARDIAN_REVIEW_FAILURE_PREFIX) + } + ReviewDecision::Approved + | ReviewDecision::ApprovedExecpolicyAmendment { .. } + | ReviewDecision::ApprovedForSession + | ReviewDecision::NetworkPolicyAmendment { .. } => false, + } } async fn reject_if_not_approved( @@ -458,7 +540,14 @@ impl ToolOrchestrator { }; Err(ToolError::Rejected(reason)) } - ReviewDecision::TimedOut => Err(ToolError::Rejected(guardian_timeout_message())), + ReviewDecision::TimedOut => { + let reason = if guardian_review_id.is_some() { + guardian_timeout_message() + } else { + "approval request timed out".to_string() + }; + Err(ToolError::Rejected(reason)) + } ReviewDecision::Approved | ReviewDecision::ApprovedExecpolicyAmendment { .. } | ReviewDecision::ApprovedForSession => Ok(()), diff --git a/codex-rs/core/src/util.rs b/codex-rs/core/src/util.rs index 97fb44ba11d6..7a615fcc4e97 100644 --- a/codex-rs/core/src/util.rs +++ b/codex-rs/core/src/util.rs @@ -127,9 +127,9 @@ pub fn resume_command(thread_name: Option<&str>, thread_id: Option) -> let needs_double_dash = target.starts_with('-'); let escaped = shlex_join(&[target]); if needs_double_dash { - format!("codex resume -- {escaped}") + format!("codext resume -- {escaped}") } else { - format!("codex resume {escaped}") + format!("codext resume {escaped}") } }) } diff --git a/codex-rs/linux-sandbox/README.md b/codex-rs/linux-sandbox/README.md index 5745f4816ca3..bfc934ee3a9a 100644 --- a/codex-rs/linux-sandbox/README.md +++ b/codex-rs/linux-sandbox/README.md @@ -51,8 +51,8 @@ commands that would enter the bubblewrap path. - When bubblewrap is active, the filesystem is read-only by default via `--ro-bind / /`. - When bubblewrap is active, writable roots are layered with `--bind `. - When bubblewrap is active, protected subpaths under writable roots (for - example `.git`, - resolved `gitdir:`, and `.codex`) are re-applied as read-only via `--ro-bind`. + example `.git`, resolved `gitdir:`, and `.codex`) are re-applied as + read-only via `--ro-bind`. - When bubblewrap is active, overlapping split-policy entries are applied in path-specificity order so narrower writable children can reopen broader read-only or denied parents while narrower denied subpaths @@ -78,9 +78,11 @@ commands that would enter the bubblewrap path. "**/*.env" = "none" ``` -- When bubblewrap is active, symlink-in-path and non-existent protected paths inside - writable roots are blocked by mounting `/dev/null` on the symlink or first - missing component. +- When bubblewrap is active, symlink-in-path protected paths fail closed, and + non-existent protected paths inside writable roots are blocked with a + sandbox-local read-only file blocker at the first missing component. If + bubblewrap materializes an empty host-side mount target for that blocker, + the helper removes it after bubblewrap exits. - When bubblewrap is active, the helper explicitly isolates the user namespace via `--unshare-user` and the PID namespace via `--unshare-pid`. - When bubblewrap is active and network is restricted without proxy routing, the helper also diff --git a/codex-rs/login/src/auth/manager.rs b/codex-rs/login/src/auth/manager.rs index 29897db7bea3..49580a8d7cdc 100644 --- a/codex-rs/login/src/auth/manager.rs +++ b/codex-rs/login/src/auth/manager.rs @@ -1046,6 +1046,12 @@ enum UnauthorizedRecoveryMode { External, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AuthReloadStatus { + Reloaded { changed: bool }, + Failed, +} + // UnauthorizedRecovery is a state machine that handles an attempt to refresh the authentication when requests // to API fail with 401 status code. // The client calls next() every time it encounters a 401 error, one time per retry. @@ -1238,9 +1244,9 @@ impl UnauthorizedRecovery { /// hands out cloned `CodexAuth` values so the rest of the program has a /// consistent snapshot. /// -/// External modifications to `auth.json` will NOT be observed until -/// `reload()` is called explicitly. This matches the design goal of avoiding -/// different parts of the program seeing inconsistent auth data mid‑run. +/// External modifications to `auth.json` will not be observed until `reload()` +/// or `reload_with_status()` is called explicitly. This matches the design goal +/// of avoiding different parts of the program seeing inconsistent auth data mid-run. pub struct AuthManager { codex_home: PathBuf, inner: RwLock, @@ -1418,9 +1424,30 @@ impl AuthManager { /// Force a reload of the auth information from auth.json. Returns /// whether the auth value changed. pub async fn reload(&self) -> bool { + match self.reload_with_status().await { + AuthReloadStatus::Reloaded { changed } => changed, + AuthReloadStatus::Failed => false, + } + } + + /// Force a reload of auth information from storage. + /// + /// Returns whether the cached auth changed, or `Failed` when storage could + /// not be read while the file was being updated. + pub async fn reload_with_status(&self) -> AuthReloadStatus { tracing::info!("Reloading auth"); - let new_auth = self.load_auth_from_storage().await; - self.set_cached_auth(new_auth) + let new_auth = match self.load_auth_from_storage().await { + Ok(new_auth) => new_auth, + Err(err) => { + tracing::warn!( + %err, + "Failed to reload auth from storage; keeping current auth state" + ); + return AuthReloadStatus::Failed; + } + }; + let changed = self.set_cached_auth(new_auth); + AuthReloadStatus::Reloaded { changed } } async fn reload_if_account_id_matches( @@ -1435,7 +1462,16 @@ impl AuthManager { } }; - let new_auth = self.load_auth_from_storage().await; + let new_auth = match self.load_auth_from_storage().await { + Ok(new_auth) => new_auth, + Err(err) => { + tracing::warn!( + %err, + "Skipping auth reload because auth storage could not be read" + ); + return ReloadOutcome::Skipped; + } + }; let new_account_id = new_auth.as_ref().and_then(CodexAuth::get_account_id); if new_account_id.as_deref() != Some(expected_account_id) { @@ -1506,7 +1542,7 @@ impl AuthManager { } } - async fn load_auth_from_storage(&self) -> Option { + async fn load_auth_from_storage(&self) -> std::io::Result> { load_auth( &self.codex_home, self.enable_codex_api_key_env, @@ -1514,8 +1550,6 @@ impl AuthManager { self.chatgpt_base_url.as_deref(), ) .await - .ok() - .flatten() } fn set_cached_auth(&self, new_auth: Option) -> bool { diff --git a/codex-rs/login/src/lib.rs b/codex-rs/login/src/lib.rs index 3049b6f6bc31..858ff29ff179 100644 --- a/codex-rs/login/src/lib.rs +++ b/codex-rs/login/src/lib.rs @@ -21,6 +21,7 @@ pub use auth::AuthConfig; pub use auth::AuthDotJson; pub use auth::AuthManager; pub use auth::AuthManagerConfig; +pub use auth::AuthReloadStatus; pub use auth::CLIENT_ID; pub use auth::CODEX_AGENT_IDENTITY_ENV_VAR; pub use auth::CODEX_API_KEY_ENV_VAR; diff --git a/codex-rs/models-manager/Cargo.toml b/codex-rs/models-manager/Cargo.toml index f46bf2b285a4..06023eaa0044 100644 --- a/codex-rs/models-manager/Cargo.toml +++ b/codex-rs/models-manager/Cargo.toml @@ -17,6 +17,7 @@ async-trait = { workspace = true } chrono = { workspace = true, features = ["serde"] } codex-app-server-protocol = { workspace = true } codex-collaboration-mode-templates = { workspace = true } +codex-config = { workspace = true } codex-login = { workspace = true } codex-otel = { workspace = true } codex-protocol = { workspace = true } diff --git a/codex-rs/models-manager/src/collaboration_mode_presets.rs b/codex-rs/models-manager/src/collaboration_mode_presets.rs index 72731c52d34a..0b26bca8cfd5 100644 --- a/codex-rs/models-manager/src/collaboration_mode_presets.rs +++ b/codex-rs/models-manager/src/collaboration_mode_presets.rs @@ -1,3 +1,5 @@ +use codex_config::types::CollaborationModeOverride; +use codex_config::types::CollaborationModeOverrides; use codex_collaboration_mode_templates::DEFAULT as COLLABORATION_MODE_DEFAULT; use codex_collaboration_mode_templates::PLAN as COLLABORATION_MODE_PLAN; use codex_protocol::config_types::CollaborationModeMask; @@ -8,13 +10,59 @@ use codex_utils_template::Template; use std::sync::LazyLock; const KNOWN_MODE_NAMES_TEMPLATE_KEY: &str = "KNOWN_MODE_NAMES"; +const REQUEST_USER_INPUT_AVAILABILITY_TEMPLATE_KEY: &str = "REQUEST_USER_INPUT_AVAILABILITY"; +const ASKING_QUESTIONS_GUIDANCE_TEMPLATE_KEY: &str = "ASKING_QUESTIONS_GUIDANCE"; static COLLABORATION_MODE_DEFAULT_TEMPLATE: LazyLock