diff options
| author | Arun Isaac | 2026-04-17 02:54:56 +0100 |
|---|---|---|
| committer | Arun Isaac | 2026-04-17 02:55:39 +0100 |
| commit | 22e44c6c3b37dec51e6d0c7edd7996b316fbb2f7 (patch) | |
| tree | 7630dc13b1ef29d8a23b2f988a420f7bf5add9e0 | |
| parent | 48638af12e7da5edd79dff088f5d6b227f7415ae (diff) | |
| download | kaagum-22e44c6c3b37dec51e6d0c7edd7996b316fbb2f7.tar.gz kaagum-22e44c6c3b37dec51e6d0c7edd7996b316fbb2f7.tar.lz kaagum-22e44c6c3b37dec51e6d0c7edd7996b316fbb2f7.zip | |
Explicitly enable automatic prompt caching.
| -rw-r--r-- | kaagum/openai.scm | 11 |
1 files changed, 11 insertions, 0 deletions
diff --git a/kaagum/openai.scm b/kaagum/openai.scm index 5e4cab5..f57cc25 100644 --- a/kaagum/openai.scm +++ b/kaagum/openai.scm @@ -95,6 +95,17 @@ compatible with the OpenAI API specification." #:headers `((authorization . ,(string-append "Bearer " api-key))) #:json `(("model" . ,model) + ;; Some providers (like Anthropic) require you to + ;; explicitly enable automatic prompt caching. See + ;; https://openrouter.ai/docs/guides/best-practices/prompt-caching + ("cache_control" + ("type" . "ephemeral") + ;; Anthropic offers cheaper cache writes for a ttl of 5 + ;; minutes. But, it doesn't make a difference for an + ;; extended session and is not worth it for our use case. + ;; If we change our mind about this, we can make this a + ;; configurable parameter. + ("ttl" . "1h")) ("messages" . ,(list->vector messages)) ("tools" . ,(list->vector tools))))) |
