From 94f6ac99959ddabfd37125994cd12af7cda94cc3 Mon Sep 17 00:00:00 2001 From: Jen Agarwal <109815410+Jenverse@users.noreply.github.com> Date: Mon, 16 Jun 2025 12:43:32 -0400 Subject: [PATCH 1/2] Update use-langcache.md - Removed Scope from the request as we have made that update - Made some other minor changes to the language --- content/operate/rc/langcache/use-langcache.md | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/content/operate/rc/langcache/use-langcache.md b/content/operate/rc/langcache/use-langcache.md index e531da3fa7..1c156fcc14 100644 --- a/content/operate/rc/langcache/use-langcache.md +++ b/content/operate/rc/langcache/use-langcache.md @@ -11,7 +11,7 @@ title: Use the LangCache API with your GenAI app weight: 10 --- -You can use the LangCache API from your client app to store and retrieve LLM responses. +You can use the LangCache API from your client app to store and retrieve LLM, RAG or agent responses. To access the LangCache API, you need: @@ -64,7 +64,7 @@ Place this call in your client app right before you call your LLM's REST API. If If LangCache does not return a response, you should call your LLM's REST API to generate a new response. After you get a response from the LLM, you can [store it in LangCache](#store-a-new-response-in-langcache) for future use. -You can also limit the responses returned from LangCache by adding an `attributes` object or `scope` object to the request. LangCache will only return responses that match the attributes you specify. +You can also scope the responses returned from LangCache by adding an `attributes` object or `scope` object to the request. LangCache will only return responses that match the attributes you specify. ```sh POST https://[host]/v1/caches/{cacheId}/search @@ -72,11 +72,6 @@ POST https://[host]/v1/caches/{cacheId}/search "prompt": "User prompt text", "attributes": { "customAttributeName": "customAttributeValue" - }, - "scope": { - "applicationId": "applicationId", - "userId": "userId", - "sessionId": "sessionId" } } ``` @@ -104,11 +99,6 @@ POST https://[host]/v1/caches/{cacheId}/entries "response": "LLM response text", "attributes": { "customAttributeName": "customAttributeValue" - }, - "scope": { - "applicationId": "applicationId", - "userId": "userId", - "sessionId": "sessionId" } } ``` @@ -124,11 +114,6 @@ DELETE https://[host]/v1/caches/{cacheId}/entries { "attributes": { "customAttributeName": "customAttributeValue" - }, - "scope": { - "applicationId": "applicationId", - "userId": "userId", - "sessionId": "sessionId" } } ``` From eef63da7f996b26f0ea06375e70c7295662d1fd6 Mon Sep 17 00:00:00 2001 From: Cameron Bates <102550101+cmilesb@users.noreply.github.com> Date: Mon, 16 Jun 2025 13:10:58 -0400 Subject: [PATCH 2/2] Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- content/operate/rc/langcache/use-langcache.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/content/operate/rc/langcache/use-langcache.md b/content/operate/rc/langcache/use-langcache.md index 1c156fcc14..54c2260596 100644 --- a/content/operate/rc/langcache/use-langcache.md +++ b/content/operate/rc/langcache/use-langcache.md @@ -11,7 +11,7 @@ title: Use the LangCache API with your GenAI app weight: 10 --- -You can use the LangCache API from your client app to store and retrieve LLM, RAG or agent responses. +You can use the LangCache API from your client app to store and retrieve LLM, RAG, or agent responses. To access the LangCache API, you need: @@ -64,7 +64,7 @@ Place this call in your client app right before you call your LLM's REST API. If If LangCache does not return a response, you should call your LLM's REST API to generate a new response. After you get a response from the LLM, you can [store it in LangCache](#store-a-new-response-in-langcache) for future use. -You can also scope the responses returned from LangCache by adding an `attributes` object or `scope` object to the request. LangCache will only return responses that match the attributes you specify. +You can also scope the responses returned from LangCache by adding an `attributes` object to the request. LangCache will only return responses that match the attributes you specify. ```sh POST https://[host]/v1/caches/{cacheId}/search