feat: AI Read File Tool, Configurable system prompts and loading lots of llms

This commit is contained in:
2026-03-04 15:48:25 +00:00
parent bbaebf1f70
commit 0d0e747682
10 changed files with 184 additions and 47 deletions
+25
View File
@@ -0,0 +1,25 @@
import dspy
base_url= "http://framework.tawny-bellatrix.ts.net:1234"
model_name= "lm_studio/qwen-0"
lm = dspy.LM(
model=model_name,
api_base=f"{base_url}/v1/"
)
dspy.configure(lm=lm)
# question = "How can i use dspy framework to add 'chat_template_kwargs={\"enable_thinking\": False}' to my API call to LM Studio? i know it uses litellm under the hood"
# question = "Hi there, do you have a name? if not i want you to name yourself."
question = "how long would it take light to travel from the sun to the earth? /no_think"
# Call with request_kwargs to inject the template kwargs
response = lm(
messages=[{"role": "user", "content": question}]
# extra_body={"enable_thinking": False}
# enable_thinking=False
)
print(response)