diff --git a/model/renderers/intellect3.go b/model/renderers/intellect3.go
index 7894bb7a1..149e70821 100644
--- a/model/renderers/intellect3.go
+++ b/model/renderers/intellect3.go
@@ -6,6 +6,8 @@ import (
"github.com/ollama/ollama/api"
)
+const intellect3DefaultSystemMessage = "You are INTELLECT-3, a helpful assistant developed by Prime Intellect, that can interact with a computer to solve tasks."
+
type Intellect3Renderer struct{}
func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, think *api.ThinkValue) (string, error) {
@@ -28,6 +30,11 @@ func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, th
if systemMessage != "" || len(tools) > 0 {
sb.WriteString(imStartTag + "system\n")
+ // Use default system message when tools present but no user system message
+ if systemMessage == "" && len(tools) > 0 {
+ systemMessage = intellect3DefaultSystemMessage
+ }
+
sb.WriteString(systemMessage)
if len(tools) > 0 {
@@ -87,15 +94,15 @@ func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, th
switch message.Role {
case "assistant":
if len(message.ToolCalls) > 0 {
- sb.WriteString(imStartTag + "assistant")
+ sb.WriteString(imStartTag + "assistant\n")
// Add thinking tags if present
if message.Thinking != "" {
- sb.WriteString("\n" + strings.TrimSpace(message.Thinking) + "")
+ sb.WriteString("" + strings.TrimSpace(message.Thinking) + "\n")
}
if message.Content != "" {
- sb.WriteString("\n" + strings.TrimSpace(message.Content) + "\n")
+ sb.WriteString(strings.TrimSpace(message.Content) + "\n")
}
for _, toolCall := range message.ToolCalls {
@@ -108,20 +115,16 @@ func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, th
}
sb.WriteString("<|im_end|>\n")
} else {
- sb.WriteString(imStartTag + "assistant")
+ sb.WriteString(imStartTag + "assistant\n")
// Add thinking tags if present
if message.Thinking != "" {
- sb.WriteString("\n" + strings.TrimSpace(message.Thinking) + "")
+ sb.WriteString("" + strings.TrimSpace(message.Thinking) + "\n")
}
// Add content if present
if message.Content != "" {
- if message.Thinking != "" {
- sb.WriteString("\n" + strings.TrimSpace(message.Content))
- } else {
- sb.WriteString("\n" + message.Content)
- }
+ sb.WriteString(message.Content)
}
if !prefill {
@@ -129,10 +132,6 @@ func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, th
}
}
case "tool":
- // consecutive tool responses should share a single `user`, but
- // have their own tags
-
- // only start a new user block if this is the first tool response
if i == 0 || filteredMessages[i-1].Role != "tool" {
sb.WriteString(imStartTag + "user\n")
}
@@ -141,7 +140,6 @@ func (r *Intellect3Renderer) Render(messages []api.Message, tools []api.Tool, th
sb.WriteString(message.Content)
sb.WriteString("\n\n")
- // close the user block only if this is the last tool response
if i == len(filteredMessages)-1 || filteredMessages[i+1].Role != "tool" {
sb.WriteString(imEndTag + "\n")
}
diff --git a/model/renderers/intellect3_test.go b/model/renderers/intellect3_test.go
new file mode 100644
index 000000000..f0e1a0529
--- /dev/null
+++ b/model/renderers/intellect3_test.go
@@ -0,0 +1,217 @@
+package renderers
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/ollama/ollama/api"
+)
+
+func TestIntellect3Renderer(t *testing.T) {
+ tests := []struct {
+ name string
+ msgs []api.Message
+ tools []api.Tool
+ expected string
+ }{
+ {
+ name: "basic user message",
+ msgs: []api.Message{
+ {Role: "user", Content: "Hello!"},
+ },
+ expected: "<|im_start|>user\n" +
+ "Hello!<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "with system message",
+ msgs: []api.Message{
+ {Role: "system", Content: "You are helpful."},
+ {Role: "user", Content: "Hi"},
+ },
+ expected: "<|im_start|>system\n" +
+ "You are helpful.<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Hi<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "multi-turn conversation",
+ msgs: []api.Message{
+ {Role: "user", Content: "Hello"},
+ {Role: "assistant", Content: "Hi!"},
+ {Role: "user", Content: "Bye"},
+ },
+ expected: "<|im_start|>user\n" +
+ "Hello<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "Hi!<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Bye<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "with tools no system message",
+ msgs: []api.Message{
+ {Role: "user", Content: "Weather?"},
+ },
+ tools: []api.Tool{
+ {
+ Type: "function",
+ Function: api.ToolFunction{
+ Name: "get_weather",
+ Description: "Get weather",
+ Parameters: api.ToolFunctionParameters{
+ Type: "object",
+ Properties: map[string]api.ToolProperty{
+ "location": {Type: api.PropertyType{"string"}},
+ },
+ },
+ },
+ },
+ },
+ expected: "<|im_start|>system\n" +
+ "You are INTELLECT-3, a helpful assistant developed by Prime Intellect, that can interact with a computer to solve tasks.\n\n" +
+ "# Tools\n\n" +
+ "You have access to the following functions:\n\n" +
+ "\n" +
+ "\n" +
+ "get_weather\n" +
+ "Get weather\n" +
+ "\n" +
+ "\n" +
+ "location\n" +
+ "string\n" +
+ "\n" +
+ "\n" +
+ "\n" +
+ "\n\n" +
+ "If you choose to call a function ONLY reply in the following format with NO suffix:\n\n" +
+ "\n" +
+ "\n" +
+ "\n" +
+ "value_1\n" +
+ "\n" +
+ "\n" +
+ "This is the value for the second parameter\n" +
+ "that can span\n" +
+ "multiple lines\n" +
+ "\n" +
+ "\n" +
+ "\n\n" +
+ "\n" +
+ "Reminder:\n" +
+ "- Function calls MUST follow the specified format: an inner block must be nested within XML tags\n" +
+ "- Required parameters MUST be specified\n" +
+ "- You may provide optional reasoning for your function call in natural language BEFORE the function call, but NOT after\n" +
+ "- If there is no function call available, answer the question like normal with your current knowledge and do not tell the user about function calls\n" +
+ "<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Weather?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "tool call and response",
+ msgs: []api.Message{
+ {Role: "user", Content: "Weather?"},
+ {
+ Role: "assistant",
+ Content: "Checking.",
+ ToolCalls: []api.ToolCall{
+ {
+ ID: "1",
+ Function: api.ToolCallFunction{
+ Name: "get_weather",
+ Arguments: map[string]any{"location": "SF"},
+ },
+ },
+ },
+ },
+ {Role: "tool", Content: `{"temp": 68}`, ToolCallID: "1"},
+ },
+ tools: []api.Tool{
+ {
+ Type: "function",
+ Function: api.ToolFunction{
+ Name: "get_weather",
+ Parameters: api.ToolFunctionParameters{
+ Type: "object",
+ Properties: map[string]api.ToolProperty{
+ "location": {Type: api.PropertyType{"string"}},
+ },
+ },
+ },
+ },
+ },
+ expected: "<|im_start|>system\n" +
+ "You are INTELLECT-3, a helpful assistant developed by Prime Intellect, that can interact with a computer to solve tasks.\n\n" +
+ "# Tools\n\n" +
+ "You have access to the following functions:\n\n" +
+ "\n" +
+ "\n" +
+ "get_weather\n" +
+ "\n" +
+ "\n" +
+ "location\n" +
+ "string\n" +
+ "\n" +
+ "\n" +
+ "\n" +
+ "\n\n" +
+ "If you choose to call a function ONLY reply in the following format with NO suffix:\n\n" +
+ "\n" +
+ "\n" +
+ "\n" +
+ "value_1\n" +
+ "\n" +
+ "\n" +
+ "This is the value for the second parameter\n" +
+ "that can span\n" +
+ "multiple lines\n" +
+ "\n" +
+ "\n" +
+ "\n\n" +
+ "\n" +
+ "Reminder:\n" +
+ "- Function calls MUST follow the specified format: an inner block must be nested within XML tags\n" +
+ "- Required parameters MUST be specified\n" +
+ "- You may provide optional reasoning for your function call in natural language BEFORE the function call, but NOT after\n" +
+ "- If there is no function call available, answer the question like normal with your current knowledge and do not tell the user about function calls\n" +
+ "<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Weather?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "Checking.\n\n" +
+ "\n" +
+ "\n" +
+ "\n" +
+ "SF\n" +
+ "\n" +
+ "\n" +
+ "<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "\n" +
+ `{"temp": 68}` + "\n" +
+ "\n" +
+ "<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ rendered, err := (&Intellect3Renderer{}).Render(tt.msgs, tt.tools, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if diff := cmp.Diff(rendered, tt.expected); diff != "" {
+ t.Errorf("mismatch (-got +want):\n%s", diff)
+ }
+ })
+ }
+}