perfsprint
This commit is contained in:
parent
efd9f5e67e
commit
974ae8ef84
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
|
|
@ -998,7 +999,7 @@ func (t *ThinkValue) UnmarshalJSON(data []byte) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("think must be a boolean or string (\"high\", \"medium\", \"low\", true, or false)")
|
||||
return errors.New("think must be a boolean or string (\"high\", \"medium\", \"low\", true, or false)")
|
||||
}
|
||||
|
||||
// MarshalJSON implements json.Marshaler
|
||||
|
|
|
|||
|
|
@ -319,7 +319,7 @@ func GetInferenceComputer(ctx context.Context) ([]InferenceCompute, error) {
|
|||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("timeout scanning server log for inference compute details")
|
||||
return nil, errors.New("timeout scanning server log for inference compute details")
|
||||
default:
|
||||
}
|
||||
file, err := os.Open(serverLogPath)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package store
|
|||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
|
@ -587,7 +588,7 @@ func (db *database) getChatWithOptions(id string, loadAttachmentData bool) (*Cha
|
|||
)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, fmt.Errorf("chat not found")
|
||||
return nil, errors.New("chat not found")
|
||||
}
|
||||
return nil, fmt.Errorf("query chat: %w", err)
|
||||
}
|
||||
|
|
@ -752,7 +753,7 @@ func (db *database) updateLastMessage(chatID string, msg Message) error {
|
|||
return fmt.Errorf("get rows affected: %w", err)
|
||||
}
|
||||
if rowsAffected == 0 {
|
||||
return fmt.Errorf("no message found to update")
|
||||
return errors.New("no message found to update")
|
||||
}
|
||||
|
||||
_, err = tx.Exec("DELETE FROM attachments WHERE message_id = ?", messageID)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package store
|
|||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -26,7 +27,7 @@ func (i *Image) Bytes() ([]byte, error) {
|
|||
// ImgBytes reads image data from the specified file path
|
||||
func ImgBytes(path string) ([]byte, error) {
|
||||
if path == "" {
|
||||
return nil, fmt.Errorf("empty image path")
|
||||
return nil, errors.New("empty image path")
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ package tools
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
|
|
@ -130,7 +131,7 @@ func (b *BrowserSearch) Schema() map[string]any {
|
|||
func (b *BrowserSearch) Execute(ctx context.Context, args map[string]any) (any, string, error) {
|
||||
query, ok := args["query"].(string)
|
||||
if !ok {
|
||||
return nil, "", fmt.Errorf("query parameter is required")
|
||||
return nil, "", errors.New("query parameter is required")
|
||||
}
|
||||
|
||||
topn, ok := args["topn"].(int)
|
||||
|
|
@ -150,7 +151,7 @@ func (b *BrowserSearch) Execute(ctx context.Context, args map[string]any) (any,
|
|||
|
||||
searchResponse, ok := result.(*WebSearchResponse)
|
||||
if !ok {
|
||||
return nil, "", fmt.Errorf("invalid search results format")
|
||||
return nil, "", errors.New("invalid search results format")
|
||||
}
|
||||
|
||||
// Build main search results page that contains all search results
|
||||
|
|
@ -594,7 +595,7 @@ func (b *BrowserOpen) Execute(ctx context.Context, args map[string]any) (any, st
|
|||
// Try to get id as integer (link ID from current page)
|
||||
if id, ok := args["id"].(float64); ok {
|
||||
if page == nil {
|
||||
return nil, "", fmt.Errorf("no current page to resolve link from")
|
||||
return nil, "", errors.New("no current page to resolve link from")
|
||||
}
|
||||
idInt := int(id)
|
||||
pageURL, ok := page.Links[idInt]
|
||||
|
|
@ -637,7 +638,7 @@ func (b *BrowserOpen) Execute(ctx context.Context, args map[string]any) (any, st
|
|||
|
||||
// If no id provided, just display current page
|
||||
if page == nil {
|
||||
return nil, "", fmt.Errorf("no current page to display")
|
||||
return nil, "", errors.New("no current page to display")
|
||||
}
|
||||
// Only add to PageStack without updating URLToPage
|
||||
b.state.Data.PageStack = append(b.state.Data.PageStack, page.URL)
|
||||
|
|
@ -742,7 +743,7 @@ func (b *BrowserFind) Schema() map[string]any {
|
|||
func (b *BrowserFind) Execute(ctx context.Context, args map[string]any) (any, string, error) {
|
||||
pattern, ok := args["pattern"].(string)
|
||||
if !ok {
|
||||
return nil, "", fmt.Errorf("pattern parameter is required")
|
||||
return nil, "", errors.New("pattern parameter is required")
|
||||
}
|
||||
|
||||
// Get cursor parameter if provided, default to current page
|
||||
|
|
@ -756,7 +757,7 @@ func (b *BrowserFind) Execute(ctx context.Context, args map[string]any) (any, st
|
|||
if cursor == -1 {
|
||||
// Use current page
|
||||
if len(b.state.Data.PageStack) == 0 {
|
||||
return nil, "", fmt.Errorf("no pages to search in")
|
||||
return nil, "", errors.New("no pages to search in")
|
||||
}
|
||||
var err error
|
||||
page, err = b.getPageFromStack(b.state.Data.PageStack[len(b.state.Data.PageStack)-1])
|
||||
|
|
@ -776,7 +777,7 @@ func (b *BrowserFind) Execute(ctx context.Context, args map[string]any) (any, st
|
|||
}
|
||||
|
||||
if page == nil {
|
||||
return nil, "", fmt.Errorf("page not found")
|
||||
return nil, "", errors.New("page not found")
|
||||
}
|
||||
|
||||
// Create find results page
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package tools
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
|
|
@ -87,7 +88,7 @@ func (g *BrowserCrawler) Schema() map[string]any {
|
|||
func (g *BrowserCrawler) Execute(ctx context.Context, args map[string]any) (*CrawlResponse, error) {
|
||||
urlsRaw, ok := args["urls"].([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("urls parameter is required and must be an array of strings")
|
||||
return nil, errors.New("urls parameter is required and must be an array of strings")
|
||||
}
|
||||
|
||||
urls := make([]string, 0, len(urlsRaw))
|
||||
|
|
@ -98,7 +99,7 @@ func (g *BrowserCrawler) Execute(ctx context.Context, args map[string]any) (*Cra
|
|||
}
|
||||
|
||||
if len(urls) == 0 {
|
||||
return nil, fmt.Errorf("at least one URL is required")
|
||||
return nil, errors.New("at least one URL is required")
|
||||
}
|
||||
|
||||
return g.performWebCrawl(ctx, urls)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package tools
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
|
@ -84,7 +85,7 @@ func (w *BrowserWebSearch) Schema() map[string]any {
|
|||
func (w *BrowserWebSearch) Execute(ctx context.Context, args map[string]any) (any, error) {
|
||||
queriesRaw, ok := args["queries"].([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("queries parameter is required and must be an array of strings")
|
||||
return nil, errors.New("queries parameter is required and must be an array of strings")
|
||||
}
|
||||
|
||||
queries := make([]string, 0, len(queriesRaw))
|
||||
|
|
@ -95,7 +96,7 @@ func (w *BrowserWebSearch) Execute(ctx context.Context, args map[string]any) (an
|
|||
}
|
||||
|
||||
if len(queries) == 0 {
|
||||
return nil, fmt.Errorf("at least one query is required")
|
||||
return nil, errors.New("at least one query is required")
|
||||
}
|
||||
|
||||
maxResults := 5
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
|
@ -61,11 +62,11 @@ func (w *WebFetch) Prompt() string {
|
|||
func (w *WebFetch) Execute(ctx context.Context, args map[string]any) (any, string, error) {
|
||||
urlRaw, ok := args["url"]
|
||||
if !ok {
|
||||
return nil, "", fmt.Errorf("url parameter is required")
|
||||
return nil, "", errors.New("url parameter is required")
|
||||
}
|
||||
urlStr, ok := urlRaw.(string)
|
||||
if !ok || strings.TrimSpace(urlStr) == "" {
|
||||
return nil, "", fmt.Errorf("url must be a non-empty string")
|
||||
return nil, "", errors.New("url must be a non-empty string")
|
||||
}
|
||||
|
||||
result, err := performWebFetch(ctx, urlStr)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
|
@ -71,12 +72,12 @@ func (g *WebSearch) Schema() map[string]any {
|
|||
func (w *WebSearch) Execute(ctx context.Context, args map[string]any) (any, string, error) {
|
||||
rawQuery, ok := args["query"]
|
||||
if !ok {
|
||||
return nil, "", fmt.Errorf("query parameter is required")
|
||||
return nil, "", errors.New("query parameter is required")
|
||||
}
|
||||
|
||||
queryStr, ok := rawQuery.(string)
|
||||
if !ok || strings.TrimSpace(queryStr) == "" {
|
||||
return nil, "", fmt.Errorf("query must be a non-empty string")
|
||||
return nil, "", errors.New("query must be a non-empty string")
|
||||
}
|
||||
|
||||
maxResults := 5
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import (
|
|||
"fmt"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
|
|
@ -73,7 +74,7 @@ func extractPDFText(data []byte) (string, error) {
|
|||
if strings.TrimSpace(text) != "" {
|
||||
if textBuilder.Len() > 0 {
|
||||
textBuilder.WriteString("\n\n--- Page ")
|
||||
textBuilder.WriteString(fmt.Sprintf("%d", i))
|
||||
textBuilder.WriteString(strconv.Itoa(i))
|
||||
textBuilder.WriteString(" ---\n")
|
||||
}
|
||||
textBuilder.WriteString(text)
|
||||
|
|
|
|||
24
app/ui/ui.go
24
app/ui/ui.go
|
|
@ -194,7 +194,7 @@ func (s *Server) Handler() http.Handler {
|
|||
log := s.log()
|
||||
level := slog.LevelInfo
|
||||
start := time.Now()
|
||||
requestID := fmt.Sprintf("%d", time.Now().UnixNano())
|
||||
requestID := strconv.FormatInt(time.Now().UnixNano(), 10)
|
||||
|
||||
defer func() {
|
||||
p := recover()
|
||||
|
|
@ -204,7 +204,7 @@ func (s *Server) Handler() http.Handler {
|
|||
|
||||
// Handle panic with user-friendly error
|
||||
if !sw.Written() {
|
||||
s.handleError(sw, fmt.Errorf("internal server error"))
|
||||
s.handleError(sw, errors.New("internal server error"))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -382,7 +382,7 @@ func waitForServer(ctx context.Context) error {
|
|||
break
|
||||
}
|
||||
if time.Now().After(timeout) {
|
||||
return fmt.Errorf("timeout waiting for Ollama server to be ready")
|
||||
return errors.New("timeout waiting for Ollama server to be ready")
|
||||
}
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
|
|
@ -455,7 +455,7 @@ func (s *Server) checkModelUpstream(ctx context.Context, modelName string, timeo
|
|||
|
||||
digest := resp.Header.Get("ollama-content-digest")
|
||||
if digest == "" {
|
||||
return "", 0, fmt.Errorf("no digest header found")
|
||||
return "", 0, errors.New("no digest header found")
|
||||
}
|
||||
|
||||
var pushTime int64
|
||||
|
|
@ -598,12 +598,12 @@ func (s *Server) chat(w http.ResponseWriter, r *http.Request) error {
|
|||
}
|
||||
|
||||
if req.Model == "" {
|
||||
return fmt.Errorf("empty model")
|
||||
return errors.New("empty model")
|
||||
}
|
||||
|
||||
// Don't allow empty messages unless forceUpdate is true
|
||||
if req.Prompt == "" && !req.ForceUpdate {
|
||||
return fmt.Errorf("empty message")
|
||||
return errors.New("empty message")
|
||||
}
|
||||
|
||||
if createdChat {
|
||||
|
|
@ -1194,7 +1194,7 @@ func (s *Server) getChat(w http.ResponseWriter, r *http.Request) error {
|
|||
cid := r.PathValue("id")
|
||||
|
||||
if cid == "" {
|
||||
return fmt.Errorf("chat ID is required")
|
||||
return errors.New("chat ID is required")
|
||||
}
|
||||
|
||||
chat, err := s.Store.Chat(cid)
|
||||
|
|
@ -1252,7 +1252,7 @@ func (s *Server) getChat(w http.ResponseWriter, r *http.Request) error {
|
|||
func (s *Server) renameChat(w http.ResponseWriter, r *http.Request) error {
|
||||
cid := r.PathValue("id")
|
||||
if cid == "" {
|
||||
return fmt.Errorf("chat ID is required")
|
||||
return errors.New("chat ID is required")
|
||||
}
|
||||
|
||||
var req struct {
|
||||
|
|
@ -1283,7 +1283,7 @@ func (s *Server) renameChat(w http.ResponseWriter, r *http.Request) error {
|
|||
func (s *Server) deleteChat(w http.ResponseWriter, r *http.Request) error {
|
||||
cid := r.PathValue("id")
|
||||
if cid == "" {
|
||||
return fmt.Errorf("chat ID is required")
|
||||
return errors.New("chat ID is required")
|
||||
}
|
||||
|
||||
// Check if the chat exists (no need to load attachments)
|
||||
|
|
@ -1291,7 +1291,7 @@ func (s *Server) deleteChat(w http.ResponseWriter, r *http.Request) error {
|
|||
if err != nil {
|
||||
if errors.Is(err, not.Found) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return fmt.Errorf("chat not found")
|
||||
return errors.New("chat not found")
|
||||
}
|
||||
return fmt.Errorf("failed to get chat: %w", err)
|
||||
}
|
||||
|
|
@ -1592,7 +1592,7 @@ func (s *Server) getInferenceCompute(w http.ResponseWriter, r *http.Request) err
|
|||
|
||||
func (s *Server) modelUpstream(w http.ResponseWriter, r *http.Request) error {
|
||||
if r.Method != "POST" {
|
||||
return fmt.Errorf("method not allowed")
|
||||
return errors.New("method not allowed")
|
||||
}
|
||||
|
||||
var req struct {
|
||||
|
|
@ -1603,7 +1603,7 @@ func (s *Server) modelUpstream(w http.ResponseWriter, r *http.Request) error {
|
|||
}
|
||||
|
||||
if req.Model == "" {
|
||||
return fmt.Errorf("model is required")
|
||||
return errors.New("model is required")
|
||||
}
|
||||
|
||||
digest, pushTime, err := s.checkModelUpstream(r.Context(), req.Model, 5*time.Second)
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ func DoUpgrade(interactive bool) error {
|
|||
|
||||
bundle := getStagedUpdate()
|
||||
if bundle == "" {
|
||||
return fmt.Errorf("failed to lookup downloads")
|
||||
return errors.New("failed to lookup downloads")
|
||||
}
|
||||
|
||||
slog.Info("starting upgrade", "app", BundlePath, "update", bundle, "pid", os.Getpid(), "log", UpgradeLogFile)
|
||||
|
|
@ -107,7 +107,7 @@ func DoUpgrade(interactive bool) error {
|
|||
// Verify old doesn't exist yet
|
||||
if _, err := os.Stat(contentsOldName); err == nil {
|
||||
slog.Error("prior upgrade failed", "backup", contentsOldName)
|
||||
return fmt.Errorf("prior upgrade failed - please upgrade manually by installing the bundle")
|
||||
return errors.New("prior upgrade failed - please upgrade manually by installing the bundle")
|
||||
}
|
||||
if err := os.MkdirAll(appBackupDir, 0o755); err != nil {
|
||||
return fmt.Errorf("unable to create backup dir %s: %w", appBackupDir, err)
|
||||
|
|
@ -133,7 +133,7 @@ func DoUpgrade(interactive bool) error {
|
|||
return err
|
||||
}
|
||||
if !chownWithAuthorization(u.Username) {
|
||||
return fmt.Errorf("unable to change permissions to complete upgrade")
|
||||
return errors.New("unable to change permissions to complete upgrade")
|
||||
}
|
||||
if err := os.Rename(BundlePath, appBackup); err != nil {
|
||||
return fmt.Errorf("unable to perform upgrade - failed to stage old version: %w", err)
|
||||
|
|
@ -264,7 +264,7 @@ func DoPostUpgradeCleanup() error {
|
|||
func verifyDownload() error {
|
||||
bundle := getStagedUpdate()
|
||||
if bundle == "" {
|
||||
return fmt.Errorf("failed to lookup downloads")
|
||||
return errors.New("failed to lookup downloads")
|
||||
}
|
||||
slog.Debug("verifying update", "bundle", bundle)
|
||||
|
||||
|
|
@ -347,11 +347,11 @@ func verifyDownload() error {
|
|||
func DoUpgradeAtStartup() error {
|
||||
bundle := getStagedUpdate()
|
||||
if bundle == "" {
|
||||
return fmt.Errorf("failed to lookup downloads")
|
||||
return errors.New("failed to lookup downloads")
|
||||
}
|
||||
|
||||
if BundlePath == "" {
|
||||
return fmt.Errorf("unable to upgrade at startup, app in development mode")
|
||||
return errors.New("unable to upgrade at startup, app in development mode")
|
||||
}
|
||||
|
||||
// [Re]verify before proceeding
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
|||
|
||||
if err := client.Create(cmd.Context(), req, fn); err != nil {
|
||||
if strings.Contains(err.Error(), "path or Modelfile are required") {
|
||||
return fmt.Errorf("the ollama server must be updated to use `ollama create` with this client")
|
||||
return errors.New("the ollama server must be updated to use `ollama create` with this client")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
|
@ -991,7 +991,7 @@ func showInfo(resp *api.ShowResponse, verbose bool, w io.Writer) error {
|
|||
var v string
|
||||
switch vData := resp.ModelInfo[k].(type) {
|
||||
case bool:
|
||||
v = fmt.Sprintf("%t", vData)
|
||||
v = strconv.FormatBool(vData)
|
||||
case string:
|
||||
v = vData
|
||||
case float64:
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"cmp"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
|
|
@ -511,7 +512,7 @@ func writeGGUFArray[S ~[]E, E any](w io.Writer, t uint32, s S) error {
|
|||
func WriteGGUF(f *os.File, kv KV, ts []*Tensor) error {
|
||||
arch := kv.String("general.architecture")
|
||||
if arch == "" {
|
||||
return fmt.Errorf("architecture not set")
|
||||
return errors.New("architecture not set")
|
||||
}
|
||||
|
||||
if err := binary.Write(f, binary.LittleEndian, []byte("GGUF")); err != nil {
|
||||
|
|
|
|||
|
|
@ -1474,7 +1474,7 @@ func (s *llmServer) Completion(ctx context.Context, req CompletionRequest, fn fu
|
|||
// User provided a JSON schema
|
||||
g := llama.SchemaToGrammar(req.Format)
|
||||
if g == nil {
|
||||
return fmt.Errorf("invalid JSON schema in format")
|
||||
return errors.New("invalid JSON schema in format")
|
||||
}
|
||||
req.Grammar = string(g)
|
||||
}
|
||||
|
|
@ -1686,7 +1686,7 @@ func (s *llamaServer) Tokenize(ctx context.Context, content string) ([]int, erro
|
|||
defer s.llamaModelLock.Unlock()
|
||||
|
||||
if s.llamaModel == nil {
|
||||
return nil, fmt.Errorf("no tokenizer configured")
|
||||
return nil, errors.New("no tokenizer configured")
|
||||
}
|
||||
|
||||
return s.llamaModel.Tokenize(content, false, true)
|
||||
|
|
@ -1711,7 +1711,7 @@ func (s *llamaServer) Detokenize(ctx context.Context, tokens []int) (string, err
|
|||
defer s.llamaModelLock.Unlock()
|
||||
|
||||
if s.llamaModel == nil {
|
||||
return "", fmt.Errorf("no tokenizer configured")
|
||||
return "", errors.New("no tokenizer configured")
|
||||
}
|
||||
|
||||
var sb strings.Builder
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"slices"
|
||||
|
|
@ -92,7 +93,7 @@ func NewBackend(modelPath string, params BackendParams) (Backend, error) {
|
|||
return backend(modelPath, params)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unsupported backend")
|
||||
return nil, errors.New("unsupported backend")
|
||||
}
|
||||
|
||||
type Context interface {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"context"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"io"
|
||||
|
|
@ -587,7 +588,7 @@ func GetDevicesFromRunner(ctx context.Context, runner BaseRunner) ([]DeviceInfo,
|
|||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("failed to finish discovery before timeout")
|
||||
return nil, errors.New("failed to finish discovery before timeout")
|
||||
case <-tick:
|
||||
r, err := http.NewRequestWithContext(ctx, http.MethodGet, fmt.Sprintf("http://127.0.0.1:%d/info", port), nil)
|
||||
if err != nil {
|
||||
|
|
@ -599,7 +600,7 @@ func GetDevicesFromRunner(ctx context.Context, runner BaseRunner) ([]DeviceInfo,
|
|||
if err != nil {
|
||||
// slog.Warn("failed to send request", "error", err)
|
||||
if runner.HasExited() {
|
||||
return nil, fmt.Errorf("runner crashed")
|
||||
return nil, errors.New("runner crashed")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
|
@ -607,7 +608,7 @@ func GetDevicesFromRunner(ctx context.Context, runner BaseRunner) ([]DeviceInfo,
|
|||
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
// old runner, fall back to bootstrapping model
|
||||
return nil, fmt.Errorf("llamarunner free vram reporting not supported")
|
||||
return nil, errors.New("llamarunner free vram reporting not supported")
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package qwen2
|
|||
|
||||
import (
|
||||
"cmp"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
|
|
@ -130,7 +131,7 @@ func (m Model) Shift(ctx ml.Context, layer int, key, shift ml.Tensor) (ml.Tensor
|
|||
func New(c fs.Config) (model.Model, error) {
|
||||
// This model currently only supports the gpt2 tokenizer
|
||||
if c.String("tokenizer.ggml.model") == "llama" {
|
||||
return nil, fmt.Errorf("unsupported tokenizer: llama")
|
||||
return nil, errors.New("unsupported tokenizer: llama")
|
||||
}
|
||||
// detect library/qwen model(s) which are incompatible
|
||||
if strings.HasPrefix(c.String("general.name"), "Qwen2-beta") {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package ollamarunner
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"testing"
|
||||
"time"
|
||||
|
|
@ -511,7 +510,7 @@ type mockCache struct {
|
|||
// Implement only the methods needed for the test
|
||||
func (m *mockCache) Remove(seq int, beginIndex, endIndex int32) error {
|
||||
if m.shouldFail {
|
||||
return fmt.Errorf("mock cache removal error")
|
||||
return errors.New("mock cache removal error")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -456,7 +456,7 @@ func kvFromLayers(baseLayers []*layerGGML) (ggml.KV, error) {
|
|||
return l.KV(), nil
|
||||
}
|
||||
}
|
||||
return ggml.KV{}, fmt.Errorf("no base model was found")
|
||||
return ggml.KV{}, errors.New("no base model was found")
|
||||
}
|
||||
|
||||
func createModel(r api.CreateRequest, name model.Name, baseLayers []*layerGGML, config *ConfigV2, fn func(resp api.ProgressResponse)) (err error) {
|
||||
|
|
|
|||
|
|
@ -438,7 +438,7 @@ func (w *checkWriter) Write(p []byte) (int, error) {
|
|||
// last write. check hash.
|
||||
sum := w.h.Sum(nil)
|
||||
if !bytes.Equal(sum, w.d.sum[:]) {
|
||||
return 0, w.seterr(fmt.Errorf("file content changed underfoot"))
|
||||
return 0, w.seterr(errors.New("file content changed underfoot"))
|
||||
}
|
||||
if w.testHookBeforeFinalWrite != nil {
|
||||
w.testHookBeforeFinalWrite(w.f)
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ func (d Digest) String() string {
|
|||
}
|
||||
|
||||
func (d Digest) Short() string {
|
||||
return fmt.Sprintf("%x", d.sum[:4])
|
||||
return hex.EncodeToString(d.sum[:4])
|
||||
}
|
||||
|
||||
func (d Digest) Sum() [32]byte {
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ func (s *Server) scheduleRunner(ctx context.Context, name string, caps []model.C
|
|||
}
|
||||
|
||||
if slices.Contains(model.Config.ModelFamilies, "mllama") && len(model.ProjectorPaths) > 0 {
|
||||
return nil, nil, nil, fmt.Errorf("'llama3.2-vision' is no longer compatible with your version of Ollama and has been replaced by a newer version. To re-download, run 'ollama pull llama3.2-vision'")
|
||||
return nil, nil, nil, errors.New("'llama3.2-vision' is no longer compatible with your version of Ollama and has been replaced by a newer version. To re-download, run 'ollama pull llama3.2-vision'")
|
||||
}
|
||||
|
||||
if err := model.CheckCapabilities(caps...); err != nil {
|
||||
|
|
|
|||
Loading…
Reference in New Issue