Skip to content

Commit

Permalink
use cmd context cancel
Browse files Browse the repository at this point in the history
  • Loading branch information
BruceMacD committed Aug 30, 2023
1 parent 6d7dd76 commit acb4d2a
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions llm/ggml_llama.go
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,9 @@ func (ft llamaFileType) String() string {
}

type Running struct {
Port int
Cmd *exec.Cmd
Port int
Cmd *exec.Cmd
Cancel context.CancelFunc
}

type llama struct {
Expand Down Expand Up @@ -279,14 +280,16 @@ func newLlama(model string, adapters []string, runner ModelRunner, opts api.Opti
// start the llama.cpp server with a retry in case the port is already in use
for try := 0; try < 3; try++ {
port := rand.Intn(65535-49152) + 49152 // get a random port in the ephemeral range
cmd := exec.Command(
ctx, cancel := context.WithCancel(context.Background())
cmd := exec.CommandContext(
ctx,
runner.Path,
append(params, "--port", strconv.Itoa(port))...,
)
var stderr bytes.Buffer
cmd.Stderr = &stderr

llm := &llama{Options: opts, Running: Running{Port: port, Cmd: cmd}}
llm := &llama{Options: opts, Running: Running{Port: port, Cmd: cmd, Cancel: cancel}}

if err := waitForServer(llm); err != nil {
log.Printf("error starting llama.cpp server: %v", err)
Expand Down Expand Up @@ -343,8 +346,7 @@ func waitForServer(llm *llama) error {
}

func (llm *llama) Close() {
llm.Running.Cmd.Process.Kill()
llm.Running.Cmd.Wait()
llm.Running.Cmd.Cancel()
}

func (llm *llama) SetOptions(opts api.Options) {
Expand Down

0 comments on commit acb4d2a

Please sign in to comment.