From fe9ab424bbc43269a26b5341637f6a5b011a5bf4 Mon Sep 17 00:00:00 2001 From: Bruce MacDonald Date: Tue, 16 Jul 2024 13:53:35 -0700 Subject: [PATCH] add insert support to generate endpoint --- README.md | 1 + examples/fill-in-middle/fill.ts | 18 +++--------------- src/interfaces.ts | 1 + 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index abc7467..95317de 100644 --- a/README.md +++ b/README.md @@ -85,6 +85,7 @@ ollama.generate(request) - `request` ``: The request object containing generate parameters. - `model` `` The name of the model to use for the chat. - `prompt` ``: The prompt to send to the model. + - `suffix` ``: (Optional) Suffix is the text that comes after the inserted text. - `system` ``: (Optional) Override the model system prompt. - `template` ``: (Optional) Override the model template. - `raw` ``: (Optional) Bypass the prompt template and pass the prompt directly to the model. diff --git a/examples/fill-in-middle/fill.ts b/examples/fill-in-middle/fill.ts index d38b62b..149c4c8 100644 --- a/examples/fill-in-middle/fill.ts +++ b/examples/fill-in-middle/fill.ts @@ -1,20 +1,8 @@ import ollama from 'ollama' -const prefix = `def remove_non_ascii(s: str) -> str: -""" -` -const suffix = ` -return result -` const response = await ollama.generate({ - model: 'codellama:7b-code', - prompt: `
 ${prefix} ${suffix} `,
-  options: {
-    num_predict: 128,
-    temperature: 0,
-    top_p: 0.9,
-    presence_penalty: 0,
-    stop: [''],
-  },
+  model: 'deepseek-coder-v2',
+  prompt: `def add(`,
+  suffix: `return c`,
 })
 console.log(response.response)
diff --git a/src/interfaces.ts b/src/interfaces.ts
index c05763c..6af2ec9 100644
--- a/src/interfaces.ts
+++ b/src/interfaces.ts
@@ -46,6 +46,7 @@ export interface Options {
 export interface GenerateRequest {
   model: string
   prompt: string
+  suffix?: string
   system?: string
   template?: string
   context?: number[]