Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

jsDocs #88

Merged
merged 8 commits into from
May 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -128,3 +128,9 @@ dist
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

# IDEs
.idea

# MacOS
.DS_Store
119 changes: 93 additions & 26 deletions src/browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,25 @@
import 'whatwg-fetch'

import type {
Fetch,
ChatRequest,
ChatResponse,
Config,
GenerateRequest,
PullRequest,
PushRequest,
CopyRequest,
CreateRequest,
DeleteRequest,
EmbeddingsRequest,
GenerateResponse,
EmbeddingsResponse,
ErrorResponse,
Fetch,
GenerateRequest,
GenerateResponse,
ListResponse,
ProgressResponse,
ErrorResponse,
StatusResponse,
DeleteRequest,
CopyRequest,
ShowResponse,
PullRequest,
PushRequest,
ShowRequest,
ChatRequest,
ChatResponse,
CreateRequest,
ShowResponse,
StatusResponse,
} from './interfaces.js'

export class Ollama {
Expand Down Expand Up @@ -50,9 +50,20 @@
this.abortController = new AbortController()
}

/**
* Processes a request to the Ollama server. If the request is streamable, it will return an
* AsyncGenerator that yields the response messages. Otherwise, it will return the response
* object.
* @param endpoint {string} - The endpoint to send the request to.
* @param request {object} - The request object to send to the endpoint.
* @protected {T | AsyncGenerator<T>} - The response object or an AsyncGenerator that yields
* response messages.
* @throws {Error} - If the response body is missing or if the response is an error.
* @returns {Promise<T | AsyncGenerator<T>>} - The response object or an AsyncGenerator that yields the streamed response.
*/
protected async processStreamableRequest<T extends object>(
endpoint: string,
request: { stream?: boolean } & Record<string, any>,

Check warning on line 66 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (16)

Unexpected any. Specify a different type

Check warning on line 66 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (18)

Unexpected any. Specify a different type

Check warning on line 66 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (20)

Unexpected any. Specify a different type
): Promise<T | AsyncGenerator<T>> {
request.stream = request.stream ?? false
const response = await utils.post(
Expand All @@ -79,7 +90,7 @@
yield message
// message will be done in the case of chat and generate
// message will be success in the case of a progress response (pull, push, create)
if ((message as any).done || (message as any).status === 'success') {

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (16)

Unexpected any. Specify a different type

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (16)

Unexpected any. Specify a different type

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (18)

Unexpected any. Specify a different type

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (18)

Unexpected any. Specify a different type

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (20)

Unexpected any. Specify a different type

Check warning on line 93 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (20)

Unexpected any. Specify a different type
return
}
}
Expand All @@ -87,20 +98,24 @@
})()
} else {
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== 'success') {

Check warning on line 101 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (16)

Unexpected any. Specify a different type

Check warning on line 101 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (18)

Unexpected any. Specify a different type

Check warning on line 101 in src/browser.ts

View workflow job for this annotation

GitHub Actions / test (20)

Unexpected any. Specify a different type
throw new Error('Expected a completed response.')
}
return message.value
}
}

/**
* Encodes an image to base64 if it is a Uint8Array.
* @param image {Uint8Array | string} - The image to encode.
* @returns {Promise<string>} - The base64 encoded image.
*/
async encodeImage(image: Uint8Array | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array convert it to base64
const uint8Array = new Uint8Array(image)
const numberArray = Array.from(uint8Array)
const base64String = btoa(String.fromCharCode.apply(null, numberArray))
return base64String
return btoa(String.fromCharCode.apply(null, numberArray))
}
// the string may be base64 encoded
return image
Expand All @@ -110,7 +125,12 @@
request: GenerateRequest & { stream: true },
): Promise<AsyncGenerator<GenerateResponse>>
generate(request: GenerateRequest & { stream?: false }): Promise<GenerateResponse>

/**
* Generates a response from a text prompt.
* @param request {GenerateRequest} - The request object.
* @returns {Promise<GenerateResponse | AsyncGenerator<GenerateResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async generate(
request: GenerateRequest,
): Promise<GenerateResponse | AsyncGenerator<GenerateResponse>> {
Expand All @@ -122,7 +142,14 @@

chat(request: ChatRequest & { stream: true }): Promise<AsyncGenerator<ChatResponse>>
chat(request: ChatRequest & { stream?: false }): Promise<ChatResponse>

/**
* Chats with the model. The request object can contain messages with images that are either
* Uint8Arrays or base64 encoded strings. The images will be base64 encoded before sending the
* request.
* @param request {ChatRequest} - The request object.
* @returns {Promise<ChatResponse | AsyncGenerator<ChatResponse>>} - The response object or an
* AsyncGenerator that yields response messages.
*/
async chat(request: ChatRequest): Promise<ChatResponse | AsyncGenerator<ChatResponse>> {
if (request.messages) {
for (const message of request.messages) {
Expand All @@ -140,7 +167,11 @@
request: CreateRequest & { stream: true },
): Promise<AsyncGenerator<ProgressResponse>>
create(request: CreateRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Creates a new model from a stream of data.
* @param request {CreateRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or a stream of progress responses.
*/
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -153,7 +184,13 @@

pull(request: PullRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
pull(request: PullRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Pulls a model from the Ollama registry. The request object can contain a stream flag to indicate if the
* response should be streamed.
* @param request {PullRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async pull(
request: PullRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -166,7 +203,13 @@

push(request: PushRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
push(request: PushRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Pushes a model to the Ollama registry. The request object can contain a stream flag to indicate if the
* response should be streamed.
* @param request {PushRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async push(
request: PushRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -177,38 +220,62 @@
})
}

/**
* Deletes a model from the server. The request object should contain the name of the model to
* delete.
* @param request {DeleteRequest} - The request object.
* @returns {Promise<StatusResponse>} - The response object.
*/
async delete(request: DeleteRequest): Promise<StatusResponse> {
await utils.del(this.fetch, `${this.config.host}/api/delete`, {
name: request.model,
})
return { status: 'success' }
}

/**
* Copies a model from one name to another. The request object should contain the name of the
* model to copy and the new name.
* @param request {CopyRequest} - The request object.
* @returns {Promise<StatusResponse>} - The response object.
*/
async copy(request: CopyRequest): Promise<StatusResponse> {
await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request })
return { status: 'success' }
}

/**
* Lists the models on the server.
* @returns {Promise<ListResponse>} - The response object.
* @throws {Error} - If the response body is missing.
*/
async list(): Promise<ListResponse> {
const response = await utils.get(this.fetch, `${this.config.host}/api/tags`)
const listResponse = (await response.json()) as ListResponse
return listResponse
return (await response.json()) as ListResponse
}

/**
* Shows the metadata of a model. The request object should contain the name of the model.
* @param request {ShowRequest} - The request object.
* @returns {Promise<ShowResponse>} - The response object.
*/
async show(request: ShowRequest): Promise<ShowResponse> {
const response = await utils.post(this.fetch, `${this.config.host}/api/show`, {
...request,
})
const showResponse = (await response.json()) as ShowResponse
return showResponse
return (await response.json()) as ShowResponse
}

/**
* Embeds a text prompt into a vector.
* @param request {EmbeddingsRequest} - The request object.
* @returns {Promise<EmbeddingsResponse>} - The response object.
*/
async embeddings(request: EmbeddingsRequest): Promise<EmbeddingsResponse> {
const response = await utils.post(this.fetch, `${this.config.host}/api/embeddings`, {
...request,
})
const embeddingsResponse = (await response.json()) as EmbeddingsResponse
return embeddingsResponse
return (await response.json()) as EmbeddingsResponse
}
}

Expand Down
25 changes: 21 additions & 4 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as utils from './utils.js'
import fs, { promises, createReadStream } from 'fs'
import { join, resolve, dirname } from 'path'
import fs, { createReadStream, promises } from 'fs'
import { dirname, join, resolve } from 'path'
import { createHash } from 'crypto'
import { homedir } from 'os'
import { Ollama as OllamaBrowser } from './browser.js'
Expand All @@ -11,8 +11,7 @@ export class Ollama extends OllamaBrowser {
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array or Buffer, convert it to base64
const result = Buffer.from(image).toString('base64')
return result
return Buffer.from(image).toString('base64')
}
try {
if (fs.existsSync(image)) {
Expand All @@ -27,6 +26,12 @@ export class Ollama extends OllamaBrowser {
return image
}

/**
* Parse the modelfile and replace the FROM and ADAPTER commands with the corresponding blob hashes.
* @param modelfile {string} - The modelfile content
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private async parseModelfile(
modelfile: string,
mfDir: string = process.cwd(),
Expand All @@ -49,13 +54,25 @@ export class Ollama extends OllamaBrowser {
return out.join('\n')
}

/**
* Resolve the path to an absolute path.
* @param inputPath {string} - The input path
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private resolvePath(inputPath, mfDir) {
if (inputPath.startsWith('~')) {
return join(homedir(), inputPath.slice(1))
}
return resolve(mfDir, inputPath)
}

/**
* checks if a file exists
* @param path {string} - The path to the file
* @private @internal
* @returns {Promise<boolean>} - Whether the file exists or not
*/
private async fileExists(path: string): Promise<boolean> {
try {
await promises.access(path)
Expand Down
Loading
Loading