Skip to content

Commit

Permalink
Add llms.vector module for vector search using grafana-llm-app
Browse files Browse the repository at this point in the history
This adds convenience functions to access the endpoints added in
grafana/grafana-llm-app#33.
  • Loading branch information
sd2k committed Sep 8, 2023
1 parent 73fd3d7 commit f46fd1f
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 2 deletions.
2 changes: 2 additions & 0 deletions src/llms/constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export const LLM_PLUGIN_ID = 'grafana-llm-app';
export const LLM_PLUGIN_ROUTE = `/api/plugins/${LLM_PLUGIN_ID}`;
1 change: 1 addition & 0 deletions src/llms/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
export * as openai from './openai';
export * as vector from './vector';
4 changes: 2 additions & 2 deletions src/llms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ import { getBackendSrv, getGrafanaLiveSrv, logDebug } from "@grafana/runtime";
import { pipe, Observable, UnaryFunction } from "rxjs";
import { filter, map, scan, takeWhile } from "rxjs/operators";

const LLM_PLUGIN_ID = 'grafana-llm-app';
const LLM_PLUGIN_ROUTE = `/api/plugins/${LLM_PLUGIN_ID}`;
import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE } from "./constants";

const OPENAI_CHAT_COMPLETIONS_PATH = 'openai/v1/chat/completions';

/** The role of a message's author. */
Expand Down
91 changes: 91 additions & 0 deletions src/llms/vector.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/**
* Vector search API.
*
* This module can be used to interact with the vector database configured
* in the Grafana LLM app plugin. That plugin must be installed, enabled and configured
* in order for these functions to work.
*
* The {@link enabled} function can be used to check if the plugin is enabled and configured.
*/

import { getBackendSrv, logDebug } from "@grafana/runtime";
import { LLM_PLUGIN_ROUTE } from "./constants";

interface SearchResultPayload extends Record<string, any> { }

/**
* A request to search for resources in the vector database.
**/
export interface SearchRequest {
/**
* The name of the collection to search in.
*
* Available collections can be obtained using the `collections` function.
**/
collection: string;

/** The query to search for. */
query: string;

/**
* The maximum number of results to return.
*
* Defaults to 10.
**/
limit?: number;
}

/**
* The results of a vector search.
*
* Results will be ordered by score, descending.
*/
export interface SearchResult<T extends SearchResultPayload> {
/**
* The payload of the result.
*
* The type of this payload depends on the collection that was searched in.
* Grafana core types can be found in the same module as this type.
**/
payload: T;

/**
* The score of the result.
*
* This is a number between 0 and 1, where 1 is the best possible match.
*/
score: number;
}

interface SearchResultResponse<T extends SearchResultPayload> {
results: SearchResult<T>[];

Check failure on line 61 in src/llms/vector.ts

View workflow job for this annotation

GitHub Actions / build

Array type using 'T[]' is forbidden for non-simple types. Use 'Array<T>' instead
}

/**
* Search for resources in the configured vector database.
*/
export async function search<T extends SearchResultPayload>(request: SearchRequest): Promise<SearchResult<T>[]> {

Check failure on line 67 in src/llms/vector.ts

View workflow job for this annotation

GitHub Actions / build

Array type using 'T[]' is forbidden for non-simple types. Use 'Array<T>' instead
const response = await getBackendSrv().post<SearchResultResponse<T>>('/api/plugins/grafana-llm-app/resources/vector/search', request, {
headers: { 'Content-Type': 'application/json' }
});
return response.results;
}

let loggedWarning = false;

/** Check if the vector API is enabled and configured via the LLM plugin. */
export const enabled = async () => {
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
});
return settings.enabled && (settings?.secureJsonFields?.openAIKey ?? false);
} catch (e) {
if (!loggedWarning) {
logDebug(String(e));
logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
loggedWarning = true;
}
return false;
}
}

0 comments on commit f46fd1f

Please sign in to comment.