Skip to content

Commit

Permalink
Merge pull request #90 from grafana/new-health-check
Browse files Browse the repository at this point in the history
LLM: use new, more detailed health check results
  • Loading branch information
sd2k authored Oct 17, 2023
2 parents 3e9390c + 181572f commit d10f729
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 23 deletions.
35 changes: 27 additions & 8 deletions src/llms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import { pipe, Observable, UnaryFunction } from "rxjs";
import { filter, map, scan, takeWhile, tap } from "rxjs/operators";

import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE, setLLMPluginVersion } from "./constants";
import { LLMAppHealthCheck } from "./types";
import { HealthCheckResponse, OpenAIHealthDetails } from "./types";

const OPENAI_CHAT_COMPLETIONS_PATH = 'openai/v1/chat/completions';

Expand Down Expand Up @@ -345,9 +345,24 @@ export function streamChatCompletions(request: ChatCompletionsRequest): Observab
let loggedWarning = false;

/** Check if the OpenAI API is enabled via the LLM plugin. */
export const enabled = async () => {
// Run a health check to see if the plugin is installed.
let response: LLMAppHealthCheck;
export const enabled = async (): Promise<OpenAIHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
});
if (!settings.enabled) {
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' }
}
} catch (e) {
logDebug(String(e));
logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
loggedWarning = true;
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
}

// Run a health check to see if OpenAI is configured on the plugin.
let response: HealthCheckResponse;
try {
response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
Expand All @@ -358,14 +373,18 @@ export const enabled = async () => {
logDebug('Failed to check if OpenAI is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
loggedWarning = true;
}
return false;
return { configured: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
}

const { details } = response;
// Update the version if it's present on the response.
if (details.version !== undefined) {
if (details?.version !== undefined) {
setLLMPluginVersion(details.version);
}
// If the plugin is installed then check if it is configured.
return details?.openAI ?? false;
if (details?.openAI === undefined) {
return { configured: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' }
}
return typeof details.openAI === 'boolean' ?
{ configured: details.openAI, ok: details.openAI } :
details.openAI;
}
49 changes: 42 additions & 7 deletions src/llms/types.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,42 @@
export type LLMAppHealthCheck = {
details: {
openAI?: boolean;
vector?: boolean;
version?: string;
};
};
export interface HealthCheckResponse {
status: 'ok' | 'error';
details?: HealthCheckDetails;
}

export interface HealthCheckDetails {
openAI: OpenAIHealthDetails | boolean;
vector: VectorHealthDetails | boolean;
version: string;
}

export interface OpenAIHealthDetails {
// Whether the minimum required OpenAI settings have been provided.
configured: boolean;
// Whether we can call the OpenAI API with the provided settings.
ok: boolean;
// If set, the error returned when trying to call the OpenAI API.
// Will be undefined if ok is true.
error?: string;
// A map of model names to their health details.
// The health check attempts to call the OpenAI API with each
// of a few models and records the result of each call here.
models?: Record<string, OpenAIModelHealthDetails>;
}

export interface OpenAIModelHealthDetails {
// Whether we can use this model in calls to OpenAI.
ok: boolean;
// If set, the error returned when trying to call the OpenAI API.
// Will be undefined if ok is true.
error?: string;
}

export interface VectorHealthDetails {
// Whether the vector service has been enabled.
enabled: boolean;
// Whether we can use the vector service with the provided settings.
ok: boolean;
// If set, the error returned when trying to call the vector service.
// Will be undefined if ok is true.
error?: string;
}
38 changes: 30 additions & 8 deletions src/llms/vector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

import { getBackendSrv, logDebug } from "@grafana/runtime";
import { LLM_PLUGIN_ROUTE, setLLMPluginVersion } from "./constants";
import { LLMAppHealthCheck } from "./types";
import { HealthCheckResponse, VectorHealthDetails } from "./types";

interface SearchResultPayload extends Record<string, any> { }

Expand Down Expand Up @@ -78,26 +78,48 @@ export async function search<T extends SearchResultPayload>(request: SearchReque
let loggedWarning = false;

/** Check if the vector API is enabled and configured via the LLM plugin. */
export const enabled = async () => {
// Run a health check to see if the plugin is installed.
let response: LLMAppHealthCheck;
export const enabled = async (): Promise<VectorHealthDetails> => {
// First check if the plugin is enabled.
try {
const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
});
if (!settings.enabled) {
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not enabled.' }
}
} catch (e) {
logDebug(String(e));
logDebug('Failed to check if the vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
loggedWarning = true;
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
}

// Run a health check to see if the vector service is configured on the plugin.
let response: HealthCheckResponse;
try {
response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, {
showSuccessAlert: false, showErrorAlert: false,
});
} catch (e) {
// We shouldn't really get here if we managed to get the plugin's settings above,
// but catch this just in case.
if (!loggedWarning) {
logDebug(String(e));
logDebug('Failed to check if vector service is enabled. This is expected if the Grafana LLM plugin is not installed, and the above error can be ignored.');
loggedWarning = true;
}
return false;
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is not installed.' }
}

const { details } = response;
// Update the version if it's present on the response.
if (details.version !== undefined) {
if (details?.version !== undefined) {
setLLMPluginVersion(details.version);
}
// If the plugin is installed then check if it is configured.
return details.vector ?? false;
if (details?.vector === undefined) {
return { enabled: false, ok: false, error: 'The Grafana LLM plugin is outdated; please update it.' }
}
return typeof details.vector === 'boolean' ?
{ enabled: details.vector, ok: details.vector } :
details.vector;
};

0 comments on commit d10f729

Please sign in to comment.