Skip to content

Commit

Permalink
Merge pull request #3 from Ibtesam-Mahmood/chat
Browse files Browse the repository at this point in the history
Added Chat Functionality
  • Loading branch information
Ibtesam-Mahmood committed Apr 8, 2023
2 parents bf39e0f + f96bbc9 commit d306482
Show file tree
Hide file tree
Showing 17 changed files with 406 additions and 61 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
## [0.1.3] v0.1.3

* Created `chat` program.
* Added `gptcli config serpapi` program.
* Added `gptcli config valueserp` program.
* Added `-c` flag to `gptcli config <key>` subcommands to clear the current key
* Factored common chat logic between `understand` and `chat` program
* Added search functionality to `chat` program through `serpapi` and `valueserp`

## [0.1.2] v0.1.2

* Created `understand` program.
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# gpt-npm-cli v0.1.2
# gpt-npm-cli v0.1.3

A npm package that uses OpenAI + Langchain to perform convenient commands in the terminal.

Expand Down Expand Up @@ -29,5 +29,7 @@ The following is a list of commands currently configured within the `gptcli`:
- `gptcli summary`: Summarizes text and webpage contents, uses map reduce to ensure no limits are encountered for the text
- `gptcli translate`: Translates the input text to a desired language.
- `gptcli understand`: Parses a webpage and allows the user to ask questions about its contents in chat format.
- `gptcli chat`: Runs a chat interface that can be improved with the following functionalities:
- Search Functionality: Provided thorugh `SerpAPI` or `ValueSerp`

For more information run the `gptcli help` command.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "gpt-npm-cli",
"version": "0.1.2",
"version": "0.1.3",
"description": "\"# gpt-npm-cli\"",
"exports": "./index.js",
"type": "module",
Expand Down
4 changes: 3 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ import figlet from "figlet";
import ConfigureProgram from "./programs/configure/configure-program.js";
import TranslateProgram from "./programs/translate-program.js";
import UnderstandProgram from "./programs/understand-program.js";
import ChatProgram from "./programs/chat-program.js";

const version = "0.1.2";
const version = "0.1.3";
const description =
"A super charged CLI for interfacing with GPT-3 and other AI services";

Expand All @@ -33,6 +34,7 @@ async function main(): Promise<void> {
new ConfigureProgram().configure(cliApp);
new TranslateProgram().configure(cliApp);
new UnderstandProgram().configure(cliApp);
new ChatProgram().configure(cliApp);

// Parse the args for the program
await cliApp.parseAsync(process.argv);
Expand Down
68 changes: 68 additions & 0 deletions src/langchain/helpers/cli-chat-helper.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import * as readline from "readline";

interface ChatOptions {
runner: (input: string, history: string[]) => Promise<string>;
historyUpdate?: (
input: string,
output: string,
history: string[]
) => string[];
inputTitle?: string;
}

function defaultHistoryUpdate(
input: string,
output: string,
history: string[]
): string[] {
return [...history, `User: ${input}`, `Chat: ${output}`];
}

async function cliChatHelper(options: ChatOptions): Promise<string[]> {
const userInputString = `----------\n${
options.inputTitle ?? "Input"
}:\n----------`;
const chatInputString = `----------\nResponse:\n----------`;
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});

// State
let chatHistory: string[] = [];

// start the chat
console.log();
console.log('Type ".." to exit');
console.log();
console.log(userInputString);
rl.on("line", async (input) => {
// Exit the chat
if (input === "..") {
console.log();
console.log("Exiting chat");
rl.close();
return;
}

// Run the query
console.log();
const result = await options.runner(input, chatHistory);

// Print resopnse and next question prompt
console.log();
console.log(chatInputString);
console.log(result);
console.log();
console.log(userInputString);

// Update the chat history
chatHistory =
options.historyUpdate?.(input, result, chatHistory) ??
defaultHistoryUpdate(input, result, chatHistory);
});

return chatHistory;
}

export { cliChatHelper };
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import { initializeAgentExecutor, Tool } from "langchain/agents";
import { LLMChain, ChatVectorDBQAChain } from "langchain/chains";
import { LLM, OpenAI } from "langchain/llms";
import { LLM } from "langchain/llms";
import { BufferMemory } from "langchain/memory";
import {
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
} from "langchain/prompts";
import * as readline from "readline";
import { Calculator, SerpAPI } from "langchain/tools";
import { VectorStore } from "langchain/vectorstores";
import { cliChatHelper } from "./helpers/cli-chat-helper.js";
const { Document: LangDocument } = await import("langchain/document");
const { loadSummarizationChain } = await import("langchain/chains");
const { OpenAIChat } = await import("langchain/llms");
Expand Down Expand Up @@ -162,8 +165,6 @@ class OpenAiChatHelper {

// Runs a chat on the vector store
public async understand(info: VectorStore): Promise<void> {
const userInputString = `-----\nQuestion:\n-----`;
const chatInputString = `-----\nResponse:\n-----`;
const qaTemplate = `Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Expand All @@ -174,48 +175,72 @@ class OpenAiChatHelper {
Helpful Answer:`;

// define chat vars
const chatHistory: string[] = [];
const chain = ChatVectorDBQAChain.fromLLM(this.model, info, {
k: 2,
qaTemplate: qaTemplate,
});
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});

// start the chat
console.log();
console.log('Type ".." to exit');
console.log();
console.log(userInputString);
rl.on("line", async (input) => {
// Exit the chat
if (input === "..") {
console.log();
console.log("Exiting chat");
rl.close();
return;
}

// Run the query
console.log();
// Options for the chat
const runner = async (
input: string,
history: string[]
): Promise<string> => {
const result = await chain.call({
question: input,
chat_history: chatHistory,
chat_history: history,
});

// Print resopnse and next question prompt
console.log();
console.log(chatInputString);
console.log(result.text);
console.log();
console.log(userInputString);
return result.text;
};

// Run the chat
await cliChatHelper({ runner, inputTitle: "Question" });
}

/*
____ _ _
/ ___| |__ __ _| |_
| | | '_ \ / _` | __|
| |___| | | | (_| | |_
\____|_| |_|\__,_|\__|
*/

// Update the chat history
chatHistory.push(`User: ${input}`);
chatHistory.push(`Chat: ${result.text}`);
public async chat(input?: { tools?: Tool[] }): Promise<void> {
// Create chat tools
const inputTools: Tool[] = [...(input?.tools ?? []), new Calculator()];

// Create the chat agent
const executor = await initializeAgentExecutor(
inputTools,
this.model,
"chat-conversational-react-description",
this.model.verbose
);

// Add memory to the agent
executor.memory = new BufferMemory({
returnMessages: true,
memoryKey: "chat_history",
inputKey: "input",
});

// Options for the chat helper
const runner = async (input: string, _: string[]): Promise<string> => {
const result = await executor.call({ input });

return result.output;
};
const historyUpdate = (
_: string,
__: string,
history: string[]
): string[] => history;

// Run the chat
await cliChatHelper({ runner, historyUpdate });
}
}

Expand Down
File renamed without changes.
102 changes: 102 additions & 0 deletions src/langchain/tools/value-serp-tool.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import axios from "axios";
import { Tool } from "langchain/tools";

const value_serp_url = "https://api.valueserp.com/search";

class ValueSerpAPI extends Tool {
name: string;
description: string;
apiKey: string;

constructor(apiKey: string | undefined) {
super();

if (!apiKey) {
throw new Error("No apiKey provided");
}

this.apiKey = apiKey as string;
this.name = "search";
this.description =
"a search engine. useful for when you need to answer questions about current events. input should be a search query.";
}

protected async _call(input: string): Promise<string> {
const params = this.getParams(input);

try {
const response = await axios.get(value_serp_url, { params });

const data = response.data;
return this.extractData(data);
} catch (e) {
throw new Error(`Got error from valueSerpAPI: ${e}`);
}
}

private getParams(input: string): any {
return {
api_key: this.apiKey,
q: input,
hl: "en",
google_domain: "google.com",
gl: "us",
};
}

private extractData(data: any): string {
let response: string = "";

// answer box
if (data?.answer_box?.answers) {
const answers = (data.answer_box.answers as any[])
.map((e) => `- ${e.answer}`)
.join("\n");

response += "Possible Answers: \n" + answers + "\n\n";
}

// knowledge graph
if (data?.knowledge_graph?.description) {
response +=
"Additional Information: \n- " +
data.knowledge_graph.description +
"\n\n";
}

// QA results
if (data?.related_questions) {
const questions = data.related_questions.slice(
0,
Math.min(3, data.related_questions.length)
);

const results = questions
.map((e: any) => `[Q]: ${e.question}\n[A]: ${e.answer}`)
.join("\n---\n");

response += "Related Question Responses: \n" + results + "\n\n";
}

// organic web results, only run if there is no response
if (data?.organic_results) {
const organgicResults = data.organic_results.slice(
0,
Math.min(3, data.organic_results.length)
);
const results = organgicResults
.map((e: any) => `- [${e.title}]: ${e.snippet}`)
.join("\n");
response += "Top Web Results: \n" + results + "\n\n";
}

// defualt response
if (response.length === 0) {
return "No good search result found";
}

return response;
}
}

export default ValueSerpAPI;
Loading

0 comments on commit d306482

Please sign in to comment.