Skip to content

Commit

Permalink
Merge pull request #1 from Ibtesam-Mahmood/v0.1.1
Browse files Browse the repository at this point in the history
Translation Program
  • Loading branch information
Ibtesam-Mahmood committed Mar 28, 2023
2 parents 4685f53 + ce3fbaf commit 9d19791
Show file tree
Hide file tree
Showing 11 changed files with 341 additions and 74 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
## [0.1.1] v0.1.1

* Created `translate` program
* Updated `summary` program with `--mode` and `--split` flags.
* Factored out open ai functionality.
* Updated to top-level await with module exports

## [0.1.0] v0.1.0

* Added `langchain` package for llm functionality
Expand Down
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# gpt-npm-cli v0.1.0
# gpt-npm-cli v0.1.1

A npm package that uses OpenAI + Langchain to perform convenient commands in the terminal.

Expand All @@ -8,7 +8,7 @@ Ensure that typescript is installed globally `npm install -g typescript`.

To get started with the project clone the repository and run `npm i & npm run build && npm install -g .` within the project root.

(Mac or Linux users may have to run `sudo` before some of these commands.
Mac or Linux users may have to run `sudo` before some of these commands.

Calling the `gptcli` command from the command-line will run the CLI.

Expand All @@ -21,5 +21,6 @@ It is important to set your keys to expect full functionality. run the `gptcli c
The following is a list of commands currently configured within the `gptcli`:
- `gptcli config`: Configures environment variables required to run programs within the CLI, it is recommended that you set any required variables through this command before using the CLI.
- `gptcli summary`: Summarizes text and webpage contents, uses map reduce to ensure no limits are encountered for the text
- `gptcli translate`: Translates the input text to a desired language.

For more information run the `gptcli help` command.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
{
"name": "gpt-npm-cli",
"version": "0.1.0",
"version": "0.1.1",
"description": "\"# gpt-npm-cli\"",
"main": "index.js",
"exports": "./index.js",
"type": "module",
"engines": {
"node": ">=16.0.0"
},
"scripts": {
"build": "npx tsc",
"start": "npx tsc & node ./dist/index.js",
Expand Down
154 changes: 154 additions & 0 deletions src/helpers/langchain/open-ai-chat-helper.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
import { LLMChain } from "langchain/chains";
import { LLM } from "langchain/llms";
import {
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
} from "langchain/prompts";

const { OpenAIChat } = await import("langchain/llms");
const { CallbackManager } = await import("langchain/callbacks");

interface OpenAiChatHelperInput {
model?: string;
temperature?: number;
verbose?: boolean;
}

interface SummarizationOptions {
type: "map_reduce" | "stuff";
split: number;
}

interface TranslationOptions {
source: string;
output: string;
}

class OpenAiChatHelper {
public model: LLM;

constructor(input: OpenAiChatHelperInput) {
let params = {
temperature: input.temperature ?? 0.7,
modelName: input.model ?? "gpt-3.5-turbo",
verbose: input.verbose ?? false,
callbackManager: null as any,
};

if (params.verbose) {
params.callbackManager = OpenAiChatHelper.defaultCallBackManager;
}

this.model = new OpenAIChat(params);
}

public static get defaultCallBackManager() {
return CallbackManager.fromHandlers({
handleLLMStart: async (llm: { name: string }, prompts: string[]) => {
console.log(JSON.stringify(llm, null, 2));
console.log(JSON.stringify(prompts, null, 2));
},
handleLLMEnd: async (output: any) => {
console.log(JSON.stringify(output, null, 2));
},
handleLLMError: async (err: Error) => {
console.error(err);
},
});
}

/*
____
/ ___| _ _ _ __ ___ _ __ ___ __ _ _ __ _ _
\___ \| | | | '_ ` _ \| '_ ` _ \ / _` | '__| | | |
___) | |_| | | | | | | | | | | | (_| | | | |_| |
|____/ \__,_|_| |_| |_|_| |_| |_|\__,_|_| \__, |
|___/
*/

public async summarize(
text: string,
options: SummarizationOptions = {
type: "map_reduce",
split: 3000,
}
): Promise<string> {
// Load in dependencies
const { Document: LangDocument } = await import("langchain/document");
const { loadSummarizationChain } = await import("langchain/chains");

// Loads in the chain
const chain = loadSummarizationChain(this.model, { type: options.type });

// Create the documents
let docs = [];
if (options.type === "map_reduce") {
const { RecursiveCharacterTextSplitter } = await import(
"langchain/text_splitter"
);
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize: options.split,
});
docs = await textSplitter.createDocuments([text]);
} else {
docs = [new LangDocument({ pageContent: text })];
}

// Summarize
const res = await chain.call({
input_documents: docs,
});

// Output the result
return res.text;
}

/*
_____ _ _
|_ _| __ __ _ _ __ ___| | __ _| |_ ___
| || '__/ _` | '_ \/ __| |/ _` | __/ _ \
| || | | (_| | | | \__ \ | (_| | || __/
|_||_| \__,_|_| |_|___/_|\__,_|\__\___|
*/

public async translate(
text: string,
options: TranslationOptions = {
source: "auto",
output: "english",
}
): Promise<string> {
const template =
"You are a helpful assistant that takes text in {input_language} and only responds with its translation in {output_language}.";
const autoTemplate =
"You are a helpful assistant that detects the language of the input and only responds with its translation in {output_language}.";

let promptTemplate = template;
if (options.source === "auto") {
promptTemplate = autoTemplate;
}

const chatPrompt = ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(promptTemplate),
HumanMessagePromptTemplate.fromTemplate("{text}"),
]);

const chain = new LLMChain({ llm: this.model, prompt: chatPrompt });

const response = await chain.call({
input_language: options.source,
output_language: options.output,
text: text,
});

return response.text;
}
}

export default OpenAiChatHelper;
6 changes: 4 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ import { Command } from "commander";
import SummaryProgram from "./programs/summary-program.js";
import figlet from "figlet";
import ConfigureProgram from "./programs/configure/configure-program.js";
import TranslateProgram from "./programs/translate-program.js";

const version = "0.1.0";
const version = "0.1.1";
const description =
"A super charged CLI for interfacing with GPT-3 and other AI services";

Expand All @@ -16,7 +17,7 @@ async function main(): Promise<void> {
const cliApp = new Command()
.version(version)
.description(description)
.option("-d, --debug", "toggles verbose logging");
.option("-d, --debug", "toggles verbose logging", false);

// Configure the help command
cliApp.configureHelp({
Expand All @@ -29,6 +30,7 @@ async function main(): Promise<void> {
// Confifgure the programs
new SummaryProgram().configure(cliApp);
new ConfigureProgram().configure(cliApp);
new TranslateProgram().configure(cliApp);

// Parse the args for the program
await cliApp.parseAsync(process.argv);
Expand Down
12 changes: 11 additions & 1 deletion src/programs/program-interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ abstract class ProgramInterface {
public configure(root: Command): Command {
let command: Command = root
.command(this.name)
.description(this.description);
.description(this.formatDescription());

// Add any arguments
this.arguments.forEach((argument) => {
Expand All @@ -58,6 +58,16 @@ abstract class ProgramInterface {

protected abstract run(input: ProgramInput): Promise<void>;

// Formats the description, adding the required environment variables
private formatDescription(): string {
let description = this.description;
if (this.requiredEnvironmentVariables.length > 0) {
const envList = this.requiredEnvironmentVariables.join(", ");
description += `\n<Required: [${envList}]>`;
}
return description;
}

// formats the input for the runner
private async runWrapper(
run: (input: ProgramInput) => Promise<void>,
Expand Down
Loading

0 comments on commit 9d19791

Please sign in to comment.