From 5db04d6ee5eed19279677f42d049a131414ee2d5 Mon Sep 17 00:00:00 2001
From: Antonis Makropoulos
Date: Thu, 18 Jan 2024 11:16:19 +0200
Subject: [PATCH 1/5] remove diff changelog
---
CHANGELOG.md.diff | 13 -------------
1 file changed, 13 deletions(-)
delete mode 100644 CHANGELOG.md.diff
diff --git a/CHANGELOG.md.diff b/CHANGELOG.md.diff
deleted file mode 100644
index 2ac57e52..00000000
--- a/CHANGELOG.md.diff
+++ /dev/null
@@ -1,13 +0,0 @@
-### 🚀 Features
-
-- Code auto-formatting (PR: #26)
-- Setup auto-formatting precommit (PR: #31)
-- Start server on Awake instead of OnEnable (PR: #28)
-- AMD support, switch to llamafile 0.6 (PR: #33)
-- Release workflows (PR: #35)
-
-### 🐛 Fixes
-
-- Support Unity 2021 LTS (PR: #32)
-- Fix macOS command (PR: #34)
-
From cc2650107b6a459e1841a2964091cdf1b8d2ca0c Mon Sep 17 00:00:00 2001
From: Antonis Makropoulos
Date: Thu, 18 Jan 2024 11:17:11 +0200
Subject: [PATCH 2/5] git ignore CHANGELOG.release.md.meta
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index b537935f..d23874c6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,4 @@ hooks/pre-commit.meta
setup.sh.meta
*.api
*.api.meta
+CHANGELOG.release.md.meta
From 79bd97b34b4e7eb97c0c405532cf1df7fa0b38a6 Mon Sep 17 00:00:00 2001
From: Antonis Makropoulos
Date: Thu, 18 Jan 2024 11:46:22 +0200
Subject: [PATCH 3/5] escape spaces according to OS
---
Runtime/LLM.cs | 23 +++++++++++++++--------
1 file changed, 15 insertions(+), 8 deletions(-)
diff --git a/Runtime/LLM.cs b/Runtime/LLM.cs
index 283ad970..a5fdcef1 100644
--- a/Runtime/LLM.cs
+++ b/Runtime/LLM.cs
@@ -171,7 +171,7 @@ private void CheckIfListening(string message)
serverBlock.Set();
}
}
- catch {}
+ catch { }
}
private void ProcessExited(object sender, EventArgs e)
@@ -179,6 +179,14 @@ private void ProcessExited(object sender, EventArgs e)
serverBlock.Set();
}
+ private string EscapeSpaces(string input)
+ {
+ if (Application.platform == RuntimePlatform.WindowsEditor || Application.platform == RuntimePlatform.WindowsPlayer)
+ return input.Replace(" ", "\" \"");
+ else
+ return input.Replace(" ", "' '");
+ }
+
private void RunServerCommand(string exe, string args)
{
string binary = exe;
@@ -194,11 +202,12 @@ private void RunServerCommand(string exe, string args)
if (Application.platform == RuntimePlatform.LinuxEditor || Application.platform == RuntimePlatform.LinuxPlayer)
{
// use APE binary directly if on Linux
- arguments = $"{binary.Replace(" ", "' '")} {arguments}";
+ arguments = $"{EscapeSpaces(binary)} {arguments}";
binary = SelectApeBinary();
- } else if (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.OSXPlayer)
+ }
+ else if (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.OSXPlayer)
{
- arguments = $"-c \"{binary.Replace(" ", "' '")} {arguments}\"";
+ arguments = $"-c \"{EscapeSpaces(binary)} {arguments}\"";
binary = "sh";
}
Debug.Log($"Server command: {binary} {arguments}");
@@ -218,13 +227,11 @@ private void StartLLMServer()
loraPath = GetAssetPath(lora);
if (!File.Exists(loraPath)) throw new System.Exception($"File {loraPath} not found!");
}
- modelPath = modelPath.Replace(" ", "' '");
- loraPath = loraPath.Replace(" ", "' '");
int slots = parallelPrompts == -1 ? FindObjectsOfType().Length : parallelPrompts;
- string arguments = $" --port {port} -m {modelPath} -c {contextSize} -b {batchSize} --log-disable --nobrowser -np {slots}";
+ string arguments = $" --port {port} -m {EscapeSpaces(modelPath)} -c {contextSize} -b {batchSize} --log-disable --nobrowser -np {slots}";
if (numThreads > 0) arguments += $" -t {numThreads}";
- if (loraPath != "") arguments += $" --lora {loraPath}";
+ if (loraPath != "") arguments += $" --lora {EscapeSpaces(loraPath)}";
string GPUArgument = numGPULayers <= 0 ? "" : $" -ngl {numGPULayers}";
RunServerCommand(server, arguments + GPUArgument);
From fe35acd73344b41d7e5eae96e0e9d34e799f43ca Mon Sep 17 00:00:00 2001
From: Antonios Makropoulos
Date: Thu, 18 Jan 2024 13:54:58 +0200
Subject: [PATCH 4/5] add to Readme: badges, how to help, history
---
README.md | 45 +++++++++++++++++++++++++++++++++++++++++----
1 file changed, 41 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index 86f093b6..08730420 100644
--- a/README.md
+++ b/README.md
@@ -4,11 +4,27 @@
Integrate LLM models in Unity!
-
-LLMUnity allows to integrate, run and deploy LLMs (Large Language Models) in the Unity engine.
+[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
+
+[![Reddit](https://img.shields.io/badge/Reddit-%23FF4500.svg?style=flat&logo=Reddit&logoColor=white)](https://www.reddit.com/user/UndreamAI)
+
+LLMUnity allows to integrate, run and deploy LLMs (Large Language Models) in the Unity engine.
LLMUnity is built on top of the awesome [llama.cpp](https://github.com/ggerganov/llama.cpp) and [llamafile](https://github.com/Mozilla-Ocho/llamafile) libraries.
+
+
+At a glance •
+Setup •
+How to use •
+Examples •
+Use your own model •
+Multiple client / Remote server setup •
+Options •
+License
+
+
+
## At a glance
- :computer: Cross-platform! Supports Windows, Linux and macOS ([supported versions](https://github.com/Mozilla-Ocho/llamafile?tab=readme-ov-file#supported-oses-and-cpus))
- :house: Runs locally without internet access but also supports remote servers
@@ -17,7 +33,13 @@ LLMUnity is built on top of the awesome [llama.cpp](https://github.com/ggerganov
- :wrench: Easy to setup, call with a single line code
- :moneybag: Free to use for both personal and commercial purposes
-[:vertical_traffic_light: Upcoming Releases](https://github.com/orgs/undreamai/projects/2/views/10)
+🧪 Tested on Unity: 2021 LTS, 2022 LTS, 2023
+:vertical_traffic_light: [Upcoming Releases](https://github.com/orgs/undreamai/projects/2/views/10)
+
+## How to help
+- Join us at [Discord](https://discord.gg/tZRGntma) and say hi!
+- Star the repo and spread the word about the project ❤️!
+- Submit feature requests or bugs as [issues](https://github.com/undreamai/LLMUnity/issues) or even submit a PR and become a collaborator!
## Setup
To install the package you can follow the typical asset / package process in Unity:
@@ -45,7 +67,7 @@ For a step-by-step tutorial you can have a look at our guide:
Create a GameObject for the LLM :chess_pawn::
- Create an empty GameObject. In the GameObject Inspector click `Add Component` and select the LLM script (`Scripts>LLM`).
- Download the default model with the `Download Model` button (this will take a while as it is ~4GB).
You can also load your own model in .gguf format with the `Load model` button (see [Use your own model](#use-your-own-model)).
-- Define the role of your AI in the `Prompt`. You can also define the name of the AI (`AI Mame`) and the player (`Player Name`).
+- Define the role of your AI in the `Prompt`. You can also define the name of the AI (`AI Name`) and the player (`Player Name`).
- (Optional) By default the LLM script is set up to receive the reply from the model as is it is produced in real-time (recommended). If you prefer to receive the full reply in one go, you can deselect the `Stream` option.
- (Optional) Adjust the server or model settings to your preference (see [Options](#options)).
@@ -95,6 +117,21 @@ That's all :sparkles:!
You can also:
+
+Add or not the message to the chat/prompt history
+
+ The last argument of the `Chat` function is a boolean that specifies whether to add the message to the history (default: true):
+``` c#
+ void Game(){
+ // your game function
+ ...
+ string message = "Hello bot!"
+ await llm.Chat(message, HandleReply, ReplyCompleted, false);
+ ...
+ }
+```
+
+
Wait for the reply before proceeding to the next lines of code
From 700c0f1f003ea4e5c9dad935d4e032fe26289b66 Mon Sep 17 00:00:00 2001
From: Antonis Makropoulos
Date: Thu, 18 Jan 2024 13:56:10 +0200
Subject: [PATCH 5/5] bump version
---
VERSION | 2 +-
package.json | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/VERSION b/VERSION
index 7dea76ed..6d7de6e6 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.0.1
+1.0.2
diff --git a/package.json b/package.json
index c2b51889..35a4c2ca 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "ai.undream.llmunity",
- "version": "1.0.1",
+ "version": "1.0.2",
"displayName": "LLMUnity",
"description": "LLMUnity allows to run and distribute LLM models in the Unity engine.",
"unity": "2022.3",