Skip to content

Commit 7aaa106

Browse files
authored
feat: LocalAI functions (#726)
2 parents a6839fd + dcf35dd commit 7aaa106

18 files changed

+781
-37
lines changed

.dockerignore

-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
.git
21
.idea
32
models
43
examples/chatbot-ui/models

.env

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ MODELS_PATH=/models
2626
## Specify a build type. Available: cublas, openblas, clblas.
2727
# BUILD_TYPE=openblas
2828

29-
## Uncomment and set to false to disable rebuilding from source
30-
# REBUILD=false
29+
## Uncomment and set to true to enable rebuilding from source
30+
# REBUILD=true
3131

3232
## Enable go tags, available: stablediffusion, tts
3333
## stablediffusion: image generation with stablediffusion

Dockerfile

+3-1
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,8 @@ RUN make get-sources
8383
COPY go.mod .
8484
RUN make prepare
8585
COPY . .
86+
COPY .git .
87+
8688
RUN ESPEAK_DATA=/build/lib/Linux-$(uname -m)/piper_phonemize/lib/espeak-ng-data make build
8789

8890
###################################
@@ -92,7 +94,7 @@ FROM requirements
9294

9395
ARG FFMPEG
9496

95-
ENV REBUILD=true
97+
ENV REBUILD=false
9698
ENV HEALTHCHECK_ENDPOINT=http://localhost:8080/readyz
9799

98100
# Add FFmpeg

Makefile

+35-3
Original file line numberDiff line numberDiff line change
@@ -3,24 +3,51 @@ GOTEST=$(GOCMD) test
33
GOVET=$(GOCMD) vet
44
BINARY_NAME=local-ai
55

6-
GOLLAMA_VERSION?=ecd358d2f144b4282a73df443d60474fca5db9ec
6+
# llama.cpp versions
7+
# Temporarly pinned to https://github.com/go-skynet/go-llama.cpp/pull/124
8+
GOLLAMA_VERSION?=cb8d7cd4cb95725a04504a9e3a26dd72a12b69ac
9+
10+
# Temporary set a specific version of llama.cpp
11+
# containing: https://github.com/ggerganov/llama.cpp/pull/1773 and
12+
# rebased on top of master.
13+
# This pin can be dropped when the PR above is merged, and go-llama has merged changes as well
14+
# Set empty to use the version pinned by go-llama
15+
LLAMA_CPP_REPO?=https://github.com/mudler/llama.cpp
16+
LLAMA_CPP_VERSION?=48ce8722a05a018681634af801fd0fd45b3a87cc
17+
18+
# gpt4all version
719
GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all
820
GPT4ALL_VERSION?=70cbff70cc2a9ad26d492d44ab582d32e6219956
21+
22+
# go-ggml-transformers version
923
GOGGMLTRANSFORMERS_VERSION?=8e31841dcddca16468c11b2e7809f279fa76a832
24+
25+
# go-rwkv version
1026
RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp
1127
RWKV_VERSION?=f5a8c45396741470583f59b916a2a7641e63bcd0
28+
29+
# whisper.cpp version
1230
WHISPER_CPP_VERSION?=85ed71aaec8e0612a84c0b67804bde75aa75a273
31+
32+
# bert.cpp version
1333
BERT_VERSION?=6069103f54b9969c02e789d0fb12a23bd614285f
34+
35+
# go-piper version
1436
PIPER_VERSION?=56b8a81b4760a6fbee1a82e62f007ae7e8f010a7
37+
38+
# go-bloomz version
1539
BLOOMZ_VERSION?=1834e77b83faafe912ad4092ccf7f77937349e2f
40+
41+
# stablediffusion version
42+
STABLEDIFFUSION_VERSION?=d89260f598afb809279bc72aa0107b4292587632
43+
1644
export BUILD_TYPE?=
1745
CGO_LDFLAGS?=
1846
CUDA_LIBPATH?=/usr/local/cuda/lib64/
19-
STABLEDIFFUSION_VERSION?=d89260f598afb809279bc72aa0107b4292587632
2047
GO_TAGS?=
2148
BUILD_ID?=git
2249

23-
VERSION?=$(shell git describe --always --tags --dirty || echo "dev" )
50+
VERSION?=$(shell git describe --always --tags || echo "dev" )
2451
# go tool nm ./local-ai | grep Commit
2552
LD_FLAGS?=
2653
override LD_FLAGS += -X "github.com/go-skynet/LocalAI/internal.Version=$(VERSION)"
@@ -201,6 +228,9 @@ whisper.cpp/libwhisper.a: whisper.cpp
201228
go-llama:
202229
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama
203230
cd go-llama && git checkout -b build $(GOLLAMA_VERSION) && git submodule update --init --recursive --depth 1
231+
ifneq ($(LLAMA_CPP_REPO),)
232+
cd go-llama && rm -rf llama.cpp && git clone $(LLAMA_CPP_REPO) llama.cpp && cd llama.cpp && git checkout -b build $(LLAMA_CPP_VERSION) && git submodule update --init --recursive --depth 1
233+
endif
204234

205235
go-llama/libbinding.a: go-llama
206236
$(MAKE) -C go-llama BUILD_TYPE=$(BUILD_TYPE) libbinding.a
@@ -227,6 +257,7 @@ prepare-sources: get-sources replace
227257

228258
## GENERIC
229259
rebuild: ## Rebuilds the project
260+
$(GOCMD) clean -cache
230261
$(MAKE) -C go-llama clean
231262
$(MAKE) -C gpt4all/gpt4all-bindings/golang/ clean
232263
$(MAKE) -C go-ggml-transformers clean
@@ -242,6 +273,7 @@ prepare: prepare-sources backend-assets/gpt4all $(OPTIONAL_TARGETS) go-llama/lib
242273
touch $@
243274

244275
clean: ## Remove build related file
276+
$(GOCMD) clean -cache
245277
rm -fr ./go-llama
246278
rm -rf ./gpt4all
247279
rm -rf ./go-gpt2

api/api.go

+3
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,9 @@ func App(opts ...AppOption) (*fiber.App, error) {
5151
}))
5252
}
5353

54+
log.Info().Msgf("Starting LocalAI using %d threads, with models path: %s", options.threads, options.loader.ModelPath)
55+
log.Info().Msgf("LocalAI version: %s", internal.PrintableVersion())
56+
5457
cm := NewConfigMerger()
5558
if err := cm.LoadConfigs(options.loader.ModelPath); err != nil {
5659
log.Error().Msgf("error loading config files: %s", err.Error())

api/config.go

+35-2
Original file line numberDiff line numberDiff line change
@@ -46,12 +46,24 @@ type Config struct {
4646
PromptCacheAll bool `yaml:"prompt_cache_all"`
4747
PromptCacheRO bool `yaml:"prompt_cache_ro"`
4848

49-
PromptStrings, InputStrings []string
50-
InputToken [][]int
49+
Grammar string `yaml:"grammar"`
50+
51+
FunctionsConfig Functions `yaml:"function"`
52+
53+
PromptStrings, InputStrings []string
54+
InputToken [][]int
55+
functionCallString, functionCallNameString string
56+
}
57+
58+
type Functions struct {
59+
DisableNoAction bool `yaml:"disable_no_action"`
60+
NoActionFunctionName string `yaml:"no_action_function_name"`
61+
NoActionDescriptionName string `yaml:"no_action_description_name"`
5162
}
5263

5364
type TemplateConfig struct {
5465
Completion string `yaml:"completion"`
66+
Functions string `yaml:"function"`
5567
Chat string `yaml:"chat"`
5668
Edit string `yaml:"edit"`
5769
}
@@ -181,6 +193,10 @@ func updateConfig(config *Config, input *OpenAIRequest) {
181193
config.TopP = input.TopP
182194
}
183195

196+
if input.Grammar != "" {
197+
config.Grammar = input.Grammar
198+
}
199+
184200
if input.Temperature != 0 {
185201
config.Temperature = input.Temperature
186202
}
@@ -261,6 +277,23 @@ func updateConfig(config *Config, input *OpenAIRequest) {
261277
}
262278
}
263279
}
280+
// Can be either a string or an object
281+
switch fnc := input.FunctionCall.(type) {
282+
case string:
283+
if fnc != "" {
284+
config.functionCallString = fnc
285+
}
286+
case map[string]interface{}:
287+
var name string
288+
n, exists := fnc["name"]
289+
if exists {
290+
nn, e := n.(string)
291+
if e {
292+
name = nn
293+
}
294+
}
295+
config.functionCallNameString = name
296+
}
264297

265298
switch p := input.Prompt.(type) {
266299
case string:

0 commit comments

Comments
 (0)