@@ -3,24 +3,51 @@ GOTEST=$(GOCMD) test
3
3
GOVET =$(GOCMD ) vet
4
4
BINARY_NAME =local-ai
5
5
6
- GOLLAMA_VERSION? =ecd358d2f144b4282a73df443d60474fca5db9ec
6
+ # llama.cpp versions
7
+ # Temporarly pinned to https://github.com/go-skynet/go-llama.cpp/pull/124
8
+ GOLLAMA_VERSION? =cb8d7cd4cb95725a04504a9e3a26dd72a12b69ac
9
+
10
+ # Temporary set a specific version of llama.cpp
11
+ # containing: https://github.com/ggerganov/llama.cpp/pull/1773 and
12
+ # rebased on top of master.
13
+ # This pin can be dropped when the PR above is merged, and go-llama has merged changes as well
14
+ # Set empty to use the version pinned by go-llama
15
+ LLAMA_CPP_REPO? =https://github.com/mudler/llama.cpp
16
+ LLAMA_CPP_VERSION? =48ce8722a05a018681634af801fd0fd45b3a87cc
17
+
18
+ # gpt4all version
7
19
GPT4ALL_REPO? =https://github.com/nomic-ai/gpt4all
8
20
GPT4ALL_VERSION? =70cbff70cc2a9ad26d492d44ab582d32e6219956
21
+
22
+ # go-ggml-transformers version
9
23
GOGGMLTRANSFORMERS_VERSION? =8e31841dcddca16468c11b2e7809f279fa76a832
24
+
25
+ # go-rwkv version
10
26
RWKV_REPO? =https://github.com/donomii/go-rwkv.cpp
11
27
RWKV_VERSION? =f5a8c45396741470583f59b916a2a7641e63bcd0
28
+
29
+ # whisper.cpp version
12
30
WHISPER_CPP_VERSION? =85ed71aaec8e0612a84c0b67804bde75aa75a273
31
+
32
+ # bert.cpp version
13
33
BERT_VERSION? =6069103f54b9969c02e789d0fb12a23bd614285f
34
+
35
+ # go-piper version
14
36
PIPER_VERSION? =56b8a81b4760a6fbee1a82e62f007ae7e8f010a7
37
+
38
+ # go-bloomz version
15
39
BLOOMZ_VERSION? =1834e77b83faafe912ad4092ccf7f77937349e2f
40
+
41
+ # stablediffusion version
42
+ STABLEDIFFUSION_VERSION? =d89260f598afb809279bc72aa0107b4292587632
43
+
16
44
export BUILD_TYPE? =
17
45
CGO_LDFLAGS? =
18
46
CUDA_LIBPATH? =/usr/local/cuda/lib64/
19
- STABLEDIFFUSION_VERSION? =d89260f598afb809279bc72aa0107b4292587632
20
47
GO_TAGS? =
21
48
BUILD_ID? =git
22
49
23
- VERSION? =$(shell git describe --always --tags --dirty || echo "dev" )
50
+ VERSION? =$(shell git describe --always --tags || echo "dev" )
24
51
# go tool nm ./local-ai | grep Commit
25
52
LD_FLAGS? =
26
53
override LD_FLAGS += -X "github.com/go-skynet/LocalAI/internal.Version=$(VERSION ) "
@@ -201,6 +228,9 @@ whisper.cpp/libwhisper.a: whisper.cpp
201
228
go-llama :
202
229
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama
203
230
cd go-llama && git checkout -b build $(GOLLAMA_VERSION ) && git submodule update --init --recursive --depth 1
231
+ ifneq ($(LLAMA_CPP_REPO ) ,)
232
+ cd go-llama && rm -rf llama.cpp && git clone $(LLAMA_CPP_REPO) llama.cpp && cd llama.cpp && git checkout -b build $(LLAMA_CPP_VERSION) && git submodule update --init --recursive --depth 1
233
+ endif
204
234
205
235
go-llama/libbinding.a : go-llama
206
236
$(MAKE ) -C go-llama BUILD_TYPE=$(BUILD_TYPE ) libbinding.a
@@ -227,6 +257,7 @@ prepare-sources: get-sources replace
227
257
228
258
# # GENERIC
229
259
rebuild : # # Rebuilds the project
260
+ $(GOCMD ) clean -cache
230
261
$(MAKE ) -C go-llama clean
231
262
$(MAKE ) -C gpt4all/gpt4all-bindings/golang/ clean
232
263
$(MAKE ) -C go-ggml-transformers clean
@@ -242,6 +273,7 @@ prepare: prepare-sources backend-assets/gpt4all $(OPTIONAL_TARGETS) go-llama/lib
242
273
touch $@
243
274
244
275
clean : # # Remove build related file
276
+ $(GOCMD ) clean -cache
245
277
rm -fr ./go-llama
246
278
rm -rf ./gpt4all
247
279
rm -rf ./go-gpt2
0 commit comments