You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
// check if all lora adapters have the same tensors
199
+
// TODO: remove this when we can support merging subset of adapters. Ref: https://github.com/ggerganov/llama.cpp/pull/8607#discussion_r1686027777
200
+
if (adapters.size() > 1) {
201
+
auto & base_adapter = adapters[0];
202
+
for (size_t i = 1; i < adapters.size(); ++i) {
203
+
for (auto & it : base_adapter->tensors) {
204
+
if (base_adapter->get_tensor(it.first) == nullptr) {
205
+
throwstd::runtime_error("Input adapters do not have the same list of tensors. This is not yet supported. Please merge the adapter one-by-one instead of merging all at once.");
206
+
}
207
+
}
208
+
}
209
+
}
210
+
198
211
// if true, this tensor can be lora-merged. if false, we skip merging and just copy data to outfile
0 commit comments