mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-11-11 21:39:52 +00:00
examples : iOS example with swift ui (#4159)
* copy to llama.cpp as subdir * attempt enabling metal, fails * ggml metal compiles! * Update README.md * initial conversion to new format, utf8 errors? * bug fixes, but now has an invalid memory access :( * added O3, now has insufficient memory access * begin sync with master * update to match latest code, new errors * fixed it! * fix for loop conditionals, increase result size * fix current workflow errors * attempt a llama.swiftui workflow * Update .github/workflows/build.yml Co-authored-by: Georgi Gerganov <ggerganov@gmail.com> --------- Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
This commit is contained in:
parent
f3b269813f
commit
bb03290c17
11
.github/workflows/build.yml
vendored
11
.github/workflows/build.yml
vendored
@ -498,6 +498,17 @@ jobs:
|
|||||||
path: |
|
path: |
|
||||||
cudart-llama-bin-win-cu${{ matrix.cuda }}-x64.zip
|
cudart-llama-bin-win-cu${{ matrix.cuda }}-x64.zip
|
||||||
|
|
||||||
|
ios-xcode-build:
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Build Xcode project
|
||||||
|
run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build
|
||||||
|
|
||||||
|
|
||||||
# freeBSD-latest:
|
# freeBSD-latest:
|
||||||
# runs-on: macos-12
|
# runs-on: macos-12
|
||||||
# steps:
|
# steps:
|
||||||
|
1
examples/llama.swiftui/.gitignore
vendored
Normal file
1
examples/llama.swiftui/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
xcuserdata
|
7
examples/llama.swiftui/README.md
Normal file
7
examples/llama.swiftui/README.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# llama.swiftui
|
||||||
|
|
||||||
|
Local inference of llama.cpp on an iPhone.
|
||||||
|
So far I only tested with starcoder 1B model, but it can most likely handle 7B models as well.
|
||||||
|
|
||||||
|
https://github.com/bachittle/llama.cpp/assets/39804642/e290827a-4edb-4093-9642-2a5e399ec545
|
||||||
|
|
176
examples/llama.swiftui/llama.cpp.swift/LibLlama.swift
Normal file
176
examples/llama.swiftui/llama.cpp.swift/LibLlama.swift
Normal file
@ -0,0 +1,176 @@
|
|||||||
|
import Foundation
|
||||||
|
|
||||||
|
// import llama
|
||||||
|
|
||||||
|
enum LlamaError: Error {
|
||||||
|
case couldNotInitializeContext
|
||||||
|
}
|
||||||
|
|
||||||
|
actor LlamaContext {
|
||||||
|
private var model: OpaquePointer
|
||||||
|
private var context: OpaquePointer
|
||||||
|
private var batch: llama_batch
|
||||||
|
private var tokens_list: [llama_token]
|
||||||
|
|
||||||
|
var n_len: Int32 = 512
|
||||||
|
var n_cur: Int32 = 0
|
||||||
|
var n_decode: Int32 = 0
|
||||||
|
|
||||||
|
init(model: OpaquePointer, context: OpaquePointer) {
|
||||||
|
self.model = model
|
||||||
|
self.context = context
|
||||||
|
self.tokens_list = []
|
||||||
|
self.batch = llama_batch_init(512, 0, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
llama_free(context)
|
||||||
|
llama_free_model(model)
|
||||||
|
llama_backend_free()
|
||||||
|
}
|
||||||
|
|
||||||
|
static func createContext(path: String) throws -> LlamaContext {
|
||||||
|
llama_backend_init(false)
|
||||||
|
let model_params = llama_model_default_params()
|
||||||
|
|
||||||
|
let model = llama_load_model_from_file(path, model_params)
|
||||||
|
guard let model else {
|
||||||
|
print("Could not load model at \(path)")
|
||||||
|
throw LlamaError.couldNotInitializeContext
|
||||||
|
}
|
||||||
|
var ctx_params = llama_context_default_params()
|
||||||
|
ctx_params.seed = 1234
|
||||||
|
ctx_params.n_ctx = 2048
|
||||||
|
ctx_params.n_threads = 8
|
||||||
|
ctx_params.n_threads_batch = 8
|
||||||
|
|
||||||
|
let context = llama_new_context_with_model(model, ctx_params)
|
||||||
|
guard let context else {
|
||||||
|
print("Could not load context!")
|
||||||
|
throw LlamaError.couldNotInitializeContext
|
||||||
|
}
|
||||||
|
|
||||||
|
return LlamaContext(model: model, context: context)
|
||||||
|
}
|
||||||
|
|
||||||
|
func get_n_tokens() -> Int32 {
|
||||||
|
return batch.n_tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
func completion_init(text: String) {
|
||||||
|
print("attempting to complete \"\(text)\"")
|
||||||
|
|
||||||
|
tokens_list = tokenize(text: text, add_bos: true)
|
||||||
|
|
||||||
|
let n_ctx = llama_n_ctx(context)
|
||||||
|
let n_kv_req = tokens_list.count + (Int(n_len) - tokens_list.count)
|
||||||
|
|
||||||
|
print("\n n_len = \(n_len), n_ctx = \(n_ctx), n_kv_req = \(n_kv_req)")
|
||||||
|
|
||||||
|
if n_kv_req > n_ctx {
|
||||||
|
print("error: n_kv_req > n_ctx, the required KV cache size is not big enough")
|
||||||
|
}
|
||||||
|
|
||||||
|
for id in tokens_list {
|
||||||
|
print(token_to_piece(token: id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// batch = llama_batch_init(512, 0) // done in init()
|
||||||
|
batch.n_tokens = Int32(tokens_list.count)
|
||||||
|
|
||||||
|
for i1 in 0..<batch.n_tokens {
|
||||||
|
let i = Int(i1)
|
||||||
|
batch.token[i] = tokens_list[i]
|
||||||
|
batch.pos[i] = i1
|
||||||
|
batch.n_seq_id[Int(i)] = 1
|
||||||
|
batch.seq_id[Int(i)]![0] = 0
|
||||||
|
batch.logits[i] = 0
|
||||||
|
}
|
||||||
|
batch.logits[Int(batch.n_tokens) - 1] = 1 // true
|
||||||
|
|
||||||
|
if llama_decode(context, batch) != 0 {
|
||||||
|
print("llama_decode() failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
n_cur = batch.n_tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
func completion_loop() -> String {
|
||||||
|
var new_token_id: llama_token = 0
|
||||||
|
|
||||||
|
let n_vocab = llama_n_vocab(model)
|
||||||
|
let logits = llama_get_logits_ith(context, batch.n_tokens - 1)
|
||||||
|
|
||||||
|
var candidates = Array<llama_token_data>()
|
||||||
|
candidates.reserveCapacity(Int(n_vocab))
|
||||||
|
|
||||||
|
for token_id in 0..<n_vocab {
|
||||||
|
candidates.append(llama_token_data(id: token_id, logit: logits![Int(token_id)], p: 0.0))
|
||||||
|
}
|
||||||
|
candidates.withUnsafeMutableBufferPointer() { buffer in
|
||||||
|
var candidates_p = llama_token_data_array(data: buffer.baseAddress, size: buffer.count, sorted: false)
|
||||||
|
|
||||||
|
new_token_id = llama_sample_token_greedy(context, &candidates_p)
|
||||||
|
}
|
||||||
|
|
||||||
|
if new_token_id == llama_token_eos(context) || n_cur == n_len {
|
||||||
|
print("\n")
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_token_str = token_to_piece(token: new_token_id)
|
||||||
|
print(new_token_str)
|
||||||
|
// tokens_list.append(new_token_id)
|
||||||
|
|
||||||
|
batch.n_tokens = 0
|
||||||
|
|
||||||
|
batch.token[Int(batch.n_tokens)] = new_token_id
|
||||||
|
batch.pos[Int(batch.n_tokens)] = n_cur
|
||||||
|
batch.n_seq_id[Int(batch.n_tokens)] = 1
|
||||||
|
batch.seq_id[Int(batch.n_tokens)]![0] = 0
|
||||||
|
batch.logits[Int(batch.n_tokens)] = 1 // true
|
||||||
|
batch.n_tokens += 1
|
||||||
|
|
||||||
|
n_decode += 1
|
||||||
|
|
||||||
|
n_cur += 1
|
||||||
|
|
||||||
|
if llama_decode(context, batch) != 0 {
|
||||||
|
print("failed to evaluate llama!")
|
||||||
|
}
|
||||||
|
|
||||||
|
return new_token_str
|
||||||
|
}
|
||||||
|
|
||||||
|
func clear() {
|
||||||
|
tokens_list.removeAll()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func tokenize(text: String, add_bos: Bool) -> [llama_token] {
|
||||||
|
let n_tokens = text.count + (add_bos ? 1 : 0)
|
||||||
|
let tokens = UnsafeMutablePointer<llama_token>.allocate(capacity: n_tokens)
|
||||||
|
let tokenCount = llama_tokenize(model, text, Int32(text.count), tokens, Int32(n_tokens), add_bos, false)
|
||||||
|
|
||||||
|
var swiftTokens: [llama_token] = []
|
||||||
|
for i in 0..<tokenCount {
|
||||||
|
swiftTokens.append(tokens[Int(i)])
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.deallocate()
|
||||||
|
|
||||||
|
return swiftTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
private func token_to_piece(token: llama_token) -> String {
|
||||||
|
let result = UnsafeMutablePointer<Int8>.allocate(capacity: 8)
|
||||||
|
result.initialize(repeating: Int8(0), count: 8)
|
||||||
|
|
||||||
|
let _ = llama_token_to_piece(model, token, result, 8)
|
||||||
|
|
||||||
|
let resultStr = String(cString: result)
|
||||||
|
|
||||||
|
result.deallocate()
|
||||||
|
|
||||||
|
return resultStr
|
||||||
|
}
|
||||||
|
}
|
5
examples/llama.swiftui/llama.cpp.swift/bridging-header.h
Normal file
5
examples/llama.swiftui/llama.cpp.swift/bridging-header.h
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
//
|
||||||
|
// Use this file to import your target's public headers that you would like to expose to Swift.
|
||||||
|
//
|
||||||
|
|
||||||
|
#import "llama.h"
|
481
examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj
Normal file
481
examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj
Normal file
@ -0,0 +1,481 @@
|
|||||||
|
// !$*UTF8*$!
|
||||||
|
{
|
||||||
|
archiveVersion = 1;
|
||||||
|
classes = {
|
||||||
|
};
|
||||||
|
objectVersion = 56;
|
||||||
|
objects = {
|
||||||
|
|
||||||
|
/* Begin PBXBuildFile section */
|
||||||
|
542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; };
|
||||||
|
5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; };
|
||||||
|
542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; };
|
||||||
|
542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; };
|
||||||
|
542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; };
|
||||||
|
542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; };
|
||||||
|
549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; };
|
||||||
|
549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; };
|
||||||
|
8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; };
|
||||||
|
8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; };
|
||||||
|
8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; };
|
||||||
|
8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; };
|
||||||
|
8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; };
|
||||||
|
8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; };
|
||||||
|
8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; };
|
||||||
|
8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; };
|
||||||
|
/* End PBXBuildFile section */
|
||||||
|
|
||||||
|
/* Begin PBXFileReference section */
|
||||||
|
542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = "<group>"; };
|
||||||
|
542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = "<group>"; };
|
||||||
|
542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = "<group>"; };
|
||||||
|
5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = "<group>"; };
|
||||||
|
542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = "<group>"; };
|
||||||
|
542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = "<group>"; };
|
||||||
|
542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = "<group>"; };
|
||||||
|
542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = "<group>"; };
|
||||||
|
542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = "<group>"; };
|
||||||
|
542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = "<group>"; };
|
||||||
|
549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = "<group>"; };
|
||||||
|
549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = "<group>"; };
|
||||||
|
549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = "<group>"; };
|
||||||
|
549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; };
|
||||||
|
8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = "<group>"; };
|
||||||
|
8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||||
|
8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = "<group>"; };
|
||||||
|
8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
|
||||||
|
8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||||
|
8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
|
||||||
|
8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
|
||||||
|
8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "llama-2-7b-chat.Q2_K.gguf"; sourceTree = "<group>"; };
|
||||||
|
8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = "<group>"; };
|
||||||
|
8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = "<group>"; };
|
||||||
|
8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = "<group>"; };
|
||||||
|
/* End PBXFileReference section */
|
||||||
|
|
||||||
|
/* Begin PBXFrameworksBuildPhase section */
|
||||||
|
8A1C83702AC328BD0096AF73 /* Frameworks */ = {
|
||||||
|
isa = PBXFrameworksBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */,
|
||||||
|
8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */,
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
|
/* End PBXFrameworksBuildPhase section */
|
||||||
|
|
||||||
|
/* Begin PBXGroup section */
|
||||||
|
8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
5423760A2B0D9C4B008E6A1C /* ggml-backend.c */,
|
||||||
|
542376092B0D9C40008E6A1C /* ggml-backend.h */,
|
||||||
|
542376062B0D9BEA008E6A1C /* ggml-quants.h */,
|
||||||
|
542376072B0D9BFB008E6A1C /* ggml-quants.c */,
|
||||||
|
549479C82AC9E10B00E0F78B /* ggml-metal.metal */,
|
||||||
|
549479C62AC9E0F200E0F78B /* ggml-metal.h */,
|
||||||
|
549479C52AC9E0F200E0F78B /* ggml-metal.m */,
|
||||||
|
542EA09B2AC8723900A8AEE9 /* ggml.c */,
|
||||||
|
542EA09C2AC8723900A8AEE9 /* ggml.h */,
|
||||||
|
542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */,
|
||||||
|
542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */,
|
||||||
|
542EA0A12AC8729100A8AEE9 /* llama.cpp */,
|
||||||
|
542EA0A22AC8729100A8AEE9 /* llama.h */,
|
||||||
|
);
|
||||||
|
name = llama.cpp;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A1C836A2AC328BD0096AF73 = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A08D1F62AC7383900FE6CD4 /* llama.cpp */,
|
||||||
|
8A907F312AC7134E006146EA /* llama.cpp.swift */,
|
||||||
|
8A3F84232AC4C891005E2EE8 /* models */,
|
||||||
|
8A1C83752AC328BD0096AF73 /* llama.swiftui */,
|
||||||
|
8A1C83742AC328BD0096AF73 /* Products */,
|
||||||
|
8A39BE082AC7601000BFEB40 /* Frameworks */,
|
||||||
|
);
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A1C83742AC328BD0096AF73 /* Products */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A1C83732AC328BD0096AF73 /* llama.swiftui.app */,
|
||||||
|
);
|
||||||
|
name = Products;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A1C83752AC328BD0096AF73 /* llama.swiftui */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A3F84102AC4BD85005E2EE8 /* Resources */,
|
||||||
|
8A9F7C4B2AC332DC008AE1EA /* Models */,
|
||||||
|
8A9F7C4A2AC332BF008AE1EA /* UI */,
|
||||||
|
8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */,
|
||||||
|
8A1C837A2AC328BE0096AF73 /* Assets.xcassets */,
|
||||||
|
8A1C837C2AC328BE0096AF73 /* Preview Content */,
|
||||||
|
);
|
||||||
|
path = llama.swiftui;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A1C837C2AC328BE0096AF73 /* Preview Content */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */,
|
||||||
|
);
|
||||||
|
path = "Preview Content";
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A39BE082AC7601000BFEB40 /* Frameworks */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
549479CA2AC9E16000E0F78B /* Metal.framework */,
|
||||||
|
8A39BE092AC7601000BFEB40 /* Accelerate.framework */,
|
||||||
|
);
|
||||||
|
name = Frameworks;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A3F84102AC4BD85005E2EE8 /* Resources */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A3F84112AC4BD8C005E2EE8 /* models */,
|
||||||
|
);
|
||||||
|
path = Resources;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A3F84112AC4BD8C005E2EE8 /* models */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */,
|
||||||
|
);
|
||||||
|
path = models;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A907F312AC7134E006146EA /* llama.cpp.swift */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */,
|
||||||
|
8A907F322AC7134E006146EA /* LibLlama.swift */,
|
||||||
|
);
|
||||||
|
path = llama.cpp.swift;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A9F7C4A2AC332BF008AE1EA /* UI */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A1C83782AC328BD0096AF73 /* ContentView.swift */,
|
||||||
|
);
|
||||||
|
path = UI;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
8A9F7C4B2AC332DC008AE1EA /* Models */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */,
|
||||||
|
);
|
||||||
|
path = Models;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
/* End PBXGroup section */
|
||||||
|
|
||||||
|
/* Begin PBXNativeTarget section */
|
||||||
|
8A1C83722AC328BD0096AF73 /* llama.swiftui */ = {
|
||||||
|
isa = PBXNativeTarget;
|
||||||
|
buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */;
|
||||||
|
buildPhases = (
|
||||||
|
8A1C836F2AC328BD0096AF73 /* Sources */,
|
||||||
|
8A1C83702AC328BD0096AF73 /* Frameworks */,
|
||||||
|
8A1C83712AC328BD0096AF73 /* Resources */,
|
||||||
|
);
|
||||||
|
buildRules = (
|
||||||
|
);
|
||||||
|
dependencies = (
|
||||||
|
);
|
||||||
|
name = llama.swiftui;
|
||||||
|
packageProductDependencies = (
|
||||||
|
);
|
||||||
|
productName = llama.swiftui;
|
||||||
|
productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */;
|
||||||
|
productType = "com.apple.product-type.application";
|
||||||
|
};
|
||||||
|
/* End PBXNativeTarget section */
|
||||||
|
|
||||||
|
/* Begin PBXProject section */
|
||||||
|
8A1C836B2AC328BD0096AF73 /* Project object */ = {
|
||||||
|
isa = PBXProject;
|
||||||
|
attributes = {
|
||||||
|
BuildIndependentTargetsInParallel = 1;
|
||||||
|
LastSwiftUpdateCheck = 1500;
|
||||||
|
LastUpgradeCheck = 1500;
|
||||||
|
TargetAttributes = {
|
||||||
|
8A1C83722AC328BD0096AF73 = {
|
||||||
|
CreatedOnToolsVersion = 15.0;
|
||||||
|
LastSwiftMigration = 1500;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */;
|
||||||
|
compatibilityVersion = "Xcode 14.0";
|
||||||
|
developmentRegion = en;
|
||||||
|
hasScannedForEncodings = 0;
|
||||||
|
knownRegions = (
|
||||||
|
en,
|
||||||
|
Base,
|
||||||
|
);
|
||||||
|
mainGroup = 8A1C836A2AC328BD0096AF73;
|
||||||
|
packageReferences = (
|
||||||
|
);
|
||||||
|
productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */;
|
||||||
|
projectDirPath = "";
|
||||||
|
projectRoot = "";
|
||||||
|
targets = (
|
||||||
|
8A1C83722AC328BD0096AF73 /* llama.swiftui */,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
/* End PBXProject section */
|
||||||
|
|
||||||
|
/* Begin PBXResourcesBuildPhase section */
|
||||||
|
8A1C83712AC328BD0096AF73 /* Resources */ = {
|
||||||
|
isa = PBXResourcesBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */,
|
||||||
|
8A3F84242AC4C891005E2EE8 /* models in Resources */,
|
||||||
|
8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */,
|
||||||
|
8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */,
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
|
/* End PBXResourcesBuildPhase section */
|
||||||
|
|
||||||
|
/* Begin PBXSourcesBuildPhase section */
|
||||||
|
8A1C836F2AC328BD0096AF73 /* Sources */ = {
|
||||||
|
isa = PBXSourcesBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */,
|
||||||
|
549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */,
|
||||||
|
542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */,
|
||||||
|
8A907F332AC7138A006146EA /* LibLlama.swift in Sources */,
|
||||||
|
542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */,
|
||||||
|
8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */,
|
||||||
|
8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */,
|
||||||
|
8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */,
|
||||||
|
542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */,
|
||||||
|
5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */,
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
|
/* End PBXSourcesBuildPhase section */
|
||||||
|
|
||||||
|
/* Begin XCBuildConfiguration section */
|
||||||
|
8A1C837F2AC328BE0096AF73 /* Debug */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
|
||||||
|
CLANG_ANALYZER_NONNULL = YES;
|
||||||
|
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||||
|
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CLANG_ENABLE_OBJC_ARC = YES;
|
||||||
|
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||||
|
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||||
|
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_COMMA = YES;
|
||||||
|
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||||
|
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||||
|
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||||
|
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||||
|
CLANG_WARN_EMPTY_BODY = YES;
|
||||||
|
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||||
|
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||||
|
CLANG_WARN_INT_CONVERSION = YES;
|
||||||
|
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||||
|
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||||
|
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||||
|
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||||
|
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||||
|
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||||
|
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||||
|
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||||
|
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||||
|
COPY_PHASE_STRIP = NO;
|
||||||
|
DEBUG_INFORMATION_FORMAT = dwarf;
|
||||||
|
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||||
|
ENABLE_TESTABILITY = YES;
|
||||||
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
|
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||||
|
GCC_DYNAMIC_NO_PIC = NO;
|
||||||
|
GCC_NO_COMMON_BLOCKS = YES;
|
||||||
|
GCC_OPTIMIZATION_LEVEL = 0;
|
||||||
|
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||||
|
"DEBUG=1",
|
||||||
|
"$(inherited)",
|
||||||
|
);
|
||||||
|
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||||
|
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||||
|
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||||
|
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||||
|
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||||
|
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||||
|
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||||
|
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||||
|
MTL_FAST_MATH = YES;
|
||||||
|
ONLY_ACTIVE_ARCH = YES;
|
||||||
|
SDKROOT = iphoneos;
|
||||||
|
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
|
||||||
|
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||||
|
};
|
||||||
|
name = Debug;
|
||||||
|
};
|
||||||
|
8A1C83802AC328BE0096AF73 /* Release */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||||
|
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
|
||||||
|
CLANG_ANALYZER_NONNULL = YES;
|
||||||
|
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||||
|
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CLANG_ENABLE_OBJC_ARC = YES;
|
||||||
|
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||||
|
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||||
|
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_COMMA = YES;
|
||||||
|
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||||
|
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||||
|
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||||
|
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||||
|
CLANG_WARN_EMPTY_BODY = YES;
|
||||||
|
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||||
|
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||||
|
CLANG_WARN_INT_CONVERSION = YES;
|
||||||
|
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||||
|
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||||
|
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||||
|
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||||
|
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||||
|
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||||
|
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||||
|
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||||
|
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||||
|
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||||
|
COPY_PHASE_STRIP = NO;
|
||||||
|
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
|
||||||
|
ENABLE_NS_ASSERTIONS = NO;
|
||||||
|
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||||
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
|
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||||
|
GCC_NO_COMMON_BLOCKS = YES;
|
||||||
|
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||||
|
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||||
|
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||||
|
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||||
|
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||||
|
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||||
|
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||||
|
MTL_ENABLE_DEBUG_INFO = NO;
|
||||||
|
MTL_FAST_MATH = YES;
|
||||||
|
SDKROOT = iphoneos;
|
||||||
|
SWIFT_COMPILATION_MODE = wholemodule;
|
||||||
|
VALIDATE_PRODUCT = YES;
|
||||||
|
};
|
||||||
|
name = Release;
|
||||||
|
};
|
||||||
|
8A1C83822AC328BE0096AF73 /* Debug */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||||
|
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CODE_SIGN_STYLE = Automatic;
|
||||||
|
CURRENT_PROJECT_VERSION = 1;
|
||||||
|
DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
|
||||||
|
DEVELOPMENT_TEAM = STLSG3FG8Q;
|
||||||
|
ENABLE_PREVIEWS = YES;
|
||||||
|
GENERATE_INFOPLIST_FILE = YES;
|
||||||
|
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||||
|
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||||
|
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 16.0;
|
||||||
|
LD_RUNPATH_SEARCH_PATHS = (
|
||||||
|
"$(inherited)",
|
||||||
|
"@executable_path/Frameworks",
|
||||||
|
);
|
||||||
|
MARKETING_VERSION = 1.0;
|
||||||
|
PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
|
||||||
|
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||||
|
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||||
|
SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
|
||||||
|
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||||
|
SWIFT_VERSION = 5.0;
|
||||||
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
|
};
|
||||||
|
name = Debug;
|
||||||
|
};
|
||||||
|
8A1C83832AC328BE0096AF73 /* Release */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||||
|
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CODE_SIGN_STYLE = Automatic;
|
||||||
|
CURRENT_PROJECT_VERSION = 1;
|
||||||
|
DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\"";
|
||||||
|
DEVELOPMENT_TEAM = STLSG3FG8Q;
|
||||||
|
ENABLE_PREVIEWS = YES;
|
||||||
|
GENERATE_INFOPLIST_FILE = YES;
|
||||||
|
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||||
|
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||||
|
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 16.0;
|
||||||
|
LD_RUNPATH_SEARCH_PATHS = (
|
||||||
|
"$(inherited)",
|
||||||
|
"@executable_path/Frameworks",
|
||||||
|
);
|
||||||
|
MARKETING_VERSION = 1.0;
|
||||||
|
PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift";
|
||||||
|
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||||
|
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||||
|
SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h";
|
||||||
|
SWIFT_VERSION = 5.0;
|
||||||
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
|
};
|
||||||
|
name = Release;
|
||||||
|
};
|
||||||
|
/* End XCBuildConfiguration section */
|
||||||
|
|
||||||
|
/* Begin XCConfigurationList section */
|
||||||
|
8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = {
|
||||||
|
isa = XCConfigurationList;
|
||||||
|
buildConfigurations = (
|
||||||
|
8A1C837F2AC328BE0096AF73 /* Debug */,
|
||||||
|
8A1C83802AC328BE0096AF73 /* Release */,
|
||||||
|
);
|
||||||
|
defaultConfigurationIsVisible = 0;
|
||||||
|
defaultConfigurationName = Release;
|
||||||
|
};
|
||||||
|
8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = {
|
||||||
|
isa = XCConfigurationList;
|
||||||
|
buildConfigurations = (
|
||||||
|
8A1C83822AC328BE0096AF73 /* Debug */,
|
||||||
|
8A1C83832AC328BE0096AF73 /* Release */,
|
||||||
|
);
|
||||||
|
defaultConfigurationIsVisible = 0;
|
||||||
|
defaultConfigurationName = Release;
|
||||||
|
};
|
||||||
|
/* End XCConfigurationList section */
|
||||||
|
};
|
||||||
|
rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */;
|
||||||
|
}
|
7
examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata
generated
Normal file
7
examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata
generated
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<Workspace
|
||||||
|
version = "1.0">
|
||||||
|
<FileRef
|
||||||
|
location = "self:">
|
||||||
|
</FileRef>
|
||||||
|
</Workspace>
|
@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>IDEDidComputeMac32BitWarning</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"colors" : [
|
||||||
|
{
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"idiom" : "universal",
|
||||||
|
"platform" : "ios",
|
||||||
|
"size" : "1024x1024"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
45
examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift
Normal file
45
examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import Foundation
|
||||||
|
|
||||||
|
@MainActor
|
||||||
|
class LlamaState: ObservableObject {
|
||||||
|
@Published var messageLog = ""
|
||||||
|
|
||||||
|
private var llamaContext: LlamaContext?
|
||||||
|
private var modelUrl: URL? {
|
||||||
|
Bundle.main.url(forResource: "q8_0", withExtension: "gguf", subdirectory: "models")
|
||||||
|
// Bundle.main.url(forResource: "llama-2-7b-chat", withExtension: "Q2_K.gguf", subdirectory: "models")
|
||||||
|
}
|
||||||
|
init() {
|
||||||
|
do {
|
||||||
|
try loadModel()
|
||||||
|
} catch {
|
||||||
|
messageLog += "Error!\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func loadModel() throws {
|
||||||
|
messageLog += "Loading model...\n"
|
||||||
|
if let modelUrl {
|
||||||
|
llamaContext = try LlamaContext.createContext(path: modelUrl.path())
|
||||||
|
messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
|
||||||
|
} else {
|
||||||
|
messageLog += "Could not locate model\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func complete(text: String) async {
|
||||||
|
guard let llamaContext else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
messageLog += "Attempting to complete text...\n"
|
||||||
|
await llamaContext.completion_init(text: text)
|
||||||
|
messageLog += "\(text)"
|
||||||
|
|
||||||
|
while await llamaContext.n_cur <= llamaContext.n_len {
|
||||||
|
let result = await llamaContext.completion_loop()
|
||||||
|
messageLog += "\(result)"
|
||||||
|
}
|
||||||
|
await llamaContext.clear()
|
||||||
|
messageLog += "\n\ndone\n"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
0
examples/llama.swiftui/llama.swiftui/Resources/models/.gitignore
vendored
Normal file
0
examples/llama.swiftui/llama.swiftui/Resources/models/.gitignore
vendored
Normal file
42
examples/llama.swiftui/llama.swiftui/UI/ContentView.swift
Normal file
42
examples/llama.swiftui/llama.swiftui/UI/ContentView.swift
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct ContentView: View {
|
||||||
|
@StateObject var llamaState = LlamaState()
|
||||||
|
|
||||||
|
@State private var multiLineText = ""
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack {
|
||||||
|
ScrollView(.vertical) {
|
||||||
|
Text(llamaState.messageLog)
|
||||||
|
}
|
||||||
|
|
||||||
|
TextEditor(text: $multiLineText)
|
||||||
|
.frame(height: 200)
|
||||||
|
.padding()
|
||||||
|
.border(Color.gray, width: 0.5)
|
||||||
|
Button(action: {
|
||||||
|
sendText()
|
||||||
|
}) {
|
||||||
|
Text("Send")
|
||||||
|
.padding()
|
||||||
|
.background(Color.blue)
|
||||||
|
.foregroundColor(.white)
|
||||||
|
.cornerRadius(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding()
|
||||||
|
}
|
||||||
|
|
||||||
|
func sendText() {
|
||||||
|
Task {
|
||||||
|
await llamaState.complete(text: multiLineText)
|
||||||
|
multiLineText = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
#Preview {
|
||||||
|
ContentView()
|
||||||
|
}
|
||||||
|
*/
|
10
examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift
Normal file
10
examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
@main
|
||||||
|
struct llama_swiftuiApp: App {
|
||||||
|
var body: some Scene {
|
||||||
|
WindowGroup {
|
||||||
|
ContentView()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user