Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ SwiftiDate/GlobalVariables.swift
firebase_chat_sorter.py
swiftidate-cdff0-firebase-adminsdk-e2p44-7c5e06ebfc.json
SwiftiDate.xcodeproj/project.pbxproj
mlc-llm
Binary file added MLCSwift/.DS_Store
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>MLCSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
</dict>
</dict>
</plist>
32 changes: 32 additions & 0 deletions MLCSwift/Package.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// swift-tools-version:5.5
// The swift-tools-version declares the minimum version of Swift required to build this package.

import PackageDescription

let package = Package(
name: "MLCSwift",
products: [
.library(
name: "MLCSwift",
targets: ["MLCEngineObjC", "MLCSwift"]
)
],
dependencies: [],
targets: [
.target(
name: "MLCEngineObjC",
path: "Sources/ObjC",
cxxSettings: [
.headerSearchPath("../../tvm_home/include"),
.headerSearchPath("../../tvm_home/3rdparty/dmlc-core/include"),
.headerSearchPath("../../tvm_home/3rdparty/dlpack/include")
]
),
.target(
name: "MLCSwift",
dependencies: ["MLCEngineObjC"],
path: "Sources/Swift"
)
],
cxxLanguageStandard: .cxx17
)
4 changes: 4 additions & 0 deletions MLCSwift/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# MLCSwift

This is a simple swift package that exposes the chat module to swift.
Checkout our [documentation](https://llm.mlc.ai/docs/) for more examples.
110 changes: 110 additions & 0 deletions MLCSwift/Sources/ObjC/LLMEngine.mm
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
//
// LLMEngine.mm
// LLMEngine
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#include <os/proc.h>

#include "LLMEngine.h"

#define TVM_USE_LIBBACKTRACE 0
#define DMLC_USE_LOGGING_LIBRARY <tvm/runtime/logging.h>

#include <tvm/runtime/packed_func.h>
#include <tvm/runtime/registry.h>

using namespace tvm::runtime;

@implementation JSONFFIEngine {
// Internal c++ classes
// internal module backed by JSON FFI
Module json_ffi_engine_;
// member functions
PackedFunc init_background_engine_func_;
PackedFunc unload_func_;
PackedFunc reload_func_;
PackedFunc reset_func_;
PackedFunc chat_completion_func_;
PackedFunc abort_func_;
PackedFunc run_background_loop_func_;
PackedFunc run_background_stream_back_loop_func_;
PackedFunc exit_background_loop_func_;
}

- (instancetype)init {
if (self = [super init]) {
// load chat module
const PackedFunc* f_json_ffi_create = Registry::Get("mlc.json_ffi.CreateJSONFFIEngine");
ICHECK(f_json_ffi_create) << "Cannot find mlc.json_ffi.CreateJSONFFIEngine";
json_ffi_engine_ = (*f_json_ffi_create)();
init_background_engine_func_ = json_ffi_engine_->GetFunction("init_background_engine");
reload_func_ = json_ffi_engine_->GetFunction("reload");
unload_func_ = json_ffi_engine_->GetFunction("unload");
reset_func_ = json_ffi_engine_->GetFunction("reset");
chat_completion_func_ = json_ffi_engine_->GetFunction("chat_completion");
abort_func_ = json_ffi_engine_->GetFunction("abort");
run_background_loop_func_ = json_ffi_engine_->GetFunction("run_background_loop");
run_background_stream_back_loop_func_ =
json_ffi_engine_->GetFunction("run_background_stream_back_loop");
exit_background_loop_func_ = json_ffi_engine_->GetFunction("exit_background_loop");

ICHECK(init_background_engine_func_ != nullptr);
ICHECK(reload_func_ != nullptr);
ICHECK(unload_func_ != nullptr);
ICHECK(reset_func_ != nullptr);
ICHECK(chat_completion_func_ != nullptr);
ICHECK(abort_func_ != nullptr);
ICHECK(run_background_loop_func_ != nullptr);
ICHECK(run_background_stream_back_loop_func_ != nullptr);
ICHECK(exit_background_loop_func_ != nullptr);
}
return self;
}

- (void)initBackgroundEngine:(void (^)(NSString*))streamCallback {
TypedPackedFunc<void(String)> internal_stream_callback([streamCallback](String value) {
streamCallback([NSString stringWithUTF8String:value.c_str()]);
});
int device_type = kDLMetal;
int device_id = 0;
init_background_engine_func_(device_type, device_id, internal_stream_callback);
}

- (void)reload:(NSString*)engineConfigJson {
std::string engine_config = engineConfigJson.UTF8String;
reload_func_(engine_config);
}

- (void)unload {
unload_func_();
}

- (void)reset {
reset_func_();
}

- (void)chatCompletion:(NSString*)requestJSON requestID:(NSString*)requestID {
std::string request_json = requestJSON.UTF8String;
std::string request_id = requestID.UTF8String;
chat_completion_func_(request_json, request_id);
}

- (void)abort:(NSString*)requestID {
std::string request_id = requestID.UTF8String;
abort_func_(request_id);
}

- (void)runBackgroundLoop {
run_background_loop_func_();
}

- (void)runBackgroundStreamBackLoop {
run_background_stream_back_loop_func_();
}

- (void)exitBackgroundLoop {
exit_background_loop_func_();
}

@end
32 changes: 32 additions & 0 deletions MLCSwift/Sources/ObjC/include/LLMEngine.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//
// Use this file to import your target's public headers that you would like to expose to Swift.
// LLM Chat Module
//
// Exposed interface of Object-C, enables swift binding.
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

/**
* This is an internal Raw JSON FFI Engine that redirects request to internal JSON FFI Engine in C++
*/
@interface JSONFFIEngine : NSObject

- (void)initBackgroundEngine:(void (^)(NSString*))streamCallback;

- (void)reload:(NSString*)engineConfig;

- (void)unload;

- (void)reset;

- (void)chatCompletion:(NSString*)requestJSON requestID:(NSString*)requestID;

- (void)abort:(NSString*)requestID;

- (void)runBackgroundLoop;

- (void)runBackgroundStreamBackLoop;

- (void)exitBackgroundLoop;

@end
Loading