diff --git a/.github/workflows/compile.yml b/.github/workflows/compile.yml
index 5ade12316..c7c0283a7 100644
--- a/.github/workflows/compile.yml
+++ b/.github/workflows/compile.yml
@@ -28,13 +28,25 @@ jobs:
include:
- build: 'noavx'
defines: '-DGGML_AVX=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF'
+ os: ubuntu-24.04
+ arch: x64
- build: 'avx2'
defines: ''
+ os: ubuntu-24.04
+ arch: x64
- build: 'avx'
defines: '-DGGML_AVX2=OFF'
+ os: ubuntu-24.04
+ arch: x64
- build: 'avx512'
defines: '-DGGML_AVX512=ON'
- runs-on: ubuntu-24.04
+ os: ubuntu-24.04
+ arch: x64
+ - build: 'aarch64'
+ defines: '-DGGML_NATIVE=OFF -DGGML_CPU_AARCH64=ON -DGGML_CPU_ARM_ARCH=armv8-a'
+ os: ubuntu-24.04-arm
+ arch: arm64
+ runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
with:
@@ -52,28 +64,28 @@ jobs:
- uses: actions/upload-artifact@v4
with:
path: ./build/bin/libllama.so
- name: llama-bin-linux-${{ matrix.build }}-x64.so
+ name: llama-bin-linux-${{ matrix.build }}-${{ matrix.arch }}.so
if-no-files-found: error
- uses: actions/upload-artifact@v4
with:
path: ./build/bin/libggml.so
- name: ggml-bin-linux-${{ matrix.build }}-x64.so
+ name: ggml-bin-linux-${{ matrix.build }}-${{ matrix.arch }}.so
if-no-files-found: error
- uses: actions/upload-artifact@v4
with:
path: ./build/bin/libggml-base.so
- name: ggml-base-bin-linux-${{ matrix.build }}-x64.so
+ name: ggml-base-bin-linux-${{ matrix.build }}-${{ matrix.arch }}.so
if-no-files-found: error
- uses: actions/upload-artifact@v4
with:
path: ./build/bin/libggml-cpu.so
- name: ggml-cpu-bin-linux-${{ matrix.build }}-x64.so
+ name: ggml-cpu-bin-linux-${{ matrix.build }}-${{ matrix.arch }}.so
if-no-files-found: error
- name: Upload Llava
uses: actions/upload-artifact@v4
with:
path: ./build/bin/libllava_shared.so
- name: llava-bin-linux-${{ matrix.build }}-x64.so
+ name: llava-bin-linux-${{ matrix.build }}-${{ matrix.arch }}.so
if-no-files-found: error
compile-musl:
@@ -527,19 +539,15 @@ jobs:
if-no-files-found: error
compile-android:
- # Disable android build
- if: false
-
+ name: Compile (Android)
strategy:
fail-fast: true
matrix:
include:
- - build: 'x86'
- defines: '-DANDROID_ABI=x86'
- build: 'x86_64'
- defines: '-DANDROID_ABI=x86_64'
+ defines: '-DANDROID_ABI=x86_64 -DCMAKE_C_FLAGS=-march=x86-64 -DCMAKE_CXX_FLAGS=-march=x86-64'
- build: 'arm64-v8a'
- defines: '-DANDROID_ABI=arm64-v8a'
+ defines: '-DANDROID_ABI=arm64-v8a -DCMAKE_C_FLAGS=-march=armv8.7a -DCMAKE_C_FLAGS=-march=armv8.7a'
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
@@ -555,28 +563,39 @@ jobs:
- name: Build
id: cmake_build
env:
- CMAKE_FLAGS: '-DCMAKE_TOOLCHAIN_FILE=${{ steps.setup-ndk.outputs.ndk-path }}/build/cmake/android.toolchain.cmake -DANDROID_PLATFORM=android-23'
+ CMAKE_FLAGS: '-DCMAKE_TOOLCHAIN_FILE=${{ steps.setup-ndk.outputs.ndk-path }}/build/cmake/android.toolchain.cmake -DANDROID_PLATFORM=android-23 -DGGML_OPENMP=OFF -DGGML_LLAMAFILE=OFF'
run: |
- mkdir build
- cd build
- cmake .. ${{ env.COMMON_DEFINE }} ${{ env.CMAKE_FLAGS }} ${{ matrix.defines }}
- cmake --build . --config Release -j ${env:NUMBER_OF_PROCESSORS}
- cd ..
- ls -R
+ # export-lora not supported on 32 bit machines hence breaks x86 build
+ sed -i '/add_subdirectory(export-lora)/d' examples/CMakeLists.txt # remove export-lora from examples
+ cmake ${{ env.COMMON_DEFINE }} ${{ env.CMAKE_FLAGS }} ${{ matrix.defines }} -B build
+ cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS}
- name: Upload Llama
uses: actions/upload-artifact@v4
with:
- path: ./build/src/libllama.so
+ path: ./build/bin/libllama.so
name: llama-bin-android-${{ matrix.build }}.so
- - uses: actions/upload-artifact@v4
+ - name: Upload GGML
+ uses: actions/upload-artifact@v4
with:
- path: ./build/ggml/src/libggml.so
+ path: ./build/bin/libggml.so
name: ggml-bin-android-${{ matrix.build }}.so
if-no-files-found: error
+ - name: Upload GGML Base
+ uses: actions/upload-artifact@v4
+ with:
+ path: ./build/bin/libggml-base.so
+ name: ggml-base-bin-android-${{ matrix.build }}.so
+ if-no-files-found: error
+ - name: Upload GGML CPU
+ uses: actions/upload-artifact@v4
+ with:
+ path: ./build/bin/libggml-cpu.so
+ name: ggml-cpu-bin-android-${{ matrix.build }}.so
+ if-no-files-found: error
- name: Upload Llava
uses: actions/upload-artifact@v4
with:
- path: ./build/examples/llava/libllava_shared.so
+ path: ./build/bin/libllava_shared.so
name: llava-bin-android-${{ matrix.build }}.so
build-deps:
@@ -601,7 +620,7 @@ jobs:
- name: Rearrange Files
run: |
# Make all directories at once
- mkdir --parents deps/{noavx,avx,avx2,avx512,musl-noavx,musl-avx,musl-avx2,musl-avx512,osx-arm64,osx-x64,osx-x64-rosetta2,cu11.7.1,cu12.2.0,vulkan,android-arm64-v8a,android-x86,android-x86_64}
+ mkdir --parents deps/{noavx,avx,avx2,avx512,linux-arm64,musl-noavx,musl-avx,musl-avx2,musl-avx512,osx-arm64,osx-x64,osx-x64-rosetta2,cu11.7.1,cu12.2.0,vulkan,android-arm64-v8a,android-x86,android-x86_64}
# Linux
cp artifacts/ggml-bin-linux-noavx-x64.so/libggml.so deps/noavx/libggml.so
@@ -628,6 +647,13 @@ jobs:
cp artifacts/llama-bin-linux-avx512-x64.so/libllama.so deps/avx512/libllama.so
cp artifacts/llava-bin-linux-avx512-x64.so/libllava_shared.so deps/avx512/libllava_shared.so
+ # Arm64
+ cp artifacts/ggml-bin-linux-aarch64-arm64.so/libggml.so deps/linux-arm64/libggml.so
+ cp artifacts/ggml-base-bin-linux-aarch64-arm64.so/libggml-base.so deps/linux-arm64/libggml-base.so
+ cp artifacts/ggml-cpu-bin-linux-aarch64-arm64.so/libggml-cpu.so deps/linux-arm64/libggml-cpu.so
+ cp artifacts/llama-bin-linux-aarch64-arm64.so/libllama.so deps/linux-arm64/libllama.so
+ cp artifacts/llava-bin-linux-aarch64-arm64.so/libllava_shared.so deps/linux-arm64/libllava_shared.so
+
# Musl
cp artifacts/ggml-bin-musl-noavx-x64.so/libggml.so deps/musl-noavx/libggml.so
cp artifacts/ggml-base-bin-musl-noavx-x64.so/libggml-base.so deps/musl-noavx/libggml-base.so
@@ -703,17 +729,17 @@ jobs:
cp artifacts/llava-bin-osx-x64-rosetta2.dylib/libllava_shared.dylib deps/osx-x64-rosetta2/libllava_shared.dylib
# Android
- #cp artifacts/ggml-bin-android-arm64-v8a.so/libggml.so deps/android-arm64-v8a/libggml.so
- #cp artifacts/llama-bin-android-arm64-v8a.so/libllama.so deps/android-arm64-v8a/libllama.so
- #cp artifacts/llava-bin-android-arm64-v8a.so/libllava_shared.so deps/android-arm64-v8a/libllava_shared.so
-
- #cp artifacts/ggml-bin-android-x86.so/libggml.so deps/android-x86/libggml.so
- #cp artifacts/llama-bin-android-x86.so/libllama.so deps/android-x86/libllama.so
- #cp artifacts/llava-bin-android-x86.so/libllava_shared.so deps/android-x86/libllava_shared.so
-
- #cp artifacts/ggml-bin-android-x86_64.so/libggml.so deps/android-x86_64/libggml.so
- #cp artifacts/llama-bin-android-x86_64.so/libllama.so deps/android-x86_64/libllama.so
- #cp artifacts/llava-bin-android-x86_64.so/libllava_shared.so deps/android-x86_64/libllava_shared.so
+ cp artifacts/ggml-bin-android-arm64-v8a.so/libggml.so deps/android-arm64-v8a/libggml.so
+ cp artifacts/ggml-base-bin-android-arm64-v8a.so/libggml-base.so deps/android-arm64-v8a/libggml-base.so
+ cp artifacts/ggml-cpu-bin-android-arm64-v8a.so/libggml-cpu.so deps/android-arm64-v8a/libggml-cpu.so
+ cp artifacts/llama-bin-android-arm64-v8a.so/libllama.so deps/android-arm64-v8a/libllama.so
+ cp artifacts/llava-bin-android-arm64-v8a.so/libllava_shared.so deps/android-arm64-v8a/libllava_shared.so
+
+ cp artifacts/ggml-bin-android-x86_64.so/libggml.so deps/android-x86_64/libggml.so
+ cp artifacts/ggml-base-bin-android-x86_64.so/libggml-base.so deps/android-x86_64/libggml-base.so
+ cp artifacts/ggml-cpu-bin-android-x86_64.so/libggml-cpu.so deps/android-x86_64/libggml-cpu.so
+ cp artifacts/llama-bin-android-x86_64.so/libllama.so deps/android-x86_64/libllama.so
+ cp artifacts/llava-bin-android-x86_64.so/libllava_shared.so deps/android-x86_64/libllava_shared.so
# Windows CUDA
cp artifacts/ggml-bin-win-cublas-cu11.7.1-x64.dll/ggml.dll deps/cu11.7.1/ggml.dll
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 8aa198549..a5e6eb0d4 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -38,6 +38,15 @@ jobs:
with:
dotnet-version: |
8.0.x
+ - name: Install Mobile Workloads
+ if: ${{ contains(runner.os, 'windows') }}
+ run: |
+ dotnet workload install android --ignore-failed-sources
+ dotnet workload install maui --ignore-failed-sources
+ - name: Remove Mobile Project
+ if: ${{ !contains(runner.os, 'windows') }}
+ run: |
+ dotnet sln LLamaSharp.sln remove Llama.Mobile
- name: Cache Packages
uses: actions/cache@v4
with:
diff --git a/.gitignore b/.gitignore
index 056ba6163..206b0dac1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -337,7 +337,6 @@ test/TensorFlowNET.Examples/mnist
# training model resources
.resources
/redist
-*.xml
*.xsd
# docs
diff --git a/LLama/LLamaSharp.Runtime.targets b/LLama/LLamaSharp.Runtime.targets
index 22a3e04e1..0f67303dc 100644
--- a/LLama/LLamaSharp.Runtime.targets
+++ b/LLama/LLamaSharp.Runtime.targets
@@ -202,6 +202,28 @@
+
+ PreserveNewest
+ runtimes/linux-arm64/native/libllama.so
+
+
+ PreserveNewest
+ runtimes/linux-arm64/native/libggml.so
+
+
+ PreserveNewest
+ runtimes/linux-arm64/native/libggml-base.so
+
+
+ PreserveNewest
+ runtimes/linux-arm64/native/libggml-cpu.so
+
+
+ PreserveNewest
+ runtimes/linux-arm64/native/libllava_shared.so
+
+
+
PreserveNewest
runtimes/linux-x64/native/cuda11/libllama.so
@@ -466,4 +488,94 @@
runtimes/linux-x64/native/vulkan/libllava_shared.so
+
+
+
+
+ runtimes/android-x86/native/libllama.so
+ x86
+
+
+ runtimes/android-x86/native/libggml.so
+ x86
+
+
+ runtimes/android-x86/native/libggml-base.so
+ x86
+
+
+ runtimes/android-x86/native/libggml-cpu.so
+ x86
+
+
+ runtimes/android-x86/native/libllava_shared.so
+ x86
+
+
+
+
+
+ lib/x86_64/libllama.so
+ x86_64
+
+
+ lib/x86_64/libggml.so
+ x86_64
+
+
+ lib/x86_64/libggml-base.so
+ x86_64
+
+
+ lib/x86_64/libggml-cpu.so
+ x86_64
+
+
+ lib/x86_64/libllava_shared.so
+ x86_64
+
+
+
+
+
+ lib/arm64-v8a/libllama.so
+ arm64-v8a
+
+
+ lib/arm64-v8a/libggml.so
+ arm64-v8a
+
+
+ lib/arm64-v8a/libggml-base.so
+ arm64-v8a
+
+
+ lib/arm64-v8a/libggml-cpu.so
+ arm64-v8a
+
+
+ lib/arm64-v8a/libllava_shared.so
+ arm64-v8a
+
+
+
+
\ No newline at end of file
diff --git a/LLama/LLamaSharp.csproj b/LLama/LLamaSharp.csproj
index 0ffecd15c..b6bde83fd 100644
--- a/LLama/LLamaSharp.csproj
+++ b/LLama/LLamaSharp.csproj
@@ -57,7 +57,7 @@
- be7c3034108473be
+ ceda28ef8e310a8de
diff --git a/LLama/Native/LLamaModelParams.cs b/LLama/Native/LLamaModelParams.cs
index 5159226fd..562896d7b 100644
--- a/LLama/Native/LLamaModelParams.cs
+++ b/LLama/Native/LLamaModelParams.cs
@@ -13,6 +13,11 @@ public unsafe struct LLamaModelParams
/// todo: add support for llama_model_params.devices
///
private IntPtr devices;
+
+ // NULL-terminated list of buffer types to use for tensors that match a pattern
+ // actual type: llama_model_tensor_buft_override*
+ // todo: add support for tensor_buft_overrides
+ private IntPtr tensor_buft_overrides;
///
/// // number of layers to store in VRAM
diff --git a/LLama/Native/LLamaModelQuantizeParams.cs b/LLama/Native/LLamaModelQuantizeParams.cs
index d11f4882e..d31b1bbc8 100644
--- a/LLama/Native/LLamaModelQuantizeParams.cs
+++ b/LLama/Native/LLamaModelQuantizeParams.cs
@@ -89,6 +89,11 @@ public bool keep_split
///
public IntPtr kv_overrides;
+ ///
+ /// pointer to vector containing tensor types
+ ///
+ public IntPtr tensor_types;
+
///
/// Create a LLamaModelQuantizeParams with default values
///
diff --git a/LLama/Native/LLamaVocabPreType.cs b/LLama/Native/LLamaVocabPreType.cs
index 384ba0391..48ab5585b 100644
--- a/LLama/Native/LLamaVocabPreType.cs
+++ b/LLama/Native/LLamaVocabPreType.cs
@@ -38,5 +38,10 @@ internal enum LLamaVocabPreType
MINERVA = 27,
DEEPSEEK3_LLM = 28,
GPT4O = 29,
+ SUPERBPE = 30,
+ TRILLION = 31,
+ BAILINGMOE = 32,
+ LLAMA4 = 33,
+ PIXTRAL = 34,
}
// ReSharper restore InconsistentNaming
\ No newline at end of file
diff --git a/LLama/Native/Load/NativeLibraryUtils.cs b/LLama/Native/Load/NativeLibraryUtils.cs
index b0e8a792a..9f6457cd1 100644
--- a/LLama/Native/Load/NativeLibraryUtils.cs
+++ b/LLama/Native/Load/NativeLibraryUtils.cs
@@ -88,19 +88,28 @@ internal static IntPtr TryLoadLibrary(NativeLibraryConfig config, out INativeLib
// On other platforms (Windows, Linux), we need to load the CPU backend from the specified AVX level directory
// We are using the AVX level supplied by NativeLibraryConfig, which automatically detects the highest supported AVX level for us
- // ggml-cpu
- dependencyPaths.Add(Path.Combine(
- $"runtimes/{os}/native/{NativeLibraryConfig.AvxLevelToString(library.Metadata.AvxLevel)}",
- $"{libPrefix}ggml-cpu{ext}"
- ));
-
- // ggml-cuda
- if (library.Metadata.UseCuda)
- dependencyPaths.Add(Path.Combine(currentRuntimeDirectory, $"{libPrefix}ggml-cuda{ext}"));
-
- // ggml-vulkan
- if (library.Metadata.UseVulkan)
- dependencyPaths.Add(Path.Combine(currentRuntimeDirectory, $"{libPrefix}ggml-vulkan{ext}"));
+ if (os == "linux-arm64"){
+ dependencyPaths.Add(Path.Combine(
+ $"runtimes/{os}/native",
+ $"{libPrefix}ggml-cpu{ext}"
+ ));
+ }
+ else{
+ // ggml-cpu
+ dependencyPaths.Add(Path.Combine(
+ $"runtimes/{os}/native/{NativeLibraryConfig.AvxLevelToString(library.Metadata.AvxLevel)}",
+ $"{libPrefix}ggml-cpu{ext}"
+ ));
+
+ // ggml-cuda
+ if (library.Metadata.UseCuda)
+ dependencyPaths.Add(Path.Combine(currentRuntimeDirectory, $"{libPrefix}ggml-cuda{ext}"));
+
+ // ggml-vulkan
+ if (library.Metadata.UseVulkan)
+ dependencyPaths.Add(Path.Combine(currentRuntimeDirectory, $"{libPrefix}ggml-vulkan{ext}"));
+ }
+
}
}
@@ -218,6 +227,13 @@ public static void GetPlatformPathParts(OSPlatform platform, out string os, out
if (platform == OSPlatform.Linux)
{
+ if(System.Runtime.Intrinsics.Arm.ArmBase.Arm64.IsSupported){
+ // linux arm64
+ os = "linux-arm64";
+ fileExtension = ".so";
+ libPrefix = "lib";
+ return;
+ }
if(RuntimeInformation.RuntimeIdentifier.ToLower().StartsWith("alpine"))
{
// alpine linux distro
diff --git a/LLama/Native/Load/NativeLibraryWithAvx.cs b/LLama/Native/Load/NativeLibraryWithAvx.cs
index 932c49866..e6cbd86f3 100644
--- a/LLama/Native/Load/NativeLibraryWithAvx.cs
+++ b/LLama/Native/Load/NativeLibraryWithAvx.cs
@@ -50,11 +50,17 @@ public IEnumerable Prepare(SystemInfo systemInfo, NativeLogConfig.LLamaL
private string? GetAvxPath(SystemInfo systemInfo, AvxLevel avxLevel, NativeLogConfig.LLamaLogCallback? logCallback)
{
NativeLibraryUtils.GetPlatformPathParts(systemInfo.OSPlatform, out var os, out var fileExtension, out var libPrefix);
- var avxStr = NativeLibraryConfig.AvxLevelToString(avxLevel);
- if (!string.IsNullOrEmpty(avxStr))
- avxStr += "/";
- var relativePath = $"runtimes/{os}/native/{avxStr}{libPrefix}{_libraryName.GetLibraryName()}{fileExtension}";
- return relativePath;
+ if (os != "linux-arm64"){
+ var avxStr = NativeLibraryConfig.AvxLevelToString(avxLevel);
+ if (!string.IsNullOrEmpty(avxStr))
+ avxStr += "/";
+ var relativePath = $"runtimes/{os}/native/{avxStr}{libPrefix}{_libraryName.GetLibraryName()}{fileExtension}";
+ return relativePath;
+ } else {
+ var relativePath = $"runtimes/{os}/native/{libPrefix}{_libraryName.GetLibraryName()}{fileExtension}";
+ return relativePath;
+ }
+
}
}
#endif
diff --git a/LLama/Native/NativeApi.Load.cs b/LLama/Native/NativeApi.Load.cs
index 5ad30d032..2d5be063f 100644
--- a/LLama/Native/NativeApi.Load.cs
+++ b/LLama/Native/NativeApi.Load.cs
@@ -53,6 +53,12 @@ private static void SetDllImportResolver()
// NativeLibrary is not available on older runtimes. We'll have to depend on
// the normal runtime dll resolution there.
#if NET5_0_OR_GREATER
+ if (OperatingSystem.IsAndroid())
+ {
+ // Android doesn't support DllImportResolver, so we have to rely on the default search path
+ return;
+ }
+
NativeLibrary.SetDllImportResolver(typeof(NativeApi).Assembly, (name, _, _) =>
{
if (name == "llama")
diff --git a/LLama/Native/SafeLLamaContextHandle.cs b/LLama/Native/SafeLLamaContextHandle.cs
index faa390f76..467dd98e7 100644
--- a/LLama/Native/SafeLLamaContextHandle.cs
+++ b/LLama/Native/SafeLLamaContextHandle.cs
@@ -389,6 +389,15 @@ static SafeLLamaContextHandle()
[DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl)]
private static extern LLamaKvCacheNative llama_get_kv_self(SafeLLamaContextHandle ctx);
+
+ ///
+ /// Set whether the model is in warmup mode or not
+ /// If true, all model tensors are activated during llama_decode() to load and cache their weights.
+ ///
+ ///
+ ///
+ [DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl)]
+ private static extern void llama_set_warmup(SafeLLamaContextHandle ctx, [MarshalAs(UnmanagedType.U1)] bool warmup);
#endregion
#region LoRA
diff --git a/LLama/Native/SafeLLamaSamplerHandle.cs b/LLama/Native/SafeLLamaSamplerHandle.cs
index 8d6cd3015..bad1a1974 100644
--- a/LLama/Native/SafeLLamaSamplerHandle.cs
+++ b/LLama/Native/SafeLLamaSamplerHandle.cs
@@ -270,6 +270,7 @@ public void AddMirostat2Sampler(uint seed, float tau, float eta)
///
/// Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751
///
+ /// Setting k <= 0 makes this a noop
///
public void AddTopK(int k)
{
@@ -408,20 +409,36 @@ public void AddFillInMiddleInfill(SafeLlamaModelHandle model)
}
///
- /// Create a sampler which makes tokens impossible unless they match the grammar
+ /// Create a sampler which makes tokens impossible unless they match the grammar.
///
- ///
+ /// The model that this grammar will be used with
///
/// Root rule of the grammar
///
public void AddGrammar(SafeLlamaModelHandle model, string grammar, string root)
+ {
+ AddGrammar(model.Vocab, grammar, root);
+ }
+
+ ///
+ /// Create a sampler which makes tokens impossible unless they match the grammar.
+ ///
+ /// The vocabulary that this grammar will be used with
+ ///
+ /// Root rule of the grammar
+ ///
+ public void AddGrammar(SafeLlamaModelHandle.Vocabulary vocab, string grammar, string root)
{
unsafe
{
- llama_sampler_chain_add(this, llama_sampler_init_grammar(model.Vocab.VocabNative, grammar, root));
+ llama_sampler_chain_add(this, llama_sampler_init_grammar(vocab.VocabNative, grammar, root));
}
// ReSharper disable InconsistentNaming
+ // @details Initializes a GBNF grammar, see grammars/README.md for details.
+ // @param vocab The vocabulary that this grammar will be used with.
+ // @param grammar_str The production rules for the grammar, encoded as a string. Returns an empty grammar if empty. Returns NULL if parsing of grammar_str fails.
+ // @param grammar_root The name of the start symbol for the grammar.
[DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl)]
static extern unsafe IntPtr llama_sampler_init_grammar(LLamaVocabNative* model, string grammar_str, string grammar_root);
// ReSharper restore InconsistentNaming
diff --git a/LLama/runtimes/build/LLamaSharp.Backend.Cpu.Android.nuspec b/LLama/runtimes/build/LLamaSharp.Backend.Cpu.Android.nuspec
new file mode 100644
index 000000000..0d45b1492
--- /dev/null
+++ b/LLama/runtimes/build/LLamaSharp.Backend.Cpu.Android.nuspec
@@ -0,0 +1,41 @@
+
+
+
+ LLamaSharp.Backend.Cpu.Android
+ $version$
+ LLamaSharp.Backend.Cpu.Android, the backend for LLamaSharp
+ llama.cpp Authors
+ false
+ MIT
+ icon512.png
+ https://github.com/SciSharp/LLamaSharp
+ LLamaSharp.Backend.Cpu.Android is a backend for LLamaSharp to use with Android Cpu only.
+
+ Copyright 2023 The llama.cpp Authors. All rights reserved.
+ LLamaSharp LLama LLM GPT AI ChatBot SciSharp
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/LLama/runtimes/build/LLamaSharp.Backend.Cpu.nuspec b/LLama/runtimes/build/LLamaSharp.Backend.Cpu.nuspec
index 7c69534da..aeef403eb 100644
--- a/LLama/runtimes/build/LLamaSharp.Backend.Cpu.nuspec
+++ b/LLama/runtimes/build/LLamaSharp.Backend.Cpu.nuspec
@@ -1,46 +1,46 @@
-
- LLamaSharp.Backend.Cpu
- $version$
- LLamaSharp.Backend.Cpu, the backend for LLamaSharp
- llama.cpp Authors
- false
- MIT
- icon512.png
- https://github.com/SciSharp/LLamaSharp
- LLamaSharp.Backend.Cpu is a backend for LLamaSharp to use with Cpu only.
-
- Copyright 2023 The llama.cpp Authors. All rights reserved.
- LLamaSharp LLama LLM GPT AI ChatBot SciSharp
-
+
+ LLamaSharp.Backend.Cpu
+ $version$
+ LLamaSharp.Backend.Cpu, the backend for LLamaSharp
+ llama.cpp Authors
+ false
+ MIT
+ icon512.png
+ https://github.com/SciSharp/LLamaSharp
+ LLamaSharp.Backend.Cpu is a backend for LLamaSharp to use with Cpu only.
+
+ Copyright 2023 The llama.cpp Authors. All rights reserved.
+ LLamaSharp LLama LLM GPT AI ChatBot SciSharp
+
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -66,7 +66,13 @@
-
+
+
+
+
+
+
+
@@ -97,22 +103,22 @@
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/LLama/runtimes/build/LLamaSharpBackend.props b/LLama/runtimes/build/LLamaSharpBackend.props
index 422969d88..006b4dabc 100644
--- a/LLama/runtimes/build/LLamaSharpBackend.props
+++ b/LLama/runtimes/build/LLamaSharpBackend.props
@@ -14,4 +14,93 @@
+
+
+
+ runtimes\android-x86\native\libllama.so
+ x86
+
+
+ runtimes\android-x86\native\libggml.so
+ x86
+
+
+ runtimes\android-x86\native\libggml-base.so
+ x86
+
+
+ runtimes\android-x86\native\libggml-cpu.so
+ x86
+
+
+ runtimes\android-x86\native\libllava_shared.so
+ x86
+
+
+
+
+
+ lib\x86_64\libllama.so
+ x86_64
+
+
+ lib\x86_64\libggml.so
+ x86_64
+
+
+ lib\x86_64\libggml-base.so
+ x86_64
+
+
+ lib\x86_64\libggml-cpu.so
+ x86_64
+
+
+ lib\x86_64\libllava_shared.so
+ x86_64
+
+
+
+
+
+ lib\arm64-v8a\libllama.so
+ arm64-v8a
+
+
+ lib\arm64-v8a\libggml.so
+ arm64-v8a
+
+
+ lib\arm64-v8a\libggml-base.so
+ arm64-v8a
+
+
+ lib\arm64-v8a\libggml-cpu.so
+ arm64-v8a
+
+
+ lib\arm64-v8a\libllava_shared.so
+ arm64-v8a
+
+
+
+
diff --git a/Llama.Mobile/App.xaml b/Llama.Mobile/App.xaml
new file mode 100644
index 000000000..e5b403011
--- /dev/null
+++ b/Llama.Mobile/App.xaml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Llama.Mobile/App.xaml.cs b/Llama.Mobile/App.xaml.cs
new file mode 100644
index 000000000..c2db0b0b9
--- /dev/null
+++ b/Llama.Mobile/App.xaml.cs
@@ -0,0 +1,12 @@
+namespace Llama.Mobile
+{
+ public partial class App : Application
+ {
+ public App()
+ {
+ InitializeComponent();
+
+ MainPage = new AppShell();
+ }
+ }
+}
diff --git a/Llama.Mobile/AppShell.xaml b/Llama.Mobile/AppShell.xaml
new file mode 100644
index 000000000..65ae2f591
--- /dev/null
+++ b/Llama.Mobile/AppShell.xaml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
diff --git a/Llama.Mobile/AppShell.xaml.cs b/Llama.Mobile/AppShell.xaml.cs
new file mode 100644
index 000000000..33f40ba5c
--- /dev/null
+++ b/Llama.Mobile/AppShell.xaml.cs
@@ -0,0 +1,10 @@
+namespace Llama.Mobile
+{
+ public partial class AppShell : Shell
+ {
+ public AppShell()
+ {
+ InitializeComponent();
+ }
+ }
+}
diff --git a/Llama.Mobile/Llama.Mobile.csproj b/Llama.Mobile/Llama.Mobile.csproj
new file mode 100644
index 000000000..a51a3eb0f
--- /dev/null
+++ b/Llama.Mobile/Llama.Mobile.csproj
@@ -0,0 +1,82 @@
+
+
+
+
+ true
+ false
+
+
+
+
+
+
+
+
+ net8.0-android
+
+
+
+
+
+
+
+
+
+ Exe
+ Llama.Mobile
+ true
+ true
+ enable
+ enable
+
+
+ Llama.Mobile
+
+
+ com.llama.mobile
+
+
+ 1.0
+ 1
+
+ 11.0
+ 13.1
+ 21.0
+ 10.0.17763.0
+ 10.0.17763.0
+ 6.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Llama.Mobile/MainPage.xaml b/Llama.Mobile/MainPage.xaml
new file mode 100644
index 000000000..5bd8e7e94
--- /dev/null
+++ b/Llama.Mobile/MainPage.xaml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/MainPage.xaml.cs b/Llama.Mobile/MainPage.xaml.cs
new file mode 100644
index 000000000..8c9cb310c
--- /dev/null
+++ b/Llama.Mobile/MainPage.xaml.cs
@@ -0,0 +1,16 @@
+namespace Llama.Mobile;
+
+using LLama.Native;
+
+public partial class MainPage : ContentPage
+{
+ public MainPage()
+ {
+ InitializeComponent();
+
+ //Load the native library
+ NativeApi.llama_empty_call();
+
+ label1.Text = "llama.cpp loaded successfully";
+ }
+}
diff --git a/Llama.Mobile/MauiProgram.cs b/Llama.Mobile/MauiProgram.cs
new file mode 100644
index 000000000..fe17dcd27
--- /dev/null
+++ b/Llama.Mobile/MauiProgram.cs
@@ -0,0 +1,25 @@
+using Microsoft.Extensions.Logging;
+
+namespace Llama.Mobile
+{
+ public static class MauiProgram
+ {
+ public static MauiApp CreateMauiApp()
+ {
+ var builder = MauiApp.CreateBuilder();
+ builder
+ .UseMauiApp()
+ .ConfigureFonts(fonts =>
+ {
+ fonts.AddFont("OpenSans-Regular.ttf", "OpenSansRegular");
+ fonts.AddFont("OpenSans-Semibold.ttf", "OpenSansSemibold");
+ });
+
+#if DEBUG
+ builder.Logging.AddDebug();
+#endif
+
+ return builder.Build();
+ }
+ }
+}
diff --git a/Llama.Mobile/Platforms/Android/AndroidManifest.xml b/Llama.Mobile/Platforms/Android/AndroidManifest.xml
new file mode 100644
index 000000000..e9937ad77
--- /dev/null
+++ b/Llama.Mobile/Platforms/Android/AndroidManifest.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Platforms/Android/MainActivity.cs b/Llama.Mobile/Platforms/Android/MainActivity.cs
new file mode 100644
index 000000000..8a0d5c68a
--- /dev/null
+++ b/Llama.Mobile/Platforms/Android/MainActivity.cs
@@ -0,0 +1,11 @@
+using Android.App;
+using Android.Content.PM;
+using Android.OS;
+
+namespace Llama.Mobile
+{
+ [Activity(Theme = "@style/Maui.SplashTheme", MainLauncher = true, LaunchMode = LaunchMode.SingleTop, ConfigurationChanges = ConfigChanges.ScreenSize | ConfigChanges.Orientation | ConfigChanges.UiMode | ConfigChanges.ScreenLayout | ConfigChanges.SmallestScreenSize | ConfigChanges.Density)]
+ public class MainActivity : MauiAppCompatActivity
+ {
+ }
+}
diff --git a/Llama.Mobile/Platforms/Android/MainApplication.cs b/Llama.Mobile/Platforms/Android/MainApplication.cs
new file mode 100644
index 000000000..9bf5331af
--- /dev/null
+++ b/Llama.Mobile/Platforms/Android/MainApplication.cs
@@ -0,0 +1,16 @@
+using Android.App;
+using Android.Runtime;
+
+namespace Llama.Mobile
+{
+ [Application]
+ public class MainApplication : MauiApplication
+ {
+ public MainApplication(IntPtr handle, JniHandleOwnership ownership)
+ : base(handle, ownership)
+ {
+ }
+
+ protected override MauiApp CreateMauiApp() => MauiProgram.CreateMauiApp();
+ }
+}
diff --git a/Llama.Mobile/Platforms/Android/Resources/values/colors.xml b/Llama.Mobile/Platforms/Android/Resources/values/colors.xml
new file mode 100644
index 000000000..c04d7492a
--- /dev/null
+++ b/Llama.Mobile/Platforms/Android/Resources/values/colors.xml
@@ -0,0 +1,6 @@
+
+
+ #512BD4
+ #2B0B98
+ #2B0B98
+
\ No newline at end of file
diff --git a/Llama.Mobile/Platforms/MacCatalyst/AppDelegate.cs b/Llama.Mobile/Platforms/MacCatalyst/AppDelegate.cs
new file mode 100644
index 000000000..5af0d2d6f
--- /dev/null
+++ b/Llama.Mobile/Platforms/MacCatalyst/AppDelegate.cs
@@ -0,0 +1,10 @@
+using Foundation;
+
+namespace Llama.Mobile
+{
+ [Register("AppDelegate")]
+ public class AppDelegate : MauiUIApplicationDelegate
+ {
+ protected override MauiApp CreateMauiApp() => MauiProgram.CreateMauiApp();
+ }
+}
diff --git a/Llama.Mobile/Platforms/MacCatalyst/Entitlements.plist b/Llama.Mobile/Platforms/MacCatalyst/Entitlements.plist
new file mode 100644
index 000000000..de4adc94a
--- /dev/null
+++ b/Llama.Mobile/Platforms/MacCatalyst/Entitlements.plist
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+ com.apple.security.app-sandbox
+
+
+ com.apple.security.network.client
+
+
+
+
diff --git a/Llama.Mobile/Platforms/MacCatalyst/Info.plist b/Llama.Mobile/Platforms/MacCatalyst/Info.plist
new file mode 100644
index 000000000..726897715
--- /dev/null
+++ b/Llama.Mobile/Platforms/MacCatalyst/Info.plist
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+ UIDeviceFamily
+
+ 2
+
+ UIRequiredDeviceCapabilities
+
+ arm64
+
+ UISupportedInterfaceOrientations
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+ UISupportedInterfaceOrientations~ipad
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationPortraitUpsideDown
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+ XSAppIconAssets
+ Assets.xcassets/appicon.appiconset
+
+
diff --git a/Llama.Mobile/Platforms/MacCatalyst/Program.cs b/Llama.Mobile/Platforms/MacCatalyst/Program.cs
new file mode 100644
index 000000000..d3bd693a9
--- /dev/null
+++ b/Llama.Mobile/Platforms/MacCatalyst/Program.cs
@@ -0,0 +1,16 @@
+using ObjCRuntime;
+using UIKit;
+
+namespace Llama.Mobile
+{
+ public class Program
+ {
+ // This is the main entry point of the application.
+ static void Main(string[] args)
+ {
+ // if you want to use a different Application Delegate class from "AppDelegate"
+ // you can specify it here.
+ UIApplication.Main(args, null, typeof(AppDelegate));
+ }
+ }
+}
diff --git a/Llama.Mobile/Platforms/Tizen/Main.cs b/Llama.Mobile/Platforms/Tizen/Main.cs
new file mode 100644
index 000000000..030e40e44
--- /dev/null
+++ b/Llama.Mobile/Platforms/Tizen/Main.cs
@@ -0,0 +1,17 @@
+using Microsoft.Maui;
+using Microsoft.Maui.Hosting;
+using System;
+
+namespace Llama.Mobile
+{
+ internal class Program : MauiApplication
+ {
+ protected override MauiApp CreateMauiApp() => MauiProgram.CreateMauiApp();
+
+ static void Main(string[] args)
+ {
+ var app = new Program();
+ app.Run(args);
+ }
+ }
+}
diff --git a/Llama.Mobile/Platforms/Tizen/tizen-manifest.xml b/Llama.Mobile/Platforms/Tizen/tizen-manifest.xml
new file mode 100644
index 000000000..58d0846a5
--- /dev/null
+++ b/Llama.Mobile/Platforms/Tizen/tizen-manifest.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+ maui-appicon-placeholder
+
+
+
+
+ http://tizen.org/privilege/internet
+
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Platforms/Windows/App.xaml b/Llama.Mobile/Platforms/Windows/App.xaml
new file mode 100644
index 000000000..51d994306
--- /dev/null
+++ b/Llama.Mobile/Platforms/Windows/App.xaml
@@ -0,0 +1,8 @@
+
+
+
diff --git a/Llama.Mobile/Platforms/Windows/App.xaml.cs b/Llama.Mobile/Platforms/Windows/App.xaml.cs
new file mode 100644
index 000000000..17804342a
--- /dev/null
+++ b/Llama.Mobile/Platforms/Windows/App.xaml.cs
@@ -0,0 +1,25 @@
+using Microsoft.UI.Xaml;
+
+// To learn more about WinUI, the WinUI project structure,
+// and more about our project templates, see: http://aka.ms/winui-project-info.
+
+namespace Llama.Mobile.WinUI
+{
+ ///
+ /// Provides application-specific behavior to supplement the default Application class.
+ ///
+ public partial class App : MauiWinUIApplication
+ {
+ ///
+ /// Initializes the singleton application object. This is the first line of authored code
+ /// executed, and as such is the logical equivalent of main() or WinMain().
+ ///
+ public App()
+ {
+ this.InitializeComponent();
+ }
+
+ protected override MauiApp CreateMauiApp() => MauiProgram.CreateMauiApp();
+ }
+
+}
diff --git a/Llama.Mobile/Platforms/Windows/Package.appxmanifest b/Llama.Mobile/Platforms/Windows/Package.appxmanifest
new file mode 100644
index 000000000..eb72027fd
--- /dev/null
+++ b/Llama.Mobile/Platforms/Windows/Package.appxmanifest
@@ -0,0 +1,46 @@
+
+
+
+
+
+
+
+
+ $placeholder$
+ User Name
+ $placeholder$.png
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Llama.Mobile/Platforms/Windows/app.manifest b/Llama.Mobile/Platforms/Windows/app.manifest
new file mode 100644
index 000000000..9991c324d
--- /dev/null
+++ b/Llama.Mobile/Platforms/Windows/app.manifest
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+ true/PM
+ PerMonitorV2, PerMonitor
+
+
+
diff --git a/Llama.Mobile/Platforms/iOS/AppDelegate.cs b/Llama.Mobile/Platforms/iOS/AppDelegate.cs
new file mode 100644
index 000000000..5af0d2d6f
--- /dev/null
+++ b/Llama.Mobile/Platforms/iOS/AppDelegate.cs
@@ -0,0 +1,10 @@
+using Foundation;
+
+namespace Llama.Mobile
+{
+ [Register("AppDelegate")]
+ public class AppDelegate : MauiUIApplicationDelegate
+ {
+ protected override MauiApp CreateMauiApp() => MauiProgram.CreateMauiApp();
+ }
+}
diff --git a/Llama.Mobile/Platforms/iOS/Info.plist b/Llama.Mobile/Platforms/iOS/Info.plist
new file mode 100644
index 000000000..0004a4fde
--- /dev/null
+++ b/Llama.Mobile/Platforms/iOS/Info.plist
@@ -0,0 +1,32 @@
+
+
+
+
+ LSRequiresIPhoneOS
+
+ UIDeviceFamily
+
+ 1
+ 2
+
+ UIRequiredDeviceCapabilities
+
+ arm64
+
+ UISupportedInterfaceOrientations
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+ UISupportedInterfaceOrientations~ipad
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationPortraitUpsideDown
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+ XSAppIconAssets
+ Assets.xcassets/appicon.appiconset
+
+
diff --git a/Llama.Mobile/Platforms/iOS/Program.cs b/Llama.Mobile/Platforms/iOS/Program.cs
new file mode 100644
index 000000000..d3bd693a9
--- /dev/null
+++ b/Llama.Mobile/Platforms/iOS/Program.cs
@@ -0,0 +1,16 @@
+using ObjCRuntime;
+using UIKit;
+
+namespace Llama.Mobile
+{
+ public class Program
+ {
+ // This is the main entry point of the application.
+ static void Main(string[] args)
+ {
+ // if you want to use a different Application Delegate class from "AppDelegate"
+ // you can specify it here.
+ UIApplication.Main(args, null, typeof(AppDelegate));
+ }
+ }
+}
diff --git a/Llama.Mobile/Platforms/iOS/Resources/PrivacyInfo.xcprivacy b/Llama.Mobile/Platforms/iOS/Resources/PrivacyInfo.xcprivacy
new file mode 100644
index 000000000..24ab3b433
--- /dev/null
+++ b/Llama.Mobile/Platforms/iOS/Resources/PrivacyInfo.xcprivacy
@@ -0,0 +1,51 @@
+
+
+
+
+
+ NSPrivacyAccessedAPITypes
+
+
+ NSPrivacyAccessedAPIType
+ NSPrivacyAccessedAPICategoryFileTimestamp
+ NSPrivacyAccessedAPITypeReasons
+
+ C617.1
+
+
+
+ NSPrivacyAccessedAPIType
+ NSPrivacyAccessedAPICategorySystemBootTime
+ NSPrivacyAccessedAPITypeReasons
+
+ 35F9.1
+
+
+
+ NSPrivacyAccessedAPIType
+ NSPrivacyAccessedAPICategoryDiskSpace
+ NSPrivacyAccessedAPITypeReasons
+
+ E174.1
+
+
+
+
+
+
diff --git a/Llama.Mobile/Resources/AppIcon/appicon.svg b/Llama.Mobile/Resources/AppIcon/appicon.svg
new file mode 100644
index 000000000..9d63b6513
--- /dev/null
+++ b/Llama.Mobile/Resources/AppIcon/appicon.svg
@@ -0,0 +1,4 @@
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Resources/AppIcon/appiconfg.svg b/Llama.Mobile/Resources/AppIcon/appiconfg.svg
new file mode 100644
index 000000000..21dfb25f1
--- /dev/null
+++ b/Llama.Mobile/Resources/AppIcon/appiconfg.svg
@@ -0,0 +1,8 @@
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Resources/Fonts/OpenSans-Regular.ttf b/Llama.Mobile/Resources/Fonts/OpenSans-Regular.ttf
new file mode 100644
index 000000000..ee3f28f4a
Binary files /dev/null and b/Llama.Mobile/Resources/Fonts/OpenSans-Regular.ttf differ
diff --git a/Llama.Mobile/Resources/Fonts/OpenSans-Semibold.ttf b/Llama.Mobile/Resources/Fonts/OpenSans-Semibold.ttf
new file mode 100644
index 000000000..bc81019ae
Binary files /dev/null and b/Llama.Mobile/Resources/Fonts/OpenSans-Semibold.ttf differ
diff --git a/Llama.Mobile/Resources/Images/dotnet_bot.png b/Llama.Mobile/Resources/Images/dotnet_bot.png
new file mode 100644
index 000000000..f93ce025a
Binary files /dev/null and b/Llama.Mobile/Resources/Images/dotnet_bot.png differ
diff --git a/Llama.Mobile/Resources/Raw/AboutAssets.txt b/Llama.Mobile/Resources/Raw/AboutAssets.txt
new file mode 100644
index 000000000..89dc758d6
--- /dev/null
+++ b/Llama.Mobile/Resources/Raw/AboutAssets.txt
@@ -0,0 +1,15 @@
+Any raw assets you want to be deployed with your application can be placed in
+this directory (and child directories). Deployment of the asset to your application
+is automatically handled by the following `MauiAsset` Build Action within your `.csproj`.
+
+
+
+These files will be deployed with your package and will be accessible using Essentials:
+
+ async Task LoadMauiAsset()
+ {
+ using var stream = await FileSystem.OpenAppPackageFileAsync("AboutAssets.txt");
+ using var reader = new StreamReader(stream);
+
+ var contents = reader.ReadToEnd();
+ }
diff --git a/Llama.Mobile/Resources/Splash/splash.svg b/Llama.Mobile/Resources/Splash/splash.svg
new file mode 100644
index 000000000..21dfb25f1
--- /dev/null
+++ b/Llama.Mobile/Resources/Splash/splash.svg
@@ -0,0 +1,8 @@
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Resources/Styles/Colors.xaml b/Llama.Mobile/Resources/Styles/Colors.xaml
new file mode 100644
index 000000000..30307a5dd
--- /dev/null
+++ b/Llama.Mobile/Resources/Styles/Colors.xaml
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+ #512BD4
+ #ac99ea
+ #242424
+ #DFD8F7
+ #9880e5
+ #2B0B98
+
+ White
+ Black
+ #D600AA
+ #190649
+ #1f1f1f
+
+ #E1E1E1
+ #C8C8C8
+ #ACACAC
+ #919191
+ #6E6E6E
+ #404040
+ #212121
+ #141414
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Llama.Mobile/Resources/Styles/Styles.xaml b/Llama.Mobile/Resources/Styles/Styles.xaml
new file mode 100644
index 000000000..6641e3aed
--- /dev/null
+++ b/Llama.Mobile/Resources/Styles/Styles.xaml
@@ -0,0 +1,427 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+