Skip to content

Commit e1e2e0a

Browse files
authored
Minor fix on the script (#106)
1 parent 911ea9b commit e1e2e0a

File tree

5 files changed

+7
-8
lines changed

5 files changed

+7
-8
lines changed

scripts/gh_deploy_site.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22
set -euxo pipefail
33

4-
scripts/build_site.sh web/global_config.json
4+
scripts/build_site.sh web/gh-page-config.json
55

66
git fetch
77
git checkout -B gh-pages origin/gh-pages

scripts/local_deploy_site.sh

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,27 +6,26 @@ MLC_LLM_HOME_SET="${MLC_LLM_HOME:-}"
66
if [ -z ${MLC_LLM_HOME_SET} ]; then
77
export MLC_LLM_HOME="${MLC_LLM_HOME:-mlc-llm}"
88
fi
9-
109
scripts/build_site.sh web/local-config.json
1110

1211
echo "symlink parameter location to site.."
1312

1413
if [ -d "$MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params" ]; then
1514
rm -rf site/_site/dist/vicuna-v1-7b-q4f32_0
1615
mkdir -p site/_site/dist/vicuna-v1-7b-q4f32_0
17-
ln -s $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params site/_site/dist/vicuna-v1-7b-q4f32_0/params
16+
ln -s "$(cd $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params && pwd)" site/_site/dist/vicuna-v1-7b-q4f32_0/params
1817
cp -rf $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/vicuna-v1-7b-q4f32_0-webgpu.wasm site/_site/dist/vicuna-v1-7b-q4f32_0/
1918
fi
2019
if [ -d "$MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params" ]; then
2120
rm -rf site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0
2221
mkdir -p site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0
23-
ln -s $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params
22+
ln -s "$(cd $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params && pwd)" site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params
2423
cp -rf $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/RedPajama-INCITE-Chat-3B-v1-q4f32_0-webgpu.wasm site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/
2524
fi
2625
if [ -d "$MLC_LLM_HOME/dist/wizardlm-7b/params" ]; then
2726
rm -rf site/_site/dist/wizardlm-7b
2827
mkdir -p site/_site/dist/wizardlm-7b
29-
ln -s $MLC_LLM_HOME/dist/wizardlm-7b/params site/_site/dist/wizardlm-7b/params
28+
ln -s "$(cd $MLC_LLM_HOME/dist/wizardlm-7b/params && pwd)" site/_site/dist/wizardlm-7b/params
3029
cp -rf $MLC_LLM_HOME/dist/wizardlm-7b/wizardlm-7b-webgpu.wasm site/_site/dist/wizardlm-7b/
3130
fi
3231

site/index.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ If you have a Mac computer with Apple silicon, here are the instructions for you
3737

3838
## Chat Demo
3939

40-
The chat demo is based on [vicuna-7b-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) model. More model support are on the way.
40+
The chat demo is based on [vicuna-7b-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) model and [RedPajama-INCITE-Chat-3B-v1](https://huggingface.co/togethercomputer/RedPajama-INCITE-Chat-3B-v1) model . More model support are on the way.
4141

4242
{% include llm_chat.html %}
4343

web/llm_chat.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ class LLMChatPipeline {
187187
this.vm.getFunction("prefill")
188188
);
189189
this.decoding = this.tvm.detachFromCurrentScope(
190-
this.vm.getFunction("decoding")
190+
this.vm.getFunction("decode")
191191
);
192192
this.params = this.tvm.detachFromCurrentScope(
193193
this.tvm.getParamsFromCache("param", cacheMetadata.ParamSize)

0 commit comments

Comments
 (0)