This is a test of the llama3.2-vision model.
I used ollama for this. So I did
ollama pull llama3.2-vision
python -m venv .venv
python pip install streamlit ollama
streamlit run app.py
flowchart LR
%% Direction
%% =========================
%% Groups (by color via classDefs at bottom)
%% browser = blue, mcp = gray, app = green, service = yellow, storage = red
%% Nodes
enduser[End User]
browser[Client Browser]
claude[Claude Desktop]
streamlit[Streamlit App]
data[(Marine Cadastre)]
cache[(Local Cache)]
ollama[Ollama Daemon]
model[[llama3.2-vision]]
ext[[Optional External APIs*]]
%% Edges
enduser --> |Gathers historical intel from| browser
enduser --> |Gather imagery information from| streamlit
browser -->|MCP| claude
claude -->|Requests data from| data
streamlit -->|read vessel metadata| data
streamlit -->|write results| cache
streamlit -->|vision inference| ollama
ollama -->|Load/Infer| model
ollama -->|predictions / text| streamlit
streamlit -->|optional queries*| ext
streamlit -->|render dashboards| browser
%% Classes
class enduser,browser browser;
class claude, mcp;
class streamlit app;
class ollama,model,ext service;
class data,cache storage;
%% Styles
classDef browser fill:#CFE2FF,stroke:#004085,color:#001933,stroke-width:1px;
classDef mcp fill:#E2E3E5,stroke:#383D41,color:#1d1f22,stroke-width:1px;
classDef app fill:#D4EDDA,stroke:#155724,color:#0d2b18,stroke-width:1px;
classDef service fill:#FFF3CD,stroke:#856404,color:#3d2d00,stroke-width:1px;
classDef storage fill:#F8D7DA,stroke:#721C24,color:#3a0c10,stroke-width:1px;
