Skip to content

Commit f86251f

Browse files
committed
Add LM Studio
1 parent e04d648 commit f86251f

File tree

6 files changed

+50
-4
lines changed

6 files changed

+50
-4
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ zsh-syntax-highlighting
1818
*.ab
1919
*.apk
2020
*.AppImage
21+
*.appimage
2122
*.bak
2223
*.bin
2324
*.BIN

backup/rsync_exclude_home.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
.gems
2828
.googleearth/Cache
2929
.java/.userPrefs/jetbrains
30+
.lmstudio/models
3031
.local/share/fish/generated_completions/
3132
.local/share/klipper
3233
.local/share/JetBrains

llm/install.sh

Lines changed: 27 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,12 @@
1-
#!/usr/bin/env sh
1+
#!/usr/bin/env bash
2+
set -eu
3+
4+
if [ "${EUID}" -eq 0 ]; then
5+
echo "This script should not be run as root."
6+
exit 1
7+
fi
8+
9+
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
210

311
# Nix is required for llama.cpp
412
if ! command -v nix &> /dev/null; then
@@ -11,13 +19,28 @@ if ! command -v ollama &> /dev/null; then
1119
curl -fsSL https://ollama.com/install.sh | sh
1220
fi
1321

14-
nix profile install nixpkgs#llama-cpp --extra-experimental-features nix-command --extra-experimental-features flakes
22+
if ! command -v llama-server &> /dev/null; then
23+
echo "Installing llama.cpp."
24+
nix profile install nixpkgs#llama-cpp --extra-experimental-features nix-command --extra-experimental-features flakes
25+
fi
26+
27+
# if [ ! -f "${SCRIPT_DIR}/LM-Studio-"* ]; then
28+
# echo "Downloading LM Studio."
29+
# # This does not pick up the name properly.
30+
# curl -JLO "https://lmstudio.ai/download/latest/linux/x64" --output-dir "${SCRIPT_DIR}"
31+
# chmod +x "${SCRIPT_DIR}/LM-Studio-"*
32+
# fi
33+
34+
echo "Creating symlink."
35+
ln -f -s "${SCRIPT_DIR}/lm-studio.desktop" "${HOME}/.local/share/applications/lm-studio.desktop"
36+
37+
ollama --version
1538

1639
which llama-server
1740
llama-server --version
1841

1942
which llama-cli
20-
llama-cli -hf Qwen/Qwen2.5-7B-Instruct-GGUF
43+
# llama-cli -hf Qwen/Qwen2.5-7B-Instruct-GGUF
2144

2245
llama-bench --list-devices
23-
llama-bench --model ${HOME}/.cache/llama.cpp/Qwen_Qwen2.5-7B-Instruct-GGUF_qwen2.5-7b-instruct-q2_k.gguf
46+
# llama-bench --model ${HOME}/.cache/llama.cpp/Qwen_Qwen2.5-7B-Instruct-GGUF_qwen2.5-7b-instruct-q2_k.gguf

llm/lm-studio.desktop

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
[Desktop Entry]
2+
Type=Application
3+
Name=LM Studio
4+
Exec=$HOME/Git/linux-scripts/llm/lm-studio.sh
5+
Comment=Local AI on your computer
6+
Categories=Development;IDE;
7+
Terminal=false
8+
StartupNotify=true

llm/lm-studio.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/usr/bin/env bash
2+
set -eu
3+
4+
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
5+
6+
"${SCRIPT_DIR}"/LM-Studio-*.appimage --disable-setuid-sandbox --no-sandbox

zsh/.zshrc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,7 @@ if [[ "${P10K_DELAYED_SETUP}" = true && -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k
186186
. "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
187187
fi
188188

189+
189190
# -----
190191
# Completion
191192
# -----
@@ -432,6 +433,12 @@ fi
432433
export NVM_DIR="$HOME/.nvm"
433434
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
434435

436+
# LM Studio
437+
if [ -d "${HOME}/.lmstudio" ]; then
438+
export PATH="${PATH}:${HOME}/.lmstudio/bin"
439+
fi
440+
441+
435442
# -----
436443
# Additional repositories
437444
# -----

0 commit comments

Comments
 (0)