1
0
Fork 0

Add llm-vscode-inference-server

main
root 2023-10-20 19:45:21 -06:00
parent 4e2c862357
commit 4c7ecfd5b9
1 changed files with 18 additions and 0 deletions

View File

@ -540,3 +540,21 @@ pip install torch==1.13.1+cu116 --extra-index-url https://download.pytorch.org/w
pip install -r requirements.txt
python main.py --port 8080 --host 0.0.0.0 --pretrained "Phind/Phind-CodeLlama-34B-v2"
\end{minted}
\subsection{llm-vscode Inference Server}
Perhaps.
Uses vllm-project.
\begin{minted}{sh}
mkdir -p ~/devel/wangcx18
cd ~/devel/wangcx18/
git clone --recursive https://github.com/wangcx18/llm-vscode-inference-server
cd llm-vscode-inference-server/
virtualenv venv
source venv/bin/activate
pip install -U pip setuptools wheel
#pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118
pip install torch torchvision torchaudio
pip install -r requirements.txt
python api_server.py --host 0.0.0.0 --port 8080
\end{minted}