FROM intel/oneapi-runtime:latest RUN apt-get update && apt-get install -y --no-install-recommends \ aria2 \ unzip \ && rm -rf /var/lib/apt/lists/* RUN useradd -m -u 1000 user USER user ENV HOME=/home/user WORKDIR $HOME/app RUN aria2c -c -x16 https://github.com/MZWNET/actions/releases/download/llama_cpp-b2690/llama-b2690-bin-linux-avx2-intel-mkl-x64.zip \ && unzip llama-b2690-bin-linux-avx2-intel-mkl-x64.zip -d llama.cpp \ && mv llama.cpp/server . \ && chmod +x ./server \ && rm -rf llama-b2690-bin-linux-avx2-intel-mkl-x64.zip llama.cpp RUN aria2c -c -x16 https://huggingface.co/mzwing/AquilaChat2-7B-16K-GGUF/resolve/main/AquilaChat2-7B-16K.Q8_0.gguf?download=true -o AquilaChat2-7B-16K.Q8_0.gguf ENV PATH=$HOME/app:$PATH EXPOSE 8080 CMD ["server", "-m", "./AquilaChat2-7B-16K.Q8_0.gguf", "-t", "2", "-c", "16384", "-a", "AquilaChat2-7B-16K", "--port", "8080", "--host", "0.0.0.0", "--api-key", "sk-1145141919810"]