Spaces:
Runtime error
Runtime error
Added mixtral, gpu support
Browse files
Dockerfile
CHANGED
@@ -1,31 +1,41 @@
|
|
1 |
-
ARG UBUNTU_VERSION=
|
|
|
|
|
|
|
2 |
|
3 |
-
FROM
|
4 |
|
5 |
-
|
6 |
-
|
|
|
|
|
|
|
|
|
7 |
|
8 |
-
WORKDIR /app
|
9 |
RUN git clone https://github.com/ggerganov/llama.cpp.git
|
10 |
|
11 |
-
WORKDIR /
|
12 |
-
|
|
|
|
|
13 |
|
14 |
-
|
15 |
-
|
|
|
|
|
16 |
|
17 |
-
FROM
|
18 |
|
19 |
WORKDIR /app
|
20 |
|
21 |
-
|
22 |
-
COPY --from=build /
|
23 |
COPY ./run.sh /app/run.sh
|
|
|
|
|
24 |
|
|
|
25 |
RUN chmod +x run.sh
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
EXPOSE 7860
|
30 |
-
|
31 |
CMD ./run.sh
|
|
|
1 |
+
ARG UBUNTU_VERSION=20.04
|
2 |
+
ARG CUDA_VERSION=12.3.1
|
3 |
+
ARG BASE_CUDA_DEV_CONTAINER=nvidia/cuda:${CUDA_VERSION}-devel-ubuntu${UBUNTU_VERSION}
|
4 |
+
ARG BASE_CUDA_RUN_CONTAINER=nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu${UBUNTU_VERSION}
|
5 |
|
6 |
+
FROM ${BASE_CUDA_DEV_CONTAINER} as build
|
7 |
|
8 |
+
ARG CUDA_DOCKER_ARCH=all
|
9 |
+
|
10 |
+
RUN apt-get update && apt-get upgrade -y && \
|
11 |
+
apt-get install -y git build-essential gcc wget
|
12 |
+
|
13 |
+
WORKDIR /build
|
14 |
|
|
|
15 |
RUN git clone https://github.com/ggerganov/llama.cpp.git
|
16 |
|
17 |
+
WORKDIR /build/llama.cpp
|
18 |
+
|
19 |
+
ENV CUDA_DOCKER_ARCH=${CUDA_DOCKER_ARCH}
|
20 |
+
ENV LLAMA_CUBLAS=1
|
21 |
|
22 |
+
RUN mkdir build && \
|
23 |
+
cd build && \
|
24 |
+
cmake .. -DLLAMA_CUBLAS=ON && \
|
25 |
+
cmake --build . --config Release
|
26 |
|
27 |
+
FROM ${BASE_CUDA_RUN_CONTAINER} as runtime
|
28 |
|
29 |
WORKDIR /app
|
30 |
|
31 |
+
# Copy the executable from the build stage
|
32 |
+
COPY --from=build /build/llama.cpp/build/bin/server /app
|
33 |
COPY ./run.sh /app/run.sh
|
34 |
+
WORKDIR /app
|
35 |
+
EXPOSE 7867
|
36 |
|
37 |
+
# Make the script executable
|
38 |
RUN chmod +x run.sh
|
39 |
|
40 |
+
# CMD to run your script
|
|
|
|
|
|
|
41 |
CMD ./run.sh
|
build.bat
CHANGED
@@ -1 +1 @@
|
|
1 |
-
docker build -t
|
|
|
1 |
+
docker build -t mixstral-api .
|
run.bat
CHANGED
@@ -1 +1 @@
|
|
1 |
-
docker run --name
|
|
|
1 |
+
docker run --name mixstral-api -v "C:\Work\ai-models\mixtral-cpp:/models" -p 7867:7867 mixstral-api
|
run.sh
CHANGED
@@ -1,2 +1,14 @@
|
|
1 |
#!/bin/bash
|
2 |
-
/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
#!/bin/bash
|
2 |
+
file_path="/models/mixtral-8x7b-instruct-v0.1.Q2_K.gguf"
|
3 |
+
url="https://huggingface.co/TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF/resolve/main/mixtral-8x7b-instruct-v0.1.Q2_K.gguf"
|
4 |
+
# Check if the file exists
|
5 |
+
if [ ! -e "$file_path" ]; then
|
6 |
+
echo "Downloading model."
|
7 |
+
# If the file doesn't exist, download it using wget
|
8 |
+
wget "$url" -O "$file_path"
|
9 |
+
# You can add more commands here if needed after the download
|
10 |
+
echo "File downloaded successfully."
|
11 |
+
else
|
12 |
+
echo "File already exists."
|
13 |
+
fi
|
14 |
+
/app/server -m "$file_path" -c 100000 --port 7867 --host 0.0.0.0
|