From 207592ff57938536eafa99f2632d670d2bb9457e Mon Sep 17 00:00:00 2001 From: ben Date: Sat, 4 Mar 2023 22:22:22 +0100 Subject: Isolating containers from internet access to enhance security. --- src/aichat/Dockerfile | 11 +++++++++-- src/aichat/entrypoint.sh | 6 ++++-- src/llm_provision/Dockerfile | 12 ------------ src/llm_provision/entrypoint.sh | 4 ---- src/llm_provision/init_models.sh | 18 ------------------ src/nginx/nginx.conf | 10 +++------- src/ollama_provision/Dockerfile | 12 ++++++++++++ src/ollama_provision/entrypoint.sh | 5 +++++ src/ollama_provision/init_models.sh | 17 +++++++++++++++++ 9 files changed, 50 insertions(+), 45 deletions(-) delete mode 100644 src/llm_provision/Dockerfile delete mode 100644 src/llm_provision/entrypoint.sh delete mode 100755 src/llm_provision/init_models.sh create mode 100644 src/ollama_provision/Dockerfile create mode 100644 src/ollama_provision/entrypoint.sh create mode 100755 src/ollama_provision/init_models.sh (limited to 'src') diff --git a/src/aichat/Dockerfile b/src/aichat/Dockerfile index 406dde2..a4d33bd 100644 --- a/src/aichat/Dockerfile +++ b/src/aichat/Dockerfile @@ -7,8 +7,15 @@ RUN update-ca-certificates RUN cargo install --target x86_64-unknown-linux-musl aichat ADD src/aichat/entrypoint.sh /entrypoint.sh -ADD src/aichat/config.yaml /aichat_config_tpl.yaml - RUN chmod 755 entrypoint.sh +RUN useradd -ms /bin/bash aichat +USER aichat +WORKDIR /home/aichat + +RUN mkdir -p /home/aichat/.config/aichat + +ADD src/aichat/config.yaml /home/aichat/.config/aichat/config.yaml +ADD src/aichat/roles /home/aichat/.config/aichat/roles + ENTRYPOINT ["/entrypoint.sh"] diff --git a/src/aichat/entrypoint.sh b/src/aichat/entrypoint.sh index ec4f040..77d9285 100644 --- a/src/aichat/entrypoint.sh +++ b/src/aichat/entrypoint.sh @@ -1,4 +1,6 @@ #!/bin/sh -mkdir -p ~/.config/aichat -cat /aichat_config_tpl.yaml | sed "s/__LLM_API_KEY__/${LLM_API_KEY}/" | sed "s/localhost/ollama/" >~/.config/aichat/config.yaml + +cat ~/.config/aichat/config.yaml | grep -v 'api_key' | sed "s/localhost/ollama/" | tee ~/.config/aichat/config.yaml.tmp +mv ~/.config/aichat/config.yaml.tmp ~/.config/aichat/config.yaml + aichat --serve 0.0.0.0 diff --git a/src/llm_provision/Dockerfile b/src/llm_provision/Dockerfile deleted file mode 100644 index 77701fe..0000000 --- a/src/llm_provision/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM debian:bookworm-slim - -ENV DEBIAN_FRONTEND=noninteractive -RUN apt-get update -RUN apt-get --yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew" install bash curl jq - -ADD ./src/llm_provision/init_models.sh /init_models.sh -ADD ./src/llm_provision/entrypoint.sh /entrypoint.sh -RUN chmod 755 /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] -#ENTRYPOINT ["tail", "-f", "/dev/null"] # to debug diff --git a/src/llm_provision/entrypoint.sh b/src/llm_provision/entrypoint.sh deleted file mode 100644 index d0b6e85..0000000 --- a/src/llm_provision/entrypoint.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash - -echo "pull models into ollama volumes" -bash /init_models.sh diff --git a/src/llm_provision/init_models.sh b/src/llm_provision/init_models.sh deleted file mode 100755 index 960eb98..0000000 --- a/src/llm_provision/init_models.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -OLLAMA_HOST="http://ollama:11434" - -IFS=',' read -r -a models_arr <<< "${MODELS}" - -## now loop through the above array -for m in "${models_arr[@]}" -do - curl -s "${OLLAMA_HOST}/api/tags" | jq '.models[].name' | grep ${m} > /dev/null - if [[ $? -ne 0 ]] - then - echo "download {m}" - curl -s "${OLLAMA_HOST}/api/pull" -d "{\"model\": \"${m}\"}" - else - echo "${m} already installed" - fi -done diff --git a/src/nginx/nginx.conf b/src/nginx/nginx.conf index fa4cb13..f07765a 100644 --- a/src/nginx/nginx.conf +++ b/src/nginx/nginx.conf @@ -39,14 +39,10 @@ http { } server { listen 8001; - set $deny 1; - if ($http_authorization = "Bearer $API_KEY") { - set $deny 0; - } - if ($deny) { - return 403; - } location / { + auth_basic "Private Area"; + auth_basic_user_file /etc/nginx/.htpasswd; + proxy_pass http://aichat:8000; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; diff --git a/src/ollama_provision/Dockerfile b/src/ollama_provision/Dockerfile new file mode 100644 index 0000000..4aa439b --- /dev/null +++ b/src/ollama_provision/Dockerfile @@ -0,0 +1,12 @@ +FROM ollama/ollama:latest + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update +RUN apt-get --yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew" install bash curl jq + +ADD ./src/ollama_provision/init_models.sh /init_models.sh +ADD ./src/ollama_provision/entrypoint.sh /entrypoint.sh +RUN chmod 755 /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] +#ENTRYPOINT ["tail", "-f", "/dev/null"] # to debug diff --git a/src/ollama_provision/entrypoint.sh b/src/ollama_provision/entrypoint.sh new file mode 100644 index 0000000..1952755 --- /dev/null +++ b/src/ollama_provision/entrypoint.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +ollama start & +echo "pull models into ollama volumes" +bash /init_models.sh diff --git a/src/ollama_provision/init_models.sh b/src/ollama_provision/init_models.sh new file mode 100755 index 0000000..1eae979 --- /dev/null +++ b/src/ollama_provision/init_models.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + + +IFS=',' read -r -a models_arr <<< "${MODELS}" + +## now loop through the above array +for m in "${models_arr[@]}" +do + ollama list | tail -n +2 | cut -d' ' -f1 | grep ${m} > /dev/null + if [[ $? -ne 0 ]] + then + echo "download {m}" + ollama pull "${m}" + else + echo "${m} already installed" + fi +done -- cgit v1.2.3