diff options
author | ben | 2023-03-04 22:22:22 +0100 |
---|---|---|
committer | ben | 2025-03-04 21:47:15 +0100 |
commit | 207592ff57938536eafa99f2632d670d2bb9457e (patch) | |
tree | 25e89078fad54f86d2691b21e8390b36e44e1aa5 /src | |
parent | f3eae794ace20d10edc4e970ce6258a47fb3b4d9 (diff) | |
download | ai_env-207592ff57938536eafa99f2632d670d2bb9457e.tar.gz ai_env-207592ff57938536eafa99f2632d670d2bb9457e.tar.bz2 ai_env-207592ff57938536eafa99f2632d670d2bb9457e.tar.xz |
Isolating containers from internet access to enhance security.
Diffstat (limited to 'src')
-rw-r--r-- | src/aichat/Dockerfile | 11 | ||||
-rw-r--r-- | src/aichat/entrypoint.sh | 6 | ||||
-rw-r--r-- | src/nginx/nginx.conf | 10 | ||||
-rw-r--r-- | src/ollama_provision/Dockerfile (renamed from src/llm_provision/Dockerfile) | 6 | ||||
-rw-r--r-- | src/ollama_provision/entrypoint.sh (renamed from src/llm_provision/entrypoint.sh) | 1 | ||||
-rwxr-xr-x | src/ollama_provision/init_models.sh (renamed from src/llm_provision/init_models.sh) | 5 |
6 files changed, 22 insertions, 17 deletions
diff --git a/src/aichat/Dockerfile b/src/aichat/Dockerfile index 406dde2..a4d33bd 100644 --- a/src/aichat/Dockerfile +++ b/src/aichat/Dockerfile @@ -7,8 +7,15 @@ RUN update-ca-certificates RUN cargo install --target x86_64-unknown-linux-musl aichat ADD src/aichat/entrypoint.sh /entrypoint.sh -ADD src/aichat/config.yaml /aichat_config_tpl.yaml - RUN chmod 755 entrypoint.sh +RUN useradd -ms /bin/bash aichat +USER aichat +WORKDIR /home/aichat + +RUN mkdir -p /home/aichat/.config/aichat + +ADD src/aichat/config.yaml /home/aichat/.config/aichat/config.yaml +ADD src/aichat/roles /home/aichat/.config/aichat/roles + ENTRYPOINT ["/entrypoint.sh"] diff --git a/src/aichat/entrypoint.sh b/src/aichat/entrypoint.sh index ec4f040..77d9285 100644 --- a/src/aichat/entrypoint.sh +++ b/src/aichat/entrypoint.sh @@ -1,4 +1,6 @@ #!/bin/sh -mkdir -p ~/.config/aichat -cat /aichat_config_tpl.yaml | sed "s/__LLM_API_KEY__/${LLM_API_KEY}/" | sed "s/localhost/ollama/" >~/.config/aichat/config.yaml + +cat ~/.config/aichat/config.yaml | grep -v 'api_key' | sed "s/localhost/ollama/" | tee ~/.config/aichat/config.yaml.tmp +mv ~/.config/aichat/config.yaml.tmp ~/.config/aichat/config.yaml + aichat --serve 0.0.0.0 diff --git a/src/nginx/nginx.conf b/src/nginx/nginx.conf index fa4cb13..f07765a 100644 --- a/src/nginx/nginx.conf +++ b/src/nginx/nginx.conf @@ -39,14 +39,10 @@ http { } server { listen 8001; - set $deny 1; - if ($http_authorization = "Bearer $API_KEY") { - set $deny 0; - } - if ($deny) { - return 403; - } location / { + auth_basic "Private Area"; + auth_basic_user_file /etc/nginx/.htpasswd; + proxy_pass http://aichat:8000; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; diff --git a/src/llm_provision/Dockerfile b/src/ollama_provision/Dockerfile index 77701fe..4aa439b 100644 --- a/src/llm_provision/Dockerfile +++ b/src/ollama_provision/Dockerfile @@ -1,11 +1,11 @@ -FROM debian:bookworm-slim +FROM ollama/ollama:latest ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update RUN apt-get --yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew" install bash curl jq -ADD ./src/llm_provision/init_models.sh /init_models.sh -ADD ./src/llm_provision/entrypoint.sh /entrypoint.sh +ADD ./src/ollama_provision/init_models.sh /init_models.sh +ADD ./src/ollama_provision/entrypoint.sh /entrypoint.sh RUN chmod 755 /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/src/llm_provision/entrypoint.sh b/src/ollama_provision/entrypoint.sh index d0b6e85..1952755 100644 --- a/src/llm_provision/entrypoint.sh +++ b/src/ollama_provision/entrypoint.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +ollama start & echo "pull models into ollama volumes" bash /init_models.sh diff --git a/src/llm_provision/init_models.sh b/src/ollama_provision/init_models.sh index 960eb98..1eae979 100755 --- a/src/llm_provision/init_models.sh +++ b/src/ollama_provision/init_models.sh @@ -1,17 +1,16 @@ #!/usr/bin/env bash -OLLAMA_HOST="http://ollama:11434" IFS=',' read -r -a models_arr <<< "${MODELS}" ## now loop through the above array for m in "${models_arr[@]}" do - curl -s "${OLLAMA_HOST}/api/tags" | jq '.models[].name' | grep ${m} > /dev/null + ollama list | tail -n +2 | cut -d' ' -f1 | grep ${m} > /dev/null if [[ $? -ne 0 ]] then echo "download {m}" - curl -s "${OLLAMA_HOST}/api/pull" -d "{\"model\": \"${m}\"}" + ollama pull "${m}" else echo "${m} already installed" fi |