Initial commit of Docker files

This commit is contained in:
2025-08-18 00:26:57 +12:00
commit 05297cf246
29 changed files with 2517 additions and 0 deletions

150
compose/document-management.yml Executable file
View File

@@ -0,0 +1,150 @@
# Document Management Stack - Paperless NGX
services:
# ===== PAPERLESS INFRASTRUCTURE =====
paperless-broker:
container_name: paperless-broker
image: redis:8-alpine
profiles: ["documents", "all"]
restart: unless-stopped
volumes:
- redisdata:/data
networks:
- database_network
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
security_opt:
- no-new-privileges:true
# ===== DOCUMENT PROCESSING SERVICES =====
paperless-gotenberg:
container_name: paperless-gotenberg
image: gotenberg/gotenberg:8.20
profiles: ["documents", "all"]
restart: unless-stopped
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
networks:
- database_network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
interval: 30s
timeout: 10s
retries: 3
paperless-tika:
container_name: paperless-tika
image: apache/tika:latest
profiles: ["documents", "all"]
restart: unless-stopped
networks:
- database_network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9998/version"]
interval: 30s
timeout: 10s
retries: 3
# ===== MAIN PAPERLESS APPLICATION =====
paperless-webserver:
container_name: paperless-webserver
image: ghcr.io/paperless-ngx/paperless-ngx:latest
profiles: ["documents", "all"]
restart: unless-stopped
depends_on:
paperless-db:
condition: service_healthy
paperless-broker:
condition: service_started
paperless-gotenberg:
condition: service_started
paperless-tika:
condition: service_started
ports:
- "8100:8000"
volumes:
- ./paperless/data:/usr/src/paperless/data
- ./paperless/media:/usr/src/paperless/media
- ./paperless/export:/usr/src/paperless/export
- ./paperless/consume:/usr/src/paperless/consume
env_file: ./paperless/docker-compose.env
environment:
- PAPERLESS_REDIS=redis://paperless-broker:6379
- PAPERLESS_DBENGINE=mariadb
- PAPERLESS_DBHOST=paperless-db
- PAPERLESS_DBUSER=${PAPERLESS_DB_USER}
- PAPERLESS_DBPASS=${PAPERLESS_DB_PASSWORD}
- PAPERLESS_DBPORT=3306
- PAPERLESS_URL=${PAPERLESS_URL}
- PAPERLESS_TIKA_ENABLED=1
- PAPERLESS_TIKA_GOTENBERG_ENDPOINT=http://paperless-gotenberg:3000
- PAPERLESS_TIKA_ENDPOINT=http://paperless-tika:9998
networks:
- database_network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000"]
interval: 30s
timeout: 10s
retries: 3
# ===== AI ENHANCEMENT =====
paperless-ai:
image: clusterzx/paperless-ai:latest
container_name: paperless-ai
profiles: ["documents", "all"]
restart: unless-stopped
depends_on:
- paperless-webserver
cap_drop:
- ALL
security_opt:
- no-new-privileges:true
environment:
- PUID=${PUID}
- PGID=${PGID}
- RAG_SERVICE_URL=http://localhost:8000
- RAG_SERVICE_ENABLED=true
ports:
- "3040:3000"
volumes:
- aidata:/app/data
networks:
database_network:
external: true
volumes:
redisdata:
external: true
dbdata:
external: true
aidata:
external: true
paperless-db:
container_name: paperless-db
image: mariadb:11
profiles: ["documents", "all"]
restart: unless-stopped
volumes:
- dbdata:/var/lib/mysql
environment:
- MARIADB_HOST=paperless
- MARIADB_DATABASE=${PAPERLESS_DB_NAME}
- MARIADB_USER=${PAPERLESS_DB_USER}
- MARIADB_PASSWORD=${PAPERLESS_DB_PASSWORD}
- MARIADB_ROOT_PASSWORD=${PAPERLESS_DB_ROOT_PASSWORD}
networks:
- database_network
healthcheck:
test: ["CMD", "mariadb-admin", "ping", "-h", "localhost", "-u", "root", "-p$$MARIADB_ROOT_PASSWORD"]
interval: 10s
timeout: 5s
retries: 10
start_period: 30s
security_opt:
- no-new-privileges:true

253
compose/media-stack.yml Executable file
View File

@@ -0,0 +1,253 @@
# Media Stack - *arr services, Jellyfin, torrent client
services:
# ===== MEDIA MANAGEMENT =====
sonarr:
image: lscr.io/linuxserver/sonarr:latest
container_name: sonarr
profiles: ["media", "all"]
environment:
- TZ=${TZ}
- PUID=${PUID}
- PGID=${PGID}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Sonarr/config:/config
- ${DATA_ROOT}:/data
- ${TORRENTS_PATH}:/downloads
ports:
- "8989:8989"
networks:
arr_network:
ipv4_address: 172.20.0.3
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8989/ping"]
interval: 30s
timeout: 10s
retries: 3
radarr:
image: lscr.io/linuxserver/radarr:latest
container_name: radarr
profiles: ["media", "all"]
environment:
- TZ=${TZ}
- PUID=${PUID}
- PGID=${PGID}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Radarr/config:/config
- ${DATA_ROOT}:/data
- ${TORRENTS_PATH}:/downloads
ports:
- "7878:7878"
networks:
arr_network:
ipv4_address: 172.20.0.5
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:7878/ping"]
interval: 30s
timeout: 10s
retries: 3
lidarr:
image: lscr.io/linuxserver/lidarr:latest
container_name: lidarr
profiles: ["media", "all"]
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Lidarr/config:/config
- ${DATA_ROOT}:/data
- ${TORRENTS_PATH}:/downloads
ports:
- "8686:8686"
restart: unless-stopped
networks:
arr_network:
ipv4_address: 172.20.0.7
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8686/ping"]
interval: 30s
timeout: 10s
retries: 3
bazarr:
image: lscr.io/linuxserver/bazarr:latest
container_name: bazarr
profiles: ["media", "all"]
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Bazarr/config:/config
- ${DATA_ROOT}:/data
networks:
arr_network:
ipv4_address: 172.20.0.6
ports:
- "6767:6767"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:6767/ping"]
interval: 30s
timeout: 10s
retries: 3
prowlarr:
image: lscr.io/linuxserver/prowlarr:latest
container_name: prowlarr
profiles: ["media", "all"]
environment:
- TZ=${TZ}
- PUID=${PUID}
- PGID=${PGID}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Prowlarr/config:/config
ports:
- "9696:9696"
networks:
arr_network:
ipv4_address: 172.20.0.4
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9696/ping"]
interval: 30s
timeout: 10s
retries: 3
flaresolverr:
image: ghcr.io/flaresolverr/flaresolverr:latest
container_name: flaresolverr
profiles: ["media", "all"]
environment:
- TZ=${TZ}
ports:
- "8191:8191"
networks:
arr_network:
ipv4_address: 172.20.0.8
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8191/health"]
interval: 30s
timeout: 10s
retries: 3
# ===== MEDIA STREAMING =====
jellyfin:
image: jellyfin/jellyfin:latest
container_name: jellyfin
profiles: ["media", "all"]
ports:
- "8096:8096"
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Jellyfin/config:/config
- ${DOCKER_CONFIG_ROOT}/Arrs/Jellyfin/cache:/cache
- ${DATA_ROOT}:/data
- /dev/dri/renderD128:/dev/dri/renderD128
group_add:
- "104"
restart: unless-stopped
environment:
- TZ=${TZ}
- PUID=${PUID}
- PGID=${PGID}
- JELLYFIN_PublishedServerUrl=${JELLYFIN_URL}
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8096/health"]
interval: 30s
timeout: 10s
retries: 3
jellyseerr:
image: fallenbagel/jellyseerr:latest
container_name: jellyseerr
profiles: ["media", "all"]
environment:
- LOG_LEVEL=info
- TZ=${TZ}
volumes:
- ${DOCKER_CONFIG_ROOT}/Arrs/Jellyseerr/config:/app/config
- ${TORRENTS_PATH}:/downloads
ports:
- "5055:5055"
networks:
arr_network:
ipv4_address: 172.20.0.14
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5055/api/v1/status"]
interval: 30s
timeout: 10s
retries: 3
# ===== DOWNLOAD CLIENT =====
qbittorrent:
container_name: qbittorrent
image: lscr.io/linuxserver/qbittorrent:latest
profiles: ["media", "all"]
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
- WEBUI_PORT=8080
ports:
- "7070:8080"
- "56881:6881"
- "56881:6881/udp"
volumes:
- ./qBittorrent/config:/config
- ${TORRENTS_PATH}:/downloads
networks:
arr_network:
ipv4_address: 172.20.0.2
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080"]
interval: 30s
timeout: 10s
retries: 3
# ===== MUSIC SCROBBLING =====
multi-scrobbler:
image: foxxmd/multi-scrobbler:latest
container_name: multi-scrobbler
profiles: ["media", "all"]
environment:
- TZ=${TZ}
- PUID=${PUID}
- PGID=${PGID}
- BASE_URL=${SCROBBLE_URL}
- SPOTIFY_CLIENT_ID=${SPOTIFY_CLIENT_ID}
- SPOTIFY_CLIENT_SECRET=${SPOTIFY_CLIENT_SECRET}
- SPOTIFY_REDIRECT_URI=${SPOTIFY_REDIRECT_URI}
- MALOJA_URL=${MALOJA_URL}
- MALOJA_API_KEY=${MALOJA_API_KEY}
volumes:
- "./scrobble/config:/config"
ports:
- "9078:9078"
restart: unless-stopped
depends_on:
- maloja
maloja:
image: krateng/maloja:latest
container_name: maloja
profiles: ["media", "all"]
ports:
- "42010:42010"
volumes:
- "./maloja/config:/etc/maloja"
- "./maloja/data:/var/lib/maloja"
- "./maloja/logs:/var/log/maloja"
environment:
- MALOJA_FORCE_PASSWORD=${MALOJA_FORCE_PASSWORD}
restart: unless-stopped
networks:
arr_network:
external: true

84
compose/monitoring.yml Executable file
View File

@@ -0,0 +1,84 @@
# Monitoring & Maintenance Services
services:
# ===== NETWORK MONITORING =====
speedtest-tracker:
image: lscr.io/linuxserver/speedtest-tracker:latest
restart: unless-stopped
container_name: speedtest-tracker
profiles: ["monitoring", "all"]
ports:
- "8180:80"
- "8143:443"
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
- APP_KEY=${SPEEDTEST_APP_KEY}
- DB_CONNECTION=mariadb
- DB_HOST=speedtest-db
- DB_PORT=3306
- DB_DATABASE=${SPEEDTEST_DB_NAME}
- DB_USERNAME=${SPEEDTEST_DB_USER}
- DB_PASSWORD=${SPEEDTEST_DB_PASSWORD}
- APP_DEBUG=false
- SPEEDTEST_SCHEDULE=${SPEEDTEST_SCHEDULE}
- SPEEDTEST_SERVERS=${SPEEDTEST_SERVERS}
- PUBLIC_DASHBOARD=true
volumes:
- ${DOCKER_CONFIG_ROOT}/speedtest-tracker/data:/config
depends_on:
speedtest-db:
condition: service_healthy
networks:
- database_network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost/api/healthcheck"]
interval: 30s
timeout: 10s
retries: 3
speedtest-db:
image: mariadb:11
container_name: speedtest-db
profiles: ["monitoring", "all"]
restart: unless-stopped
environment:
- MYSQL_DATABASE=${SPEEDTEST_DB_NAME}
- MYSQL_USER=${SPEEDTEST_DB_USER}
- MYSQL_PASSWORD=${SPEEDTEST_DB_PASSWORD}
- MYSQL_RANDOM_ROOT_PASSWORD=true
volumes:
- ${DOCKER_CONFIG_ROOT}/speedtest-tracker/db:/var/lib/mysql
networks:
- database_network
healthcheck:
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
interval: 10s
timeout: 5s
retries: 10
start_period: 30s
security_opt:
- no-new-privileges:true
# ===== CONTAINER MAINTENANCE =====
watchtower:
image: containrrr/watchtower:latest
container_name: watchtower
profiles: ["monitoring", "all"]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- TZ=${TZ}
- WATCHTOWER_CLEANUP=${WATCHTOWER_CLEANUP}
- WATCHTOWER_POLL_INTERVAL=${WATCHTOWER_POLL_INTERVAL}
- WATCHTOWER_INCLUDE_RESTARTING=true
- WATCHTOWER_INCLUDE_STOPPED=false
- WATCHTOWER_REVIVE_STOPPED=false
- WATCHTOWER_NO_STARTUP_MESSAGE=true
restart: unless-stopped
security_opt:
- no-new-privileges:true
networks:
database_network:
external: true

271
compose/utility-stack.yml Executable file
View File

@@ -0,0 +1,271 @@
# Utility Services - Dashboard, file management, sync, etc.
services:
# ===== DASHBOARD & MONITORING =====
homepage:
image: ghcr.io/gethomepage/homepage:latest
container_name: homepage
profiles: ["utilities", "all"]
volumes:
- ./Homepage/config/images:/app/public/images
- ./Homepage/config/icons:/app/public/icons
- ./Homepage/config:/app/config
ports:
- "7575:3000"
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
- HOMEPAGE_ALLOWED_HOSTS=${HOMEPAGE_ALLOWED_HOSTS}
restart: unless-stopped
depends_on:
- dockerproxy
dockerproxy:
image: ghcr.io/tecnativa/docker-socket-proxy:latest
container_name: dockerproxy
profiles: ["utilities", "all"]
environment:
- CONTAINERS=${DOCKER_PROXY_CONTAINERS}
- SERVICES=${DOCKER_PROXY_SERVICES}
- TASKS=${DOCKER_PROXY_TASKS}
- POST=${DOCKER_PROXY_POST}
ports:
- "127.0.0.1:2375:2375"
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
restart: unless-stopped
security_opt:
- no-new-privileges:true
# ===== FILE MANAGEMENT =====
filebrowser:
image: hurlenko/filebrowser:latest
container_name: filebrowser
profiles: ["utilities", "all"]
user: "${PUID}:${PGID}"
ports:
- "6633:8080"
volumes:
- ./filebrowser/data:/data
- ./filebrowser/config:/config
- /home/jamie:/data/home
- ${DATA_ROOT}:/data/media
environment:
- FB_BASEURL=/filebrowser
restart: unless-stopped
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/health"]
interval: 30s
timeout: 10s
retries: 3
# ===== SYNCHRONIZATION =====
syncthing:
image: syncthing/syncthing:latest
container_name: syncthing
hostname: syncthing
profiles: ["utilities", "all"]
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
volumes:
- ${DOCKER_CONFIG_ROOT}/syncthing:/var/syncthing
- ${DOCKER_CONFIG_ROOT}/obsidian/vaults:/var/syncthing/obsidian
ports:
- "8384:8384" # Web UI
- "22000:22000/tcp" # TCP file transfers
- "22000:22000/udp" # QUIC file transfers
- "21027:21027/udp" # Receive local discovery broadcasts
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8384/rest/system/ping"]
interval: 30s
timeout: 10s
retries: 3
# ===== PRODUCTIVITY =====
obsidian:
image: ghcr.io/sytone/obsidian-remote:latest
container_name: obsidian-remote
profiles: ["utilities", "all"]
restart: unless-stopped
ports:
- "8181:8080"
- "8443:8443"
volumes:
- ${DOCKER_CONFIG_ROOT}/obsidian/vaults:/vaults
- ${DOCKER_CONFIG_ROOT}/obsidian/config:/config
environment:
- PUID=${PUID}
- PGID=${PGID}
- TZ=${TZ}
- DOCKER_MODS=linuxserver/mods:universal-git
depends_on:
- syncthing
stirling-pdf:
container_name: stirling-pdf
image: docker.stirlingpdf.com/stirlingtools/stirling-pdf:latest
profiles: ["utilities", "all"]
deploy:
resources:
limits:
memory: 4G
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8080/api/v1/info/status | grep -q 'UP' && curl -fL http://localhost:8080/ | grep -qv 'Please sign in'"]
interval: 30s
timeout: 10s
retries: 5
ports:
- "8090:8080"
volumes:
- ./stirling/latest/data:/usr/share/tessdata:rw
- ./stirling/latest/config:/configs:rw
- ./stirling/latest/logs:/logs:rw
environment:
- DISABLE_ADDITIONAL_FEATURES=true
- SECURITY_ENABLELOGIN=false
- LANGS=en_GB,en_US,ar_AR,de_DE,fr_FR,es_ES,zh_CN,zh_TW,ca_CA,it_IT,sv_SE,pl_PL,ro_RO,ko_KR,pt_BR,ru_RU,el_GR,hi_IN,hu_HU,tr_TR,id_ID
- SYSTEM_DEFAULTLOCALE=en-US
- UI_APPNAME=Stirling-PDF
- UI_HOMEDESCRIPTION=Stirling-PDF
- UI_APPNAMENAVBAR=Stirling-PDF
- SYSTEM_MAXFILESIZE=100
- METRICS_ENABLED=true
- SYSTEM_GOOGLEVISIBILITY=true
- SHOW_SURVEY=true
restart: unless-stopped
# ===== REMOTE ACCESS =====
hbbr:
container_name: hbbr
image: rustdesk/rustdesk-server:latest
profiles: ["remote", "all"]
command: hbbr
environment:
- PUID=${PUID}
- PGID=${PGID}
volumes:
- ${DOCKER_CONFIG_ROOT}/rustdesk/data:/root
network_mode: "host"
restart: unless-stopped
hbbs:
container_name: hbbs
image: rustdesk/rustdesk-server:latest
profiles: ["remote", "all"]
command: hbbs
environment:
- PUID=${PUID}
- PGID=${PGID}
volumes:
- ${DOCKER_CONFIG_ROOT}/rustdesk/data:/root
network_mode: "host"
depends_on:
- hbbr
restart: unless-stopped
# ===== GENEALOGY =====
grampsweb:
container_name: gramps
image: ghcr.io/gramps-project/grampsweb:latest
profiles: ["genealogy", "all"]
restart: unless-stopped
ports:
- "5511:5000"
environment:
- GRAMPSWEB_TREE=Gramps Web
- GRAMPSWEB_CELERY_CONFIG__broker_url=redis://grampsweb_redis:6379/0
- GRAMPSWEB_CELERY_CONFIG__result_backend=redis://grampsweb_redis:6379/0
- GRAMPSWEB_RATELIMIT_STORAGE_URI=redis://grampsweb_redis:6379/1
depends_on:
- grampsweb_redis
volumes:
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_users:/app/users
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_index:/app/indexdir
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_thumb_cache:/app/thumbnail_cache
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_cache:/app/cache
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_secret:/app/secret
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_db:/root/.gramps/grampsdb
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_media:/app/media
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_tmp:/tmp
grampsweb_celery:
container_name: grampsweb_celery
image: ghcr.io/gramps-project/grampsweb:latest
profiles: ["genealogy", "all"]
restart: unless-stopped
environment:
- GRAMPSWEB_TREE=Gramps Web
- GRAMPSWEB_CELERY_CONFIG__broker_url=redis://grampsweb_redis:6379/0
- GRAMPSWEB_CELERY_CONFIG__result_backend=redis://grampsweb_redis:6379/0
- GRAMPSWEB_RATELIMIT_STORAGE_URI=redis://grampsweb_redis:6379/1
depends_on:
- grampsweb_redis
volumes:
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_users:/app/users
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_index:/app/indexdir
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_thumb_cache:/app/thumbnail_cache
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_cache:/app/cache
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_secret:/app/secret
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_db:/root/.gramps/grampsdb
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_media:/app/media
- ${DOCKER_CONFIG_ROOT}/gramps/gramps_tmp:/tmp
command: celery -A gramps_webapi.celery worker --loglevel=INFO --concurrency=2
grampsweb_redis:
image: redis:7.2.4-alpine
container_name: grampsweb_redis
profiles: ["genealogy", "all"]
restart: unless-stopped
volumes:
- grampsweb_redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
# ===== API SERVICES =====
racknerd-api-converter:
build: ./racknerd-converter
container_name: racknerd-api-converter
profiles: ["utilities", "all"]
ports:
- "5000:5000"
environment:
- RACKNERD_API_KEY=${RACKNERD_API_KEY}
- RACKNERD_API_HASH=${RACKNERD_API_HASH}
- RACKNERD_VSERVER_ID=${RACKNERD_VSERVER_ID}
- RACKNERD_BASE_URL=${RACKNERD_BASE_URL}
- UPDATE_INTERVAL=${RACKNERD_UPDATE_INTERVAL}
- HOST=0.0.0.0
- PORT=5000
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5000/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
# ===== MISC SERVICES =====
newt:
image: fosrl/newt:latest
container_name: newt
profiles: ["utilities", "all"]
restart: unless-stopped
environment:
- PANGOLIN_ENDPOINT=${PANGOLIN_URL}
- NEWT_ID=${NEWT_ID}
- NEWT_SECRET=${NEWT_SECRET}
volumes:
grampsweb_redis_data:
driver: local