# sudo docker compose down --volumes # sudo docker compose up -d --remove-orphans name: server services: # --- NGINX Proxy Manager --- nginx-proxy-manager: image: "docker.io/jc21/nginx-proxy-manager:latest" restart: unless-stopped container_name: nginx-proxy-manager ports: - "80:80" - "81:81" - "443:443" environment: # Uncomment this if IPv6 is not enabled on your host - DISABLE_IPV6=true # Uncomment this if IPv6 is not enabled on your host volumes: - ${NGINX_DATA}:/data - ${NGINX_LETSENCRYPT}:/etc/letsencrypt # --- Immich Server --- immich-server: container_name: immich_server image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release} # extends: # file: hwaccel.transcoding.yml # service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding volumes: # Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file - ${IMMICH_UPLOAD_LOCATION}:/usr/src/app/upload - /etc/localtime:/etc/localtime:ro env_file: - .env ports: - "2283:2283" depends_on: - redis - database restart: always healthcheck: disable: false immich-machine-learning: container_name: immich_machine_learning # For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag. # Example tag: ${IMMICH_VERSION:-release}-cuda image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release} # extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration # file: hwaccel.ml.yml # service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable volumes: - model-cache:/cache env_file: - .env restart: always healthcheck: disable: false redis: container_name: immich_redis image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177 healthcheck: test: redis-cli ping || exit 1 restart: always database: container_name: immich_postgres image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0 environment: POSTGRES_PASSWORD: ${IMMICH_DB_PASSWORD} POSTGRES_USER: ${IMMICH_DB_USERNAME} POSTGRES_DB: ${IMMICH_DB_DATABASE_NAME} POSTGRES_INITDB_ARGS: "--data-checksums" # Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs B_STORAGE_TYPE: "HDD" volumes: # Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file - ${IMMICH_DB_DATA_LOCATION}:/var/lib/postgresql/data restart: always # --- Home Assistant --- homeassistant: container_name: homeassistant image: "ghcr.io/home-assistant/home-assistant:stable" env_file: - .env volumes: - ${HA_CONFIG}:/config - /etc/localtime:/etc/localtime:ro - /run/dbus:/run/dbus:ro restart: unless-stopped # devices: # - /dev/ttyUSB0:/dev/ttyUSB0 privileged: true network_mode: host # ports: # - "8123:8123" esphome: container_name: esphome image: ghcr.io/esphome/esphome:latest volumes: - ${ESPHOME_CONFIG}:/config - /etc/localtime:/etc/localtime:ro restart: unless-stopped privileged: true network_mode: host # ports: # - "6052:6052" env_file: - .env # openwakeword: # image: homeassistant/amd64-addon-openwakeword:latest # container_name: openwakeword # entrypoint: python3 # command: > # -m wyoming_openwakeword # --uri 'tcp://0.0.0.0:10400' # --preload-model 'ok_nabu' # --custom-model-dir /share/openwakeword # env_file: # - .env # ports: # - 10400:10400 # volumes: # - ${OWW_DATA}:/data # - ${OWW_CUSTOM_MODEL_DIR}:/share/openwakeword # restart: unless-stopped piper: image: homeassistant/amd64-addon-piper:latest container_name: piper entrypoint: python3 command: > -m wyoming_piper --piper '/usr/share/piper/piper' --uri 'tcp://0.0.0.0:10200' --length-scale "1" --noise-scale "0.667" --speaker "0" --voice "en_US-lessac-medium" --max-piper-procs "1" --data-dir /data --data-dir /share/piper --download-dir /data network_mode: host # ports: # - "10200:10200" volumes: - ${PIPER_DATA}:/data restart: unless-stopped whisper: image: homeassistant/amd64-addon-whisper:latest container_name: whisper entrypoint: python3 command: > -m wyoming_faster_whisper --uri tcp://0.0.0.0:10300 --model small-int8 --beam-size 1 --language en --data-dir /data --download-dir /data network_mode: host # Needed to use localhost in HA interface # ports: # - "10300:10300" env_file: - .env volumes: - ${WHISPER_DATA}:/data restart: unless-stopped # --- Pi-hole --- # pihole: # container_name: pihole # image: pihole/pihole:latest # ports: # # DNS Ports # - "53:53/tcp" # - "53:53/udp" # # Default HTTP Port # - "2100:80/tcp" # - "2101:443/tcp" # env_file: # - .env # volumes: # - "${PIHOLE}:/etc/pihole" # cap_add: # # Optional, if Pi-hole should get some more processing time # - SYS_NICE # restart: unless-stopped # --- Gitea --- gitea: image: gitea/gitea:latest restart: unless-stopped volumes: - ${GITEA}:/data - /etc/timezone:/etc/timezone:ro - /etc/localtime:/etc/localtime:ro ports: - "3001:3000" - "22:22" volumes: model-cache: nextcloud_aio_mastercontainer: name: nextcloud_aio_mastercontainer # This line is not allowed to be changed.