upped docker-compose version (just because), explicitly called :latest (just because), commented out ark-server and mass, added tixbkend (separate tix web and force it to run as uid=1000, and backend that processes pdfs), upped emby to a newer 4.9 release - but still not allowing latest, so we can convert back to prod 4.9 version when it goes to latest, adjsted telegraf group after reinstall of mara -- this still is not elegant, but works
This commit is contained in:
@@ -1,11 +1,11 @@
|
|||||||
# To note, if I am using an env_file to /srv/docker/config/secrets/*,
|
# To note, if I am using an env_file to /srv/docker/config/secrets/*,
|
||||||
# then I have taken the ENV variable with a password for that container and
|
# then I have taken the ENV variable with a password for that container and
|
||||||
# put it into a separate file (1 place for common pwds like for ldap, but also so this file can be shared safely)
|
# put it into a separate file (1 place for common pwds like for ldap, but also so this file can be shared safely)
|
||||||
version: '3.7'
|
version: '3.9'
|
||||||
services:
|
services:
|
||||||
traefik:
|
traefik:
|
||||||
container_name: traefik
|
container_name: traefik
|
||||||
image: "traefik"
|
image: traefik:latest
|
||||||
restart: always
|
restart: always
|
||||||
network_mode: host
|
network_mode: host
|
||||||
command:
|
command:
|
||||||
@@ -46,7 +46,7 @@ services:
|
|||||||
|
|
||||||
sonarr:
|
sonarr:
|
||||||
container_name: sonarr
|
container_name: sonarr
|
||||||
image: linuxserver/sonarr
|
image: linuxserver/sonarr:latest
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
- TZ=Australia/Melbourne
|
- TZ=Australia/Melbourne
|
||||||
@@ -69,7 +69,7 @@ services:
|
|||||||
|
|
||||||
radarr:
|
radarr:
|
||||||
container_name: radarr
|
container_name: radarr
|
||||||
image: linuxserver/radarr
|
image: linuxserver/radarr:latest
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
- TZ=Australia/Melbourne
|
- TZ=Australia/Melbourne
|
||||||
@@ -113,7 +113,7 @@ services:
|
|||||||
|
|
||||||
calibre:
|
calibre:
|
||||||
container_name: calibre
|
container_name: calibre
|
||||||
image: linuxserver/calibre
|
image: linuxserver/calibre:latest
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
- TZ=Australia/Melbourne
|
- TZ=Australia/Melbourne
|
||||||
@@ -145,7 +145,7 @@ services:
|
|||||||
container_name: emby
|
container_name: emby
|
||||||
# image: emby/embyserver
|
# image: emby/embyserver
|
||||||
# image: emby/embyserver:beta
|
# image: emby/embyserver:beta
|
||||||
image: emby/embyserver:4.9.0.5
|
image: emby/embyserver:4.9.0.22
|
||||||
restart: always
|
restart: always
|
||||||
network_mode: host
|
network_mode: host
|
||||||
environment:
|
environment:
|
||||||
@@ -260,7 +260,7 @@ services:
|
|||||||
- NET_ADMIN
|
- NET_ADMIN
|
||||||
|
|
||||||
openldap:
|
openldap:
|
||||||
image: bitnami/openldap
|
image: bitnami/openldap:latest
|
||||||
user: "2000"
|
user: "2000"
|
||||||
container_name: openldap
|
container_name: openldap
|
||||||
restart: always
|
restart: always
|
||||||
@@ -290,7 +290,7 @@ services:
|
|||||||
|
|
||||||
# webmail
|
# webmail
|
||||||
webmail:
|
webmail:
|
||||||
image: roundcube/roundcubemail
|
image: roundcube/roundcubemail:latest
|
||||||
container_name: webmail
|
container_name: webmail
|
||||||
restart: always
|
restart: always
|
||||||
labels:
|
labels:
|
||||||
@@ -321,7 +321,7 @@ services:
|
|||||||
|
|
||||||
portainer:
|
portainer:
|
||||||
container_name: portainer
|
container_name: portainer
|
||||||
image: portainer/portainer-ce
|
image: portainer/portainer-ce:latest
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
- pihole
|
- pihole
|
||||||
@@ -339,7 +339,7 @@ services:
|
|||||||
# this is running network_mode: host so it is on the same subnet as the IoT
|
# this is running network_mode: host so it is on the same subnet as the IoT
|
||||||
# devices and can see/discover them
|
# devices and can see/discover them
|
||||||
hass:
|
hass:
|
||||||
image: ghcr.io/home-assistant/home-assistant
|
image: ghcr.io/home-assistant/home-assistant:latest
|
||||||
container_name: hass
|
container_name: hass
|
||||||
privileged: true
|
privileged: true
|
||||||
network_mode: host
|
network_mode: host
|
||||||
@@ -369,33 +369,33 @@ services:
|
|||||||
- "traefik.http.routers.hass.tls.certresolver=myresolver"
|
- "traefik.http.routers.hass.tls.certresolver=myresolver"
|
||||||
|
|
||||||
# this runs in network_most host so that it can find the players automatically
|
# this runs in network_most host so that it can find the players automatically
|
||||||
mass:
|
# mass:
|
||||||
image: ghcr.io/music-assistant/server
|
# image: ghcr.io/music-assistant/server:latest
|
||||||
container_name: mass
|
# container_name: mass
|
||||||
restart: always
|
# restart: always
|
||||||
network_mode: host
|
# network_mode: host
|
||||||
depends_on:
|
# depends_on:
|
||||||
- pihole
|
# - pihole
|
||||||
- emby
|
# - emby
|
||||||
volumes:
|
# volumes:
|
||||||
- /srv/docker/container/mass/data:/data
|
# - /srv/docker/container/mass/data:/data
|
||||||
- /export/docker/storage/music:/music
|
# - /export/docker/storage/music:/music
|
||||||
- /etc/localtime:/etc/localtime:ro
|
# - /etc/localtime:/etc/localtime:ro
|
||||||
labels:
|
# labels:
|
||||||
- "com.centurylinklabs.watchtower.enable=true"
|
# - "com.centurylinklabs.watchtower.enable=true"
|
||||||
- "traefik.enable=true"
|
# - "traefik.enable=true"
|
||||||
- "traefik.http.routers.mass.rule=Host(`hass.depaoli.id.au`) && PathPrefix(`/mass/`)"
|
# - "traefik.http.routers.mass.rule=Host(`hass.depaoli.id.au`) && PathPrefix(`/mass/`)"
|
||||||
- "traefik.http.routers.mass.tls=true"
|
# - "traefik.http.routers.mass.tls=true"
|
||||||
- "traefik.http.routers.mass.entrypoints=secureweb"
|
# - "traefik.http.routers.mass.entrypoints=secureweb"
|
||||||
- "traefik.http.middlewares.stripprefix-mass.stripprefix.prefixes=/mass"
|
# - "traefik.http.middlewares.stripprefix-mass.stripprefix.prefixes=/mass"
|
||||||
- "traefik.http.routers.mass.middlewares=stripprefix-mass@docker"
|
# - "traefik.http.routers.mass.middlewares=stripprefix-mass@docker"
|
||||||
- "traefik.http.routers.mass.tls.certresolver=myresolver"
|
# - "traefik.http.routers.mass.tls.certresolver=myresolver"
|
||||||
# to note with network_mode: host, this works via localhost --> traefik routes this to http://127.0.0.1:8095"
|
# # to note with network_mode: host, this works via localhost --> traefik routes this to http://127.0.0.1:8095"
|
||||||
- "traefik.http.services.mass.loadbalancer.server.port=8095"
|
# - "traefik.http.services.mass.loadbalancer.server.port=8095"
|
||||||
|
|
||||||
mosquitto:
|
mosquitto:
|
||||||
container_name: mosquitto
|
container_name: mosquitto
|
||||||
image: eclipse-mosquitto
|
image: eclipse-mosquitto:latest
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- /srv/docker/container/mosquitto:/mosquitto
|
- /srv/docker/container/mosquitto:/mosquitto
|
||||||
@@ -409,7 +409,7 @@ services:
|
|||||||
|
|
||||||
esphome:
|
esphome:
|
||||||
container_name: esphome
|
container_name: esphome
|
||||||
image: esphome/esphome
|
image: esphome/esphome:latest
|
||||||
environment:
|
environment:
|
||||||
- ESPHOME_DASHBOARD_USE_PING=true
|
- ESPHOME_DASHBOARD_USE_PING=true
|
||||||
volumes:
|
volumes:
|
||||||
@@ -426,7 +426,7 @@ services:
|
|||||||
|
|
||||||
sabnzbd:
|
sabnzbd:
|
||||||
# image: linuxserver/sabnzbd:4.2.2-ls151
|
# image: linuxserver/sabnzbd:4.2.2-ls151
|
||||||
image: linuxserver/sabnzbd
|
image: linuxserver/sabnzbd:latest
|
||||||
container_name: sabnzbd
|
container_name: sabnzbd
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -474,8 +474,9 @@ services:
|
|||||||
telegraf:
|
telegraf:
|
||||||
image: telegraf:latest
|
image: telegraf:latest
|
||||||
container_name: telegraf
|
container_name: telegraf
|
||||||
# needs to be 0 / root to run smartmontools / nvme
|
# needs to be 0 / root to run smartmontools / nvme, and 124/docker to read docker.sock
|
||||||
user: "0:139"
|
#user: "root:docker"
|
||||||
|
user: "root:124"
|
||||||
entrypoint: /root/mara-init/entrypoint-wrapper.sh
|
entrypoint: /root/mara-init/entrypoint-wrapper.sh
|
||||||
volumes:
|
volumes:
|
||||||
- /srv/docker/container/telegraf:/etc/telegraf
|
- /srv/docker/container/telegraf:/etc/telegraf
|
||||||
@@ -506,7 +507,7 @@ services:
|
|||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
grafana:
|
grafana:
|
||||||
image: grafana/grafana
|
image: grafana/grafana:latest
|
||||||
container_name: grafana
|
container_name: grafana
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -533,7 +534,7 @@ services:
|
|||||||
|
|
||||||
pihole:
|
pihole:
|
||||||
container_name: pihole
|
container_name: pihole
|
||||||
image: pihole/pihole
|
image: pihole/pihole:latest
|
||||||
ports:
|
ports:
|
||||||
- "192.168.0.2:53:53/tcp"
|
- "192.168.0.2:53:53/tcp"
|
||||||
- "192.168.0.2:53:53/udp"
|
- "192.168.0.2:53:53/udp"
|
||||||
@@ -562,7 +563,7 @@ services:
|
|||||||
|
|
||||||
bookdb_dev:
|
bookdb_dev:
|
||||||
container_name: bookdb_dev
|
container_name: bookdb_dev
|
||||||
image: postgres
|
image: postgres:latest
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: ddp
|
POSTGRES_USER: ddp
|
||||||
@@ -644,7 +645,7 @@ services:
|
|||||||
|
|
||||||
padb_dev:
|
padb_dev:
|
||||||
container_name: padb_dev
|
container_name: padb_dev
|
||||||
image: postgres
|
image: postgres:latest
|
||||||
restart: always
|
restart: always
|
||||||
# replace entrypoint to install cron and a cron job to backup users so we can rebuild the content from sqls
|
# replace entrypoint to install cron and a cron job to backup users so we can rebuild the content from sqls
|
||||||
entrypoint: /root/mara-init/entrypoint-wrapper.sh
|
entrypoint: /root/mara-init/entrypoint-wrapper.sh
|
||||||
@@ -737,14 +738,15 @@ services:
|
|||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
ENV: "production"
|
ENV: "production"
|
||||||
|
# force using uid/gid of 1000/1000 so we can share dev/prod for now - may care enough 1 day to fix
|
||||||
build:
|
build:
|
||||||
context: '/home/ddp/src/snow-ticket-analysis'
|
context: '/home/ddp/src/tix'
|
||||||
args:
|
args:
|
||||||
USERID: "2000"
|
USERID: "1000"
|
||||||
GROUPID: "2000"
|
GROUPID: "1000"
|
||||||
user: "2000:2000"
|
user: "1000:1000"
|
||||||
volumes:
|
volumes:
|
||||||
- /home/ddp/src/snow-ticket-analysis/stats.db:/stats.db
|
- /home/ddp/src/tix/:/tix
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
labels:
|
labels:
|
||||||
- "com.centurylinklabs.watchtower.enable=false"
|
- "com.centurylinklabs.watchtower.enable=false"
|
||||||
@@ -754,10 +756,29 @@ services:
|
|||||||
- "traefik.http.routers.tix.entrypoints=secureweb"
|
- "traefik.http.routers.tix.entrypoints=secureweb"
|
||||||
- "traefik.http.routers.tix.tls.certresolver=myresolver"
|
- "traefik.http.routers.tix.tls.certresolver=myresolver"
|
||||||
|
|
||||||
|
# this runs cron to invoke the snow-ticket-analysis.py once a day to extract
|
||||||
|
# the next pdfs and add data to stats.db - then used by tix: to graph the data
|
||||||
|
tixbkend:
|
||||||
|
container_name: tixbkend
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
ENV: "production"
|
||||||
|
# force using uid/gid of 1000/1000 so we can share dev/prod for now - may care enough 1 day to fix
|
||||||
|
build:
|
||||||
|
context: '/home/ddp/src/snow-ticket-analysis'
|
||||||
|
args:
|
||||||
|
USERID: "1000"
|
||||||
|
GROUPID: "1000"
|
||||||
|
volumes:
|
||||||
|
- /home/ddp/src/tix/:/tix/
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
labels:
|
||||||
|
- "com.centurylinklabs.watchtower.enable=false"
|
||||||
|
|
||||||
vaultwarden:
|
vaultwarden:
|
||||||
container_name: vaultwarden
|
container_name: vaultwarden
|
||||||
restart: always
|
restart: always
|
||||||
image: vaultwarden/server
|
image: vaultwarden/server:latest
|
||||||
depends_on:
|
depends_on:
|
||||||
- pihole
|
- pihole
|
||||||
- openldap
|
- openldap
|
||||||
@@ -791,7 +812,7 @@ services:
|
|||||||
|
|
||||||
# used for hass (eufy) to get notifications from camera
|
# used for hass (eufy) to get notifications from camera
|
||||||
eufy_security_ws:
|
eufy_security_ws:
|
||||||
image: bropat/eufy-security-ws
|
image: bropat/eufy-security-ws:latest
|
||||||
container_name: eufy_security_ws
|
container_name: eufy_security_ws
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -814,7 +835,7 @@ services:
|
|||||||
|
|
||||||
# used for hass (eufy) to stream from camera
|
# used for hass (eufy) to stream from camera
|
||||||
rtsp_simple_server:
|
rtsp_simple_server:
|
||||||
image: aler9/rtsp-simple-server
|
image: aler9/rtsp-simple-server:latest
|
||||||
container_name: rtsp_simple_server
|
container_name: rtsp_simple_server
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -829,36 +850,37 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- "/etc/localtime:/etc/localtime:ro"
|
- "/etc/localtime:/etc/localtime:ro"
|
||||||
|
|
||||||
ark-server:
|
# ark-server:
|
||||||
restart: always
|
# # for now ark-server wont start by default, needs manual startup
|
||||||
image: hermsi/ark-server
|
# restart: "no"
|
||||||
container_name: ark-server
|
# image: hermsi/ark-server
|
||||||
volumes:
|
# container_name: ark-server
|
||||||
- /srv/docker/container/ark-server:/app
|
# volumes:
|
||||||
- /srv/docker/container/ark-server-backups:/home/steam/ARK-Backups
|
# - /srv/docker/container/ark-server:/app
|
||||||
environment:
|
# - /srv/docker/container/ark-server-backups:/home/steam/ARK-Backups
|
||||||
- "SESSION_NAME=The Island - ARK"
|
# environment:
|
||||||
- "SERVER_MAP=TheIsland"
|
# - "SESSION_NAME=The Island - ARK"
|
||||||
- "SERVER_PASSWORD=bagrid"
|
# - "SERVER_MAP=TheIsland"
|
||||||
- "ADMIN_PASSWORD=arkadminpassword"
|
# - "SERVER_PASSWORD=bagrid"
|
||||||
- "MAX_PLAYERS=5"
|
# - "ADMIN_PASSWORD=arkadminpassword"
|
||||||
- "UPDATE_ON_START=false"
|
# - "MAX_PLAYERS=5"
|
||||||
- "BACKUP_ON_STOP=true"
|
# - "UPDATE_ON_START=false"
|
||||||
- "PRE_UPDATE_BACKUP=true"
|
# - "BACKUP_ON_STOP=true"
|
||||||
- "WARN_ON_STOP=true"
|
# - "PRE_UPDATE_BACKUP=true"
|
||||||
- "GAME_MOD_IDS=1404697612,1428596566,772235118,895711211,731604991"
|
# - "WARN_ON_STOP=true"
|
||||||
ports:
|
# - "GAME_MOD_IDS=1404697612,1428596566,772235118,895711211,731604991"
|
||||||
# Port for connections from ARK game client
|
# ports:
|
||||||
- "7777:7777/udp"
|
# # Port for connections from ARK game client
|
||||||
# Raw UDP socket port (always Game client port +1)
|
# - "7777:7777/udp"
|
||||||
- "7778:7778/udp"
|
# # Raw UDP socket port (always Game client port +1)
|
||||||
# RCON management port
|
# - "7778:7778/udp"
|
||||||
- "27020:27020/tcp"
|
# # RCON management port
|
||||||
# Steam's server-list port
|
# - "27020:27020/tcp"
|
||||||
- "27015:27015/udp"
|
# # Steam's server-list port
|
||||||
|
# - "27015:27015/udp"
|
||||||
|
|
||||||
kuma:
|
kuma:
|
||||||
image: louislam/uptime-kuma
|
image: louislam/uptime-kuma:latest
|
||||||
container_name: kuma
|
container_name: kuma
|
||||||
volumes:
|
volumes:
|
||||||
- /srv/docker/container/kuma/data:/app/data
|
- /srv/docker/container/kuma/data:/app/data
|
||||||
@@ -969,7 +991,7 @@ services:
|
|||||||
- mythdb
|
- mythdb
|
||||||
|
|
||||||
wiki:
|
wiki:
|
||||||
image: lscr.io/linuxserver/bookstack
|
image: lscr.io/linuxserver/bookstack:latest
|
||||||
container_name: wiki
|
container_name: wiki
|
||||||
environment:
|
environment:
|
||||||
- PUID=1000
|
- PUID=1000
|
||||||
@@ -994,7 +1016,7 @@ services:
|
|||||||
- "traefik.http.routers.wiki.tls.certresolver=myresolver"
|
- "traefik.http.routers.wiki.tls.certresolver=myresolver"
|
||||||
|
|
||||||
wikidb:
|
wikidb:
|
||||||
image: lscr.io/linuxserver/mariadb
|
image: lscr.io/linuxserver/mariadb:latest
|
||||||
container_name: wikidb
|
container_name: wikidb
|
||||||
environment:
|
environment:
|
||||||
- PUID=1000
|
- PUID=1000
|
||||||
|
|||||||
Reference in New Issue
Block a user