git subrepo commit (merge) mailcow/src/mailcow-dockerized

subrepo: subdir:   "mailcow/src/mailcow-dockerized"
  merged:   "c7b1dc37"
upstream: origin:   "https://github.com/mailcow/mailcow-dockerized.git"
  branch:   "master"
  commit:   "a366494c"
git-subrepo: version:  "0.4.6"
  origin:   "???"
  commit:   "???"
Change-Id: Id574ecd4e02e3c4fbf8a1efd49be11c0b6d19a3f
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/Dockerfile
index 571c3d0..898dd8b 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/Dockerfile
@@ -1,6 +1,6 @@
 FROM alpine:3.17
 
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 RUN apk upgrade --no-cache \
   && apk add --update --no-cache \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
index 4f5cb80..a45f5a5 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
@@ -213,12 +213,12 @@
   done
   ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig')
 
+  if [[ ${SKIP_IP_CHECK} != "y" ]]; then
   # Start IP detection
   log_f "Detecting IP addresses..."
   IPV4=$(get_ipv4)
   IPV6=$(get_ipv6)
   log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
-
   #########################################
   # IP and webroot challenge verification #
   SQL_DOMAINS=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT domain FROM domain WHERE backupmx=0 and active=1" -Bs)
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
index 91716b8..1fbcfda 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
@@ -1,7 +1,7 @@
-FROM clamav/clamav:1.0_base
+FROM clamav/clamav:1.0.3_base
 
-LABEL maintainer "André Peters <andre.peters@servercow.de>"
-
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
+  && apk add --update --no-cache \
 RUN apk upgrade --no-cache \
   && apk add --update --no-cache \
   rsync \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/Dockerfile
index 97c3808..3431f93 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/Dockerfile
@@ -1,6 +1,6 @@
 FROM alpine:3.17
 
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 WORKDIR /app
 
@@ -13,9 +13,13 @@
   fastapi \
   uvicorn \
   aiodocker \
-  redis 
+  docker \
+  aioredis 
+RUN mkdir /app/modules
 
 COPY docker-entrypoint.sh /app/
-COPY dockerapi.py /app/
+COPY main.py /app/main.py
+COPY modules/ /app/modules/
 
 ENTRYPOINT ["/bin/sh", "/app/docker-entrypoint.sh"]
+CMD exec python main.py
\ No newline at end of file
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/docker-entrypoint.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/docker-entrypoint.sh
index aab6cd5..64f4b82 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/docker-entrypoint.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/docker-entrypoint.sh
@@ -6,4 +6,4 @@
   -subj /CN=dockerapi/O=mailcow \
   -addext subjectAltName=DNS:dockerapi`
 
-`uvicorn --host 0.0.0.0 --port 443 --ssl-certfile=/app/dockerapi_cert.pem --ssl-keyfile=/app/dockerapi_key.pem dockerapi:app`
+exec "$@"
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/dockerapi.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/dockerapi.py
deleted file mode 100644
index 4a17e0c..0000000
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/dockerapi.py
+++ /dev/null
@@ -1,572 +0,0 @@
-from fastapi import FastAPI, Response, Request
-import aiodocker
-import psutil
-import sys
-import re
-import time
-import os
-import json
-import asyncio
-import redis
-from datetime import datetime
-
-
-containerIds_to_update = []
-host_stats_isUpdating = False
-app = FastAPI()
-
-
-@app.get("/host/stats")
-async def get_host_update_stats():
-  global host_stats_isUpdating
-
-  if host_stats_isUpdating == False:
-    print("start host stats task")
-    asyncio.create_task(get_host_stats())
-    host_stats_isUpdating = True
-
-  while True:
-    if redis_client.exists('host_stats'):
-      break
-    print("wait for host_stats results")
-    await asyncio.sleep(1.5)
-
-
-  print("host stats pulled")
-  stats = json.loads(redis_client.get('host_stats'))
-  return Response(content=json.dumps(stats, indent=4), media_type="application/json")
-
-@app.get("/containers/{container_id}/json")
-async def get_container(container_id : str):
-  if container_id and container_id.isalnum():
-    try:
-      for container in (await async_docker_client.containers.list()):
-        if container._id == container_id:
-          container_info = await container.show()
-          return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
-     
-      res = {
-        "type": "danger",
-        "msg": "no container found"
-      }
-      return Response(content=json.dumps(res, indent=4), media_type="application/json")
-    except Exception as e:
-      res = {
-        "type": "danger",
-        "msg": str(e)
-      }
-      return Response(content=json.dumps(res, indent=4), media_type="application/json")
-  else:
-    res = {
-      "type": "danger",
-      "msg": "no or invalid id defined"
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-@app.get("/containers/json")
-async def get_containers():
-  containers = {}
-  try:
-    for container in (await async_docker_client.containers.list()):
-      container_info = await container.show()
-      containers.update({container_info['Id']: container_info})
-    return Response(content=json.dumps(containers, indent=4), media_type="application/json")
-  except Exception as e:
-    res = {
-      "type": "danger",
-      "msg": str(e)
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-@app.post("/containers/{container_id}/{post_action}")
-async def post_containers(container_id : str, post_action : str, request: Request):
-  try : 
-    request_json = await request.json()
-  except Exception as err:
-    request_json = {}
-
-  if container_id and container_id.isalnum() and post_action:
-    try:
-      """Dispatch container_post api call"""
-      if post_action == 'exec':
-        if not request_json or not 'cmd' in request_json:
-          res = {
-            "type": "danger",
-            "msg": "cmd is missing"
-          }
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-        if not request_json or not 'task' in request_json:
-          res = {
-            "type": "danger",
-            "msg": "task is missing"
-          }
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-        api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
-      else:
-        api_call_method_name = '__'.join(['container_post', str(post_action) ])
-
-      docker_utils = DockerUtils(async_docker_client)
-      api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
-
-
-      print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
-      return await api_call_method(container_id, request_json)
-    except Exception as e:
-      print("error - container_post: %s" % str(e))
-      res = {
-        "type": "danger",
-        "msg": str(e)
-      }
-      return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-  else:
-    res = {
-      "type": "danger",
-      "msg": "invalid container id or missing action"
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-@app.post("/container/{container_id}/stats/update")
-async def post_container_update_stats(container_id : str):
-  global containerIds_to_update
-
-  # start update task for container if no task is running
-  if container_id not in containerIds_to_update:
-    asyncio.create_task(get_container_stats(container_id))
-    containerIds_to_update.append(container_id)
-
-  while True:
-    if redis_client.exists(container_id + '_stats'):
-      break
-    await asyncio.sleep(1.5)
-
-  stats = json.loads(redis_client.get(container_id + '_stats'))
-  return Response(content=json.dumps(stats, indent=4), media_type="application/json")
-
-
-
-
-class DockerUtils:
-  def __init__(self, docker_client):
-    self.docker_client = docker_client
-
-  # api call: container_post - post_action: stop
-  async def container_post__stop(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        await container.stop()
-    res = {
-      'type': 'success', 
-      'msg': 'command completed successfully'
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-  # api call: container_post - post_action: start
-  async def container_post__start(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        await container.start()
-    res = {
-      'type': 'success', 
-      'msg': 'command completed successfully'
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: restart
-  async def container_post__restart(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        await container.restart()
-    res = {
-      'type': 'success', 
-      'msg': 'command completed successfully'
-    }
-    return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: top
-  async def container_post__top(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        ps_exec = await container.exec("ps")      
-        async with ps_exec.start(detach=False) as stream:
-          ps_return = await stream.read_out()
-
-        exec_details = await ps_exec.inspect()
-        if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
-          res = {
-            'type': 'success', 
-            'msg': ps_return.data.decode('utf-8')
-          }
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-        else:
-          res = {
-            'type': 'danger', 
-            'msg': ''
-          }
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: delete
-  async def container_post__exec__mailq__delete(self, container_id, request_json):
-    if 'items' in request_json:
-      r = re.compile("^[0-9a-fA-F]+$")
-      filtered_qids = filter(r.match, request_json['items'])
-      if filtered_qids:
-        flagged_qids = ['-d %s' % i for i in filtered_qids]
-        sanitized_string = str(' '.join(flagged_qids))
-
-        for container in (await self.docker_client.containers.list()):
-          if container._id == container_id:
-            postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
-            return await exec_run_handler('generic', postsuper_r_exec)
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: hold
-  async def container_post__exec__mailq__hold(self, container_id, request_json):
-    if 'items' in request_json:
-      r = re.compile("^[0-9a-fA-F]+$")
-      filtered_qids = filter(r.match, request_json['items'])
-      if filtered_qids:
-        flagged_qids = ['-h %s' % i for i in filtered_qids]
-        sanitized_string = str(' '.join(flagged_qids))
-
-        for container in (await self.docker_client.containers.list()):
-          if container._id == container_id:
-            postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
-            return await exec_run_handler('generic', postsuper_r_exec)
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: cat
-  async def container_post__exec__mailq__cat(self, container_id, request_json):
-    if 'items' in request_json:
-      r = re.compile("^[0-9a-fA-F]+$")
-      filtered_qids = filter(r.match, request_json['items'])
-      if filtered_qids:
-        sanitized_string = str(' '.join(filtered_qids))
-
-        for container in (await self.docker_client.containers.list()):
-          if container._id == container_id:
-            postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
-            return await exec_run_handler('utf8_text_only', postcat_exec)
-
-   # api call: container_post - post_action: exec - cmd: mailq - task: unhold
-  async def container_post__exec__mailq__unhold(self, container_id, request_json):
-    if 'items' in request_json:
-      r = re.compile("^[0-9a-fA-F]+$")
-      filtered_qids = filter(r.match, request_json['items'])
-      if filtered_qids:
-        flagged_qids = ['-H %s' % i for i in filtered_qids]
-        sanitized_string = str(' '.join(flagged_qids))
-
-        for container in (await self.docker_client.containers.list()):
-          if container._id == container_id:
-            postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
-            return await exec_run_handler('generic', postsuper_r_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: deliver
-  async def container_post__exec__mailq__deliver(self, container_id, request_json):
-    if 'items' in request_json:
-      r = re.compile("^[0-9a-fA-F]+$")
-      filtered_qids = filter(r.match, request_json['items'])
-      if filtered_qids:
-        flagged_qids = ['-i %s' % i for i in filtered_qids]
-
-        for container in (await self.docker_client.containers.list()):
-          if container._id == container_id:
-            for i in flagged_qids:
-              postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')      
-              async with postsuper_r_exec.start(detach=False) as stream:
-                postsuper_r_return = await stream.read_out()
-              # todo: check each exit code
-            res = {
-              'type': 'success', 
-              'msg': 'Scheduled immediate delivery'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: list
-  async def container_post__exec__mailq__list(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
-        return await exec_run_handler('utf8_text_only', mailq_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: flush
-  async def container_post__exec__mailq__flush(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
-        return await exec_run_handler('generic', postsuper_r_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: mailq - task: super_delete
-  async def container_post__exec__mailq__super_delete(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
-        return await exec_run_handler('generic', postsuper_r_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: system - task: fts_rescan
-  async def container_post__exec__system__fts_rescan(self, container_id, request_json):
-    if 'username' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')         
-          async with rescan_exec.start(detach=False) as stream:
-            rescan_return = await stream.read_out()
-
-          exec_details = await rescan_exec.inspect()
-          if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
-            res = {
-              'type': 'success', 
-              'msg': 'fts_rescan: rescan triggered'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-          else:
-            res = {
-              'type': 'warning', 
-              'msg': 'fts_rescan error'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-    if 'all' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')          
-          async with rescan_exec.start(detach=False) as stream:
-            rescan_return = await stream.read_out()
-
-          exec_details = await rescan_exec.inspect()
-          if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
-            res = {
-              'type': 'success', 
-              'msg': 'fts_rescan: rescan triggered'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-          else:
-            res = {
-              'type': 'warning', 
-              'msg': 'fts_rescan error'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: exec - cmd: system - task: df
-  async def container_post__exec__system__df(self, container_id, request_json):
-    if 'dir' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
-          async with df_exec.start(detach=False) as stream:
-            df_return = await stream.read_out()
-
-          print(df_return)
-          print(await df_exec.inspect())
-          exec_details = await df_exec.inspect()
-          if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
-            return df_return.data.decode('utf-8').rstrip()
-          else:
-            return "0,0,0,0,0,0"
-
-
-  # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
-  async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
-    return Response(content=json.dumps(dict(type='success', msg='mysql_upgrade: not touching fake MySQL', text=''), indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
-  async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
-    return Response(content=json.dumps(dict(type='success', msg='mysql_tzinfo_to_sql: not touching fake MySQL', text=''), indent=4), media_type="application/json")
-
-
-  # api call: container_post - post_action: exec - cmd: reload - task: dovecot
-  async def container_post__exec__reload__dovecot(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
-        return await exec_run_handler('generic', reload_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: reload - task: postfix
-  async def container_post__exec__reload__postfix(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
-        return await exec_run_handler('generic', reload_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: reload - task: nginx
-  async def container_post__exec__reload__nginx(self, container_id, request_json):
-    for container in (await self.docker_client.containers.list()):
-      if container._id == container_id:
-        reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
-        return await exec_run_handler('generic', reload_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: sieve - task: list
-  async def container_post__exec__sieve__list(self, container_id, request_json):
-    if 'username' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
-          return await exec_run_handler('utf8_text_only', sieve_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: sieve - task: print
-  async def container_post__exec__sieve__print(self, container_id, request_json):
-    if 'username' in request_json and 'script_name' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]  
-          sieve_exec = await container.exec(cmd)
-          return await exec_run_handler('utf8_text_only', sieve_exec)
-
-
-  # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
-  async def container_post__exec__maildir__cleanup(self, container_id, request_json):
-    if 'maildir' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          sane_name = re.sub(r'\W+', '', request_json['maildir'])
-          cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
-          maildir_cleanup_exec = await container.exec(cmd, user='vmail')
-          return await exec_run_handler('generic', maildir_cleanup_exec)
-
-  # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
-  async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
-    if 'raw' in request_json:
-      for container in (await self.docker_client.containers.list()):
-        if container._id == container_id:
-          
-          cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
-          rspamd_password_exec = await container.exec(cmd, user='_rspamd')  
-          async with rspamd_password_exec.start(detach=False) as stream:
-            rspamd_password_return = await stream.read_out()
-
-          matched = False
-          if "OK" in rspamd_password_return.data.decode('utf-8'):
-            matched = True
-            await container.restart()
-
-          if matched:
-            res = {
-              'type': 'success', 
-              'msg': 'command completed successfully'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-          else:
-            res = {
-              'type': 'danger', 
-              'msg': 'command did not complete'
-            }
-            return Response(content=json.dumps(res, indent=4), media_type="application/json")
-
-
-
-async def exec_run_handler(type, exec_obj):
-  async with exec_obj.start(detach=False) as stream:
-    exec_return = await stream.read_out()
-
-  if exec_return == None:
-    exec_return = ""
-  else:
-    exec_return = exec_return.data.decode('utf-8')
-
-  if type == 'generic':       
-    exec_details = await exec_obj.inspect()
-    if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
-      res = {
-        "type": "success",
-        "msg": "command completed successfully"
-      }
-      return Response(content=json.dumps(res, indent=4), media_type="application/json")
-    else:
-      res = {
-        "type": "success",
-        "msg": "'command failed: " + exec_return
-      }
-      return Response(content=json.dumps(res, indent=4), media_type="application/json")
-  if type == 'utf8_text_only':
-    return Response(content=exec_return, media_type="text/plain")
-
-async def get_host_stats(wait=5):
-  global host_stats_isUpdating
-
-  try:
-    system_time = datetime.now()
-    host_stats = {
-      "cpu": {
-        "cores": psutil.cpu_count(),
-        "usage": psutil.cpu_percent()
-      },
-      "memory": {
-        "total": psutil.virtual_memory().total,
-        "usage": psutil.virtual_memory().percent,
-        "swap": psutil.swap_memory()
-      },
-      "uptime": time.time() - psutil.boot_time(),
-      "system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
-    }
-
-    redis_client.set('host_stats', json.dumps(host_stats), ex=10)
-  except Exception as e:
-    res = {
-      "type": "danger",
-      "msg": str(e)
-    }
-    print(json.dumps(res, indent=4))
-
-  await asyncio.sleep(wait)
-  host_stats_isUpdating = False
-  
-
-async def get_container_stats(container_id, wait=5, stop=False):
-  global containerIds_to_update
-
-  if container_id and container_id.isalnum():
-    try:
-      for container in (await async_docker_client.containers.list()):
-        if container._id == container_id:
-          res = await container.stats(stream=False)
-
-          if redis_client.exists(container_id + '_stats'):
-            stats = json.loads(redis_client.get(container_id + '_stats'))
-          else:
-            stats = []
-          stats.append(res[0])
-          if len(stats) > 3:
-            del stats[0]
-          redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
-    except Exception as e:
-      res = {
-        "type": "danger",
-        "msg": str(e)
-      }
-      print(json.dumps(res, indent=4))
-  else:
-    res = {
-      "type": "danger",
-      "msg": "no or invalid id defined"
-    }
-    print(json.dumps(res, indent=4))
-
-  await asyncio.sleep(wait)
-  if stop == True:
-    # update task was called second time, stop
-    containerIds_to_update.remove(container_id)
-  else:
-    # call update task a second time
-    await get_container_stats(container_id, wait=0, stop=True)
-
-
-if os.environ['REDIS_SLAVEOF_IP'] != "":
-  redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
-else:
-  redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
-
-async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/main.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/main.py
new file mode 100644
index 0000000..f9f02b6
--- /dev/null
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/main.py
@@ -0,0 +1,260 @@
+import os
+import sys
+import uvicorn
+import json
+import uuid
+import async_timeout
+import asyncio
+import aioredis
+import aiodocker
+import docker
+import logging
+from logging.config import dictConfig
+from fastapi import FastAPI, Response, Request
+from modules.DockerApi import DockerApi
+
+dockerapi = None
+app = FastAPI()
+
+# Define Routes
+@app.get("/host/stats")
+async def get_host_update_stats():
+  global dockerapi
+
+  if dockerapi.host_stats_isUpdating == False:
+    asyncio.create_task(dockerapi.get_host_stats())
+    dockerapi.host_stats_isUpdating = True
+
+  while True:
+    if await dockerapi.redis_client.exists('host_stats'):
+      break
+    await asyncio.sleep(1.5)
+
+  stats = json.loads(await dockerapi.redis_client.get('host_stats'))
+  return Response(content=json.dumps(stats, indent=4), media_type="application/json")
+
+@app.get("/containers/{container_id}/json")
+async def get_container(container_id : str):
+  global dockerapi
+
+  if container_id and container_id.isalnum():
+    try:
+      for container in (await dockerapi.async_docker_client.containers.list()):
+        if container._id == container_id:
+          container_info = await container.show()
+          return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
+     
+      res = {
+        "type": "danger",
+        "msg": "no container found"
+      }
+      return Response(content=json.dumps(res, indent=4), media_type="application/json")
+    except Exception as e:
+      res = {
+        "type": "danger",
+        "msg": str(e)
+      }
+      return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  else:
+    res = {
+      "type": "danger",
+      "msg": "no or invalid id defined"
+    }
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+@app.get("/containers/json")
+async def get_containers():
+  global dockerapi
+
+  containers = {}
+  try:
+    for container in (await dockerapi.async_docker_client.containers.list()):
+      container_info = await container.show()
+      containers.update({container_info['Id']: container_info})
+    return Response(content=json.dumps(containers, indent=4), media_type="application/json")
+  except Exception as e:
+    res = {
+      "type": "danger",
+      "msg": str(e)
+    }
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+@app.post("/containers/{container_id}/{post_action}")
+async def post_containers(container_id : str, post_action : str, request: Request):
+  global dockerapi
+
+  try : 
+    request_json = await request.json()
+  except Exception as err:
+    request_json = {}
+
+  if container_id and container_id.isalnum() and post_action:
+    try:
+      """Dispatch container_post api call"""
+      if post_action == 'exec':
+        if not request_json or not 'cmd' in request_json:
+          res = {
+            "type": "danger",
+            "msg": "cmd is missing"
+          }
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+        if not request_json or not 'task' in request_json:
+          res = {
+            "type": "danger",
+            "msg": "task is missing"
+          }
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+        api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
+      else:
+        api_call_method_name = '__'.join(['container_post', str(post_action) ])
+
+      api_call_method = getattr(dockerapi, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
+
+      dockerapi.logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
+      return api_call_method(request_json, container_id=container_id)
+    except Exception as e:
+      dockerapi.logger.error("error - container_post: %s" % str(e))
+      res = {
+        "type": "danger",
+        "msg": str(e)
+      }
+      return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+  else:
+    res = {
+      "type": "danger",
+      "msg": "invalid container id or missing action"
+    }
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+@app.post("/container/{container_id}/stats/update")
+async def post_container_update_stats(container_id : str):
+  global dockerapi
+
+  # start update task for container if no task is running
+  if container_id not in dockerapi.containerIds_to_update:
+    asyncio.create_task(dockerapi.get_container_stats(container_id))
+    dockerapi.containerIds_to_update.append(container_id)
+
+  while True:
+    if await dockerapi.redis_client.exists(container_id + '_stats'):
+      break
+    await asyncio.sleep(1.5)
+
+  stats = json.loads(await dockerapi.redis_client.get(container_id + '_stats'))
+  return Response(content=json.dumps(stats, indent=4), media_type="application/json")
+
+# Events
+@app.on_event("startup")
+async def startup_event():
+  global dockerapi
+
+  # Initialize a custom logger
+  logger = logging.getLogger("dockerapi")
+  logger.setLevel(logging.INFO)
+  # Configure the logger to output logs to the terminal
+  handler = logging.StreamHandler()
+  handler.setLevel(logging.INFO)
+  formatter = logging.Formatter("%(levelname)s:     %(message)s")
+  handler.setFormatter(formatter)
+  logger.addHandler(handler)
+
+  logger.info("Init APP")
+
+  # Init redis client
+  if os.environ['REDIS_SLAVEOF_IP'] != "":
+    redis_client = redis = await aioredis.from_url(f"redis://{os.environ['REDIS_SLAVEOF_IP']}:{os.environ['REDIS_SLAVEOF_PORT']}/0")
+  else:
+    redis_client = redis = await aioredis.from_url("redis://redis-mailcow:6379/0")
+
+  # Init docker clients
+  sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
+  async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
+
+  dockerapi = DockerApi(redis_client, sync_docker_client, async_docker_client, logger)
+
+  logger.info("Subscribe to redis channel")
+  # Subscribe to redis channel
+  dockerapi.pubsub = redis.pubsub()
+  await dockerapi.pubsub.subscribe("MC_CHANNEL")
+  asyncio.create_task(handle_pubsub_messages(dockerapi.pubsub))
+
+@app.on_event("shutdown")
+async def shutdown_event():
+  global dockerapi
+
+  # Close docker connections
+  dockerapi.sync_docker_client.close()
+  await dockerapi.async_docker_client.close()
+
+  # Close redis
+  await dockerapi.pubsub.unsubscribe("MC_CHANNEL")
+  await dockerapi.redis_client.close()
+
+# PubSub Handler
+async def handle_pubsub_messages(channel: aioredis.client.PubSub):
+  global dockerapi
+
+  while True:
+    try:
+      async with async_timeout.timeout(60):
+        message = await channel.get_message(ignore_subscribe_messages=True, timeout=30)
+        if message is not None:
+          # Parse message
+          data_json = json.loads(message['data'].decode('utf-8'))
+          dockerapi.logger.info(f"PubSub Received - {json.dumps(data_json)}")
+
+          # Handle api_call
+          if 'api_call' in data_json:
+            # api_call: container_post
+            if data_json['api_call'] == "container_post":
+              if 'post_action' in data_json and 'container_name' in data_json:
+                try:
+                  """Dispatch container_post api call"""
+                  request_json = {}
+                  if data_json['post_action'] == 'exec':
+                    if 'request' in data_json:
+                      request_json = data_json['request']
+                      if 'cmd' in request_json:
+                        if 'task' in request_json:
+                          api_call_method_name = '__'.join(['container_post', str(data_json['post_action']), str(request_json['cmd']), str(request_json['task']) ])
+                        else:
+                          dockerapi.logger.error("api call: task missing")
+                      else:
+                        dockerapi.logger.error("api call: cmd missing")
+                    else:
+                      dockerapi.logger.error("api call: request missing")
+                  else:
+                    api_call_method_name = '__'.join(['container_post', str(data_json['post_action'])])
+
+                  if api_call_method_name:
+                    api_call_method = getattr(dockerapi, api_call_method_name)
+                    if api_call_method:
+                      dockerapi.logger.info("api call: %s, container_name: %s" % (api_call_method_name, data_json['container_name']))
+                      api_call_method(request_json, container_name=data_json['container_name'])
+                    else:
+                      dockerapi.logger.error("api call not found: %s, container_name: %s" % (api_call_method_name, data_json['container_name']))
+                except Exception as e:
+                  dockerapi.logger.error("container_post: %s" % str(e))
+              else:
+                dockerapi.logger.error("api call: missing container_name, post_action or request")
+            else:
+              dockerapi.logger.error("Unknwon PubSub recieved - %s" % json.dumps(data_json))
+          else:
+            dockerapi.logger.error("Unknwon PubSub recieved - %s" % json.dumps(data_json))
+              
+        await asyncio.sleep(0.0)
+    except asyncio.TimeoutError:
+      pass
+
+if __name__ == '__main__':
+  uvicorn.run(
+    app,
+    host="0.0.0.0",
+    port=443,
+    ssl_certfile="/app/dockerapi_cert.pem",
+    ssl_keyfile="/app/dockerapi_key.pem",
+    log_level="info",
+    loop="none"
+  )
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py
new file mode 100644
index 0000000..ea1c104
--- /dev/null
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py
@@ -0,0 +1,487 @@
+import psutil
+import sys
+import os
+import re
+import time
+import json
+import asyncio
+import platform
+from datetime import datetime
+from fastapi import FastAPI, Response, Request
+
+class DockerApi:
+  def __init__(self, redis_client, sync_docker_client, async_docker_client, logger):
+    self.redis_client = redis_client
+    self.sync_docker_client = sync_docker_client
+    self.async_docker_client = async_docker_client
+    self.logger = logger
+
+    self.host_stats_isUpdating = False
+    self.containerIds_to_update = []
+
+  # api call: container_post - post_action: stop
+  def container_post__stop(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(all=True, filters=filters):
+      container.stop()
+
+    res = { 'type': 'success', 'msg': 'command completed successfully'}
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: start
+  def container_post__start(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(all=True, filters=filters):
+      container.start()
+
+    res = { 'type': 'success', 'msg': 'command completed successfully'}
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: restart
+  def container_post__restart(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(all=True, filters=filters):
+      container.restart()
+
+    res = { 'type': 'success', 'msg': 'command completed successfully'}
+    return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: top
+  def container_post__top(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(all=True, filters=filters):
+      res = { 'type': 'success', 'msg': container.top()}
+      return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: stats
+  def container_post__stats(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(all=True, filters=filters):
+      for stat in container.stats(decode=True, stream=True):
+        res = { 'type': 'success', 'msg': stat}
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: exec - cmd: mailq - task: delete
+  def container_post__exec__mailq__delete(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'items' in request_json:
+      r = re.compile("^[0-9a-fA-F]+$")
+      filtered_qids = filter(r.match, request_json['items'])
+      if filtered_qids:
+        flagged_qids = ['-d %s' % i for i in filtered_qids]
+        sanitized_string = str(' '.join(flagged_qids))
+        for container in self.sync_docker_client.containers.list(filters=filters):
+          postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+          return self.exec_run_handler('generic', postsuper_r)
+  # api call: container_post - post_action: exec - cmd: mailq - task: hold
+  def container_post__exec__mailq__hold(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'items' in request_json:
+      r = re.compile("^[0-9a-fA-F]+$")
+      filtered_qids = filter(r.match, request_json['items'])
+      if filtered_qids:
+        flagged_qids = ['-h %s' % i for i in filtered_qids]
+        sanitized_string = str(' '.join(flagged_qids))
+        for container in self.sync_docker_client.containers.list(filters=filters):
+          postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+          return self.exec_run_handler('generic', postsuper_r)
+  # api call: container_post - post_action: exec - cmd: mailq - task: cat
+  def container_post__exec__mailq__cat(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'items' in request_json:
+      r = re.compile("^[0-9a-fA-F]+$")
+      filtered_qids = filter(r.match, request_json['items'])
+      if filtered_qids:
+        sanitized_string = str(' '.join(filtered_qids))
+
+        for container in self.sync_docker_client.containers.list(filters=filters):
+          postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
+        if not postcat_return:
+          postcat_return = 'err: invalid'
+        return self.exec_run_handler('utf8_text_only', postcat_return)
+  # api call: container_post - post_action: exec - cmd: mailq - task: unhold
+  def container_post__exec__mailq__unhold(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'items' in request_json:
+      r = re.compile("^[0-9a-fA-F]+$")
+      filtered_qids = filter(r.match, request_json['items'])
+      if filtered_qids:
+        flagged_qids = ['-H %s' % i for i in filtered_qids]
+        sanitized_string = str(' '.join(flagged_qids))
+        for container in self.sync_docker_client.containers.list(filters=filters):
+          postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
+          return self.exec_run_handler('generic', postsuper_r)
+  # api call: container_post - post_action: exec - cmd: mailq - task: deliver
+  def container_post__exec__mailq__deliver(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'items' in request_json:
+      r = re.compile("^[0-9a-fA-F]+$")
+      filtered_qids = filter(r.match, request_json['items'])
+      if filtered_qids:
+        flagged_qids = ['-i %s' % i for i in filtered_qids]
+        for container in self.sync_docker_client.containers.list(filters=filters):
+          for i in flagged_qids:
+            postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
+            # todo: check each exit code
+          res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")        
+  # api call: container_post - post_action: exec - cmd: mailq - task: list
+  def container_post__exec__mailq__list(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
+      return self.exec_run_handler('utf8_text_only', mailq_return)
+  # api call: container_post - post_action: exec - cmd: mailq - task: flush
+  def container_post__exec__mailq__flush(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
+      return self.exec_run_handler('generic', postqueue_r)
+  # api call: container_post - post_action: exec - cmd: mailq - task: super_delete
+  def container_post__exec__mailq__super_delete(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
+      return self.exec_run_handler('generic', postsuper_r)
+  # api call: container_post - post_action: exec - cmd: system - task: fts_rescan
+  def container_post__exec__system__fts_rescan(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'username' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
+        if rescan_return.exit_code == 0:
+          res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+        else:
+          res = { 'type': 'warning', 'msg': 'fts_rescan error'}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+    if 'all' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
+        if rescan_return.exit_code == 0:
+          res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+        else:
+          res = { 'type': 'warning', 'msg': 'fts_rescan error'}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: exec - cmd: system - task: df
+  def container_post__exec__system__df(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'dir' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
+        if df_return.exit_code == 0:
+          return df_return.output.decode('utf-8').rstrip()
+        else:
+          return "0,0,0,0,0,0"
+  # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
+  def container_post__exec__system__mysql_upgrade(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
+      if sql_return.exit_code == 0:
+        matched = False
+        for line in sql_return.output.decode('utf-8').split("\n"):
+          if 'is already upgraded to' in line:
+            matched = True
+        if matched:
+          res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+        else:
+          container.restart()
+          res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+      else:
+        res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
+  def container_post__exec__system__mysql_tzinfo_to_sql(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
+      if sql_return.exit_code == 0:
+        res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+      else:
+        res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+  # api call: container_post - post_action: exec - cmd: reload - task: dovecot
+  def container_post__exec__reload__dovecot(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
+      return self.exec_run_handler('generic', reload_return)
+  # api call: container_post - post_action: exec - cmd: reload - task: postfix
+  def container_post__exec__reload__postfix(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
+      return self.exec_run_handler('generic', reload_return)
+  # api call: container_post - post_action: exec - cmd: reload - task: nginx
+  def container_post__exec__reload__nginx(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    for container in self.sync_docker_client.containers.list(filters=filters):
+      reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
+      return self.exec_run_handler('generic', reload_return)
+  # api call: container_post - post_action: exec - cmd: sieve - task: list
+  def container_post__exec__sieve__list(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'username' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
+        return self.exec_run_handler('utf8_text_only', sieve_return)
+  # api call: container_post - post_action: exec - cmd: sieve - task: print
+  def container_post__exec__sieve__print(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'username' in request_json and 'script_name' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]  
+        sieve_return = container.exec_run(cmd)
+        return self.exec_run_handler('utf8_text_only', sieve_return)
+  # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
+  def container_post__exec__maildir__cleanup(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'maildir' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        sane_name = re.sub(r'\W+', '', request_json['maildir'])
+        vmail_name = request_json['maildir'].replace("'", "'\\''")
+        cmd_vmail = "if [[ -d '/var/vmail/" + vmail_name + "' ]]; then /bin/mv '/var/vmail/" + vmail_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"
+        index_name = request_json['maildir'].split("/")
+        if len(index_name) > 1:
+          index_name = index_name[1].replace("'", "'\\''") + "@" + index_name[0].replace("'", "'\\''")
+          cmd_vmail_index = "if [[ -d '/var/vmail_index/" + index_name + "' ]]; then /bin/mv '/var/vmail_index/" + index_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "_index'; fi"
+          cmd = ["/bin/bash", "-c", cmd_vmail + " && " + cmd_vmail_index]
+        else:
+          cmd = ["/bin/bash", "-c", cmd_vmail]
+        maildir_cleanup = container.exec_run(cmd, user='vmail')
+        return self.exec_run_handler('generic', maildir_cleanup)
+  # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
+  def container_post__exec__rspamd__worker_password(self, request_json, **kwargs):
+    if 'container_id' in kwargs:
+      filters = {"id": kwargs['container_id']}
+    elif 'container_name' in kwargs:
+      filters = {"name": kwargs['container_name']}
+
+    if 'raw' in request_json:
+      for container in self.sync_docker_client.containers.list(filters=filters):
+        cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
+        cmd_response = self.exec_cmd_container(container, cmd, user="_rspamd")
+
+        matched = False
+        for line in cmd_response.split("\n"):
+          if '$2$' in line:
+            hash = line.strip()
+            hash_out = re.search('\$2\$.+$', hash).group(0)
+            rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
+            rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
+            cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
+            cmd_response = self.exec_cmd_container(container, cmd, user="_rspamd")
+            if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
+              container.restart()
+              matched = True
+        if matched:
+          res = { 'type': 'success', 'msg': 'command completed successfully' }
+          self.logger.info('success changing Rspamd password')
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+        else:
+          self.logger.error('failed changing Rspamd password')
+          res = { 'type': 'danger', 'msg': 'command did not complete' }
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
+
+  # Collect host stats
+  async def get_host_stats(self, wait=5):
+    try:
+      system_time = datetime.now()
+      host_stats = {
+        "cpu": {
+          "cores": psutil.cpu_count(),
+          "usage": psutil.cpu_percent()
+        },
+        "memory": {
+          "total": psutil.virtual_memory().total,
+          "usage": psutil.virtual_memory().percent,
+          "swap": psutil.swap_memory()
+        },
+        "uptime": time.time() - psutil.boot_time(),
+        "system_time": system_time.strftime("%d.%m.%Y %H:%M:%S"),
+        "architecture": platform.machine()
+      }
+
+      await self.redis_client.set('host_stats', json.dumps(host_stats), ex=10)
+    except Exception as e:
+      res = {
+        "type": "danger",
+        "msg": str(e)
+      }
+
+    await asyncio.sleep(wait)
+    self.host_stats_isUpdating = False
+  # Collect container stats
+  async def get_container_stats(self, container_id, wait=5, stop=False):
+    if container_id and container_id.isalnum():
+      try:
+        for container in (await self.async_docker_client.containers.list()):
+          if container._id == container_id:
+            res = await container.stats(stream=False)
+
+            if await self.redis_client.exists(container_id + '_stats'):
+              stats = json.loads(await self.redis_client.get(container_id + '_stats'))
+            else:
+              stats = []
+            stats.append(res[0])
+            if len(stats) > 3:
+              del stats[0]
+            await self.redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
+      except Exception as e:
+        res = {
+          "type": "danger",
+          "msg": str(e)
+        }
+    else:
+      res = {
+        "type": "danger",
+        "msg": "no or invalid id defined"
+      }
+
+    await asyncio.sleep(wait)
+    if stop == True:
+      # update task was called second time, stop
+      self.containerIds_to_update.remove(container_id)
+    else:
+      # call update task a second time
+      await self.get_container_stats(container_id, wait=0, stop=True)
+
+  def exec_cmd_container(self, container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
+    def recv_socket_data(c_socket, timeout):
+      c_socket.setblocking(0)
+      total_data=[]
+      data=''
+      begin=time.time()
+      while True:
+        if total_data and time.time()-begin > timeout:
+          break
+        elif time.time()-begin > timeout*2:
+          break
+        try:
+          data = c_socket.recv(8192)
+          if data:
+            total_data.append(data.decode('utf-8'))
+            #change the beginning time for measurement
+            begin=time.time()
+          else:
+            #sleep for sometime to indicate a gap
+            time.sleep(0.1)
+            break
+        except:
+          pass
+      return ''.join(total_data)
+      
+    try :
+      socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
+      if not cmd.endswith("\n"):
+        cmd = cmd + "\n"
+      socket.send(cmd.encode('utf-8'))
+      data = recv_socket_data(socket, timeout)
+      socket.close()
+      return data
+    except Exception as e:
+      self.logger.error("error - exec_cmd_container: %s" % str(e))
+      traceback.print_exc(file=sys.stdout)
+
+  def exec_run_handler(self, type, output):
+    if type == 'generic':
+      if output.exit_code == 0:
+        res = { 'type': 'success', 'msg': 'command completed successfully' }
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+      else:
+        res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
+        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+    if type == 'utf8_text_only':
+      return Response(content=output.output.decode('utf-8'), media_type="text/plain")
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/__init__.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/__init__.py
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
index 4e90052..6249302 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
@@ -1,11 +1,15 @@
 FROM debian:bullseye-slim
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 ARG DEBIAN_FRONTEND=noninteractive
-ARG DOVECOT=2.3.19.1
+# renovate: datasource=github-tags depName=dovecot/core versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG DOVECOT=2.3.21
+# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG GOSU_VERSION=1.16
 ENV LC_ALL C
 ENV GOSU_VERSION 1.14
 
+
 # Add groups and users before installing Dovecot to not break compatibility
 RUN groupadd -g 5000 vmail \
   && groupadd -g 401 dovecot \
@@ -18,6 +22,7 @@
   && touch /etc/default/locale \
   && apt-get update \
   && apt-get -y --no-install-recommends install \
+  build-essential \
   apt-transport-https \
   ca-certificates \
   cpanminus \
@@ -58,6 +63,7 @@
   libproc-processtable-perl \
   libreadonly-perl \
   libregexp-common-perl \
+  libssl-dev \
   libsys-meminfo-perl \
   libterm-readkey-perl \
   libtest-deep-perl \
@@ -107,6 +113,8 @@
   && apt-get autoclean \
   && rm -rf /var/lib/apt/lists/* \
   && rm -rf /tmp/* /var/tmp/* /root/.cache/
+# imapsync dependencies
+RUN cpan Crypt::OpenSSL::PKCS12
 
 COPY trim_logs.sh /usr/local/bin/trim_logs.sh
 COPY clean_q_aged.sh /usr/local/bin/clean_q_aged.sh
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/docker-entrypoint.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/docker-entrypoint.sh
index 18746de..b2633c2 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/docker-entrypoint.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/docker-entrypoint.sh
@@ -159,7 +159,7 @@
         VALUES ("%s", 0, "%s", "%s")]], con:escape(req.service), con:escape(req.user), con:escape(req.real_rip)))
       cur:close()
       con:close()
-      return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
+      return dovecot.auth.PASSDB_RESULT_OK, ""
     end
     row = cur:fetch (row, "a")
   end
@@ -180,13 +180,13 @@
         if tostring(req.real_rip) == "__IPV4_SOGO__" then
           cur:close()
           con:close()
-          return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
+          return dovecot.auth.PASSDB_RESULT_OK, ""
         elseif row.has_prot_access == "1" then
           con:execute(string.format([[REPLACE INTO sasl_log (service, app_password, username, real_rip)
             VALUES ("%s", %d, "%s", "%s")]], con:escape(req.service), row.id, con:escape(req.user), con:escape(req.real_rip)))
           cur:close()
           con:close()
-          return dovecot.auth.PASSDB_RESULT_OK, "password=" .. pass
+          return dovecot.auth.PASSDB_RESULT_OK, ""
         end
       end
       row = cur:fetch (row, "a")
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/imapsync b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/imapsync
index 0d34504..de63d65 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/imapsync
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/imapsync
@@ -8492,6 +8492,7 @@
         require HTML::Entities ;
         require JSON ;
         require JSON::WebToken::Crypt::RSA ;
+        require Crypt::OpenSSL::PKCS12;
         require Crypt::OpenSSL::RSA ;
         require Encode::Byte ;
         require IO::Socket::SSL ;
@@ -8532,8 +8533,9 @@
 
             $sync->{ debug } and myprint( "Service account: $iss\nKey file: $keyfile\nKey password: $keypass\n");
 
-            # Get private key from p12 file (would be better in perl...)
-            $key = `openssl pkcs12 -in "$keyfile" -nodes -nocerts -passin pass:$keypass -nomacver`;
+            # Get private key from p12 file
+            my $pkcs12 = Crypt::OpenSSL::PKCS12->new_from_file($keyfile);
+            $key = $pkcs12->private_key($keypass);
 
             $sync->{ debug } and myprint( "Private key:\n$key\n");
         }
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
index 7687c3c..1ebee4c 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
@@ -1,5 +1,5 @@
 FROM alpine:3.17
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 ENV XTABLES_LIBDIR /usr/lib/xtables
 ENV PYTHON_IPTABLES_XTABLES_VERSION 12
@@ -17,13 +17,13 @@
   tzdata \
   py3-pip \
   musl-dev \
-&& pip3 install --upgrade --ignore-installed \
-  packaging \
 && pip3 install --ignore-installed --upgrade pip \
   python-iptables \
   redis \
   ipaddress \
   dnspython \
+  ipaddress \
+  dnspython \
 && apk del .build-deps
 
 #  && pip3 install --upgrade pip python-iptables==0.13.0 redis ipaddress dnspython \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
index 4eb8e7a..9767994 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
@@ -64,28 +64,40 @@
   global f2boptions
   global quit_now
   global exit_code
+
+  f2boptions = {}
+
   if not r.get('F2B_OPTIONS'):
-    f2boptions = {}
-    f2boptions['ban_time'] = int
-    f2boptions['max_attempts'] = int
-    f2boptions['retry_window'] = int
-    f2boptions['netban_ipv4'] = int
-    f2boptions['netban_ipv6'] = int
-    f2boptions['ban_time'] = r.get('F2B_BAN_TIME') or 1800
-    f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS') or 10
-    f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW') or 600
-    f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4') or 32
-    f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6') or 128
-    r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
+    f2boptions['ban_time'] = r.get('F2B_BAN_TIME')
+    f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME')
+    f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT')
+    f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS')
+    f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW')
+    f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4')
+    f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6')
   else:
     try:
-      f2boptions = {}
       f2boptions = json.loads(r.get('F2B_OPTIONS'))
     except ValueError:
       print('Error loading F2B options: F2B_OPTIONS is not json')
       quit_now = True
       exit_code = 2
 
+  verifyF2boptions(f2boptions)
+  r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
+
+def verifyF2boptions(f2boptions):
+  verifyF2boption(f2boptions,'ban_time', 1800)
+  verifyF2boption(f2boptions,'max_ban_time', 10000)
+  verifyF2boption(f2boptions,'ban_time_increment', True)
+  verifyF2boption(f2boptions,'max_attempts', 10)
+  verifyF2boption(f2boptions,'retry_window', 600)
+  verifyF2boption(f2boptions,'netban_ipv4', 32)
+  verifyF2boption(f2boptions,'netban_ipv6', 128)
+
+def verifyF2boption(f2boptions, f2boption, f2bdefault):
+  f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault
+
 def refreshF2bregex():
   global f2bregex
   global quit_now
@@ -145,6 +157,7 @@
   global lock
   refreshF2boptions()
   BAN_TIME = int(f2boptions['ban_time'])
+  BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
   MAX_ATTEMPTS = int(f2boptions['max_attempts'])
   RETRY_WINDOW = int(f2boptions['retry_window'])
   NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])
@@ -172,20 +185,16 @@
   net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)
   net = str(net)
 
-  if not net in bans or time.time() - bans[net]['last_attempt'] > RETRY_WINDOW:
-    bans[net] = { 'attempts': 0 }
-    active_window = RETRY_WINDOW
-  else:
-    active_window = time.time() - bans[net]['last_attempt']
+  if not net in bans:
+    bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0}
 
   bans[net]['attempts'] += 1
   bans[net]['last_attempt'] = time.time()
 
-  active_window = time.time() - bans[net]['last_attempt']
-
   if bans[net]['attempts'] >= MAX_ATTEMPTS:
     cur_time = int(round(time.time()))
-    logCrit('Banning %s for %d minutes' % (net, BAN_TIME / 60))
+    NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
+    logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 ))
     if type(ip) is ipaddress.IPv4Address:
       with lock:
         chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
@@ -197,7 +206,7 @@
           chain.insert_rule(rule)
     else:
       pass
-    r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + BAN_TIME)
+    r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME)
   else:
     logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))
 
@@ -222,7 +231,8 @@
   r.hdel('F2B_ACTIVE_BANS', '%s' % net)
   r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
   if net in bans:
-    del bans[net]
+    bans[net]['attempts'] = 0
+    bans[net]['ban_counter'] += 1
 
 def permBan(net, unban=False):
   global lock
@@ -243,6 +253,7 @@
         r.hdel('F2B_PERM_BANS', '%s' % net)
   else:
     pass
+        r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
 
 def quit(signum, frame):
   global quit_now
@@ -302,7 +313,7 @@
               logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))
               ban(addr)
     except Exception as ex:
-      logWarn('Error reading log line from pubsub')
+      logWarn('Error reading log line from pubsub: %s' % ex)
       quit_now = True
       exit_code = 2
 
@@ -329,11 +340,11 @@
         chain = iptc.Chain(table, 'POSTROUTING')
         table.autocommit = False
         new_rule = get_snat4_rule()
-        for position, rule in enumerate(chain.rules):
-          match = all((
-            new_rule.get_src() == rule.get_src(),
-            new_rule.get_dst() == rule.get_dst(),
-            new_rule.target.parameters == rule.target.parameters,
+
+        if not chain.rules:
+          # if there are no rules in the chain, insert the new rule directly
+          logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
+          chain.insert_rule(new_rule)
             new_rule.target.name == rule.target.name
           ))
           if position == 0:
@@ -341,19 +352,37 @@
               logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
               chain.insert_rule(new_rule)
           else:
-            if match:
-              logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
-              chain.delete_rule(rule)
+          for position, rule in enumerate(chain.rules):
+            if not hasattr(rule.target, 'parameter'):
+                continue
+            match = all((
+              new_rule.get_src() == rule.get_src(),
+              new_rule.get_dst() == rule.get_dst(),
+              new_rule.target.parameters == rule.target.parameters,
+              new_rule.target.name == rule.target.name
+            ))
+            if position == 0:
+              if not match:
+                logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
+                chain.insert_rule(new_rule)
+            else:
+              if match:
+                logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
+                chain.delete_rule(rule)
+
         table.commit()
         table.autocommit = True
       except:
         print('Error running SNAT4, retrying...')
 
+        print('Error running SNAT6, retrying...')
 def autopurge():
   while not quit_now:
     time.sleep(10)
     refreshF2boptions()
     BAN_TIME = int(f2boptions['ban_time'])
+    MAX_BAN_TIME = int(f2boptions['max_ban_time'])
+    BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
     MAX_ATTEMPTS = int(f2boptions['max_attempts'])
     QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')
     if QUEUE_UNBAN:
@@ -361,7 +390,9 @@
         unban(str(net))
     for net in bans.copy():
       if bans[net]['attempts'] >= MAX_ATTEMPTS:
-        if time.time() - bans[net]['last_attempt'] > BAN_TIME:
+        NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
+        TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt']
+        if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME:
           unban(net)
 
 def isIpNetwork(address):
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/olefy/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/olefy/Dockerfile
index 10d63d0..06d4679 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/olefy/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/olefy/Dockerfile
@@ -1,5 +1,5 @@
 FROM alpine:3.17
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 WORKDIR /app
 
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/Dockerfile
index 93acb33..ea8c78a 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/Dockerfile
@@ -1,12 +1,18 @@
-FROM php:8.1-fpm-alpine3.17
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+FROM php:8.2-fpm-alpine3.17
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
-ENV APCU_PECL 5.1.22
-ENV IMAGICK_PECL 3.7.0
-ENV MAILPARSE_PECL 3.1.4
-ENV MEMCACHED_PECL 3.2.0
-ENV REDIS_PECL 5.3.7
-ENV COMPOSER 2.4.4
+# renovate: datasource=github-tags depName=krakjoe/apcu versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG APCU_PECL_VERSION=5.1.22
+# renovate: datasource=github-tags depName=Imagick/imagick versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG IMAGICK_PECL_VERSION=3.7.0
+# renovate: datasource=github-tags depName=php/pecl-mail-mailparse versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG MAILPARSE_PECL_VERSION=3.1.6
+# renovate: datasource=github-tags depName=php-memcached-dev/php-memcached versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG MEMCACHED_PECL_VERSION=3.2.0
+# renovate: datasource=github-tags depName=phpredis/phpredis versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG REDIS_PECL_VERSION=6.0.1
+# renovate: datasource=github-tags depName=composer/composer versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG COMPOSER_VERSION=2.6.5
 
 RUN apk add -U --no-cache autoconf \
   aspell-dev \
@@ -46,6 +52,7 @@
   libxpm-dev \
   libzip \
   libzip-dev \
+  linux-headers \
   make \
   mysql-client \
   openldap-dev \
@@ -55,11 +62,11 @@
   samba-client \
   zlib-dev \
   tzdata \
-  && pecl install mailparse-${MAILPARSE_PECL} \
-  && pecl install redis-${REDIS_PECL} \
-  && pecl install memcached-${MEMCACHED_PECL} \
-  && pecl install APCu-${APCU_PECL} \
-  && pecl install imagick-${IMAGICK_PECL} \
+  && pecl install APCu-${APCU_PECL_VERSION} \
+  && pecl install imagick-${IMAGICK_PECL_VERSION} \
+  && pecl install mailparse-${MAILPARSE_PECL_VERSION} \
+  && pecl install memcached-${MEMCACHED_PECL_VERSION} \
+  && pecl install redis-${REDIS_PECL_VERSION} \
   && docker-php-ext-enable apcu imagick memcached mailparse redis \
   && pecl clear-cache \
   && docker-php-ext-configure intl \
@@ -69,10 +76,10 @@
     --with-webp \
     --with-xpm \
     --with-avif \
-  && docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets zip bcmath gmp \
+  && docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets sysvsem zip bcmath gmp \
   && docker-php-ext-configure imap --with-imap --with-imap-ssl \
   && docker-php-ext-install -j 4 imap \
-  && curl --silent --show-error https://getcomposer.org/installer | php -- --version=${COMPOSER} \
+  && curl --silent --show-error https://getcomposer.org/installer | php -- --version=${COMPOSER_VERSION} \
   && mv composer.phar /usr/local/bin/composer \
   && chmod +x /usr/local/bin/composer \
   && apk del --purge autoconf \
@@ -93,6 +100,7 @@
     libxml2-dev \
     libxpm-dev \
     libzip-dev \
+    linux-headers \
     make \
     openldap-dev \
     pcre-dev \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/docker-entrypoint.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/docker-entrypoint.sh
index cefebcd..3737011 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/docker-entrypoint.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/phpfpm/docker-entrypoint.sh
@@ -172,6 +172,24 @@
 END;
 //
 DELIMITER ;
+DROP EVENT IF EXISTS clean_sasl_log;
+DELIMITER //
+CREATE EVENT clean_sasl_log
+ON SCHEDULE EVERY 1 DAY DO
+BEGIN
+  DELETE sasl_log.* FROM sasl_log
+    LEFT JOIN (
+      SELECT username, service, MAX(datetime) AS lastdate
+      FROM sasl_log
+      GROUP BY username, service
+    ) AS last ON sasl_log.username = last.username AND sasl_log.service = last.service
+    WHERE datetime < DATE_SUB(NOW(), INTERVAL 31 DAY) AND datetime < lastdate;
+  DELETE FROM sasl_log
+    WHERE username NOT IN (SELECT username FROM mailbox) AND
+    datetime < DATE_SUB(NOW(), INTERVAL 31 DAY);
+END;
+//
+DELIMITER ;
 EOF
 fi
 
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/Dockerfile
index e3c6498..bda6e07 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/Dockerfile
@@ -1,5 +1,5 @@
 FROM debian:bullseye-slim
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 ARG DEBIAN_FRONTEND=noninteractive
 ENV LC_ALL C
@@ -17,10 +17,10 @@
 	ca-certificates \
 	curl \
 	dirmngr \
-  dnsutils \
+  	dnsutils \
 	gnupg \
 	libsasl2-modules \
-  mariadb-client \
+  	mariadb-client \
 	perl \
 	postfix \
 	postfix-mysql \
@@ -32,7 +32,7 @@
 	syslog-ng \
 	syslog-ng-core \
 	syslog-ng-mod-redis \
-  tzdata \
+  	tzdata \
 	&& rm -rf /var/lib/apt/lists/* \
 	&& touch /etc/default/locale \
   && printf '#!/bin/bash\n/usr/sbin/postconf -c /opt/postfix/conf "$@"' > /usr/local/sbin/postconf \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/postfix.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/postfix.sh
index 78b070e..b3098d3 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/postfix.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/postfix/postfix.sh
@@ -393,12 +393,101 @@
     AND validity >= UNIX_TIMESTAMP()
 EOF
 
-sed -i '/User overrides/q' /opt/postfix/conf/main.cf
-echo >> /opt/postfix/conf/main.cf
-touch /opt/postfix/conf/extra.cf
-sed -i '/myhostname/d' /opt/postfix/conf/extra.cf
-echo -e "myhostname = ${MAILCOW_HOSTNAME}\n$(cat /opt/postfix/conf/extra.cf)" > /opt/postfix/conf/extra.cf
+if [ ! -f /opt/postfix/conf/dns_blocklists.cf ]; then
+  cat <<EOF > /opt/postfix/conf/dns_blocklists.cf
+# This file can be edited. 
+# Delete this file and restart postfix container to revert any changes.
+postscreen_dnsbl_sites = wl.mailspike.net=127.0.0.[18;19;20]*-2
+  hostkarma.junkemailfilter.com=127.0.0.1*-2
+  list.dnswl.org=127.0.[0..255].0*-2
+  list.dnswl.org=127.0.[0..255].1*-4
+  list.dnswl.org=127.0.[0..255].2*-6
+  list.dnswl.org=127.0.[0..255].3*-8
+  ix.dnsbl.manitu.net*2
+  bl.spamcop.net*2
+  bl.suomispam.net*2
+  hostkarma.junkemailfilter.com=127.0.0.2*3
+  hostkarma.junkemailfilter.com=127.0.0.4*2
+  hostkarma.junkemailfilter.com=127.0.1.2*1
+  backscatter.spameatingmonkey.net*2
+  bl.ipv6.spameatingmonkey.net*2
+  bl.spameatingmonkey.net*2
+  b.barracudacentral.org=127.0.0.2*7
+  bl.mailspike.net=127.0.0.2*5
+  bl.mailspike.net=127.0.0.[10;11;12]*4
+  dnsbl.sorbs.net=127.0.0.10*8
+  dnsbl.sorbs.net=127.0.0.5*6
+  dnsbl.sorbs.net=127.0.0.7*3
+  dnsbl.sorbs.net=127.0.0.8*2
+  dnsbl.sorbs.net=127.0.0.6*2
+  dnsbl.sorbs.net=127.0.0.9*2
+EOF
+fi
+DNSBL_CONFIG=$(grep -v '^#' /opt/postfix/conf/dns_blocklists.cf | grep '\S')
 
+if [ ! -z "$DNSBL_CONFIG" ]; then
+  echo -e "\e[33mChecking if ASN for your IP is listed for Spamhaus Bad ASN List...\e[0m"
+  if [ -n "$SPAMHAUS_DQS_KEY" ]; then
+    echo -e "\e[32mDetected SPAMHAUS_DQS_KEY variable from mailcow.conf...\e[0m"
+    echo -e "\e[33mUsing DQS Blocklists from Spamhaus!\e[0m"
+    SPAMHAUS_DNSBL_CONFIG=$(cat <<EOF
+  ${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.[4..7]*6
+  ${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.[10;11]*8
+  ${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.3*4
+  ${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net=127.0.0.2*3
+postscreen_dnsbl_reply_map = texthash:/opt/postfix/conf/dnsbl_reply.map
+EOF
+
+  cat <<EOF > /opt/postfix/conf/dnsbl_reply.map
+# Autogenerated by mailcow, using Spamhaus DQS reply domains
+${SPAMHAUS_DQS_KEY}.sbl.dq.spamhaus.net     sbl.spamhaus.org
+${SPAMHAUS_DQS_KEY}.xbl.dq.spamhaus.net     xbl.spamhaus.org
+${SPAMHAUS_DQS_KEY}.pbl.dq.spamhaus.net     pbl.spamhaus.org
+${SPAMHAUS_DQS_KEY}.zen.dq.spamhaus.net     zen.spamhaus.org
+${SPAMHAUS_DQS_KEY}.dbl.dq.spamhaus.net     dbl.spamhaus.org
+${SPAMHAUS_DQS_KEY}.zrd.dq.spamhaus.net     zrd.spamhaus.org
+EOF
+    )
+  else
+    if [ -f "/opt/postfix/conf/dnsbl_reply.map" ]; then
+      rm /opt/postfix/conf/dnsbl_reply.map
+    fi
+    response=$(curl --connect-timeout 15 --max-time 30 -s -o /dev/null -w "%{http_code}" "https://asn-check.mailcow.email")
+    if [ "$response" -eq 503 ]; then
+      echo -e "\e[31mThe AS of your IP is listed as a banned AS from Spamhaus!\e[0m"
+      echo -e "\e[33mNo SPAMHAUS_DQS_KEY found... Skipping Spamhaus blocklists entirely!\e[0m"
+      SPAMHAUS_DNSBL_CONFIG=""
+    elif [ "$response" -eq 200 ]; then
+      echo -e "\e[32mThe AS of your IP is NOT listed as a banned AS from Spamhaus!\e[0m"
+      echo -e "\e[33mUsing the open Spamhaus blocklists.\e[0m"
+      SPAMHAUS_DNSBL_CONFIG=$(cat <<EOF
+  zen.spamhaus.org=127.0.0.[10;11]*8
+  zen.spamhaus.org=127.0.0.[4..7]*6
+  zen.spamhaus.org=127.0.0.3*4
+  zen.spamhaus.org=127.0.0.2*3
+EOF
+      )
+
+    else
+      echo -e "\e[31mWe couldn't determine your AS... (maybe DNS/Network issue?) Response Code: $response\e[0m"
+      echo -e "\e[33mDeactivating Spamhaus DNS Blocklists to be on the safe site!\e[0m"
+      SPAMHAUS_DNSBL_CONFIG=""
+    fi
+  fi
+fi
+
+# Reset main.cf
+sed -i '/Overrides/q' /opt/postfix/conf/main.cf
+echo >> /opt/postfix/conf/main.cf
+# Append postscreen dnsbl sites to main.cf
+if [ ! -z "$DNSBL_CONFIG" ]; then
+  echo -e "${DNSBL_CONFIG}\n${SPAMHAUS_DNSBL_CONFIG}" >> /opt/postfix/conf/main.cf
+fi
+# Append user overrides
+echo -e "\n# User Overrides" >> /opt/postfix/conf/main.cf
+touch /opt/postfix/conf/extra.cf
+sed -i '/\$myhostname/! { /myhostname/d }' /opt/postfix/conf/extra.cf
+echo -e "myhostname = ${MAILCOW_HOSTNAME}\n$(cat /opt/postfix/conf/extra.cf)" > /opt/postfix/conf/extra.cf
 cat /opt/postfix/conf/extra.cf >> /opt/postfix/conf/main.cf
 
 if [ ! -f /opt/postfix/conf/custom_transport.pcre ]; then
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/rspamd/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/rspamd/Dockerfile
index 2520ddc..9d022f8 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/rspamd/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/rspamd/Dockerfile
@@ -1,5 +1,5 @@
 FROM debian:bullseye-slim
-LABEL maintainer "Andre Peters <andre.peters@tinc.gmbh>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 ARG DEBIAN_FRONTEND=noninteractive
 ARG CODENAME=bullseye
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/sogo/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/sogo/Dockerfile
index f08600a..35e0721 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/sogo/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/sogo/Dockerfile
@@ -1,10 +1,11 @@
 FROM debian:bullseye-slim
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 ARG DEBIAN_FRONTEND=noninteractive
 ARG SOGO_DEBIAN_REPOSITORY=http://packages.sogo.nu/nightly/5/debian/
+# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG GOSU_VERSION=1.16
 ENV LC_ALL C
-ENV GOSU_VERSION 1.14
 
 # Prerequisites
 RUN echo "Building from repository $SOGO_DEBIAN_REPOSITORY" \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/solr/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/solr/Dockerfile
index 0629925..dd465b4 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/solr/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/solr/Dockerfile
@@ -2,7 +2,8 @@
 
 USER root
 
-ENV GOSU_VERSION 1.11
+# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^v(?<version>.*)$
+ARG GOSU_VERSION=1.16
 
 COPY solr.sh /
 COPY solr-config-7.7.0.xml /
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/Dockerfile
index d9756d0..b190908 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/Dockerfile
@@ -1,6 +1,6 @@
 FROM alpine:3.17
 
-LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
+LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
 
 RUN apk add --update --no-cache \
 	curl \
@@ -18,6 +18,11 @@
 
 COPY docker-entrypoint.sh /docker-entrypoint.sh
 
+# healthcheck (nslookup)
+COPY healthcheck.sh /healthcheck.sh
+RUN chmod +x /healthcheck.sh
+HEALTHCHECK --interval=30s --timeout=10s CMD [ "/healthcheck.sh" ]
+
 ENTRYPOINT ["/docker-entrypoint.sh"]
 
 CMD ["/usr/sbin/unbound"]
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/healthcheck.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/healthcheck.sh
new file mode 100644
index 0000000..8c4508f
--- /dev/null
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/unbound/healthcheck.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+nslookup mailcow.email 127.0.0.1 1> /dev/null
+
+if [ $? == 0 ]; then
+    echo "DNS resolution is working!"
+    exit 0
+else
+    echo "DNS resolution is not working correctly..."
+    echo "Maybe check your outbound firewall, as it needs to resolve DNS over TCP AND UDP!"
+    exit 1
+fi