git subrepo commit (merge) mailcow/src/mailcow-dockerized

subrepo: subdir:   "mailcow/src/mailcow-dockerized"
  merged:   "2866fb80"
upstream: origin:   "https://github.com/mailcow/mailcow-dockerized.git"
  branch:   "master"
  commit:   "a366494c"
git-subrepo: version:  "0.4.6"
  origin:   "???"
  commit:   "???"
Change-Id: I26ce31f84c1ff9e905669570f9fc7eb754ce6c1c
diff --git a/mailcow/src/mailcow-dockerized/.github/workflows/integration_tests.yml b/mailcow/src/mailcow-dockerized/.github/workflows/integration_tests.yml
deleted file mode 100644
index ee083bf..0000000
--- a/mailcow/src/mailcow-dockerized/.github/workflows/integration_tests.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-name: mailcow Integration Tests
-
-on:
-  push:
-    branches: [ "master", "staging" ]
-  workflow_dispatch:
-
-permissions:
-  contents: read
-
-jobs:
-  integration_tests:
-    runs-on: ubuntu-latest
-    steps:
-      - name: Setup Ansible
-        run: |
-          export DEBIAN_FRONTEND=noninteractive
-          sudo apt-get update
-          sudo apt-get install python3 python3-pip git
-          sudo pip3 install ansible
-      - name: Prepair Test Environment
-        run: |
-          git clone https://github.com/mailcow/mailcow-integration-tests.git --branch $(curl -sL https://api.github.com/repos/mailcow/mailcow-integration-tests/releases/latest | jq -r '.tag_name') --single-branch .
-          ./fork_check.sh
-          ./ci.sh
-          ./ci-pip-requirements.sh
-        env:
-          VAULT_PW: ${{ secrets.MAILCOW_TESTS_VAULT_PW }}
-          VAULT_FILE: ${{ secrets.MAILCOW_TESTS_VAULT_FILE }}
-      - name: Start Integration Test Server
-        run: |
-          ./fork_check.sh
-          ansible-playbook mailcow-start-server.yml --diff
-        env:
-          PY_COLORS: '1'
-          ANSIBLE_FORCE_COLOR: '1'
-          ANSIBLE_HOST_KEY_CHECKING: 'false'
-      - name: Setup Integration Test Server
-        run: |
-          ./fork_check.sh
-          sleep 30
-          ansible-playbook mailcow-setup-server.yml --private-key id_ssh_rsa --diff
-        env:
-          PY_COLORS: '1'
-          ANSIBLE_FORCE_COLOR: '1'
-          ANSIBLE_HOST_KEY_CHECKING: 'false'
-      - name: Run Integration Tests
-        run: |
-          ./fork_check.sh
-          ansible-playbook mailcow-integration-tests.yml --private-key id_ssh_rsa --diff
-        env:
-          PY_COLORS: '1'
-          ANSIBLE_FORCE_COLOR: '1'
-          ANSIBLE_HOST_KEY_CHECKING: 'false'
-      - name: Delete Integration Test Server
-        if: always()
-        run: |
-          ./fork_check.sh
-          ansible-playbook mailcow-delete-server.yml --diff
-        env:
-          PY_COLORS: '1'
-          ANSIBLE_FORCE_COLOR: '1'
-          ANSIBLE_HOST_KEY_CHECKING: 'false'
diff --git a/mailcow/src/mailcow-dockerized/.github/workflows/tweet-trigger-publish-release.yml b/mailcow/src/mailcow-dockerized/.github/workflows/tweet-trigger-publish-release.yml
deleted file mode 100644
index 86cf628..0000000
--- a/mailcow/src/mailcow-dockerized/.github/workflows/tweet-trigger-publish-release.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-name: "Tweet trigger release"
-on:
-  release:
-    types: [published]
-
-jobs:
-  tweet:
-    runs-on: ubuntu-latest
-    steps:
-      - name: "Get Release Tag"
-        run:  |
-          RELEASE_TAG=$(curl https://api.github.com/repos/mailcow/mailcow-dockerized/releases/latest | jq -r '.tag_name')
-      - name: Tweet-trigger-publish-release
-        uses: mugi111/tweet-trigger-release@v1.1
-        with:
-          consumer_key: ${{ secrets.CONSUMER_KEY }}
-          consumer_secret: ${{ secrets.CONSUMER_SECRET }}
-          access_token_key: ${{ secrets.ACCESS_TOKEN_KEY }}
-          access_token_secret: ${{ secrets.ACCESS_TOKEN_SECRET }}
-          tweet_body: 'A new mailcow update has just been released! Checkout the GitHub Page for changelog and more informations: https://github.com/mailcow/mailcow-dockerized/releases/latest'
diff --git a/mailcow/src/mailcow-dockerized/README.md b/mailcow/src/mailcow-dockerized/README.md
index e95e4d9..2dacac9 100644
--- a/mailcow/src/mailcow-dockerized/README.md
+++ b/mailcow/src/mailcow-dockerized/README.md
@@ -3,7 +3,6 @@
 [![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/)
 [![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email)
 
-
 ## Want to support mailcow?
 
 Please [consider a support contract with Servercow](https://www.servercow.de/mailcow?lang=en#support) to support further development. _We_ support _you_ while _you_ support _us_. :)
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
index a45f5a5..1cd456a 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/acme/acme.sh
@@ -219,6 +219,8 @@
   IPV4=$(get_ipv4)
   IPV6=$(get_ipv6)
   log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
+  fi
+
   #########################################
   # IP and webroot challenge verification #
   SQL_DOMAINS=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT domain FROM domain WHERE backupmx=0 and active=1" -Bs)
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
index 1fbcfda..31a332d 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/clamd/Dockerfile
@@ -1,7 +1,7 @@
 FROM clamav/clamav:1.0.3_base
 
 LABEL maintainer "The Infrastructure Company GmbH <info@servercow.de>"
-  && apk add --update --no-cache \
+
 RUN apk upgrade --no-cache \
   && apk add --update --no-cache \
   rsync \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py
index ea1c104..3ca4560 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dockerapi/modules/DockerApi.py
@@ -159,7 +159,7 @@
             postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
             # todo: check each exit code
           res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")        
+          return Response(content=json.dumps(res, indent=4), media_type="application/json")
   # api call: container_post - post_action: exec - cmd: mailq - task: list
   def container_post__exec__mailq__list(self, request_json, **kwargs):
     if 'container_id' in kwargs:
@@ -231,43 +231,10 @@
           return "0,0,0,0,0,0"
   # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
   def container_post__exec__system__mysql_upgrade(self, request_json, **kwargs):
-    if 'container_id' in kwargs:
-      filters = {"id": kwargs['container_id']}
-    elif 'container_name' in kwargs:
-      filters = {"name": kwargs['container_name']}
-
-    for container in self.sync_docker_client.containers.list(filters=filters):
-      sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
-      if sql_return.exit_code == 0:
-        matched = False
-        for line in sql_return.output.decode('utf-8').split("\n"):
-          if 'is already upgraded to' in line:
-            matched = True
-        if matched:
-          res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-        else:
-          container.restart()
-          res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
-          return Response(content=json.dumps(res, indent=4), media_type="application/json")
-      else:
-        res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
-        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+    return Response(content=json.dumps(dict(type='success', msg='mysql_upgrade: not touching fake MySQL', text=''), indent=4), media_type="application/json")
   # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
   def container_post__exec__system__mysql_tzinfo_to_sql(self, request_json, **kwargs):
-    if 'container_id' in kwargs:
-      filters = {"id": kwargs['container_id']}
-    elif 'container_name' in kwargs:
-      filters = {"name": kwargs['container_name']}
-
-    for container in self.sync_docker_client.containers.list(filters=filters):
-      sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
-      if sql_return.exit_code == 0:
-        res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
-        return Response(content=json.dumps(res, indent=4), media_type="application/json")
-      else:
-        res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
-        return Response(content=json.dumps(res, indent=4), media_type="application/json")
+    return Response(content=json.dumps(dict(type='success', msg='mysql_tzinfo_to_sql: not touching fake MySQL', text=''), indent=4), media_type="application/json")
   # api call: container_post - post_action: exec - cmd: reload - task: dovecot
   def container_post__exec__reload__dovecot(self, request_json, **kwargs):
     if 'container_id' in kwargs:
@@ -318,7 +285,7 @@
 
     if 'username' in request_json and 'script_name' in request_json:
       for container in self.sync_docker_client.containers.list(filters=filters):
-        cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]  
+        cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
         sieve_return = container.exec_run(cmd)
         return self.exec_run_handler('utf8_text_only', sieve_return)
   # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
@@ -462,7 +429,7 @@
         except:
           pass
       return ''.join(total_data)
-      
+
     try :
       socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
       if not cmd.endswith("\n"):
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
index 6249302..90a6af9 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/dovecot/Dockerfile
@@ -7,7 +7,6 @@
 # renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^v(?<version>.*)$
 ARG GOSU_VERSION=1.16
 ENV LC_ALL C
-ENV GOSU_VERSION 1.14
 
 
 # Add groups and users before installing Dovecot to not break compatibility
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
index 1ebee4c..4fcb5ee 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/Dockerfile
@@ -22,8 +22,6 @@
   redis \
   ipaddress \
   dnspython \
-  ipaddress \
-  dnspython \
 && apk del .build-deps
 
 #  && pip3 install --upgrade pip python-iptables==0.13.0 redis ipaddress dnspython \
diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
index 9767994..942b258 100644
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/netfilter/server.py
@@ -1,531 +1,523 @@
-#!/usr/bin/env python3
-
-import re
-import os
-import sys
-import time
-import atexit
-import signal
-import ipaddress
-from collections import Counter
-from random import randint
-from threading import Thread
-from threading import Lock
-import redis
-import json
-import iptc
-import dns.resolver
-import dns.exception
-
-while True:
-  try:
-    redis_slaveof_ip = os.getenv('REDIS_SLAVEOF_IP', '')
-    redis_slaveof_port = os.getenv('REDIS_SLAVEOF_PORT', '')
-    if "".__eq__(redis_slaveof_ip):
-      r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0)
-    else:
-      r = redis.StrictRedis(host=redis_slaveof_ip, decode_responses=True, port=redis_slaveof_port, db=0)
-    r.ping()
-  except Exception as ex:
-    print('%s - trying again in 3 seconds'  % (ex))
-    time.sleep(3)
-  else:
-    break
-
-pubsub = r.pubsub()
-
-WHITELIST = []
-BLACKLIST= []
-
-bans = {}
-
-quit_now = False
-exit_code = 0
-lock = Lock()
-
-def log(priority, message):
-  tolog = {}
-  tolog['time'] = int(round(time.time()))
-  tolog['priority'] = priority
-  tolog['message'] = message
-  r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False))
-  print(message)
-
-def logWarn(message):
-  log('warn', message)
-
-def logCrit(message):
-  log('crit', message)
-
-def logInfo(message):
-  log('info', message)
-
-def refreshF2boptions():
-  global f2boptions
-  global quit_now
-  global exit_code
-
-  f2boptions = {}
-
-  if not r.get('F2B_OPTIONS'):
-    f2boptions['ban_time'] = r.get('F2B_BAN_TIME')
-    f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME')
-    f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT')
-    f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS')
-    f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW')
-    f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4')
-    f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6')
-  else:
-    try:
-      f2boptions = json.loads(r.get('F2B_OPTIONS'))
-    except ValueError:
-      print('Error loading F2B options: F2B_OPTIONS is not json')
-      quit_now = True
-      exit_code = 2
-
-  verifyF2boptions(f2boptions)
-  r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
-
-def verifyF2boptions(f2boptions):
-  verifyF2boption(f2boptions,'ban_time', 1800)
-  verifyF2boption(f2boptions,'max_ban_time', 10000)
-  verifyF2boption(f2boptions,'ban_time_increment', True)
-  verifyF2boption(f2boptions,'max_attempts', 10)
-  verifyF2boption(f2boptions,'retry_window', 600)
-  verifyF2boption(f2boptions,'netban_ipv4', 32)
-  verifyF2boption(f2boptions,'netban_ipv6', 128)
-
-def verifyF2boption(f2boptions, f2boption, f2bdefault):
-  f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault
-
-def refreshF2bregex():
-  global f2bregex
-  global quit_now
-  global exit_code
-  if not r.get('F2B_REGEX'):
-    f2bregex = {}
-    f2bregex[1] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)'
-    f2bregex[2] = 'Rspamd UI: Invalid password by ([0-9a-f\.:]+)'
-    f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+'
-    f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+'
-    f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+'
-    f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'
-    f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
-    f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'
-    f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked'
-    f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+'
-    r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False))
-  else:
-    try:
-      f2bregex = {}
-      f2bregex = json.loads(r.get('F2B_REGEX'))
-    except ValueError:
-      print('Error loading F2B options: F2B_REGEX is not json')
-      quit_now = True
-      exit_code = 2
-
-if r.exists('F2B_LOG'):
-  r.rename('F2B_LOG', 'NETFILTER_LOG')
-
-def mailcowChainOrder():
-  global lock
-  global quit_now
-  global exit_code
-  while not quit_now:
-    time.sleep(10)
-    with lock:
-      filter4_table = iptc.Table(iptc.Table.FILTER)
-      filter4_table.refresh()
-      for f in [filter4_table]:
-        forward_chain = iptc.Chain(f, 'FORWARD')
-        input_chain = iptc.Chain(f, 'INPUT')
-        for chain in [forward_chain, input_chain]:
-          target_found = False
-          for position, item in enumerate(chain.rules):
-            if item.target.name == 'MAILCOW':
-              target_found = True
-              if position > 2:
-                logCrit('Error in %s chain order: MAILCOW on position %d, restarting container' % (chain.name, position))
-                quit_now = True
-                exit_code = 2
-          if not target_found:
-            logCrit('Error in %s chain: MAILCOW target not found, restarting container' % (chain.name))
-            quit_now = True
-            exit_code = 2
-
-def ban(address):
-  global lock
-  refreshF2boptions()
-  BAN_TIME = int(f2boptions['ban_time'])
-  BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
-  MAX_ATTEMPTS = int(f2boptions['max_attempts'])
-  RETRY_WINDOW = int(f2boptions['retry_window'])
-  NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])
-  NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6'])
-
-  ip = ipaddress.ip_address(address)
-  if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped:
-    ip = ip.ipv4_mapped
-    address = str(ip)
-  if ip.is_private or ip.is_loopback:
-    return
-
-  self_network = ipaddress.ip_network(address)
-
-  with lock:
-    temp_whitelist = set(WHITELIST)
-
-  if temp_whitelist:
-    for wl_key in temp_whitelist:
-      wl_net = ipaddress.ip_network(wl_key, False)
-      if wl_net.overlaps(self_network):
-        logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net))
-        return
-
-  net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)
-  net = str(net)
-
-  if not net in bans:
-    bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0}
-
-  bans[net]['attempts'] += 1
-  bans[net]['last_attempt'] = time.time()
-
-  if bans[net]['attempts'] >= MAX_ATTEMPTS:
-    cur_time = int(round(time.time()))
-    NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
-    logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 ))
-    if type(ip) is ipaddress.IPv4Address:
-      with lock:
-        chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
-        rule = iptc.Rule()
-        rule.src = net
-        target = iptc.Target(rule, "REJECT")
-        rule.target = target
-        if rule not in chain.rules:
-          chain.insert_rule(rule)
-    else:
-      pass
-    r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME)
-  else:
-    logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))
-
-def unban(net):
-  global lock
-  if not net in bans:
-   logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net)
-   r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
-   return
-  logInfo('Unbanning %s' % net)
-  if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network:
-    with lock:
-      chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
-      rule = iptc.Rule()
-      rule.src = net
-      target = iptc.Target(rule, "REJECT")
-      rule.target = target
-      if rule in chain.rules:
-        chain.delete_rule(rule)
-  else:
-    pass
-  r.hdel('F2B_ACTIVE_BANS', '%s' % net)
-  r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
-  if net in bans:
-    bans[net]['attempts'] = 0
-    bans[net]['ban_counter'] += 1
-
-def permBan(net, unban=False):
-  global lock
-  if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network:
-    with lock:
-      chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
-      rule = iptc.Rule()
-      rule.src = net
-      target = iptc.Target(rule, "REJECT")
-      rule.target = target
-      if rule not in chain.rules and not unban:
-        logCrit('Add host/network %s to blacklist' % net)
-        chain.insert_rule(rule)
-        r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
-      elif rule in chain.rules and unban:
-        logCrit('Remove host/network %s from blacklist' % net)
-        chain.delete_rule(rule)
-        r.hdel('F2B_PERM_BANS', '%s' % net)
-  else:
-    pass
-        r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))
-
-def quit(signum, frame):
-  global quit_now
-  quit_now = True
-
-def clear():
-  global lock
-  logInfo('Clearing all bans')
-  for net in bans.copy():
-    unban(net)
-  with lock:
-    filter4_table = iptc.Table(iptc.Table.FILTER)
-    for filter_table in [filter4_table]:
-      filter_table.autocommit = False
-      forward_chain = iptc.Chain(filter_table, "FORWARD")
-      input_chain = iptc.Chain(filter_table, "INPUT")
-      mailcow_chain = iptc.Chain(filter_table, "MAILCOW")
-      if mailcow_chain in filter_table.chains:
-        for rule in mailcow_chain.rules:
-          mailcow_chain.delete_rule(rule)
-        for rule in forward_chain.rules:
-          if rule.target.name == 'MAILCOW':
-            forward_chain.delete_rule(rule)
-        for rule in input_chain.rules:
-          if rule.target.name == 'MAILCOW':
-            input_chain.delete_rule(rule)
-        filter_table.delete_chain("MAILCOW")
-      filter_table.commit()
-      filter_table.refresh()
-      filter_table.autocommit = True
-    r.delete('F2B_ACTIVE_BANS')
-    r.delete('F2B_PERM_BANS')
-    pubsub.unsubscribe()
-
-def watch():
-  logInfo('Watching Redis channel F2B_CHANNEL')
-  pubsub.subscribe('F2B_CHANNEL')
-
-  global quit_now
-  global exit_code
-
-  while not quit_now:
-    try:
-      for item in pubsub.listen():
-        refreshF2bregex()
-        for rule_id, rule_regex in f2bregex.items():
-          if item['data'] and item['type'] == 'message':
-            try:
-              result = re.search(rule_regex, item['data'])
-            except re.error:
-              result = False
-            if result:
-              addr = result.group(1)
-              ip = ipaddress.ip_address(addr)
-              if ip.is_private or ip.is_loopback:
-                continue
-              logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))
-              ban(addr)
-    except Exception as ex:
-      logWarn('Error reading log line from pubsub: %s' % ex)
-      quit_now = True
-      exit_code = 2
-
-def snat4(snat_target):
-  global lock
-  global quit_now
-
-  def get_snat4_rule():
-    rule = iptc.Rule()
-    rule.src = os.getenv('IPV4_NETWORK', '172.22.1') + '.0/24'
-    rule.dst = '!' + rule.src
-    target = rule.create_target("SNAT")
-    target.to_source = snat_target
-    match = rule.create_match("comment")
-    match.comment = f'{int(round(time.time()))}'
-    return rule
-
-  while not quit_now:
-    time.sleep(10)
-    with lock:
-      try:
-        table = iptc.Table('nat')
-        table.refresh()
-        chain = iptc.Chain(table, 'POSTROUTING')
-        table.autocommit = False
-        new_rule = get_snat4_rule()
-
-        if not chain.rules:
-          # if there are no rules in the chain, insert the new rule directly
-          logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
-          chain.insert_rule(new_rule)
-            new_rule.target.name == rule.target.name
-          ))
-          if position == 0:
-            if not match:
-              logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
-              chain.insert_rule(new_rule)
-          else:
-          for position, rule in enumerate(chain.rules):
-            if not hasattr(rule.target, 'parameter'):
-                continue
-            match = all((
-              new_rule.get_src() == rule.get_src(),
-              new_rule.get_dst() == rule.get_dst(),
-              new_rule.target.parameters == rule.target.parameters,
-              new_rule.target.name == rule.target.name
-            ))
-            if position == 0:
-              if not match:
-                logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
-                chain.insert_rule(new_rule)
-            else:
-              if match:
-                logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
-                chain.delete_rule(rule)
-
-        table.commit()
-        table.autocommit = True
-      except:
-        print('Error running SNAT4, retrying...')
-
-        print('Error running SNAT6, retrying...')
-def autopurge():
-  while not quit_now:
-    time.sleep(10)
-    refreshF2boptions()
-    BAN_TIME = int(f2boptions['ban_time'])
-    MAX_BAN_TIME = int(f2boptions['max_ban_time'])
-    BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
-    MAX_ATTEMPTS = int(f2boptions['max_attempts'])
-    QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')
-    if QUEUE_UNBAN:
-      for net in QUEUE_UNBAN:
-        unban(str(net))
-    for net in bans.copy():
-      if bans[net]['attempts'] >= MAX_ATTEMPTS:
-        NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
-        TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt']
-        if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME:
-          unban(net)
-
-def isIpNetwork(address):
-  try:
-    ipaddress.ip_network(address, False)
-  except ValueError:
-    return False
-  return True
-
-
-def genNetworkList(list):
-  resolver = dns.resolver.Resolver()
-  hostnames = []
-  networks = []
-  for key in list:
-    if isIpNetwork(key):
-      networks.append(key)
-    else:
-      hostnames.append(key)
-  for hostname in hostnames:
-    hostname_ips = []
-    for rdtype in ['A', 'AAAA']:
-      try:
-        answer = resolver.resolve(qname=hostname, rdtype=rdtype, lifetime=3)
-      except dns.exception.Timeout:
-        logInfo('Hostname %s timedout on resolve' % hostname)
-        break
-      except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
-        continue
-      except dns.exception.DNSException as dnsexception:
-        logInfo('%s' % dnsexception)
-        continue
-      for rdata in answer:
-        hostname_ips.append(rdata.to_text())
-    networks.extend(hostname_ips)
-  return set(networks)
-
-def whitelistUpdate():
-  global lock
-  global quit_now
-  global WHITELIST
-  while not quit_now:
-    start_time = time.time()
-    list = r.hgetall('F2B_WHITELIST')
-    new_whitelist = []
-    if list:
-      new_whitelist = genNetworkList(list)
-    with lock:
-      if Counter(new_whitelist) != Counter(WHITELIST):
-        WHITELIST = new_whitelist
-        logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST))
-    time.sleep(60.0 - ((time.time() - start_time) % 60.0))
-
-def blacklistUpdate():
-  global quit_now
-  global BLACKLIST
-  while not quit_now:
-    start_time = time.time()
-    list = r.hgetall('F2B_BLACKLIST')
-    new_blacklist = []
-    if list:
-      new_blacklist = genNetworkList(list)
-    if Counter(new_blacklist) != Counter(BLACKLIST):
-      addban = set(new_blacklist).difference(BLACKLIST)
-      delban = set(BLACKLIST).difference(new_blacklist)
-      BLACKLIST = new_blacklist
-      logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST))
-      if addban:
-        for net in addban:
-          permBan(net=net)
-      if delban:
-        for net in delban:
-          permBan(net=net, unban=True)
-    time.sleep(60.0 - ((time.time() - start_time) % 60.0))
-
-def initChain():
-  # Is called before threads start, no locking
-  print("Initializing mailcow netfilter chain")
-  # IPv4
-  if not iptc.Chain(iptc.Table(iptc.Table.FILTER), "MAILCOW") in iptc.Table(iptc.Table.FILTER).chains:
-    iptc.Table(iptc.Table.FILTER).create_chain("MAILCOW")
-  for c in ['FORWARD', 'INPUT']:
-    chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), c)
-    rule = iptc.Rule()
-    rule.src = '0.0.0.0/0'
-    rule.dst = '0.0.0.0/0'
-    target = iptc.Target(rule, "MAILCOW")
-    rule.target = target
-    if rule not in chain.rules:
-      chain.insert_rule(rule)
-
-if __name__ == '__main__':
-
-  # In case a previous session was killed without cleanup
-  clear()
-  # Reinit MAILCOW chain
-  initChain()
-
-  watch_thread = Thread(target=watch)
-  watch_thread.daemon = True
-  watch_thread.start()
-
-  if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') != 'n':
-    try:
-      snat_ip = os.getenv('SNAT_TO_SOURCE')
-      snat_ipo = ipaddress.ip_address(snat_ip)
-      if type(snat_ipo) is ipaddress.IPv4Address:
-        snat4_thread = Thread(target=snat4,args=(snat_ip,))
-        snat4_thread.daemon = True
-        snat4_thread.start()
-    except ValueError:
-      print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address')
-
-  autopurge_thread = Thread(target=autopurge)
-  autopurge_thread.daemon = True
-  autopurge_thread.start()
-
-  mailcowchainwatch_thread = Thread(target=mailcowChainOrder)
-  mailcowchainwatch_thread.daemon = True
-  mailcowchainwatch_thread.start()
-
-  blacklistupdate_thread = Thread(target=blacklistUpdate)
-  blacklistupdate_thread.daemon = True
-  blacklistupdate_thread.start()
-
-  whitelistupdate_thread = Thread(target=whitelistUpdate)
-  whitelistupdate_thread.daemon = True
-  whitelistupdate_thread.start()
-
-  signal.signal(signal.SIGTERM, quit)
-  atexit.register(clear)
-
-  while not quit_now:
-    time.sleep(0.5)
-
-  sys.exit(exit_code)
+#!/usr/bin/env python3

+

+import re

+import os

+import sys

+import time

+import atexit

+import signal

+import ipaddress

+from collections import Counter

+from random import randint

+from threading import Thread

+from threading import Lock

+import redis

+import json

+import iptc

+import dns.resolver

+import dns.exception

+

+while True:

+  try:

+    redis_slaveof_ip = os.getenv('REDIS_SLAVEOF_IP', '')

+    redis_slaveof_port = os.getenv('REDIS_SLAVEOF_PORT', '')

+    if "".__eq__(redis_slaveof_ip):

+      r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0)

+    else:

+      r = redis.StrictRedis(host=redis_slaveof_ip, decode_responses=True, port=redis_slaveof_port, db=0)

+    r.ping()

+  except Exception as ex:

+    print('%s - trying again in 3 seconds'  % (ex))

+    time.sleep(3)

+  else:

+    break

+

+pubsub = r.pubsub()

+

+WHITELIST = []

+BLACKLIST= []

+

+bans = {}

+

+quit_now = False

+exit_code = 0

+lock = Lock()

+

+def log(priority, message):

+  tolog = {}

+  tolog['time'] = int(round(time.time()))

+  tolog['priority'] = priority

+  tolog['message'] = message

+  r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False))

+  print(message)

+

+def logWarn(message):

+  log('warn', message)

+

+def logCrit(message):

+  log('crit', message)

+

+def logInfo(message):

+  log('info', message)

+

+def refreshF2boptions():

+  global f2boptions

+  global quit_now

+  global exit_code

+

+  f2boptions = {}

+

+  if not r.get('F2B_OPTIONS'):

+    f2boptions['ban_time'] = r.get('F2B_BAN_TIME')

+    f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME')

+    f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT')

+    f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS')

+    f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW')

+    f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4')

+    f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6')

+  else:

+    try:

+      f2boptions = json.loads(r.get('F2B_OPTIONS'))

+    except ValueError:

+      print('Error loading F2B options: F2B_OPTIONS is not json')

+      quit_now = True

+      exit_code = 2

+

+  verifyF2boptions(f2boptions)

+  r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))

+

+def verifyF2boptions(f2boptions):

+  verifyF2boption(f2boptions,'ban_time', 1800)

+  verifyF2boption(f2boptions,'max_ban_time', 10000)

+  verifyF2boption(f2boptions,'ban_time_increment', True)

+  verifyF2boption(f2boptions,'max_attempts', 10)

+  verifyF2boption(f2boptions,'retry_window', 600)

+  verifyF2boption(f2boptions,'netban_ipv4', 32)

+  verifyF2boption(f2boptions,'netban_ipv6', 128)

+

+def verifyF2boption(f2boptions, f2boption, f2bdefault):

+  f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault

+

+def refreshF2bregex():

+  global f2bregex

+  global quit_now

+  global exit_code

+  if not r.get('F2B_REGEX'):

+    f2bregex = {}

+    f2bregex[1] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)'

+    f2bregex[2] = 'Rspamd UI: Invalid password by ([0-9a-f\.:]+)'

+    f2bregex[3] = 'warning: .*\[([0-9a-f\.:]+)\]: SASL .+ authentication failed: (?!.*Connection lost to authentication server).+'

+    f2bregex[4] = 'warning: non-SMTP command from .*\[([0-9a-f\.:]+)]:.+'

+    f2bregex[5] = 'NOQUEUE: reject: RCPT from \[([0-9a-f\.:]+)].+Protocol error.+'

+    f2bregex[6] = '-login: Disconnected.+ \(auth failed, .+\): user=.*, method=.+, rip=([0-9a-f\.:]+),'

+    f2bregex[7] = '-login: Aborted login.+ \(auth failed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'

+    f2bregex[8] = '-login: Aborted login.+ \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+'

+    f2bregex[9] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked'

+    f2bregex[10] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+'

+    r.set('F2B_REGEX', json.dumps(f2bregex, ensure_ascii=False))

+  else:

+    try:

+      f2bregex = {}

+      f2bregex = json.loads(r.get('F2B_REGEX'))

+    except ValueError:

+      print('Error loading F2B options: F2B_REGEX is not json')

+      quit_now = True

+      exit_code = 2

+

+if r.exists('F2B_LOG'):

+  r.rename('F2B_LOG', 'NETFILTER_LOG')

+

+def mailcowChainOrder():

+  global lock

+  global quit_now

+  global exit_code

+  while not quit_now:

+    time.sleep(10)

+    with lock:

+      filter4_table = iptc.Table(iptc.Table.FILTER)

+      filter4_table.refresh()

+      for f in [filter4_table]:

+        forward_chain = iptc.Chain(f, 'FORWARD')

+        input_chain = iptc.Chain(f, 'INPUT')

+        for chain in [forward_chain, input_chain]:

+          target_found = False

+          for position, item in enumerate(chain.rules):

+            if item.target.name == 'MAILCOW':

+              target_found = True

+              if position > 2:

+                logCrit('Error in %s chain order: MAILCOW on position %d, restarting container' % (chain.name, position))

+                quit_now = True

+                exit_code = 2

+          if not target_found:

+            logCrit('Error in %s chain: MAILCOW target not found, restarting container' % (chain.name))

+            quit_now = True

+            exit_code = 2

+

+def ban(address):

+  global lock

+  refreshF2boptions()

+  BAN_TIME = int(f2boptions['ban_time'])

+  BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])

+  MAX_ATTEMPTS = int(f2boptions['max_attempts'])

+  RETRY_WINDOW = int(f2boptions['retry_window'])

+  NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])

+  NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6'])

+

+  ip = ipaddress.ip_address(address)

+  if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped:

+    ip = ip.ipv4_mapped

+    address = str(ip)

+  if ip.is_private or ip.is_loopback:

+    return

+

+  self_network = ipaddress.ip_network(address)

+

+  with lock:

+    temp_whitelist = set(WHITELIST)

+

+  if temp_whitelist:

+    for wl_key in temp_whitelist:

+      wl_net = ipaddress.ip_network(wl_key, False)

+      if wl_net.overlaps(self_network):

+        logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net))

+        return

+

+  net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)

+  net = str(net)

+

+  if not net in bans:

+    bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0}

+

+  bans[net]['attempts'] += 1

+  bans[net]['last_attempt'] = time.time()

+

+  if bans[net]['attempts'] >= MAX_ATTEMPTS:

+    cur_time = int(round(time.time()))

+    NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']

+    logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 ))

+    if type(ip) is ipaddress.IPv4Address:

+      with lock:

+        chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')

+        rule = iptc.Rule()

+        rule.src = net

+        target = iptc.Target(rule, "REJECT")

+        rule.target = target

+        if rule not in chain.rules:

+          chain.insert_rule(rule)

+    else:

+      pass

+    r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME)

+  else:

+    logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))

+

+def unban(net):

+  global lock

+  if not net in bans:

+   logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net)

+   r.hdel('F2B_QUEUE_UNBAN', '%s' % net)

+   return

+  logInfo('Unbanning %s' % net)

+  if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network:

+    with lock:

+      chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')

+      rule = iptc.Rule()

+      rule.src = net

+      target = iptc.Target(rule, "REJECT")

+      rule.target = target

+      if rule in chain.rules:

+        chain.delete_rule(rule)

+  else:

+    pass

+  r.hdel('F2B_ACTIVE_BANS', '%s' % net)

+  r.hdel('F2B_QUEUE_UNBAN', '%s' % net)

+  if net in bans:

+    bans[net]['attempts'] = 0

+    bans[net]['ban_counter'] += 1

+

+def permBan(net, unban=False):

+  global lock

+  if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network:

+    with lock:

+      chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')

+      rule = iptc.Rule()

+      rule.src = net

+      target = iptc.Target(rule, "REJECT")

+      rule.target = target

+      if rule not in chain.rules and not unban:

+        logCrit('Add host/network %s to blacklist' % net)

+        chain.insert_rule(rule)

+        r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time())))

+      elif rule in chain.rules and unban:

+        logCrit('Remove host/network %s from blacklist' % net)

+        chain.delete_rule(rule)

+        r.hdel('F2B_PERM_BANS', '%s' % net)

+  else:

+    pass

+

+def quit(signum, frame):

+  global quit_now

+  quit_now = True

+

+def clear():

+  global lock

+  logInfo('Clearing all bans')

+  for net in bans.copy():

+    unban(net)

+  with lock:

+    filter4_table = iptc.Table(iptc.Table.FILTER)

+    for filter_table in [filter4_table]:

+      filter_table.autocommit = False

+      forward_chain = iptc.Chain(filter_table, "FORWARD")

+      input_chain = iptc.Chain(filter_table, "INPUT")

+      mailcow_chain = iptc.Chain(filter_table, "MAILCOW")

+      if mailcow_chain in filter_table.chains:

+        for rule in mailcow_chain.rules:

+          mailcow_chain.delete_rule(rule)

+        for rule in forward_chain.rules:

+          if rule.target.name == 'MAILCOW':

+            forward_chain.delete_rule(rule)

+        for rule in input_chain.rules:

+          if rule.target.name == 'MAILCOW':

+            input_chain.delete_rule(rule)

+        filter_table.delete_chain("MAILCOW")

+      filter_table.commit()

+      filter_table.refresh()

+      filter_table.autocommit = True

+    r.delete('F2B_ACTIVE_BANS')

+    r.delete('F2B_PERM_BANS')

+    pubsub.unsubscribe()

+

+def watch():

+  logInfo('Watching Redis channel F2B_CHANNEL')

+  pubsub.subscribe('F2B_CHANNEL')

+

+  global quit_now

+  global exit_code

+

+  while not quit_now:

+    try:

+      for item in pubsub.listen():

+        refreshF2bregex()

+        for rule_id, rule_regex in f2bregex.items():

+          if item['data'] and item['type'] == 'message':

+            try:

+              result = re.search(rule_regex, item['data'])

+            except re.error:

+              result = False

+            if result:

+              addr = result.group(1)

+              ip = ipaddress.ip_address(addr)

+              if ip.is_private or ip.is_loopback:

+                continue

+              logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))

+              ban(addr)

+    except Exception as ex:

+      logWarn('Error reading log line from pubsub: %s' % ex)

+      quit_now = True

+      exit_code = 2

+

+def snat4(snat_target):

+  global lock

+  global quit_now

+

+  def get_snat4_rule():

+    rule = iptc.Rule()

+    rule.src = os.getenv('IPV4_NETWORK', '172.22.1') + '.0/24'

+    rule.dst = '!' + rule.src

+    target = rule.create_target("SNAT")

+    target.to_source = snat_target

+    match = rule.create_match("comment")

+    match.comment = f'{int(round(time.time()))}'

+    return rule

+

+  while not quit_now:

+    time.sleep(10)

+    with lock:

+      try:

+        table = iptc.Table('nat')

+        table.refresh()

+        chain = iptc.Chain(table, 'POSTROUTING')

+        table.autocommit = False

+        new_rule = get_snat4_rule()

+

+        if not chain.rules:

+          # if there are no rules in the chain, insert the new rule directly

+          logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')

+          chain.insert_rule(new_rule)

+        else:

+          for position, rule in enumerate(chain.rules):

+            if not hasattr(rule.target, 'parameter'):

+                continue

+            match = all((

+              new_rule.get_src() == rule.get_src(),

+              new_rule.get_dst() == rule.get_dst(),

+              new_rule.target.parameters == rule.target.parameters,

+              new_rule.target.name == rule.target.name

+            ))

+            if position == 0:

+              if not match:

+                logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')

+                chain.insert_rule(new_rule)

+            else:

+              if match:

+                logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')

+                chain.delete_rule(rule)

+

+        table.commit()

+        table.autocommit = True

+      except:

+        print('Error running SNAT4, retrying...')

+

+def autopurge():

+  while not quit_now:

+    time.sleep(10)

+    refreshF2boptions()

+    BAN_TIME = int(f2boptions['ban_time'])

+    MAX_BAN_TIME = int(f2boptions['max_ban_time'])

+    BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])

+    MAX_ATTEMPTS = int(f2boptions['max_attempts'])

+    QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')

+    if QUEUE_UNBAN:

+      for net in QUEUE_UNBAN:

+        unban(str(net))

+    for net in bans.copy():

+      if bans[net]['attempts'] >= MAX_ATTEMPTS:

+        NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']

+        TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt']

+        if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME:

+          unban(net)

+

+def isIpNetwork(address):

+  try:

+    ipaddress.ip_network(address, False)

+  except ValueError:

+    return False

+  return True

+

+

+def genNetworkList(list):

+  resolver = dns.resolver.Resolver()

+  hostnames = []

+  networks = []

+  for key in list:

+    if isIpNetwork(key):

+      networks.append(key)

+    else:

+      hostnames.append(key)

+  for hostname in hostnames:

+    hostname_ips = []

+    for rdtype in ['A', 'AAAA']:

+      try:

+        answer = resolver.resolve(qname=hostname, rdtype=rdtype, lifetime=3)

+      except dns.exception.Timeout:

+        logInfo('Hostname %s timedout on resolve' % hostname)

+        break

+      except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):

+        continue

+      except dns.exception.DNSException as dnsexception:

+        logInfo('%s' % dnsexception)

+        continue

+      for rdata in answer:

+        hostname_ips.append(rdata.to_text())

+    networks.extend(hostname_ips)

+  return set(networks)

+

+def whitelistUpdate():

+  global lock

+  global quit_now

+  global WHITELIST

+  while not quit_now:

+    start_time = time.time()

+    list = r.hgetall('F2B_WHITELIST')

+    new_whitelist = []

+    if list:

+      new_whitelist = genNetworkList(list)

+    with lock:

+      if Counter(new_whitelist) != Counter(WHITELIST):

+        WHITELIST = new_whitelist

+        logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST))

+    time.sleep(60.0 - ((time.time() - start_time) % 60.0))

+

+def blacklistUpdate():

+  global quit_now

+  global BLACKLIST

+  while not quit_now:

+    start_time = time.time()

+    list = r.hgetall('F2B_BLACKLIST')

+    new_blacklist = []

+    if list:

+      new_blacklist = genNetworkList(list)

+    if Counter(new_blacklist) != Counter(BLACKLIST):

+      addban = set(new_blacklist).difference(BLACKLIST)

+      delban = set(BLACKLIST).difference(new_blacklist)

+      BLACKLIST = new_blacklist

+      logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST))

+      if addban:

+        for net in addban:

+          permBan(net=net)

+      if delban:

+        for net in delban:

+          permBan(net=net, unban=True)

+    time.sleep(60.0 - ((time.time() - start_time) % 60.0))

+

+def initChain():

+  # Is called before threads start, no locking

+  print("Initializing mailcow netfilter chain")

+  # IPv4

+  if not iptc.Chain(iptc.Table(iptc.Table.FILTER), "MAILCOW") in iptc.Table(iptc.Table.FILTER).chains:

+    iptc.Table(iptc.Table.FILTER).create_chain("MAILCOW")

+  for c in ['FORWARD', 'INPUT']:

+    chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), c)

+    rule = iptc.Rule()

+    rule.src = '0.0.0.0/0'

+    rule.dst = '0.0.0.0/0'

+    target = iptc.Target(rule, "MAILCOW")

+    rule.target = target

+    if rule not in chain.rules:

+      chain.insert_rule(rule)

+

+if __name__ == '__main__':

+

+  # In case a previous session was killed without cleanup

+  clear()

+  # Reinit MAILCOW chain

+  initChain()

+

+  watch_thread = Thread(target=watch)

+  watch_thread.daemon = True

+  watch_thread.start()

+

+  if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') != 'n':

+    try:

+      snat_ip = os.getenv('SNAT_TO_SOURCE')

+      snat_ipo = ipaddress.ip_address(snat_ip)

+      if type(snat_ipo) is ipaddress.IPv4Address:

+        snat4_thread = Thread(target=snat4,args=(snat_ip,))

+        snat4_thread.daemon = True

+        snat4_thread.start()

+    except ValueError:

+      print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address')

+

+  autopurge_thread = Thread(target=autopurge)

+  autopurge_thread.daemon = True

+  autopurge_thread.start()

+

+  mailcowchainwatch_thread = Thread(target=mailcowChainOrder)

+  mailcowchainwatch_thread.daemon = True

+  mailcowchainwatch_thread.start()

+

+  blacklistupdate_thread = Thread(target=blacklistUpdate)

+  blacklistupdate_thread.daemon = True

+  blacklistupdate_thread.start()

+

+  whitelistupdate_thread = Thread(target=whitelistUpdate)

+  whitelistupdate_thread.daemon = True

+  whitelistupdate_thread.start()

+

+  signal.signal(signal.SIGTERM, quit)

+  atexit.register(clear)

+

+  while not quit_now:

+    time.sleep(0.5)

+

+  sys.exit(exit_code)

diff --git a/mailcow/src/mailcow-dockerized/data/Dockerfiles/watchdog/watchdog.sh b/mailcow/src/mailcow-dockerized/data/Dockerfiles/watchdog/watchdog.sh
index 086e326..231d0ec 100755
--- a/mailcow/src/mailcow-dockerized/data/Dockerfiles/watchdog/watchdog.sh
+++ b/mailcow/src/mailcow-dockerized/data/Dockerfiles/watchdog/watchdog.sh
@@ -269,8 +269,8 @@
     touch /tmp/unbound-mailcow; echo "$(tail -50 /tmp/unbound-mailcow)" > /tmp/unbound-mailcow
     host_ip=$(get_container_ip unbound-mailcow)
     err_c_cur=${err_count}
-    /usr/bin/nslookup -sil stackoverflow.com "${host_ip}" 2>> /tmp/unbound-mailcow 1>&2; err_count=$(( ${err_count} + $? ))
-    DNSSEC=$(dig com +dnssec "@${host_ip}" | egrep 'flags:.+ad')
+    /usr/lib/nagios/plugins/check_dns -s ${host_ip} -H stackoverflow.com 2>> /tmp/unbound-mailcow 1>&2; err_count=$(( ${err_count} + $? ))
+    DNSSEC=$(dig com +dnssec | egrep 'flags:.+ad')
     if [[ -z ${DNSSEC} ]]; then
       echo "DNSSEC failure" 2>> /tmp/unbound-mailcow 1>&2
       err_count=$(( ${err_count} + 1))
diff --git a/mailcow/src/mailcow-dockerized/data/conf/postfix/postscreen_access.cidr b/mailcow/src/mailcow-dockerized/data/conf/postfix/postscreen_access.cidr
index 43cfb20..61aac64 100644
--- a/mailcow/src/mailcow-dockerized/data/conf/postfix/postscreen_access.cidr
+++ b/mailcow/src/mailcow-dockerized/data/conf/postfix/postscreen_access.cidr
@@ -7,12 +7,12 @@
 2a01:111:f403::/49	permit
 2a01:111:f403:c000::/51	permit
 2a01:111:f403:f000::/52	permit
-2a01:4180:4051:0800::/64	permit
 2a02:a60:0:5::/64	permit
 2c0f:fb50:4000::/36	permit
 2.207.151.53	permit
 3.14.230.16	permit
 3.70.123.177	permit
+3.93.157.0/24	permit
 3.129.120.190	permit
 3.210.190.0/24	permit
 8.20.114.31	permit
@@ -36,14 +36,12 @@
 15.200.201.185	permit
 17.57.155.0/24	permit
 17.57.156.0/24	permit
-17.57.155.0/24	permit
-17.57.156.0/24	permit
 17.58.0.0/16	permit
 18.156.89.250	permit
 18.157.243.190	permit
 18.194.95.56	permit
 18.198.96.88	permit
-20.47.149.138	permit
+18.208.124.128/25	permit
 18.216.232.154	permit
 18.234.1.244	permit
 18.236.40.242	permit
@@ -114,7 +112,6 @@
 37.218.251.62	permit
 39.156.163.64/29	permit
 40.71.187.0/24	permit
-40.77.102.222	permit
 40.92.0.0/15	permit
 40.92.0.0/16	permit
 40.107.0.0/16	permit
@@ -125,7 +122,7 @@
 44.209.42.157	permit
 44.236.56.93	permit
 44.238.220.251	permit
-43.228.184.0/22	permit
+46.19.168.0/23	permit
 46.226.48.0/21	permit
 46.228.36.37	permit
 46.228.36.38/31	permit
@@ -191,12 +188,6 @@
 51.137.58.21	permit
 51.140.75.55	permit
 51.144.100.179	permit
-51.4.80.0/27	permit
-51.5.72.0/24	permit
-51.5.80.0/27	permit
-51.137.58.21	permit
-51.140.75.55	permit
-51.144.100.179	permit
 52.5.230.59	permit
 52.27.5.72	permit
 52.27.28.47	permit
@@ -241,9 +232,14 @@
 52.247.53.144	permit
 52.250.107.196	permit
 52.250.126.174	permit
-52.251.55.143	permit
 54.90.148.255	permit
 54.172.97.247	permit
+54.174.52.0/24	permit
+54.174.53.128/30	permit
+54.174.57.0/24	permit
+54.174.59.0/24	permit
+54.174.60.0/23	permit
+54.174.63.0/24	permit
 54.186.193.102	permit
 54.191.223.56	permit
 54.194.61.95	permit
@@ -276,6 +272,7 @@
 62.253.227.114	permit
 63.32.13.159	permit
 63.80.14.0/23	permit
+63.111.28.137	permit
 63.128.21.0/24	permit
 63.143.57.128/25	permit
 63.143.59.128/25	permit
@@ -283,9 +280,8 @@
 64.20.241.45	permit
 64.69.212.0/24	permit
 64.71.149.160/28	permit
-64.71.149.160/28	permit
 64.79.155.0/24	permit
-64.89.44.85	permit
+64.79.155.192	permit
 64.79.155.193	permit
 64.79.155.205	permit
 64.79.155.206	permit
@@ -316,7 +312,7 @@
 64.207.219.71	permit
 64.207.219.72	permit
 64.207.219.73	permit
-64.207.219.78	permit
+64.207.219.75	permit
 64.207.219.77	permit
 64.207.219.78	permit
 64.207.219.79	permit
@@ -367,6 +363,9 @@
 66.111.4.229	permit
 66.111.4.230	permit
 66.119.150.192/26	permit
+66.135.202.0/27	permit
+66.135.215.0/24	permit
+66.135.222.1	permit
 66.162.193.226/31	permit
 66.163.184.0/21	permit
 66.163.184.0/24	permit
@@ -399,6 +398,7 @@
 66.211.168.230/31	permit
 66.211.170.86/31	permit
 66.211.170.88/29	permit
+66.211.184.0/23	permit
 66.218.74.64/30	permit
 66.218.74.68/31	permit
 66.218.75.112/30	permit
@@ -471,6 +471,7 @@
 68.142.230.76/31	permit
 68.142.230.78	permit
 68.232.140.138	permit
+68.232.157.143	permit
 68.232.192.0/20	permit
 69.63.178.128/25	permit
 69.63.181.0/24	permit
@@ -490,6 +491,7 @@
 70.37.151.128/25	permit
 70.42.149.0/24	permit
 70.42.149.35	permit
+72.3.237.64/28	permit
 72.14.192.0/18	permit
 72.21.192.0/19	permit
 72.21.217.142	permit
@@ -583,6 +585,9 @@
 74.112.67.243	permit
 74.125.0.0/16	permit
 74.202.227.40	permit
+74.208.4.192/26	permit
+74.208.5.64/26	permit
+74.208.122.0/26	permit
 74.209.250.0/24	permit
 76.223.128.0/19	permit
 76.223.176.0/20	permit
@@ -609,8 +614,12 @@
 77.238.189.148/30	permit
 81.7.169.128/25	permit
 81.223.46.0/27	permit
+82.165.159.0/24	permit
+82.165.159.0/26	permit
 82.165.229.31	permit
+82.165.229.130	permit
 82.165.230.21	permit
+82.165.230.22	permit
 84.116.36.0/24	permit
 85.158.136.0/21	permit
 86.61.88.25	permit
@@ -652,9 +661,9 @@
 87.248.117.201	permit
 87.248.117.202	permit
 87.248.117.205	permit
-87.252.219.254	permit
 87.253.232.0/21	permit
 89.22.108.0/24	permit
+91.211.240.0/22	permit
 94.245.112.0/27	permit
 94.245.112.10/31	permit
 95.131.104.0/21	permit
@@ -1170,7 +1179,7 @@
 99.78.197.208/28	permit
 103.2.140.0/22	permit
 103.9.96.0/22	permit
-103.47.204.0/22	permit
+103.28.42.0/24	permit
 103.47.204.0/22	permit
 103.151.192.0/23	permit
 103.168.172.128/27	permit
@@ -1183,7 +1192,6 @@
 104.130.96.0/28	permit
 104.130.122.0/23	permit
 104.214.25.77	permit
-104.215.186.3	permit
 104.245.209.192/26	permit
 106.10.144.64/27	permit
 106.10.144.100/31	permit
@@ -1424,6 +1432,7 @@
 139.138.46.219	permit
 139.138.57.55	permit
 139.138.58.119	permit
+139.180.17.0/24	permit
 141.148.159.229	permit
 141.193.32.0/23	permit
 143.55.224.0/21	permit
@@ -1444,6 +1453,7 @@
 146.20.191.0/24	permit
 146.20.215.0/24	permit
 146.20.215.182	permit
+146.88.28.0/24	permit
 147.160.158.0/24	permit
 147.243.1.47	permit
 147.243.1.48	permit
@@ -1481,7 +1491,7 @@
 157.255.1.64/29	permit
 158.101.211.207	permit
 158.120.80.0/21	permit
-159.92.159.0/24	permit
+158.247.16.0/20	permit
 159.92.157.0/24	permit
 159.92.157.16	permit
 159.92.157.17	permit
@@ -1504,6 +1514,7 @@
 161.71.32.0/19	permit
 161.71.64.0/20	permit
 162.247.216.0/22	permit
+163.47.180.0/22	permit
 163.47.180.0/23	permit
 163.114.130.16	permit
 163.114.132.120	permit
@@ -1576,13 +1587,13 @@
 182.50.78.64/28	permit
 183.240.219.64/29	permit
 185.4.120.0/23	permit
+185.4.122.0/24	permit
 185.12.80.0/22	permit
 185.58.84.93	permit
-185.58.86.0/24	permit
-185.72.128.80	permit
 185.80.93.204	permit
 185.80.93.227	permit
 185.80.95.31	permit
+185.90.20.0/22	permit
 185.189.236.0/22	permit
 185.211.120.0/22	permit
 185.250.236.0/22	permit
@@ -1656,18 +1667,17 @@
 192.254.113.10	permit
 192.254.113.101	permit
 192.254.114.176	permit
-193.7.207.0/25	permit
 193.109.254.0/23	permit
 193.122.128.100	permit
 193.123.56.63	permit
 194.19.134.0/25	permit
+194.64.234.128/27	permit
 194.64.234.129	permit
-194.104.109.0/24	permit
-194.104.111.0/24	permit
 194.106.220.0/23	permit
+194.113.24.0/22	permit
 194.154.193.192/27	permit
 195.4.92.0/23	permit
-195.234.109.226	permit
+195.54.172.0/23	permit
 195.234.109.226	permit
 195.245.230.0/23	permit
 198.2.128.0/18	permit
@@ -1685,7 +1695,6 @@
 198.61.254.0/23	permit
 198.61.254.21	permit
 198.61.254.231	permit
-198.74.56.28	permit
 198.178.234.57	permit
 198.244.48.0/20	permit
 198.244.60.0/22	permit
@@ -1713,8 +1722,10 @@
 202.177.148.110	permit
 203.31.36.0/22	permit
 203.32.4.25	permit
+203.55.21.0/24	permit
 203.81.17.0/24	permit
 203.122.32.250	permit
+203.145.57.160/27	permit
 203.188.194.32	permit
 203.188.194.151	permit
 203.188.194.203	permit
@@ -1749,10 +1760,12 @@
 203.209.230.76/31	permit
 204.11.168.0/21	permit
 204.13.11.48/29	permit
+204.13.11.48/30	permit
 204.14.232.0/21	permit
 204.14.232.64/28	permit
 204.14.234.64/28	permit
 204.29.186.0/23	permit
+204.75.142.0/24	permit
 204.79.197.212	permit
 204.92.114.187	permit
 204.92.114.203	permit
@@ -1766,6 +1779,7 @@
 205.201.131.128/25	permit
 205.201.134.128/25	permit
 205.201.136.0/23	permit
+205.201.137.229	permit
 205.201.139.0/24	permit
 205.207.104.0/22	permit
 205.220.167.17	permit
@@ -1842,6 +1856,7 @@
 208.71.42.212/31	permit
 208.71.42.214	permit
 208.72.249.240/29	permit
+208.74.204.0/22	permit
 208.74.204.9	permit
 208.75.120.0/22	permit
 208.75.121.246	permit
@@ -1864,7 +1879,6 @@
 209.67.98.46	permit
 209.67.98.59	permit
 209.85.128.0/17	permit
-212.25.240.88	permit
 212.82.96.0/24	permit
 212.82.96.32/27	permit
 212.82.96.64/29	permit
@@ -1905,9 +1919,12 @@
 212.82.111.228/31	permit
 212.82.111.230	permit
 212.123.28.40	permit
-213.167.75.0/25	permit
-213.167.81.0/25	permit
+212.227.15.0/24	permit
+212.227.15.0/25	permit
+212.227.17.0/27	permit
+212.227.126.128/25	permit
 213.46.255.0/24	permit
+213.165.64.0/23	permit
 213.199.128.139	permit
 213.199.128.145	permit
 213.199.138.181	permit
@@ -1947,6 +1964,9 @@
 216.58.192.0/19	permit
 216.66.217.240/29	permit
 216.71.138.33	permit
+216.71.152.207	permit
+216.71.154.29	permit
+216.71.155.89	permit
 216.74.162.13	permit
 216.74.162.14	permit
 216.82.240.0/20	permit
@@ -1956,6 +1976,9 @@
 216.109.114.0/24	permit
 216.109.114.32/27	permit
 216.109.114.64/29	permit
+216.113.160.0/24	permit
+216.113.172.0/25	permit
+216.113.175.0/24	permit
 216.128.126.97	permit
 216.136.162.65	permit
 216.136.162.120/29	permit
@@ -1966,6 +1989,9 @@
 216.203.33.178/31	permit
 216.205.24.0/24	permit
 216.239.32.0/19	permit
+217.72.192.64/26	permit
+217.72.192.248/29	permit
+217.72.207.0/27	permit
 217.77.141.52	permit
 217.77.141.59	permit
 217.175.194.0/24	permit
diff --git a/mailcow/src/mailcow-dockerized/data/conf/rspamd/local.d/composites.conf b/mailcow/src/mailcow-dockerized/data/conf/rspamd/local.d/composites.conf
index e6fa24c..cde34b5 100644
--- a/mailcow/src/mailcow-dockerized/data/conf/rspamd/local.d/composites.conf
+++ b/mailcow/src/mailcow-dockerized/data/conf/rspamd/local.d/composites.conf
@@ -68,20 +68,6 @@
 ENCRYPTED_CHAT {
   expression = "CHAT_VERSION_HEADER & ENCRYPTED_PGP";
 }
-# Remove bayes ham if fuzzy denied
-FUZZY_HAM_MISMATCH {
-  expression = "( -FUZZY_DENIED | -MAILCOW_FUZZY_DENIED | -LOCAL_FUZZY_DENIED ) & ( ^BAYES_HAM | ^NEURAL_HAM_LONG | ^NEURAL_HAM_SHORT )";
-}
-# Remove bayes spam if local fuzzy white
-FUZZY_SPAM_MISMATCH {
-  expression = "( -LOCAL_FUZZY_WHITE ) & ( ^BAYES_SPAM | ^NEURAL_SPAM_LONG | ^NEURAL_SPAM_SHORT )";
-}
-WL_FWD_HOST {
-  expression = "-WHITELISTED_FWD_HOST & (^g+:rbl | ^g+:policies | ^g+:hfilter | ^g:neural)";
-}
-ENCRYPTED_CHAT {
-  expression = "CHAT_VERSION_HEADER & ENCRYPTED_PGP";
-}
 
 CLAMD_SPAM_FOUND {
   expression = "CLAM_SECI_SPAM & !MAILCOW_WHITE";
diff --git a/mailcow/src/mailcow-dockerized/data/web/admin.php b/mailcow/src/mailcow-dockerized/data/web/admin.php
index 93a6c22..ebddb7b 100644
--- a/mailcow/src/mailcow-dockerized/data/web/admin.php
+++ b/mailcow/src/mailcow-dockerized/data/web/admin.php
@@ -64,6 +64,7 @@
     $dkim_blind_domains[$blind]['dkim'] = $dkim;
   }
 }
+
 // rsettings
 $rsettings = array_map(function ($rsetting){
   $rsetting['details'] = rsettings('details', $rsetting['id']);
@@ -78,6 +79,7 @@
     'data' => file_get_contents('/rspamd_custom_maps/' . $rspamd_regex_map)
   ];
 }
+
 // cors settings
 $cors_settings = cors('get');
 $cors_settings['allowed_origins'] = str_replace(", ", "\n", $cors_settings['allowed_origins']);
diff --git a/mailcow/src/mailcow-dockerized/data/web/api/openapi.yaml b/mailcow/src/mailcow-dockerized/data/web/api/openapi.yaml
index 18e8093..1d26216 100644
--- a/mailcow/src/mailcow-dockerized/data/web/api/openapi.yaml
+++ b/mailcow/src/mailcow-dockerized/data/web/api/openapi.yaml
@@ -2031,7 +2031,7 @@
                 - domain.tld
                 - domain2.tld
               properties:
-                items: 
+                items:
                   type: array
                   items:
                     type: string
diff --git a/mailcow/src/mailcow-dockerized/data/web/css/build/013-mailcow.css b/mailcow/src/mailcow-dockerized/data/web/css/build/013-mailcow.css
deleted file mode 100644
index 374d484..0000000
--- a/mailcow/src/mailcow-dockerized/data/web/css/build/013-mailcow.css
+++ /dev/null
@@ -1,372 +0,0 @@
-@font-face {

-  font-family: 'Noto Sans';

-  font-style: normal;

-  font-weight: 400;

-  src: local(''),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-regular.woff2') format('woff2'),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-regular.woff') format('woff');

-}

-

-@font-face {

-  font-family: 'Noto Sans';

-  font-style: normal;

-  font-weight: 700;

-  src: local(''),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-700.woff2') format('woff2'),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-700.woff') format('woff');

-}

-

-@font-face {

-  font-family: 'Noto Sans';

-  font-style: italic;

-  font-weight: 400;

-  src: local(''),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-italic.woff2') format('woff2'),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-italic.woff') format('woff');

-}

-

-@font-face {

-  font-family: 'Noto Sans';

-  font-style: italic;

-  font-weight: 700;

-  src: local(''),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-700italic.woff2') format('woff2'),

-       url('/fonts/noto-sans-v12-latin_greek_cyrillic-700italic.woff') format('woff');

-}

-#maxmsgsize { min-width: 80px; }

-#slider1 .slider-selection {

-	background: #FFD700;

-}

-#slider1 .slider-track-high {

-	background: #FF4500;

-}

-#slider1 .slider-track-low {

-  background: #66CD00;

-}

-.striped:nth-child(odd) {

-  background-color: #fff;

-}

-.striped:nth-child(even) {

-  background-color: #fafafa;

-  border:1px solid white;

-}

-.btn {

-  text-transform: none;

-}

-.btn * {

-  pointer-events: none;

-}

-.textarea-code {

-  font-family:Consolas,Monaco,Lucida Console,Liberation Mono,DejaVu Sans Mono,Bitstream Vera Sans Mono,Courier New, monospace;

-  background:transparent !important;

-}

-.navbar-nav {

-  margin: 0;

-}

-.navbar-nav .nav-item {

-  flex-direction: column;

-  display: flex;

-  padding: 0 10px !important;

-}

-.navbar-nav .nav-link {

-  height: 44px;

-  display: flex;

-  align-items: center;

-  padding: 0 10px !important;

-}

-.navbar-fixed-bottom .navbar-collapse, 

-.navbar-fixed-top .navbar-collapse {

-  max-height: 1000px

-}

-.bi {

-  display: inline-block;

-  font-size: 12pt;

-}

-.btn .bi {

-  display: inline-block;

-  font-size: inherit;

-}

-.btn-group-xs > .btn, .btn-xs {

-  padding: .25rem .4rem;

-  font-size: .875rem;

-  line-height: 1rem;

-  border-radius: .2rem;

-}

-.icon-spin {

-  animation-name: spin;

-  animation-duration: 2000ms;

-  animation-iteration-count: infinite;

-  animation-timing-function: linear;

-  -webkit-animation: spin 2000ms infinite linear;

-}

-.dropdown-menu {

-  font-size: 0.9rem;

-}

-@-webkit-keyframes spin {

-  0% {

-    -webkit-transform: rotate(0deg);

-    transform: rotate(0deg);

-  }

-  100% {

-    -webkit-transform: rotate(359deg);

-    transform: rotate(359deg);

-  }

-}

-@keyframes spin {

-  0% {

-    -webkit-transform: rotate(0deg);

-    transform: rotate(0deg);

-  }

-  100% {

-    -webkit-transform: rotate(359deg);

-    transform: rotate(359deg);

-  }

-}

-@keyframes blink {

-  50% { 

-    color: transparent 

-  }

-}

-.loader-dot { 

-  animation: 1s blink infinite 

-}

-.loader-dot:nth-child(2) { 

-  animation-delay: 250ms 

-}

-.loader-dot:nth-child(3) { 

-  animation-delay: 500ms 

-}

-

-pre{white-space:pre-wrap;white-space:-moz-pre-wrap;white-space:-o-pre-wrap;word-wrap:break-word;}

-/* Fix modal moving content left */

-body.modal-open {

-  overflow: inherit;

-  padding-right: inherit !important;

-}

-body {

-  font-family: "Noto Sans","Helvetica Neue",Helvetica,Arial,sans-serif;

-  font-size: 10.5pt;

-  line-height: 1.5;

-}

-html {

-  font-family: "Noto Sans","Helvetica Neue",Helvetica,Arial,sans-serif;

-  font-size: 10.5pt;

-  line-height: 1.5;

-}

-#mailcow-alert {

-  position: fixed;

-  bottom: 8px;

-  right: 25px;

-  min-width: 350px;

-  max-width: 550px;

-  z-index: 2000;

-}

-.input-group-sm .btn { margin-top: 0 !important }

-legend {

-  -webkit-user-select: none;

-  -moz-user-select: none;

-  -ms-user-select: none;

-  -o-user-select: none;

-  user-select: none;

-  font-size: 1.2rem;

-}

-.navbar .navbar-brand {

-  padding-top: 5px;

-}

-.navbar .navbar-brand img {

-  height: 40px;

-}

-.mailcow-logo img {

-  max-width: 250px;

-}

-.lang-link-disabled a  {

-  pointer-events: none;

-}

-.lang-link-disabled  {

-  cursor: not-allowed;

-}

-.overlay {

-  background: #fff;

-  position: absolute;

-  z-index: 10000;

-  top: 0; right: 0; bottom: 0; left: 0;

-  opacity: 0.7;

-}

-.bootstrap-select.btn-group .no-results {

-  display: none;

-}

-.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary {

-  color: rgb(197, 197, 197) !important;

-}

-.haveibeenpwned {

-  cursor: pointer;

-  -webkit-user-select: none;  

-  -moz-user-select: none;    

-  -ms-user-select: none;      

-  user-select: none;

-}

-.full-width-select {

-  width: 100%!important;  

-}

-.tooltip {

-  font-family: inherit;

-  font-size: 0.8rem;

-}

-.progress-bar {

-  font-size: 0.8rem;

-  line-height: 14px;

-}

-.footer {

-  margin-top: 27px;

-  margin-bottom: 20px;

-  color: #959595;

-  display: flex;

-  flex-direction: column;

-}

-.footer .version {

-  margin-left: auto;

-	margin-top: 20px;

-}

-.slave-info {

-  padding: 15px 0px 15px 15px;

-  font-weight: bold;

-}

-.alert-hr {

-  margin:3px 0px;

-  border-bottom:1px solid #f5f5f5!important;

-  opacity: 0.3;

-}

-.btn-input-missing,

-.btn-input-missing:hover,

-.btn-input-missing:active,

-.btn-input-missing:focus,

-.btn-input-missing:active:hover,

-.btn-input-missing:active:focus {

-  color: #000 !important;

-  background-color: #ff2f24 !important;

-  border-color: #e21207 !important;

-}

-.navbar-nav > li {

-  font-size: 1rem !important;

-}

-.dropdown-menu > li > a {

-  font-size: 1rem !important;

-}

-.label {

-  font-size:inherit;

-}

-[class^="bi-"]::before, [class*=" bi-"]::before {

-  vertical-align: -0.2em !important;

-}

-legend > [class^="bi-"]::before, legend > [class*=" bi-"]::before {

-  vertical-align: 0em !important;

-}

-code {

-  font-size: inherit;

-}

-.bootstrap-select.btn-group.show-tick .dropdown-menu li.selected a span.check-mark {

-  margin-top: 0px;

-}

-.flag-icon {

-  margin-right: 5px;

-}

-

-.dropdown-header {

-  font-weight: 600;

-}

-

-

-.tag-box {

-  display: flex;

-  flex-wrap: wrap;

-  height: auto;

-}

-.tag-badge {

-  transition: 200ms linear;

-  margin-top: 5px;

-  margin-bottom: 5px;

-  margin-left: 2px;

-  margin-right: 2px;

-}

-.tag-badge.btn-badge {

-  cursor: pointer;

-}

-.tag-badge .bi {

-  font-size: 12px;

-}

-.tag-badge.btn-badge:hover {

-  filter: brightness(0.9);

-}

-.tag-input {

-  margin-left: 10px;

-  border: 0 !important;

-  flex: 1;

-  height: 24px;

-  min-width: 150px;

-}

-.tag-input:focus {

-  outline: none;

-}

-.tag-add {

-  padding: 0 5px 0 5px;

-  align-items: center;

-  display: inline-flex;

-}

-

-#dnstable {

-  overflow-x: auto!important;

-}

-.well {

-  border: 1px solid #dfdfdf;

-  background-color: #f9f9f9;

-  padding: 10px;

-}

-

-

-.btn-check-label {

-  color: #555;

-}

-

-.caret {

-  transform: rotate(0deg);

-}

-a[aria-expanded='true'] > .caret, 

-button[aria-expanded='true'] > .caret {

-  transform: rotate(-180deg);

-}

-

-.list-group-details {

-  background: #fff;

-}

-.list-group-header {

-  background: #f7f7f7;

-} 

-

-

-.bg-primary, .alert-primary, .btn-primary {

-  background-color: #0F688D !important;

-  border-color: #0d526d !important;

-}

-.bg-info, .alert-info, .btn-info {

-  background-color: #148DBC !important;

-  border-color: #127ea8 !important;

-}

-

-.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary {

-  color: rgb(137 137 137)!important;

-}

-

-.progress {

-  background-color: #d5d5d5;

-}

-

-

-.btn-outline-secondary:hover {

-    background-color: #f0f0f0;

-}

-.btn.btn-outline-secondary {

-  border-color: #cfcfcf !important;  

-}

-.btn-check:checked+.btn-outline-secondary, .btn-check:active+.btn-outline-secondary, .btn-outline-secondary:active, .btn-outline-secondary.active, .btn-outline-secondary.dropdown-toggle.show {

-    background-color: #f0f0f0 !important;

-}

diff --git a/mailcow/src/mailcow-dockerized/data/web/css/build/014-responsive.css b/mailcow/src/mailcow-dockerized/data/web/css/build/014-responsive.css
deleted file mode 100644
index a987727..0000000
--- a/mailcow/src/mailcow-dockerized/data/web/css/build/014-responsive.css
+++ /dev/null
@@ -1,214 +0,0 @@
-.btn-xs-lg>.lang-sm:after {
-  margin-left: 4px;
-}
-
-.bootstrap-select {
-  max-width: 350px;
-}
-
-.card-login .apps .btn {
-  width: auto;
-  float: left;
-  margin-right: 10px;
-  margin-top: auto;
-}
-.card-login .apps .btn:hover {
-  margin-top: 1px !important;
-  border-bottom-width: 3px;
-}
-
-.responsive-tabs .nav-tabs {
-  display: none;
-}
-
-.dataTables_paginate.paging_simple_numbers .pagination {
-  display: flex;
-  flex-wrap: wrap;
-}
-
-@media (min-width: 768px) {
-  .responsive-tabs .nav-tabs {
-      display: flex;
-  }
-
-  .responsive-tabs .card .card-body.collapse {
-      display: block;
-  }
-}
-
-
-@media (max-width: 767px) {
-  .responsive-tabs .tab-pane {
-      display: block !important;
-      opacity: 1;
-  }
-
-  .card-login .apps .btn {
-    width: 100%;
-    float: none;
-    margin-bottom: 10px;
-  }
-
-  .card-login .apps .btn {
-    border-bottom-width: 4px;
-  }
-
-  .xs-show {
-    display: block !important;
-  }
-
-  .recent-login-success {
-    font-size: 14px;
-    margin-top: 10px !important;
-  }
-  .pull-xs-right {
-  float: right !important;
-  }
-  .pull-xs-right .dropdown-menu {
-    right: 0;
-    left: auto;
-  }
-  .text-xs-left {
-    text-align: left;
-  }
-  .text-xs-bold {
-    font-weight: bold;
-  }
-  .text-xs-bold .small {
-    font-weight: normal;
-    text-align: justify;
-  }
-  .btn.d-block {
-    width: 100%;
-    white-space: normal;
-  }
-  .btn.btn-xs-half,
-  .btn.d-block.btn-xs-half {
-    width: 50%;
-  }
-  .btn.btn-xs-third,
-  .btn.d-block.btn-xs-third {
-    width: 33.33%;
-  }
-  .btn.btn-xs-quart,
-  .btn.d-block.btn-xs-quart {
-    width: 25%;
-  }
-  .btn.d-block.btn-sm,
-  .btn-xs-lg {
-    padding: .5rem 1rem;
-    line-height: 20px;
-  }
-  .input-xs-lg {
-    height: 47px;
-    padding: 13px 16px;
-  }
-  .btn-group:not(.input-group-btn) {
-    display: flex;
-    flex-wrap: wrap;
-  }
-  .btn-group.nowrap {
-    flex-wrap: nowrap;
-  }
-  .btn-group.nowrap .dropdown-menu {
-    width: 100%;
-  }
-  .card-login .btn-group {
-    display: block;
-  }
-  .mass-actions-user .btn-group {
-    float: none;
-  }
-  div[class^='mass-actions'] .dropdown-menu,
-  .card-xs-lg .dropdown-menu,
-  .dropdown-menu.login {
-    width: 100%;
-  }
-  div[class^='mass-actions'] .btn-group .dropdown-menu {
-    top: 50%;
-  }
-  div[class^='mass-actions'] .btn-group .btn-group .dropdown-menu,
-  div.mass-actions-quarantine .btn-group .dropdown-menu,
-  .card-xs-lg .dropdown-menu {
-    top: 100%;
-  }
-  div[class^='mass-actions'] .dropdown-menu>li>a,
-  .card-xs-lg .dropdown-menu>li>a,
-  .dropdown-menu.login>li>a {
-    padding: 8px 20px;
-  }
-  div[class^='mass-actions'] .dropdown-header {
-    font-size: 14px;
-    font-weight: bold;
-  }
-  .top100 {
-    top: 100% !important;
-  }
-  .top33 {
-    top: 33% !important;
-  }
-  .footable-filtering .form {
-    width: 65%;
-  }
-  .btn-xs-lg>.lang-sm:after {
-    top: 1px;
-  }
-  .pagination {
-    margin-bottom: 5px;
-  }
-  .mass-actions-mailbox {
-    padding: 0;
-  }
-  .card-xs-lg .card-header {
-    height: 66px;
-    line-height: 47px;
-  }
-  .card-xs-lg .btn-group .btn {
-    padding-right: 5px;
-    padding-left: 5px;
-  }
-  .bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn) {
-    width: 100%;
-  }
-  .btn-group:not(.bootstrap-select) {
-    width: auto !important;
-  }
-  .bootstrap-select {
-    max-width: 100%;
-  }
-  .bootstrap-select.btn-group.show-tick .dropdown-menu li a span.text {
-    margin-right: 14px;
-    white-space: normal;
-  }
-  .btn-group > .btn-group {
-    flex-basis: 100%;
-  }
-  .btn-group .btn {
-    display: flex !important;
-    align-items: center;
-    justify-content: center;
-  }
-  .btn-group .btn i {
-    margin-right: 5px;
-  }
-  .card-login .btn-group .btn {
-    display: block !important;
-  }
-
-  .dt-sm-head-hidden .dtr-title {
-    display: none !important;
-  }
-
-}
-
-@media (max-width: 350px) {
-  .mailcow-logo img {
-    max-width: 250px;
-  }
-}
-
-@media (min-width: 1400px) {
-  .container-xxl, .container-xl, .container-lg, .container-md, .container-sm, .container {
-    max-width: 1600px;
-  }
-}
\ No newline at end of file
diff --git a/mailcow/src/mailcow-dockerized/data/web/css/build/015-datatables.css b/mailcow/src/mailcow-dockerized/data/web/css/build/015-datatables.css
deleted file mode 100644
index e5518ff..0000000
--- a/mailcow/src/mailcow-dockerized/data/web/css/build/015-datatables.css
+++ /dev/null
@@ -1,80 +0,0 @@
-.dataTables_info {
-    margin: 15px 0 !important;
-    padding: 0px !important;
-}
-.dataTables_paginate, .dataTables_length, .dataTables_filter {
-    margin: 15px 0 !important;
-}
-.dtr-details {
-    width: 100%;
-}
-.table-striped>tbody>tr:nth-of-type(odd) {
-    background-color: #F2F2F2;
-}
-td.child>ul>li {
-    display: flex;
-}
-table.dataTable>tbody>tr.child ul.dtr-details>li {
-    border-bottom: 1px solid rgba(0, 0, 0, 0.129);
-    padding: 0.5em 0;
-}
-table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before:hover, 
-table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before:hover {
-    background-color: #5e5e5e;
-}
-table.dataTable.dtr-inline.collapsed>tbody>tr>td.dtr-control:before, 
-table.dataTable.dtr-inline.collapsed>tbody>tr>th.dtr-control:before,
-table.dataTable td.dt-control:before {
-    background-color: #979797 !important;
-    border: 1.5px solid #616161 !important;
-    border-radius: 2px !important;
-    color: #fff;
-    height: 1em;
-    width: 1em;
-    line-height: 1.25em;
-    border-radius: 0px;
-    box-shadow: none;
-    font-size: 14px;
-    transition: 0.5s all;
-}
-table.dataTable.dtr-inline.collapsed>tbody>tr.parent>td.dtr-control:before, 
-table.dataTable.dtr-inline.collapsed>tbody>tr.parent>th.dtr-control:before,
-table.dataTable td.dt-control:before {
-    background-color: #979797 !important;
-}
-table.dataTable.dtr-inline.collapsed>tbody>tr>td.child, 
-table.dataTable.dtr-inline.collapsed>tbody>tr>th.child, 
-table.dataTable.dtr-inline.collapsed>tbody>tr>td.dataTables_empty {
-    background-color: #fbfbfb;
-}
-table.dataTable.table-striped>tbody>tr>td {
-    vertical-align: middle;
-}
-table.dataTable.table-striped>tbody>tr>td>input[type="checkbox"] {
-    margin-top: 7px;
-}
-td.dtr-col-lg {
-    min-width: 350px;
-    word-break: break-word;
-}
-td.dtr-col-md {
-    min-width: 250px;
-    word-break: break-word;
-}
-td.dtr-col-sm {
-    min-width: 125px;
-    word-break: break-word;
-}
-.dt-data-w100 .dtr-data {
-    width: 100%;
-}
-li .dtr-data {
-    word-break: break-all;
-    flex: 1;
-    padding-left: 5px;
-    padding-right: 5px;
-}
-table.dataTable>tbody>tr.child span.dtr-title {
-    width: 30%;
-    max-width: 250px;
-}
\ No newline at end of file
diff --git a/mailcow/src/mailcow-dockerized/docker-compose.yml b/mailcow/src/mailcow-dockerized/docker-compose.yml
index 2fb0441..040e530 100644
--- a/mailcow/src/mailcow-dockerized/docker-compose.yml
+++ b/mailcow/src/mailcow-dockerized/docker-compose.yml
@@ -352,7 +352,7 @@
         - sogo-mailcow
         - php-fpm-mailcow
         - redis-mailcow
-      image: nginx:1.19-alpine
+      image: nginx:mainline-alpine
       dns:
         - ${IPV4_NETWORK:-172.22.1}.254
       command: /bin/sh -c "envsubst < /etc/nginx/conf.d/templates/listen_plain.template > /etc/nginx/conf.d/listen_plain.active &&
diff --git a/mailcow/src/mailcow-dockerized/generate_config.sh b/mailcow/src/mailcow-dockerized/generate_config.sh
index 4780e79..f25309e 100755
--- a/mailcow/src/mailcow-dockerized/generate_config.sh
+++ b/mailcow/src/mailcow-dockerized/generate_config.sh
@@ -54,13 +54,6 @@
 
 else
   echo -e "\e[31mCannot find Docker Compose.\e[0m" 
-  echo -e "\e[31mPlease install it regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
-      exit 1
-    fi
-  fi
-    
-else
-  echo -e "\e[31mCannot find Docker Compose.\e[0m" 
   echo -e "\e[31mPlease install it regarding to this doc site: https://docs.mailcow.email/i_u_m/i_u_m_install/\e[0m"
   exit 1
 fi
diff --git a/mailcow/src/mailcow-dockerized/update.sh b/mailcow/src/mailcow-dockerized/update.sh
index d80e2b7..5204659 100755
--- a/mailcow/src/mailcow-dockerized/update.sh
+++ b/mailcow/src/mailcow-dockerized/update.sh
@@ -339,119 +339,6 @@
 DATE=$(date +%Y-%m-%d_%H_%M_%S)
 BRANCH=$(cd ${SCRIPT_DIR}; git rev-parse --abbrev-ref HEAD)
 
-                if [[ "$(cat $override | sed '/^\s*$/d' | wc -l)" == "2" ]]; then
-                  mv $override ${override}_empty
-                  echo -e "\e[31m${override} is empty. Renamed it to ensure mailcow is startable.\e[0m"
-                fi
-            fi
-          fi
-        fi
-    fi
-    done        
-}
-
-detect_docker_compose_command(){
-if ! [ "${DOCKER_COMPOSE_VERSION}" == "native" ] && ! [ "${DOCKER_COMPOSE_VERSION}" == "standalone" ]; then
-  if docker compose > /dev/null 2>&1; then
-      if docker compose version --short | grep "2." > /dev/null 2>&1; then
-        DOCKER_COMPOSE_VERSION=native
-        COMPOSE_COMMAND="docker compose"
-        echo -e "\e[31mFound Docker Compose Plugin (native).\e[0m"
-        echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to native\e[0m"
-        sleep 2
-        echo -e "\e[33mNotice: You'll have to update this Compose Version via your Package Manager manually!\e[0m"
-      else
-        echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m" 
-        echo -e "\e[31mPlease update/install it manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
-        exit 1
-      fi
-  elif docker-compose > /dev/null 2>&1; then
-    if ! [[ $(alias docker-compose 2> /dev/null) ]] ; then
-      if docker-compose version --short | grep "^2." > /dev/null 2>&1; then
-        DOCKER_COMPOSE_VERSION=standalone
-        COMPOSE_COMMAND="docker-compose"
-        echo -e "\e[31mFound Docker Compose Standalone.\e[0m"
-        echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to standalone\e[0m"
-        sleep 2
-        echo -e "\e[33mNotice: For an automatic update of docker-compose please use the update_compose.sh scripts located at the helper-scripts folder.\e[0m"
-      else
-        echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m" 
-        echo -e "\e[31mPlease update/install regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
-        exit 1
-      fi
-    fi
-
-  else
-    echo -e "\e[31mCannot find Docker Compose.\e[0m" 
-    echo -e "\e[31mPlease install it regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
-    exit 1
-  fi
-
-elif [ "${DOCKER_COMPOSE_VERSION}" == "native" ]; then
-  COMPOSE_COMMAND="docker compose"
-
-elif [ "${DOCKER_COMPOSE_VERSION}" == "standalone" ]; then
-  COMPOSE_COMMAND="docker-compose"
-fi
-}
-
-############## End Function Section ##############
-
-# Check permissions
-if [ "$(id -u)" -ne "0" ]; then
-  echo "You need to be root"
-  exit 1
-fi
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-# Run pre-update-hook
-if [ -f "${SCRIPT_DIR}/pre_update_hook.sh" ]; then
-  bash "${SCRIPT_DIR}/pre_update_hook.sh"
-fi
-
-if [[ "$(uname -r)" =~ ^4\.15\.0-60 ]]; then
-  echo "DO NOT RUN mailcow ON THIS UBUNTU KERNEL!";
-  echo "Please update to 5.x or use another distribution."
-  exit 1
-fi
-
-if [[ "$(uname -r)" =~ ^4\.4\. ]]; then
-  if grep -q Ubuntu <<< $(uname -a); then
-    echo "DO NOT RUN mailcow ON THIS UBUNTU KERNEL!"
-    echo "Please update to linux-generic-hwe-16.04 by running \"apt-get install --install-recommends linux-generic-hwe-16.04\""
-    exit 1
-  fi
-  echo "mailcow on a 4.4.x kernel is not supported. It may or may not work, please upgrade your kernel or continue at your own risk."
-  read -p "Press any key to continue..." < /dev/tty
-fi
-
-# Exit on error and pipefail
-set -o pipefail
-
-# Setting high dc timeout
-export COMPOSE_HTTP_TIMEOUT=600
-
-# Add /opt/bin to PATH
-PATH=$PATH:/opt/bin
-
-umask 0022
-
-# Unset COMPOSE_COMMAND and DOCKER_COMPOSE_VERSION Variable to be on the newest state.
-unset COMPOSE_COMMAND
-unset DOCKER_COMPOSE_VERSION
-
-for bin in curl docker git awk sha1sum; do
-  if [[ -z $(command -v ${bin}) ]]; then 
-  echo "Cannot find ${bin}, exiting..." 
-  exit 1;
-  fi  
-done
-
-export LC_ALL=C
-DATE=$(date +%Y-%m-%d_%H_%M_%S)
-BRANCH=$(cd ${SCRIPT_DIR}; git rev-parse --abbrev-ref HEAD)
-
 while (($#)); do
   case "${1}" in
     --check|-c)
@@ -504,6 +391,7 @@
     -d|--dev)
       echo -e "\e[32mRunning in Developer mode...\e[0m"
       DEV=y
+    ;;
     --help|-h)
     echo './update.sh [-c|--check, --ours, --gc, --nightly, --prefetch, --skip-start, --skip-ping-check, --stable, -f|--force, -d|--dev, -h|--help]
 
@@ -937,70 +825,6 @@
   git checkout -f ${BRANCH}
 fi
 
-  else
-    echo -e "\e[33mYou are receiving updates from a unsupported branch.\e[0m"
-    sleep 1
-    echo -e "\e[33mThe mailcow stack might still work but it is recommended to switch to the master branch (stable builds).\e[0m"
-    echo -e "\e[33mTo change that run the update.sh Script one time with the --stable parameter to switch to stable builds.\e[0m"
-  fi
-elif [ $FORCE ]; then
-  echo -e "\e[31mYou are running in forced mode!\e[0m"
-  echo -e "\e[31mA Branch Switch can only be performed manually (monitored).\e[0m"
-  echo -e "\e[31mPlease rerun the update.sh Script without the --force/-f parameter.\e[0m"
-  sleep 1
-elif [ $NEW_BRANCH == "master" ] && [ $CURRENT_BRANCH != "master" ]; then
-  echo -e "\e[33mYou are about to switch your mailcow Updates to the stable (master) branch.\e[0m"
-  sleep 1
-  echo -e "\e[33mBefore you do: Please take a backup of all components to ensure that no Data is lost...\e[0m"
-  sleep 1
-  echo -e "\e[31mWARNING: Please see on GitHub or ask in the communitys if a switch to master is stable or not.
-  In some rear cases a Update back to master can destroy your mailcow configuration in case of Database Upgrades etc.
-  Normally a upgrade back to master should be safe during each full release. 
-  Check GitHub for Database Changes and Update only if there similar to the full release!\e[0m"
-  read -r -p "Are you sure you that want to continue upgrading to the stable (master) branch? [y/N] " response
-  if [[ ! "${response}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
-    echo "OK. If you prepared yourself for that please run the update.sh Script with the --stable parameter again to trigger this process here."
-    exit 0
-  fi
-  BRANCH=$NEW_BRANCH
-  DIFF_DIRECTORY=update_diffs
-  DIFF_FILE=${DIFF_DIRECTORY}/diff_before_upgrade_to_master_$(date +"%Y-%m-%d-%H-%M-%S")
-  mv diff_before_upgrade* ${DIFF_DIRECTORY}/ 2> /dev/null
-  if ! git diff-index --quiet HEAD; then
-    echo -e "\e[32mSaving diff to ${DIFF_FILE}...\e[0m"
-    mkdir -p ${DIFF_DIRECTORY}
-    git diff ${BRANCH} --stat > ${DIFF_FILE}
-    git diff ${BRANCH} >> ${DIFF_FILE}
-  fi
-  echo -e "\e[32mSwitching Branch to ${BRANCH}...\e[0m"
-  git fetch origin
-  git checkout -f ${BRANCH}
-
-elif [ $NEW_BRANCH == "nightly" ] && [ $CURRENT_BRANCH != "nightly" ]; then
-  echo -e "\e[33mYou are about to switch your mailcow Updates to the unstable (nightly) branch.\e[0m"
-  sleep 1
-  echo -e "\e[33mBefore you do: Please take a backup of all components to ensure that no Data is lost...\e[0m"
-  sleep 1
-  echo -e "\e[31mWARNING: A switch to nightly is possible any time. But a switch back (to master) isn't.\e[0m"
-  read -r -p "Are you sure you that want to continue upgrading to the unstable (nightly) branch? [y/N] " response
-  if [[ ! "${response}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
-    echo "OK. If you prepared yourself for that please run the update.sh Script with the --nightly parameter again to trigger this process here."
-    exit 0
-  fi
-  BRANCH=$NEW_BRANCH
-  DIFF_DIRECTORY=update_diffs
-  DIFF_FILE=${DIFF_DIRECTORY}/diff_before_upgrade_to_nightly_$(date +"%Y-%m-%d-%H-%M-%S")
-  mv diff_before_upgrade* ${DIFF_DIRECTORY}/ 2> /dev/null
-  if ! git diff-index --quiet HEAD; then
-    echo -e "\e[32mSaving diff to ${DIFF_FILE}...\e[0m"
-    mkdir -p ${DIFF_DIRECTORY}
-    git diff ${BRANCH} --stat > ${DIFF_FILE}
-    git diff ${BRANCH} >> ${DIFF_FILE}
-  fi
-  git fetch origin
-  git checkout -f ${BRANCH}
-fi
-
 if [ ! $DEV ]; then
   echo -e "\e[32mChecking for newer update script...\e[0m"
   SHA1_1=$(sha1sum update.sh)
@@ -1012,6 +836,8 @@
     chmod +x update.sh
     exit 2
   fi
+fi
+
 if [ ! $FORCE ]; then
   read -r -p "Are you sure you want to update mailcow: dockerized? All containers will be stopped. [y/N] " response
   if [[ ! "${response}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
@@ -1190,9 +1016,6 @@
   echo -e "\e[33mCannot determine current git repository version...\e[0m"
 fi
 
-# Set DOCKER_COMPOSE_VERSION
-sed -i 's/^DOCKER_COMPOSE_VERSION=$/DOCKER_COMPOSE_VERSION='$DOCKER_COMPOSE_VERSION'/g' mailcow.conf
-
 if [[ ${SKIP_START} == "y" ]]; then
   echo -e "\e[33mNot starting mailcow, please run \"$COMPOSE_COMMAND up -d --remove-orphans\" to start mailcow.\e[0m"
 else
@@ -1213,4 +1036,4 @@
 # echo
 # git reflog --color=always | grep "Before update on "
 # echo
-# echo "Use \"git reset --hard hash-on-the-left\" and run $COMPOSE_COMMAND up -d afterwards."
\ No newline at end of file
+# echo "Use \"git reset --hard hash-on-the-left\" and run $COMPOSE_COMMAND up -d afterwards."