Skip to content

Commit

Permalink
Add the possibility to store limits_by_id in a mysql or pgsql database
Browse files Browse the repository at this point in the history
Co-authored-by: root <[email protected]>
Co-authored-by: Valentin Samir <[email protected]>
  • Loading branch information
3 people authored Jul 26, 2023
1 parent 38b397b commit cf0e11a
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 1 deletion.
7 changes: 7 additions & 0 deletions policyd_rate_limit/policyd-rate-limit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,13 @@ limits:
# 2a01:240:fe3d:4:219:bbff:fe3c:4f76: []
limits_by_id: {}

# Use custom rate limit for every user, if you have sql backend for your mail server.
# There is a `rate_limit` table which has id -> limits_by_id-like strings for custom limits.
# You should set for your users a rate_limit id to map users <-> rate_limits tables.
# sql_limits_by_id: "SELECT `limits` FROM `rate_limits` WHERE `id` IN (SELECT `rate_limit` FROM `virtual_users` WHERE `email`=%s)"
#
sql_limits_by_id: ""

# Apply limits by sasl usernames.
limit_by_sasl: True
# If no sasl username is found, or limit by sasl usernames disabled,
Expand Down
21 changes: 20 additions & 1 deletion policyd_rate_limit/policyd.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import time
import select
import traceback
import ast

from policyd_rate_limit import utils
from policyd_rate_limit.utils import config
Expand Down Expand Up @@ -281,14 +282,31 @@ def action(self, connection, request):
# to the next section
else:
raise Pass()

if request['protocol_state'].upper() == "RCPT":
recipient_count = 1
elif request['protocol_state'].upper() == "DATA":
recipient_count = max(int(request["recipient_count"]), 1)

# Custom limits per ID via SQL
custom_limits = config.limits_by_id
if config.sql_limits_by_id != "":
try:
cur.execute(config.sql_limits_by_id, [id])
custom_limits[id] = ast.literal_eval(cur.fetchone()[0])
except TypeError:
custom_limits = config.limits_by_id
if config.debug:
sys.stderr.write(u"There is no limit rate in SQL for: %s\n" % (id))
sys.stderr.flush()
if config.debug:
sys.stderr.write(u"Custom limit(s): %s\n" % custom_limits)
sys.stderr.flush()

# Here we are limiting against sasl username, sender or source ip addresses.
# for each limit periods, we count the number of mails already send.
# if the a limit is reach, we change action to fail (deny the mail).
for mail_nb, delta in config.limits_by_id.get(id, config.limits):
for mail_nb, delta in custom_limits.get(id, config.limits):
cur.execute(
(
"SELECT SUM(recipient_count) FROM mail_count "
Expand Down Expand Up @@ -350,6 +368,7 @@ def action(self, connection, request):
sys.stderr.flush()
# return the result to the client
self.socket_data_write[connection] = data.encode('UTF-8')

# Wipe the read buffer (otherwise it'll be added up for eternity)
self.socket_data_read[connection].clear()
# Socket has been used, let's update its last_used time.
Expand Down
16 changes: 16 additions & 0 deletions policyd_rate_limit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,6 +465,19 @@ def database_init():
instance varchar(40) NOT NULL,
protocol_state varchar(10) NOT NULL
);"""
if config.backend == MYSQL_DB:
query_limits = """CREATE TABLE IF NOT EXISTS rate_limits (
id int NOT NULL AUTO_INCREMENT,
limits varchar(255) NOT NULL,
PRIMARY KEY (id)
);"""
else:
query_limits = """CREATE TABLE IF NOT EXISTS rate_limits (
id int NOT NULL,
limits varchar(255) NOT NULL,
PRIMARY KEY (id)
);"""

# if report is enable, also create the table for storing report datas
query_report = """CREATE TABLE IF NOT EXISTS limit_report (
id varchar(40) NOT NULL,
Expand Down Expand Up @@ -512,6 +525,9 @@ def database_init():
cur.execute(query)
if config.report:
cur.execute(query_report)
if config.sql_limits_by_id and config.backend in [MYSQL_DB, PGSQL_DB]:
cur.execute(query_limits)

finally:
warnings.resetwarnings()
try:
Expand Down

0 comments on commit cf0e11a

Please sign in to comment.