2023-01-30 16:27:06 +03:00
|
|
|
// Package dmarcdb stores incoming DMARC reports.
|
|
|
|
//
|
|
|
|
// With DMARC, a domain can request emails with DMARC verification results by
|
|
|
|
// remote mail servers to be sent to a specified address. Mox parses such
|
|
|
|
// reports, stores them in its database and makes them available through its
|
|
|
|
// admin web interface.
|
|
|
|
package dmarcdb
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
|
|
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
|
|
|
|
|
|
|
"github.com/mjl-/bstore"
|
|
|
|
|
|
|
|
"github.com/mjl-/mox/dmarcrpt"
|
|
|
|
"github.com/mjl-/mox/dns"
|
|
|
|
"github.com/mjl-/mox/mox-"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
add a "backup" subcommand to make consistent backups, and a "verifydata" subcommand to verify a backup before restoring, and add tests for future upgrades
the backup command will make consistent snapshots of all the database files. i
had been copying the db files before, and it usually works. but if the file is
modified during the backup, it is inconsistent and is likely to generate errors
when reading (can be at any moment in the future, when reading some db page).
"mox backup" opens the database file and writes out a copy in a transaction.
it also duplicates the message files.
before doing a restore, you could run "mox verifydata" on the to-be-restored
"data" directory. it check the database files, and compares the message files
with the database.
the new "gentestdata" subcommand generates a basic "data" directory, with a
queue and a few accounts. we will use it in the future along with "verifydata"
to test upgrades from old version to the latest version. both when going to the
next version, and when skipping several versions. the script test-upgrades.sh
executes these tests and doesn't do anything at the moment, because no releases
have this subcommand yet.
inspired by a failed upgrade attempt of a pre-release version.
2023-05-26 20:26:51 +03:00
|
|
|
DBTypes = []any{DomainFeedback{}} // Types stored in DB.
|
|
|
|
DB *bstore.DB // Exported for backups.
|
2023-01-30 16:27:06 +03:00
|
|
|
mutex sync.Mutex
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
metricEvaluated = promauto.NewCounterVec(
|
|
|
|
prometheus.CounterOpts{
|
|
|
|
Name: "mox_dmarcdb_policy_evaluated_total",
|
|
|
|
Help: "Number of policy evaluations.",
|
|
|
|
},
|
|
|
|
// We only register validated domains for which we have a config.
|
|
|
|
[]string{"domain", "disposition", "dkim", "spf"},
|
|
|
|
)
|
|
|
|
metricDKIM = promauto.NewCounterVec(
|
|
|
|
prometheus.CounterOpts{
|
|
|
|
Name: "mox_dmarcdb_dkim_result_total",
|
|
|
|
Help: "Number of DKIM results.",
|
|
|
|
},
|
|
|
|
[]string{"result"},
|
|
|
|
)
|
|
|
|
metricSPF = promauto.NewCounterVec(
|
|
|
|
prometheus.CounterOpts{
|
|
|
|
Name: "mox_dmarcdb_spf_result_total",
|
|
|
|
Help: "Number of SPF results.",
|
|
|
|
},
|
|
|
|
[]string{"result"},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
// DomainFeedback is a single report stored in the database.
|
|
|
|
type DomainFeedback struct {
|
|
|
|
ID int64
|
|
|
|
// Domain where DMARC DNS record was found, could be organizational domain.
|
|
|
|
Domain string `bstore:"index"`
|
|
|
|
// Domain in From-header.
|
|
|
|
FromDomain string `bstore:"index"`
|
|
|
|
dmarcrpt.Feedback
|
|
|
|
}
|
|
|
|
|
2023-05-22 15:40:36 +03:00
|
|
|
func database(ctx context.Context) (rdb *bstore.DB, rerr error) {
|
2023-01-30 16:27:06 +03:00
|
|
|
mutex.Lock()
|
|
|
|
defer mutex.Unlock()
|
add a "backup" subcommand to make consistent backups, and a "verifydata" subcommand to verify a backup before restoring, and add tests for future upgrades
the backup command will make consistent snapshots of all the database files. i
had been copying the db files before, and it usually works. but if the file is
modified during the backup, it is inconsistent and is likely to generate errors
when reading (can be at any moment in the future, when reading some db page).
"mox backup" opens the database file and writes out a copy in a transaction.
it also duplicates the message files.
before doing a restore, you could run "mox verifydata" on the to-be-restored
"data" directory. it check the database files, and compares the message files
with the database.
the new "gentestdata" subcommand generates a basic "data" directory, with a
queue and a few accounts. we will use it in the future along with "verifydata"
to test upgrades from old version to the latest version. both when going to the
next version, and when skipping several versions. the script test-upgrades.sh
executes these tests and doesn't do anything at the moment, because no releases
have this subcommand yet.
inspired by a failed upgrade attempt of a pre-release version.
2023-05-26 20:26:51 +03:00
|
|
|
if DB == nil {
|
2023-01-30 16:27:06 +03:00
|
|
|
p := mox.DataDirPath("dmarcrpt.db")
|
|
|
|
os.MkdirAll(filepath.Dir(p), 0770)
|
add a "backup" subcommand to make consistent backups, and a "verifydata" subcommand to verify a backup before restoring, and add tests for future upgrades
the backup command will make consistent snapshots of all the database files. i
had been copying the db files before, and it usually works. but if the file is
modified during the backup, it is inconsistent and is likely to generate errors
when reading (can be at any moment in the future, when reading some db page).
"mox backup" opens the database file and writes out a copy in a transaction.
it also duplicates the message files.
before doing a restore, you could run "mox verifydata" on the to-be-restored
"data" directory. it check the database files, and compares the message files
with the database.
the new "gentestdata" subcommand generates a basic "data" directory, with a
queue and a few accounts. we will use it in the future along with "verifydata"
to test upgrades from old version to the latest version. both when going to the
next version, and when skipping several versions. the script test-upgrades.sh
executes these tests and doesn't do anything at the moment, because no releases
have this subcommand yet.
inspired by a failed upgrade attempt of a pre-release version.
2023-05-26 20:26:51 +03:00
|
|
|
db, err := bstore.Open(ctx, p, &bstore.Options{Timeout: 5 * time.Second, Perm: 0660}, DBTypes...)
|
2023-01-30 16:27:06 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
add a "backup" subcommand to make consistent backups, and a "verifydata" subcommand to verify a backup before restoring, and add tests for future upgrades
the backup command will make consistent snapshots of all the database files. i
had been copying the db files before, and it usually works. but if the file is
modified during the backup, it is inconsistent and is likely to generate errors
when reading (can be at any moment in the future, when reading some db page).
"mox backup" opens the database file and writes out a copy in a transaction.
it also duplicates the message files.
before doing a restore, you could run "mox verifydata" on the to-be-restored
"data" directory. it check the database files, and compares the message files
with the database.
the new "gentestdata" subcommand generates a basic "data" directory, with a
queue and a few accounts. we will use it in the future along with "verifydata"
to test upgrades from old version to the latest version. both when going to the
next version, and when skipping several versions. the script test-upgrades.sh
executes these tests and doesn't do anything at the moment, because no releases
have this subcommand yet.
inspired by a failed upgrade attempt of a pre-release version.
2023-05-26 20:26:51 +03:00
|
|
|
DB = db
|
2023-01-30 16:27:06 +03:00
|
|
|
}
|
add a "backup" subcommand to make consistent backups, and a "verifydata" subcommand to verify a backup before restoring, and add tests for future upgrades
the backup command will make consistent snapshots of all the database files. i
had been copying the db files before, and it usually works. but if the file is
modified during the backup, it is inconsistent and is likely to generate errors
when reading (can be at any moment in the future, when reading some db page).
"mox backup" opens the database file and writes out a copy in a transaction.
it also duplicates the message files.
before doing a restore, you could run "mox verifydata" on the to-be-restored
"data" directory. it check the database files, and compares the message files
with the database.
the new "gentestdata" subcommand generates a basic "data" directory, with a
queue and a few accounts. we will use it in the future along with "verifydata"
to test upgrades from old version to the latest version. both when going to the
next version, and when skipping several versions. the script test-upgrades.sh
executes these tests and doesn't do anything at the moment, because no releases
have this subcommand yet.
inspired by a failed upgrade attempt of a pre-release version.
2023-05-26 20:26:51 +03:00
|
|
|
return DB, nil
|
2023-01-30 16:27:06 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Init opens the database.
|
|
|
|
func Init() error {
|
2023-05-22 15:40:36 +03:00
|
|
|
_, err := database(mox.Shutdown)
|
2023-01-30 16:27:06 +03:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddReport adds a DMARC aggregate feedback report from an email to the database,
|
|
|
|
// and updates prometheus metrics.
|
|
|
|
//
|
|
|
|
// fromDomain is the domain in the report message From header.
|
|
|
|
func AddReport(ctx context.Context, f *dmarcrpt.Feedback, fromDomain dns.Domain) error {
|
2023-05-22 15:40:36 +03:00
|
|
|
db, err := database(ctx)
|
2023-01-30 16:27:06 +03:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
d, err := dns.ParseDomain(f.PolicyPublished.Domain)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("parsing domain in report: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
df := DomainFeedback{0, d.Name(), fromDomain.Name(), *f}
|
2023-05-22 15:40:36 +03:00
|
|
|
if err := db.Insert(ctx, &df); err != nil {
|
2023-01-30 16:27:06 +03:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, r := range f.Records {
|
|
|
|
for _, dkim := range r.AuthResults.DKIM {
|
|
|
|
count := r.Row.Count
|
|
|
|
if count > 0 {
|
|
|
|
metricDKIM.With(prometheus.Labels{
|
|
|
|
"result": string(dkim.Result),
|
|
|
|
}).Add(float64(count))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, spf := range r.AuthResults.SPF {
|
|
|
|
count := r.Row.Count
|
|
|
|
if count > 0 {
|
|
|
|
metricSPF.With(prometheus.Labels{
|
|
|
|
"result": string(spf.Result),
|
|
|
|
}).Add(float64(count))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
count := r.Row.Count
|
|
|
|
if count > 0 {
|
|
|
|
pe := r.Row.PolicyEvaluated
|
|
|
|
metricEvaluated.With(prometheus.Labels{
|
|
|
|
"domain": f.PolicyPublished.Domain,
|
|
|
|
"disposition": string(pe.Disposition),
|
|
|
|
"dkim": string(pe.DKIM),
|
|
|
|
"spf": string(pe.SPF),
|
|
|
|
}).Add(float64(count))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Records returns all reports in the database.
|
|
|
|
func Records(ctx context.Context) ([]DomainFeedback, error) {
|
2023-05-22 15:40:36 +03:00
|
|
|
db, err := database(ctx)
|
2023-01-30 16:27:06 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-05-22 15:40:36 +03:00
|
|
|
return bstore.QueryDB[DomainFeedback](ctx, db).List()
|
2023-01-30 16:27:06 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// RecordID returns the report for the ID.
|
|
|
|
func RecordID(ctx context.Context, id int64) (DomainFeedback, error) {
|
2023-05-22 15:40:36 +03:00
|
|
|
db, err := database(ctx)
|
2023-01-30 16:27:06 +03:00
|
|
|
if err != nil {
|
|
|
|
return DomainFeedback{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
e := DomainFeedback{ID: id}
|
2023-05-22 15:40:36 +03:00
|
|
|
err = db.Get(ctx, &e)
|
2023-01-30 16:27:06 +03:00
|
|
|
return e, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// RecordsPeriodDomain returns the reports overlapping start and end, for the given
|
|
|
|
// domain. If domain is empty, all records match for domain.
|
|
|
|
func RecordsPeriodDomain(ctx context.Context, start, end time.Time, domain string) ([]DomainFeedback, error) {
|
2023-05-22 15:40:36 +03:00
|
|
|
db, err := database(ctx)
|
2023-01-30 16:27:06 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
s := start.Unix()
|
|
|
|
e := end.Unix()
|
|
|
|
|
2023-05-22 15:40:36 +03:00
|
|
|
q := bstore.QueryDB[DomainFeedback](ctx, db)
|
2023-01-30 16:27:06 +03:00
|
|
|
if domain != "" {
|
|
|
|
q.FilterNonzero(DomainFeedback{Domain: domain})
|
|
|
|
}
|
|
|
|
q.FilterFn(func(d DomainFeedback) bool {
|
|
|
|
m := d.Feedback.ReportMetadata.DateRange
|
|
|
|
return m.Begin >= s && m.Begin < e || m.End > s && m.End <= e
|
|
|
|
})
|
|
|
|
return q.List()
|
|
|
|
}
|