mirror of
https://github.com/mjl-/mox.git
synced 2025-07-12 17:44:35 +03:00
add webmail
it was far down on the roadmap, but implemented earlier, because it's interesting, and to help prepare for a jmap implementation. for jmap we need to implement more client-like functionality than with just imap. internal data structures need to change. jmap has lots of other requirements, so it's already a big project. by implementing a webmail now, some of the required data structure changes become clear and can be made now, so the later jmap implementation can do things similarly to the webmail code. the webmail frontend and webmail are written together, making their interface/api much smaller and simpler than jmap. one of the internal changes is that we now keep track of per-mailbox total/unread/unseen/deleted message counts and mailbox sizes. keeping this data consistent after any change to the stored messages (through the code base) is tricky, so mox now has a consistency check that verifies the counts are correct, which runs only during tests, each time an internal account reference is closed. we have a few more internal "changes" that are propagated for the webmail frontend (that imap doesn't have a way to propagate on a connection), like changes to the special-use flags on mailboxes, and used keywords in a mailbox. more changes that will be required have revealed themselves while implementing the webmail, and will be implemented next. the webmail user interface is modeled after the mail clients i use or have used: thunderbird, macos mail, mutt; and webmails i normally only use for testing: gmail, proton, yahoo, outlook. a somewhat technical user is assumed, but still the goal is to make this webmail client easy to use for everyone. the user interface looks like most other mail clients: a list of mailboxes, a search bar, a message list view, and message details. there is a top/bottom and a left/right layout for the list/message view, default is automatic based on screen size. the panes can be resized by the user. buttons for actions are just text, not icons. clicking a button briefly shows the shortcut for the action in the bottom right, helping with learning to operate quickly. any text that is underdotted has a title attribute that causes more information to be displayed, e.g. what a button does or a field is about. to highlight potential phishing attempts, any text (anywhere in the webclient) that switches unicode "blocks" (a rough approximation to (language) scripts) within a word is underlined orange. multiple messages can be selected with familiar ui interaction: clicking while holding control and/or shift keys. keyboard navigation works with arrows/page up/down and home/end keys, and also with a few basic vi-like keys for list/message navigation. we prefer showing the text instead of html (with inlined images only) version of a message. html messages are shown in an iframe served from an endpoint with CSP headers to prevent dangerous resources (scripts, external images) from being loaded. the html is also sanitized, with javascript removed. a user can choose to load external resources (e.g. images for tracking purposes). the frontend is just (strict) typescript, no external frameworks. all incoming/outgoing data is typechecked, both the api request parameters and response types, and the data coming in over SSE. the types and checking code are generated with sherpats, which uses the api definitions generated by sherpadoc based on the Go code. so types from the backend are automatically propagated to the frontend. since there is no framework to automatically propagate properties and rerender components, changes coming in over the SSE connection are propagated explicitly with regular function calls. the ui is separated into "views", each with a "root" dom element that is added to the visible document. these views have additional functions for getting changes propagated, often resulting in the view updating its (internal) ui state (dom). we keep the frontend compilation simple, it's just a few typescript files that get compiled (combined and types stripped) into a single js file, no additional runtime code needed or complicated build processes used. the webmail is served is served from a compressed, cachable html file that includes style and the javascript, currently just over 225kb uncompressed, under 60kb compressed (not minified, including comments). we include the generated js files in the repository, to keep Go's easily buildable self-contained binaries. authentication is basic http, as with the account and admin pages. most data comes in over one long-term SSE connection to the backend. api requests signal which mailbox/search/messages are requested over the SSE connection. fetching individual messages, and making changes, are done through api calls. the operations are similar to imap, so some code has been moved from package imapserver to package store. the future jmap implementation will benefit from these changes too. more functionality will probably be moved to the store package in the future. the quickstart enables webmail on the internal listener by default (for new installs). users can enable it on the public listener if they want to. mox localserve enables it too. to enable webmail on existing installs, add settings like the following to the listeners in mox.conf, similar to AccountHTTP(S): WebmailHTTP: Enabled: true WebmailHTTPS: Enabled: true special thanks to liesbeth, gerben, andrii for early user feedback. there is plenty still to do, see the list at the top of webmail/webmail.ts. feedback welcome as always.
This commit is contained in:
402
webaccount/account.go
Normal file
402
webaccount/account.go
Normal file
@ -0,0 +1,402 @@
|
||||
package webaccount
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"archive/zip"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
_ "embed"
|
||||
|
||||
"github.com/mjl-/sherpa"
|
||||
"github.com/mjl-/sherpadoc"
|
||||
"github.com/mjl-/sherpaprom"
|
||||
|
||||
"github.com/mjl-/mox/config"
|
||||
"github.com/mjl-/mox/dns"
|
||||
"github.com/mjl-/mox/metrics"
|
||||
"github.com/mjl-/mox/mlog"
|
||||
"github.com/mjl-/mox/mox-"
|
||||
"github.com/mjl-/mox/moxvar"
|
||||
"github.com/mjl-/mox/store"
|
||||
)
|
||||
|
||||
func init() {
|
||||
mox.LimitersInit()
|
||||
}
|
||||
|
||||
var xlog = mlog.New("webaccount")
|
||||
|
||||
//go:embed accountapi.json
|
||||
var accountapiJSON []byte
|
||||
|
||||
//go:embed account.html
|
||||
var accountHTML []byte
|
||||
|
||||
var accountDoc = mustParseAPI("account", accountapiJSON)
|
||||
|
||||
var accountSherpaHandler http.Handler
|
||||
|
||||
func mustParseAPI(api string, buf []byte) (doc sherpadoc.Section) {
|
||||
err := json.Unmarshal(buf, &doc)
|
||||
if err != nil {
|
||||
xlog.Fatalx("parsing api docs", err, mlog.Field("api", api))
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
func init() {
|
||||
collector, err := sherpaprom.NewCollector("moxaccount", nil)
|
||||
if err != nil {
|
||||
xlog.Fatalx("creating sherpa prometheus collector", err)
|
||||
}
|
||||
|
||||
accountSherpaHandler, err = sherpa.NewHandler("/api/", moxvar.Version, Account{}, &accountDoc, &sherpa.HandlerOpts{Collector: collector, AdjustFunctionNames: "none"})
|
||||
if err != nil {
|
||||
xlog.Fatalx("sherpa handler", err)
|
||||
}
|
||||
}
|
||||
|
||||
func xcheckf(ctx context.Context, err error, format string, args ...any) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
errmsg := fmt.Sprintf("%s: %s", msg, err)
|
||||
xlog.WithContext(ctx).Errorx(msg, err)
|
||||
panic(&sherpa.Error{Code: "server:error", Message: errmsg})
|
||||
}
|
||||
|
||||
// Account exports web API functions for the account web interface. All its
|
||||
// methods are exported under api/. Function calls require valid HTTP
|
||||
// Authentication credentials of a user.
|
||||
type Account struct{}
|
||||
|
||||
// CheckAuth checks http basic auth, returns login address and account name if
|
||||
// valid, and writes http response and returns empty string otherwise.
|
||||
func CheckAuth(ctx context.Context, log *mlog.Log, kind string, w http.ResponseWriter, r *http.Request) (address, account string) {
|
||||
authResult := "error"
|
||||
start := time.Now()
|
||||
var addr *net.TCPAddr
|
||||
defer func() {
|
||||
metrics.AuthenticationInc(kind, "httpbasic", authResult)
|
||||
if authResult == "ok" && addr != nil {
|
||||
mox.LimiterFailedAuth.Reset(addr.IP, start)
|
||||
}
|
||||
}()
|
||||
|
||||
var err error
|
||||
var remoteIP net.IP
|
||||
addr, err = net.ResolveTCPAddr("tcp", r.RemoteAddr)
|
||||
if err != nil {
|
||||
log.Errorx("parsing remote address", err, mlog.Field("addr", r.RemoteAddr))
|
||||
} else if addr != nil {
|
||||
remoteIP = addr.IP
|
||||
}
|
||||
if remoteIP != nil && !mox.LimiterFailedAuth.Add(remoteIP, start, 1) {
|
||||
metrics.AuthenticationRatelimitedInc(kind)
|
||||
http.Error(w, "429 - too many auth attempts", http.StatusTooManyRequests)
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// store.OpenEmailAuth has an auth cache, so we don't bcrypt for every auth attempt.
|
||||
if auth := r.Header.Get("Authorization"); !strings.HasPrefix(auth, "Basic ") {
|
||||
} else if authBuf, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(auth, "Basic ")); err != nil {
|
||||
log.Debugx("parsing base64", err)
|
||||
} else if t := strings.SplitN(string(authBuf), ":", 2); len(t) != 2 {
|
||||
log.Debug("bad user:pass form")
|
||||
} else if acc, err := store.OpenEmailAuth(t[0], t[1]); err != nil {
|
||||
if errors.Is(err, store.ErrUnknownCredentials) {
|
||||
authResult = "badcreds"
|
||||
log.Info("failed authentication attempt", mlog.Field("username", t[0]), mlog.Field("remote", remoteIP))
|
||||
}
|
||||
log.Errorx("open account", err)
|
||||
} else {
|
||||
authResult = "ok"
|
||||
accName := acc.Name
|
||||
err := acc.Close()
|
||||
log.Check(err, "closing account")
|
||||
return t[0], accName
|
||||
}
|
||||
// note: browsers don't display the realm to prevent users getting confused by malicious realm messages.
|
||||
w.Header().Set("WWW-Authenticate", `Basic realm="mox account - login with account email address and password"`)
|
||||
http.Error(w, "http 401 - unauthorized - mox account - login with account email address and password", http.StatusUnauthorized)
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func Handle(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.WithValue(r.Context(), mlog.CidKey, mox.Cid())
|
||||
log := xlog.WithContext(ctx).Fields(mlog.Field("userauth", ""))
|
||||
|
||||
// Without authentication. The token is unguessable.
|
||||
if r.URL.Path == "/importprogress" {
|
||||
if r.Method != "GET" {
|
||||
http.Error(w, "405 - method not allowed - get required", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
token := q.Get("token")
|
||||
if token == "" {
|
||||
http.Error(w, "400 - bad request - missing token", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
log.Error("internal error: ResponseWriter not a http.Flusher")
|
||||
http.Error(w, "500 - internal error - cannot access underlying connection", 500)
|
||||
return
|
||||
}
|
||||
|
||||
l := importListener{token, make(chan importEvent, 100), make(chan bool, 1)}
|
||||
importers.Register <- &l
|
||||
ok = <-l.Register
|
||||
if !ok {
|
||||
http.Error(w, "400 - bad request - unknown token, import may have finished more than a minute ago", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
importers.Unregister <- &l
|
||||
}()
|
||||
|
||||
h := w.Header()
|
||||
h.Set("Content-Type", "text/event-stream")
|
||||
h.Set("Cache-Control", "no-cache")
|
||||
_, err := w.Write([]byte(": keepalive\n\n"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
flusher.Flush()
|
||||
|
||||
cctx := r.Context()
|
||||
for {
|
||||
select {
|
||||
case e := <-l.Events:
|
||||
_, err := w.Write(e.SSEMsg)
|
||||
flusher.Flush()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case <-cctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, accName := CheckAuth(ctx, log, "webaccount", w, r)
|
||||
if accName == "" {
|
||||
// Response already sent.
|
||||
return
|
||||
}
|
||||
|
||||
if lw, ok := w.(interface{ AddField(p mlog.Pair) }); ok {
|
||||
lw.AddField(mlog.Field("authaccount", accName))
|
||||
}
|
||||
|
||||
switch r.URL.Path {
|
||||
case "/":
|
||||
if r.Method != "GET" {
|
||||
http.Error(w, "405 - method not allowed - post required", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
w.Header().Set("Cache-Control", "no-cache; max-age=0")
|
||||
// We typically return the embedded admin.html, but during development it's handy
|
||||
// to load from disk.
|
||||
f, err := os.Open("webaccount/account.html")
|
||||
if err == nil {
|
||||
defer f.Close()
|
||||
_, _ = io.Copy(w, f)
|
||||
} else {
|
||||
_, _ = w.Write(accountHTML)
|
||||
}
|
||||
|
||||
case "/mail-export-maildir.tgz", "/mail-export-maildir.zip", "/mail-export-mbox.tgz", "/mail-export-mbox.zip":
|
||||
maildir := strings.Contains(r.URL.Path, "maildir")
|
||||
tgz := strings.Contains(r.URL.Path, ".tgz")
|
||||
|
||||
acc, err := store.OpenAccount(accName)
|
||||
if err != nil {
|
||||
log.Errorx("open account for export", err)
|
||||
http.Error(w, "500 - internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
err := acc.Close()
|
||||
log.Check(err, "closing account")
|
||||
}()
|
||||
|
||||
var archiver store.Archiver
|
||||
if tgz {
|
||||
// Don't tempt browsers to "helpfully" decompress.
|
||||
w.Header().Set("Content-Type", "application/octet-stream")
|
||||
|
||||
gzw := gzip.NewWriter(w)
|
||||
defer func() {
|
||||
_ = gzw.Close()
|
||||
}()
|
||||
archiver = store.TarArchiver{Writer: tar.NewWriter(gzw)}
|
||||
} else {
|
||||
w.Header().Set("Content-Type", "application/zip")
|
||||
archiver = store.ZipArchiver{Writer: zip.NewWriter(w)}
|
||||
}
|
||||
defer func() {
|
||||
err := archiver.Close()
|
||||
log.Check(err, "exporting mail close")
|
||||
}()
|
||||
if err := store.ExportMessages(r.Context(), log, acc.DB, acc.Dir, archiver, maildir, ""); err != nil {
|
||||
log.Errorx("exporting mail", err)
|
||||
}
|
||||
|
||||
case "/import":
|
||||
if r.Method != "POST" {
|
||||
http.Error(w, "405 - method not allowed - post required", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
f, _, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
if errors.Is(err, http.ErrMissingFile) {
|
||||
http.Error(w, "400 - bad request - missing file", http.StatusBadRequest)
|
||||
} else {
|
||||
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
err := f.Close()
|
||||
log.Check(err, "closing form file")
|
||||
}()
|
||||
skipMailboxPrefix := r.FormValue("skipMailboxPrefix")
|
||||
tmpf, err := os.CreateTemp("", "mox-import")
|
||||
if err != nil {
|
||||
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if tmpf != nil {
|
||||
err := tmpf.Close()
|
||||
log.Check(err, "closing uploaded file")
|
||||
}
|
||||
}()
|
||||
if err := os.Remove(tmpf.Name()); err != nil {
|
||||
log.Errorx("removing temporary file", err)
|
||||
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if _, err := io.Copy(tmpf, f); err != nil {
|
||||
log.Errorx("copying import to temporary file", err)
|
||||
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
token, err := importStart(log, accName, tmpf, skipMailboxPrefix)
|
||||
if err != nil {
|
||||
log.Errorx("starting import", err)
|
||||
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
tmpf = nil // importStart is now responsible for closing.
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_ = json.NewEncoder(w).Encode(map[string]string{"ImportToken": token})
|
||||
|
||||
default:
|
||||
if strings.HasPrefix(r.URL.Path, "/api/") {
|
||||
ctx = context.WithValue(ctx, authCtxKey, accName)
|
||||
accountSherpaHandler.ServeHTTP(w, r.WithContext(ctx))
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
type ctxKey string
|
||||
|
||||
var authCtxKey ctxKey = "account"
|
||||
|
||||
// SetPassword saves a new password for the account, invalidating the previous password.
|
||||
// Sessions are not interrupted, and will keep working. New login attempts must use the new password.
|
||||
// Password must be at least 8 characters.
|
||||
func (Account) SetPassword(ctx context.Context, password string) {
|
||||
if len(password) < 8 {
|
||||
panic(&sherpa.Error{Code: "user:error", Message: "password must be at least 8 characters"})
|
||||
}
|
||||
accountName := ctx.Value(authCtxKey).(string)
|
||||
acc, err := store.OpenAccount(accountName)
|
||||
xcheckf(ctx, err, "open account")
|
||||
defer func() {
|
||||
err := acc.Close()
|
||||
xlog.Check(err, "closing account")
|
||||
}()
|
||||
err = acc.SetPassword(password)
|
||||
xcheckf(ctx, err, "setting password")
|
||||
}
|
||||
|
||||
// Account returns information about the account: full name, the default domain,
|
||||
// and the destinations (keys are email addresses, or localparts to the default
|
||||
// domain). todo: replace with a function that returns the whole account, when
|
||||
// sherpadoc understands unnamed struct fields.
|
||||
func (Account) Account(ctx context.Context) (string, dns.Domain, map[string]config.Destination) {
|
||||
accountName := ctx.Value(authCtxKey).(string)
|
||||
accConf, ok := mox.Conf.Account(accountName)
|
||||
if !ok {
|
||||
xcheckf(ctx, errors.New("not found"), "looking up account")
|
||||
}
|
||||
return accConf.FullName, accConf.DNSDomain, accConf.Destinations
|
||||
}
|
||||
|
||||
func (Account) AccountSaveFullName(ctx context.Context, fullName string) {
|
||||
accountName := ctx.Value(authCtxKey).(string)
|
||||
_, ok := mox.Conf.Account(accountName)
|
||||
if !ok {
|
||||
xcheckf(ctx, errors.New("not found"), "looking up account")
|
||||
}
|
||||
err := mox.AccountFullNameSave(ctx, accountName, fullName)
|
||||
xcheckf(ctx, err, "saving account full name")
|
||||
}
|
||||
|
||||
// DestinationSave updates a destination.
|
||||
// OldDest is compared against the current destination. If it does not match, an
|
||||
// error is returned. Otherwise newDest is saved and the configuration reloaded.
|
||||
func (Account) DestinationSave(ctx context.Context, destName string, oldDest, newDest config.Destination) {
|
||||
accountName := ctx.Value(authCtxKey).(string)
|
||||
accConf, ok := mox.Conf.Account(accountName)
|
||||
if !ok {
|
||||
xcheckf(ctx, errors.New("not found"), "looking up account")
|
||||
}
|
||||
curDest, ok := accConf.Destinations[destName]
|
||||
if !ok {
|
||||
xcheckf(ctx, errors.New("not found"), "looking up destination")
|
||||
}
|
||||
|
||||
if !curDest.Equal(oldDest) {
|
||||
xcheckf(ctx, errors.New("modified"), "checking stored destination")
|
||||
}
|
||||
|
||||
// Keep fields we manage.
|
||||
newDest.DMARCReports = curDest.DMARCReports
|
||||
newDest.TLSReports = curDest.TLSReports
|
||||
|
||||
err := mox.DestinationSave(ctx, accountName, destName, newDest)
|
||||
xcheckf(ctx, err, "saving destination")
|
||||
}
|
||||
|
||||
// ImportAbort aborts an import that is in progress. If the import exists and isn't
|
||||
// finished, no changes will have been made by the import.
|
||||
func (Account) ImportAbort(ctx context.Context, importToken string) error {
|
||||
req := importAbortRequest{importToken, make(chan error)}
|
||||
importers.Abort <- req
|
||||
return <-req.Response
|
||||
}
|
713
webaccount/account.html
Normal file
713
webaccount/account.html
Normal file
@ -0,0 +1,713 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Mox Account</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="icon" href="noNeedlessFaviconRequestsPlease:" />
|
||||
<style>
|
||||
body, html { padding: 1em; font-size: 16px; }
|
||||
* { font-size: inherit; font-family: ubuntu, lato, sans-serif; margin: 0; padding: 0; box-sizing: border-box; }
|
||||
h1, h2, h3, h4 { margin-bottom: 1ex; }
|
||||
h1 { font-size: 1.2rem; }
|
||||
h2 { font-size: 1.1rem; }
|
||||
h3, h4 { font-size: 1rem; }
|
||||
ul { padding-left: 1rem; }
|
||||
.literal { background-color: #fdfdfd; padding: .5em 1em; border: 1px solid #eee; border-radius: 4px; white-space: pre-wrap; font-family: monospace; font-size: 15px; tab-size: 4; }
|
||||
table td, table th { padding: .2em .5em; }
|
||||
table > tbody > tr:nth-child(odd) { background-color: #f8f8f8; }
|
||||
.text { max-width: 50em; }
|
||||
p { margin-bottom: 1em; max-width: 50em; }
|
||||
[title] { text-decoration: underline; text-decoration-style: dotted; }
|
||||
fieldset { border: 0; }
|
||||
#page { opacity: 1; animation: fadein 0.15s ease-in; }
|
||||
#page.loading { opacity: 0.1; animation: fadeout 1s ease-out; }
|
||||
@keyframes fadein { 0% { opacity: 0 } 100% { opacity: 1 } }
|
||||
@keyframes fadeout { 0% { opacity: 1 } 100% { opacity: 0.1 } }
|
||||
</style>
|
||||
<script src="api/sherpa.js"></script>
|
||||
<script>api._sherpa.baseurl = 'api/'</script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="page">Loading...</div>
|
||||
|
||||
<script>
|
||||
const [dom, style, attr, prop] = (function() {
|
||||
function _domKids(e, ...kl) {
|
||||
kl.forEach(k => {
|
||||
if (typeof k === 'string' || k instanceof String) {
|
||||
e.appendChild(document.createTextNode(k))
|
||||
} else if (k instanceof Node) {
|
||||
e.appendChild(k)
|
||||
} else if (Array.isArray(k)) {
|
||||
_domKids(e, ...k)
|
||||
} else if (typeof k === 'function') {
|
||||
if (!k.name) {
|
||||
throw new Error('function without name', k)
|
||||
}
|
||||
e.addEventListener(k.name, k)
|
||||
} else if (typeof k === 'object' && k !== null) {
|
||||
if (k.root) {
|
||||
e.appendChild(k.root)
|
||||
return
|
||||
}
|
||||
for (const key in k) {
|
||||
const value = k[key]
|
||||
if (key === '_prop') {
|
||||
for (const prop in value) {
|
||||
e[prop] = value[prop]
|
||||
}
|
||||
} else if (key === '_attr') {
|
||||
for (const prop in value) {
|
||||
e.setAttribute(prop, value[prop])
|
||||
}
|
||||
} else if (key === '_listen') {
|
||||
e.addEventListener(...value)
|
||||
} else {
|
||||
e.style[key] = value
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log('bad kid', k)
|
||||
throw new Error('bad kid')
|
||||
}
|
||||
})
|
||||
}
|
||||
const _dom = (kind, ...kl) => {
|
||||
const t = kind.split('.')
|
||||
const e = document.createElement(t[0])
|
||||
for (let i = 1; i < t.length; i++) {
|
||||
e.classList.add(t[i])
|
||||
}
|
||||
_domKids(e, kl)
|
||||
return e
|
||||
}
|
||||
_dom._kids = function(e, ...kl) {
|
||||
while(e.firstChild) {
|
||||
e.removeChild(e.firstChild)
|
||||
}
|
||||
_domKids(e, kl)
|
||||
}
|
||||
const dom = new Proxy(_dom, {
|
||||
get: function(dom, prop) {
|
||||
if (prop in dom) {
|
||||
return dom[prop]
|
||||
}
|
||||
const fn = (...kl) => _dom(prop, kl)
|
||||
dom[prop] = fn
|
||||
return fn
|
||||
},
|
||||
apply: function(target, that, args) {
|
||||
if (args.length === 1 && typeof args[0] === 'object' && !Array.isArray(args[0])) {
|
||||
return {_attr: args[0]}
|
||||
}
|
||||
return _dom(...args)
|
||||
},
|
||||
})
|
||||
const style = x => x
|
||||
const attr = x => { return {_attr: x} }
|
||||
const prop = x => { return {_prop: x} }
|
||||
return [dom, style, attr, prop]
|
||||
})()
|
||||
|
||||
const link = (href, anchorOpt) => dom.a(attr({href: href, rel: 'noopener noreferrer'}), anchorOpt || href)
|
||||
|
||||
const crumblink = (text, link) => dom.a(text, attr({href: link}))
|
||||
const crumbs = (...l) => [dom.h1(l.map((e, index) => index === 0 ? e : [' / ', e])), dom.br()]
|
||||
|
||||
const footer = dom.div(
|
||||
style({marginTop: '6ex', opacity: 0.75}),
|
||||
link('https://github.com/mjl-/mox', 'mox'),
|
||||
' ',
|
||||
api._sherpa.version,
|
||||
)
|
||||
|
||||
const domainName = d => {
|
||||
return d.Unicode || d.ASCII
|
||||
}
|
||||
|
||||
const domainString = d => {
|
||||
if (d.Unicode) {
|
||||
return d.Unicode+" ("+d.ASCII+")"
|
||||
}
|
||||
return d.ASCII
|
||||
}
|
||||
|
||||
const box = (color, ...l) => [
|
||||
dom.div(
|
||||
style({
|
||||
display: 'inline-block',
|
||||
padding: '.25em .5em',
|
||||
backgroundColor: color,
|
||||
borderRadius: '3px',
|
||||
margin: '.5ex 0',
|
||||
}),
|
||||
l,
|
||||
),
|
||||
dom.br(),
|
||||
]
|
||||
|
||||
const green = '#1dea20'
|
||||
const yellow = '#ffe400'
|
||||
const red = '#ff7443'
|
||||
const blue = '#8bc8ff'
|
||||
|
||||
const index = async () => {
|
||||
const [accountFullName, domain, destinations] = await api.Account()
|
||||
|
||||
let fullNameForm, fullNameFieldset, fullName
|
||||
let passwordForm, passwordFieldset, password1, password2, passwordHint
|
||||
|
||||
let importForm, importFieldset, mailboxFile, mailboxFileHint, mailboxPrefix, mailboxPrefixHint, importProgress, importAbortBox, importAbort
|
||||
|
||||
const importTrack = async (token) => {
|
||||
const importConnection = dom.div('Waiting for updates...')
|
||||
importProgress.appendChild(importConnection)
|
||||
|
||||
let countsTbody
|
||||
let counts = {} // mailbox -> elem
|
||||
|
||||
let problems // element
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const eventSource = new window.EventSource('importprogress?token=' + encodeURIComponent(token))
|
||||
eventSource.addEventListener('open', function(e) {
|
||||
console.log('eventsource open', {e})
|
||||
dom._kids(importConnection, dom.div('Waiting for updates, connected...'))
|
||||
|
||||
dom._kids(importAbortBox,
|
||||
importAbort=dom.button('Abort import', attr({title: 'If the import is not yet finished, it can be aborted and no messages will have been imported.'}), async function click(e) {
|
||||
try {
|
||||
await api.ImportAbort(token)
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: ' + err.message)
|
||||
}
|
||||
// On success, the event source will get an aborted notification and shutdown the connection.
|
||||
})
|
||||
)
|
||||
})
|
||||
eventSource.addEventListener('error', function(e) {
|
||||
console.log('eventsource error', {e})
|
||||
dom._kids(importConnection, box(red, 'Connection error'))
|
||||
reject({message: 'Connection error'})
|
||||
})
|
||||
eventSource.addEventListener('count', (e) => {
|
||||
const data = JSON.parse(e.data) // {Mailbox: ..., Count: ...}
|
||||
console.log('import count event', {e, data})
|
||||
if (!countsTbody) {
|
||||
importProgress.appendChild(
|
||||
dom.div(
|
||||
dom.br(),
|
||||
dom.h3('Importing mailboxes and messages...'),
|
||||
dom.table(
|
||||
dom.thead(
|
||||
dom.tr(dom.th('Mailbox'), dom.th('Messages')),
|
||||
),
|
||||
countsTbody=dom.tbody(),
|
||||
),
|
||||
)
|
||||
)
|
||||
}
|
||||
let elem = counts[data.Mailbox]
|
||||
if (!elem) {
|
||||
countsTbody.appendChild(
|
||||
dom.tr(
|
||||
dom.td(data.Mailbox),
|
||||
elem=dom.td(style({textAlign: 'right'}), ''+data.Count),
|
||||
),
|
||||
)
|
||||
counts[data.Mailbox] = elem
|
||||
}
|
||||
dom._kids(elem, ''+data.Count)
|
||||
})
|
||||
eventSource.addEventListener('problem', (e) => {
|
||||
const data = JSON.parse(e.data) // {Message: ...}
|
||||
console.log('import problem event', {e, data})
|
||||
if (!problems) {
|
||||
importProgress.appendChild(
|
||||
dom.div(
|
||||
dom.br(),
|
||||
dom.h3('Problems during import'),
|
||||
problems=dom.div(),
|
||||
),
|
||||
)
|
||||
}
|
||||
problems.appendChild(dom.div(box(yellow, data.Message)))
|
||||
})
|
||||
eventSource.addEventListener('done', (e) => {
|
||||
console.log('import done event', {e})
|
||||
importProgress.appendChild(dom.div(dom.br(), box(blue, 'Import finished')))
|
||||
|
||||
eventSource.close()
|
||||
dom._kids(importConnection)
|
||||
dom._kids(importAbortBox)
|
||||
window.sessionStorage.removeItem('ImportToken')
|
||||
|
||||
resolve()
|
||||
})
|
||||
eventSource.addEventListener('aborted', function(e) {
|
||||
console.log('import aborted event', {e})
|
||||
|
||||
importProgress.appendChild(dom.div(dom.br(), box(red, 'Import aborted, no message imported')))
|
||||
|
||||
eventSource.close()
|
||||
dom._kids(importConnection)
|
||||
dom._kids(importAbortBox)
|
||||
window.sessionStorage.removeItem('ImportToken')
|
||||
|
||||
reject({message: 'Import aborted'})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const page = document.getElementById('page')
|
||||
dom._kids(page,
|
||||
crumbs('Mox Account'),
|
||||
dom.p('NOTE: Not all account settings can be configured through these pages yet. See the configuration file for more options.'),
|
||||
dom.div(
|
||||
'Default domain: ',
|
||||
domain.ASCII ? domainString(domain) : '(none)',
|
||||
),
|
||||
dom.br(),
|
||||
|
||||
fullNameForm=dom.form(
|
||||
fullNameFieldset=dom.fieldset(
|
||||
dom.label(
|
||||
style({display: 'inline-block'}),
|
||||
'Full name',
|
||||
dom.br(),
|
||||
fullName=dom.input(attr({value: accountFullName, title: 'Name to use in From header when composing messages. Can be overridden per configured address.'})),
|
||||
|
||||
),
|
||||
' ',
|
||||
dom.button('Save'),
|
||||
),
|
||||
async function submit(e) {
|
||||
e.preventDefault()
|
||||
fullNameFieldset.disabled = true
|
||||
try {
|
||||
await api.AccountSaveFullName(fullName.value)
|
||||
fullName.setAttribute('value', fullName.value)
|
||||
fullNameForm.reset()
|
||||
window.alert('Full name has been changed.')
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: ' + err.message)
|
||||
} finally {
|
||||
fullNameFieldset.disabled = false
|
||||
}
|
||||
},
|
||||
),
|
||||
dom.br(),
|
||||
|
||||
dom.h2('Addresses'),
|
||||
dom.ul(
|
||||
Object.entries(destinations).sort().map(t =>
|
||||
dom.li(
|
||||
dom.a(t[0], attr({href: '#destinations/'+t[0]})),
|
||||
t[0].startsWith('@') ? ' (catchall)' : [],
|
||||
),
|
||||
),
|
||||
),
|
||||
dom.br(),
|
||||
dom.h2('Change password'),
|
||||
passwordForm=dom.form(
|
||||
passwordFieldset=dom.fieldset(
|
||||
dom.label(
|
||||
style({display: 'inline-block'}),
|
||||
'New password',
|
||||
dom.br(),
|
||||
password1=dom.input(attr({type: 'password', required: ''}), function focus() {
|
||||
passwordHint.style.display = ''
|
||||
}),
|
||||
),
|
||||
' ',
|
||||
dom.label(
|
||||
style({display: 'inline-block'}),
|
||||
'New password repeat',
|
||||
dom.br(),
|
||||
password2=dom.input(attr({type: 'password', required: ''})),
|
||||
),
|
||||
' ',
|
||||
dom.button('Change password'),
|
||||
),
|
||||
passwordHint=dom.div(
|
||||
style({display: 'none', marginTop: '.5ex'}),
|
||||
dom.button('Generate random password', attr({type: 'button'}), function click(e) {
|
||||
e.preventDefault()
|
||||
let b = new Uint8Array(1)
|
||||
let s = ''
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*-_;:,<.>/'
|
||||
while (s.length < 12) {
|
||||
self.crypto.getRandomValues(b)
|
||||
if (Math.ceil(b[0]/chars.length)*chars.length > 255) {
|
||||
continue // Prevent bias.
|
||||
}
|
||||
s += chars[b[0]%chars.length]
|
||||
}
|
||||
password1.type = 'text'
|
||||
password2.type = 'text'
|
||||
password1.value = s
|
||||
password2.value = s
|
||||
}),
|
||||
dom('div.text',
|
||||
box(yellow, 'Important: Bots will try to bruteforce your password. Connections with failed authentication attempts will be rate limited but attackers WILL find weak passwords. If your account is compromised, spammers are likely to abuse your system, spamming your address and the wider internet in your name. So please pick a random, unguessable password, preferrably at least 12 characters.'),
|
||||
),
|
||||
),
|
||||
async function submit(e) {
|
||||
e.stopPropagation()
|
||||
e.preventDefault()
|
||||
if (!password1.value || password1.value !== password2.value) {
|
||||
window.alert('Passwords do not match.')
|
||||
return
|
||||
}
|
||||
passwordFieldset.disabled = true
|
||||
try {
|
||||
await api.SetPassword(password1.value)
|
||||
window.alert('Password has been changed.')
|
||||
passwordForm.reset()
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: ' + err.message)
|
||||
} finally {
|
||||
passwordFieldset.disabled = false
|
||||
}
|
||||
},
|
||||
),
|
||||
dom.br(),
|
||||
dom.h2('Export'),
|
||||
dom.p('Export all messages in all mailboxes. In maildir or mbox format, as .zip or .tgz file.'),
|
||||
dom.ul(
|
||||
dom.li(dom.a('mail-export-maildir.tgz', attr({href: 'mail-export-maildir.tgz'}))),
|
||||
dom.li(dom.a('mail-export-maildir.zip', attr({href: 'mail-export-maildir.zip'}))),
|
||||
dom.li(dom.a('mail-export-mbox.tgz', attr({href: 'mail-export-mbox.tgz'}))),
|
||||
dom.li(dom.a('mail-export-mbox.zip', attr({href: 'mail-export-mbox.zip'}))),
|
||||
),
|
||||
dom.br(),
|
||||
dom.h2('Import'),
|
||||
dom.p('Import messages from a .zip or .tgz file with maildirs and/or mbox files.'),
|
||||
importForm=dom.form(
|
||||
async function submit(e) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
const request = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Browsers can do everything. Except show a progress bar while uploading...
|
||||
let progressBox, progressPercentage, progressBar
|
||||
dom._kids(importProgress,
|
||||
progressBox=dom.div(
|
||||
dom.div('Uploading... ', progressPercentage=dom.span()),
|
||||
),
|
||||
)
|
||||
importProgress.style.display = ''
|
||||
|
||||
const xhr = new window.XMLHttpRequest()
|
||||
xhr.open('POST', 'import', true)
|
||||
xhr.upload.addEventListener('progress', (e) => {
|
||||
if (!e.lengthComputable) {
|
||||
return
|
||||
}
|
||||
const pct = Math.floor(100*e.loaded/e.total)
|
||||
dom._kids(progressPercentage, pct+'%')
|
||||
})
|
||||
xhr.addEventListener('load', () => {
|
||||
console.log('upload done', {xhr: xhr, status: xhr.status})
|
||||
if (xhr.status !== 200) {
|
||||
reject({message: 'status '+xhr.status})
|
||||
return
|
||||
}
|
||||
let resp
|
||||
try {
|
||||
resp = JSON.parse(xhr.responseText)
|
||||
} catch (err) {
|
||||
reject({message: 'parsing resonse json: '+err.message})
|
||||
return
|
||||
}
|
||||
resolve(resp)
|
||||
})
|
||||
xhr.addEventListener('error', (e) => reject({message: 'upload error', event: e}))
|
||||
xhr.addEventListener('abort', (e) => reject({message: 'upload aborted', event: e}))
|
||||
xhr.send(new window.FormData(importForm))
|
||||
})
|
||||
}
|
||||
try {
|
||||
const p = request()
|
||||
importFieldset.disabled = true
|
||||
const result = await p
|
||||
|
||||
try {
|
||||
window.sessionStorage.setItem('ImportToken', result.ImportToken)
|
||||
} catch (err) {
|
||||
console.log('storing import token in session storage', {err})
|
||||
// Ignore error, could be some browser security thing like private browsing.
|
||||
}
|
||||
|
||||
await importTrack(result.ImportToken)
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: '+err.message)
|
||||
} finally {
|
||||
importFieldset.disabled = false
|
||||
}
|
||||
},
|
||||
importFieldset=dom.fieldset(
|
||||
dom.div(
|
||||
style({marginBottom: '1ex'}),
|
||||
dom.label(
|
||||
dom.div(style({marginBottom: '.5ex'}), 'File'),
|
||||
mailboxFile=dom.input(attr({type: 'file', required: '', name: 'file'}), function focus() {
|
||||
mailboxFileHint.style.display = ''
|
||||
}),
|
||||
),
|
||||
mailboxFileHint=dom.p(style({display: 'none', fontStyle: 'italic', marginTop: '.5ex'}), 'This file must either be a zip file or a gzipped tar file with mbox and/or maildir mailboxes. For maildirs, an optional file "dovecot-keywords" is read additional keywords, like Forwarded/Junk/NotJunk. If an imported mailbox already exists by name, messages are added to the existing mailbox. If a mailbox does not yet exist it will be created.'),
|
||||
),
|
||||
dom.div(
|
||||
style({marginBottom: '1ex'}),
|
||||
dom.label(
|
||||
dom.div(style({marginBottom: '.5ex'}), 'Skip mailbox prefix (optional)'),
|
||||
mailboxPrefix=dom.input(attr({name: 'skipMailboxPrefix'}), function focus() {
|
||||
mailboxPrefixHint.style.display = ''
|
||||
}),
|
||||
),
|
||||
mailboxPrefixHint=dom.p(style({display: 'none', fontStyle: 'italic', marginTop: '.5ex'}), 'If set, any mbox/maildir path with this prefix will have it stripped before importing. For example, if all mailboxes are in a directory "Takeout", specify that path in the field above so mailboxes like "Takeout/Inbox.mbox" are imported into a mailbox called "Inbox" instead of "Takeout/Inbox".'),
|
||||
),
|
||||
dom.div(
|
||||
dom.button('Upload and import'),
|
||||
dom.p(style({fontStyle: 'italic', marginTop: '.5ex'}), 'The file is uploaded first, then its messages are imported. Importing is done in a transaction, you can abort the entire import before it is finished.'),
|
||||
),
|
||||
),
|
||||
),
|
||||
importAbortBox=dom.div(), // Outside fieldset because it gets disabled, above progress because may be scrolling it down quickly with problems.
|
||||
importProgress=dom.div(
|
||||
style({display: 'none'}),
|
||||
),
|
||||
footer,
|
||||
)
|
||||
|
||||
// Try to show the progress of an earlier import session. The user may have just
|
||||
// refreshed the browser.
|
||||
let importToken
|
||||
try {
|
||||
importToken = window.sessionStorage.getItem('ImportToken')
|
||||
} catch (err) {
|
||||
console.log('looking up ImportToken in session storage', {err})
|
||||
return
|
||||
}
|
||||
if (!importToken) {
|
||||
return
|
||||
}
|
||||
importFieldset.disabled = true
|
||||
dom._kids(importProgress,
|
||||
dom.div(
|
||||
dom.div('Reconnecting to import...'),
|
||||
),
|
||||
)
|
||||
importProgress.style.display = ''
|
||||
importTrack(importToken)
|
||||
.catch((err) => {
|
||||
if (window.confirm('Error reconnecting to import. Remove this import session?')) {
|
||||
window.sessionStorage.removeItem('ImportToken')
|
||||
dom._kids(importProgress)
|
||||
importProgress.style.display = 'none'
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
importFieldset.disabled = false
|
||||
})
|
||||
}
|
||||
|
||||
const destination = async (name) => {
|
||||
const [_, domain, destinations] = await api.Account()
|
||||
let dest = destinations[name]
|
||||
if (!dest) {
|
||||
throw new Error('destination not found')
|
||||
}
|
||||
|
||||
let rulesetsTbody = dom.tbody()
|
||||
let rulesetsRows = []
|
||||
|
||||
const addRulesetsRow = (rs) => {
|
||||
let headersCell = dom.td()
|
||||
let headers = [] // Holds objects: {key, value, root}
|
||||
const addHeader = (k, v) => {
|
||||
let h = {}
|
||||
h.root = dom.div(
|
||||
h.key=dom.input(attr({value: k})),
|
||||
' ',
|
||||
h.value=dom.input(attr({value: v})),
|
||||
' ',
|
||||
dom.button('-', style({width: '1.5em'}), function click(e) {
|
||||
h.root.remove()
|
||||
headers = headers.filter(x => x !== h)
|
||||
if (headers.length === 0) {
|
||||
const b = dom.button('+', style({width: '1.5em'}), function click(e) {
|
||||
e.target.remove()
|
||||
addHeader('', '')
|
||||
})
|
||||
headersCell.appendChild(dom.div(style({textAlign: 'right'}), b))
|
||||
}
|
||||
}),
|
||||
' ',
|
||||
dom.button('+', style({width: '1.5em'}), function click(e) {
|
||||
addHeader('', '')
|
||||
}),
|
||||
)
|
||||
headers.push(h)
|
||||
headersCell.appendChild(h.root)
|
||||
}
|
||||
Object.entries(rs.HeadersRegexp || {}).sort().forEach(t =>
|
||||
addHeader(t[0], t[1])
|
||||
)
|
||||
if (Object.entries(rs.HeadersRegexp || {}).length === 0) {
|
||||
const b = dom.button('+', style({width: '1.5em'}), function click(e) {
|
||||
e.target.remove()
|
||||
addHeader('', '')
|
||||
})
|
||||
headersCell.appendChild(dom.div(style({textAlign: 'right'}), b))
|
||||
}
|
||||
|
||||
let row = {headers}
|
||||
row.root=dom.tr(
|
||||
dom.td(row.SMTPMailFromRegexp=dom.input(attr({value: rs.SMTPMailFromRegexp || ''}))),
|
||||
dom.td(row.VerifiedDomain=dom.input(attr({value: rs.VerifiedDomain || ''}))),
|
||||
headersCell,
|
||||
dom.td(row.ListAllowDomain=dom.input(attr({value: rs.ListAllowDomain || ''}))),
|
||||
dom.td(row.Mailbox=dom.input(attr({value: rs.Mailbox || ''}))),
|
||||
dom.td(
|
||||
dom.button('Remove ruleset', function click(e) {
|
||||
row.root.remove()
|
||||
rulesetsRows = rulesetsRows.filter(e => e !== row)
|
||||
}),
|
||||
),
|
||||
)
|
||||
rulesetsRows.push(row)
|
||||
rulesetsTbody.appendChild(row.root)
|
||||
}
|
||||
|
||||
(dest.Rulesets || []).forEach(rs => {
|
||||
addRulesetsRow(rs)
|
||||
})
|
||||
|
||||
let defaultMailbox
|
||||
let fullName
|
||||
let saveButton
|
||||
|
||||
const page = document.getElementById('page')
|
||||
dom._kids(page,
|
||||
crumbs(
|
||||
crumblink('Mox Account', '#'),
|
||||
'Destination ' + name,
|
||||
),
|
||||
dom.div(
|
||||
dom.span('Default mailbox', attr({title: 'Default mailbox where email for this recipient is delivered to if it does not match any ruleset. Default is Inbox.'})),
|
||||
dom.br(),
|
||||
defaultMailbox=dom.input(attr({value: dest.Mailbox, placeholder: 'Inbox'})),
|
||||
),
|
||||
dom.br(),
|
||||
dom.div(
|
||||
dom.span('Full name', attr({title: 'Name to use in From header when composing messages. If not set, the account default full name is used.'})),
|
||||
dom.br(),
|
||||
fullName=dom.input(attr({value: dest.FullName})),
|
||||
),
|
||||
dom.br(),
|
||||
dom.h2('Rulesets'),
|
||||
dom.p('Incoming messages are checked against the rulesets. If a ruleset matches, the message is delivered to the mailbox configured for the ruleset instead of to the default mailbox.'),
|
||||
dom.p('The "List allow domain" does not affect the matching, but skips the regular spam checks if one of the verified domains is a (sub)domain of the domain mentioned here.'),
|
||||
dom.table(
|
||||
dom.thead(
|
||||
dom.tr(
|
||||
dom.th('SMTP "MAIL FROM" regexp', attr({title: 'Matches if this regular expression matches (a substring of) the SMTP MAIL FROM address (not the message From-header). E.g. user@example.org.'})),
|
||||
dom.th('Verified domain', attr({title: 'Matches if this domain matches an SPF- and/or DKIM-verified (sub)domain.'})),
|
||||
dom.th('Headers regexp', attr({title: 'Matches if these header field/value regular expressions all match (substrings of) the message headers. Header fields and valuees are converted to lower case before matching. Whitespace is trimmed from the value before matching. A header field can occur multiple times in a message, only one instance has to match. For mailing lists, you could match on ^list-id$ with the value typically the mailing list address in angled brackets with @ replaced with a dot, e.g. <name\\.lists\\.example\\.org>.'})),
|
||||
dom.th('List allow domain', attr({title: "Influence the spam filtering, this does not change whether this ruleset applies to a message. If this domain matches an SPF- and/or DKIM-verified (sub)domain, the message is accepted without further spam checks, such as a junk filter or DMARC reject evaluation. DMARC rejects should not apply for mailing lists that are not configured to rewrite the From-header of messages that don't have a passing DKIM signature of the From-domain. Otherwise, by rejecting messages, you may be automatically unsubscribed from the mailing list. The assumption is that mailing lists do their own spam filtering/moderation."})),
|
||||
dom.th('Mailbox', attr({title: 'Mailbox to deliver to if this ruleset matches.'})),
|
||||
dom.th('Action'),
|
||||
)
|
||||
),
|
||||
rulesetsTbody,
|
||||
dom.tfoot(
|
||||
dom.tr(
|
||||
dom.td(attr({colspan: '5'})),
|
||||
dom.td(
|
||||
dom.button('Add ruleset', function click(e) {
|
||||
addRulesetsRow({})
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
dom.br(),
|
||||
saveButton=dom.button('Save', async function click(e) {
|
||||
saveButton.disabled = true
|
||||
try {
|
||||
const newDest = {
|
||||
Mailbox: defaultMailbox.value,
|
||||
FullName: fullName.value,
|
||||
Rulesets: rulesetsRows.map(row => {
|
||||
return {
|
||||
SMTPMailFromRegexp: row.SMTPMailFromRegexp.value,
|
||||
VerifiedDomain: row.VerifiedDomain.value,
|
||||
HeadersRegexp: Object.fromEntries(row.headers.map(h => [h.key.value, h.value.value])),
|
||||
ListAllowDomain: row.ListAllowDomain.value,
|
||||
Mailbox: row.Mailbox.value,
|
||||
}
|
||||
}),
|
||||
}
|
||||
page.classList.add('loading')
|
||||
await api.DestinationSave(name, dest, newDest)
|
||||
dest = newDest // Set new dest, for if user edits again. Without this, they would get an error that the config has been modified.
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: '+err.message)
|
||||
return
|
||||
} finally {
|
||||
saveButton.disabled = false
|
||||
page.classList.remove('loading')
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const init = async () => {
|
||||
let curhash
|
||||
|
||||
const page = document.getElementById('page')
|
||||
|
||||
const hashChange = async () => {
|
||||
if (curhash === window.location.hash) {
|
||||
return
|
||||
}
|
||||
let h = decodeURIComponent(window.location.hash)
|
||||
if (h !== '' && h.substring(0, 1) == '#') {
|
||||
h = h.substring(1)
|
||||
}
|
||||
const t = h.split('/')
|
||||
page.classList.add('loading')
|
||||
try {
|
||||
if (h === '') {
|
||||
await index()
|
||||
} else if (t[0] === 'destinations' && t.length === 2) {
|
||||
await destination(t[1])
|
||||
} else {
|
||||
dom._kids(page, 'page not found')
|
||||
}
|
||||
} catch (err) {
|
||||
console.log({err})
|
||||
window.alert('Error: ' + err.message)
|
||||
window.location.hash = curhash
|
||||
curhash = window.location.hash
|
||||
return
|
||||
}
|
||||
curhash = window.location.hash
|
||||
page.classList.remove('loading')
|
||||
}
|
||||
window.addEventListener('hashchange', hashChange)
|
||||
hashChange()
|
||||
}
|
||||
|
||||
window.addEventListener('load', init)
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
225
webaccount/account_test.go
Normal file
225
webaccount/account_test.go
Normal file
@ -0,0 +1,225 @@
|
||||
package webaccount
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mjl-/bstore"
|
||||
|
||||
"github.com/mjl-/mox/mlog"
|
||||
"github.com/mjl-/mox/mox-"
|
||||
"github.com/mjl-/mox/store"
|
||||
)
|
||||
|
||||
var ctxbg = context.Background()
|
||||
|
||||
func tcheck(t *testing.T, err error, msg string) {
|
||||
t.Helper()
|
||||
if err != nil {
|
||||
t.Fatalf("%s: %s", msg, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAccount(t *testing.T) {
|
||||
os.RemoveAll("../testdata/httpaccount/data")
|
||||
mox.ConfigStaticPath = "../testdata/httpaccount/mox.conf"
|
||||
mox.ConfigDynamicPath = filepath.Join(filepath.Dir(mox.ConfigStaticPath), "domains.conf")
|
||||
mox.MustLoadConfig(true, false)
|
||||
acc, err := store.OpenAccount("mjl")
|
||||
tcheck(t, err, "open account")
|
||||
defer func() {
|
||||
err = acc.Close()
|
||||
tcheck(t, err, "closing account")
|
||||
}()
|
||||
switchDone := store.Switchboard()
|
||||
defer close(switchDone)
|
||||
|
||||
log := mlog.New("store")
|
||||
|
||||
test := func(userpass string, expect string) {
|
||||
t.Helper()
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest("GET", "/ignored", nil)
|
||||
authhdr := "Basic " + base64.StdEncoding.EncodeToString([]byte(userpass))
|
||||
r.Header.Add("Authorization", authhdr)
|
||||
_, accName := CheckAuth(ctxbg, log, "webaccount", w, r)
|
||||
if accName != expect {
|
||||
t.Fatalf("got %q, expected %q", accName, expect)
|
||||
}
|
||||
}
|
||||
|
||||
const authOK = "mjl@mox.example:test1234"
|
||||
const authBad = "mjl@mox.example:badpassword"
|
||||
|
||||
authCtx := context.WithValue(ctxbg, authCtxKey, "mjl")
|
||||
|
||||
test(authOK, "") // No password set yet.
|
||||
Account{}.SetPassword(authCtx, "test1234")
|
||||
test(authOK, "mjl")
|
||||
test(authBad, "")
|
||||
|
||||
fullName, _, dests := Account{}.Account(authCtx)
|
||||
Account{}.DestinationSave(authCtx, "mjl@mox.example", dests["mjl@mox.example"], dests["mjl@mox.example"]) // todo: save modified value and compare it afterwards
|
||||
|
||||
Account{}.AccountSaveFullName(authCtx, fullName+" changed") // todo: check if value was changed
|
||||
Account{}.AccountSaveFullName(authCtx, fullName)
|
||||
|
||||
go ImportManage()
|
||||
|
||||
// Import mbox/maildir tgz/zip.
|
||||
testImport := func(filename string, expect int) {
|
||||
t.Helper()
|
||||
|
||||
var reqBody bytes.Buffer
|
||||
mpw := multipart.NewWriter(&reqBody)
|
||||
part, err := mpw.CreateFormFile("file", path.Base(filename))
|
||||
tcheck(t, err, "creating form file")
|
||||
buf, err := os.ReadFile(filename)
|
||||
tcheck(t, err, "reading file")
|
||||
_, err = part.Write(buf)
|
||||
tcheck(t, err, "write part")
|
||||
err = mpw.Close()
|
||||
tcheck(t, err, "close multipart writer")
|
||||
|
||||
r := httptest.NewRequest("POST", "/import", &reqBody)
|
||||
r.Header.Add("Content-Type", mpw.FormDataContentType())
|
||||
r.Header.Add("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(authOK)))
|
||||
w := httptest.NewRecorder()
|
||||
Handle(w, r)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("import, got status code %d, expected 200: %s", w.Code, w.Body.Bytes())
|
||||
}
|
||||
m := map[string]string{}
|
||||
if err := json.Unmarshal(w.Body.Bytes(), &m); err != nil {
|
||||
t.Fatalf("parsing import response: %v", err)
|
||||
}
|
||||
token := m["ImportToken"]
|
||||
|
||||
l := importListener{token, make(chan importEvent, 100), make(chan bool)}
|
||||
importers.Register <- &l
|
||||
if !<-l.Register {
|
||||
t.Fatalf("register failed")
|
||||
}
|
||||
defer func() {
|
||||
importers.Unregister <- &l
|
||||
}()
|
||||
count := 0
|
||||
loop:
|
||||
for {
|
||||
e := <-l.Events
|
||||
switch x := e.Event.(type) {
|
||||
case importCount:
|
||||
count += x.Count
|
||||
case importProblem:
|
||||
t.Fatalf("unexpected problem: %q", x.Message)
|
||||
case importDone:
|
||||
break loop
|
||||
case importAborted:
|
||||
t.Fatalf("unexpected aborted import")
|
||||
default:
|
||||
panic("missing case")
|
||||
}
|
||||
}
|
||||
if count != expect {
|
||||
t.Fatalf("imported %d messages, expected %d", count, expect)
|
||||
}
|
||||
}
|
||||
testImport("../testdata/importtest.mbox.zip", 2)
|
||||
testImport("../testdata/importtest.maildir.tgz", 2)
|
||||
|
||||
// Check there are messages, with the right flags.
|
||||
acc.DB.Read(ctxbg, func(tx *bstore.Tx) error {
|
||||
_, err = bstore.QueryTx[store.Message](tx).FilterEqual("Expunged", false).FilterIn("Keywords", "other").FilterIn("Keywords", "test").Get()
|
||||
tcheck(t, err, `fetching message with keywords "other" and "test"`)
|
||||
|
||||
mb, err := acc.MailboxFind(tx, "importtest")
|
||||
tcheck(t, err, "looking up mailbox importtest")
|
||||
if mb == nil {
|
||||
t.Fatalf("missing mailbox importtest")
|
||||
}
|
||||
sort.Strings(mb.Keywords)
|
||||
if strings.Join(mb.Keywords, " ") != "other test" {
|
||||
t.Fatalf(`expected mailbox keywords "other" and "test", got %v`, mb.Keywords)
|
||||
}
|
||||
|
||||
n, err := bstore.QueryTx[store.Message](tx).FilterEqual("Expunged", false).FilterIn("Keywords", "custom").Count()
|
||||
tcheck(t, err, `fetching message with keyword "custom"`)
|
||||
if n != 2 {
|
||||
t.Fatalf(`got %d messages with keyword "custom", expected 2`, n)
|
||||
}
|
||||
|
||||
mb, err = acc.MailboxFind(tx, "maildir")
|
||||
tcheck(t, err, "looking up mailbox maildir")
|
||||
if mb == nil {
|
||||
t.Fatalf("missing mailbox maildir")
|
||||
}
|
||||
if strings.Join(mb.Keywords, " ") != "custom" {
|
||||
t.Fatalf(`expected mailbox keywords "custom", got %v`, mb.Keywords)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
testExport := func(httppath string, iszip bool, expectFiles int) {
|
||||
t.Helper()
|
||||
|
||||
r := httptest.NewRequest("GET", httppath, nil)
|
||||
r.Header.Add("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(authOK)))
|
||||
w := httptest.NewRecorder()
|
||||
Handle(w, r)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("export, got status code %d, expected 200: %s", w.Code, w.Body.Bytes())
|
||||
}
|
||||
var count int
|
||||
if iszip {
|
||||
buf := w.Body.Bytes()
|
||||
zr, err := zip.NewReader(bytes.NewReader(buf), int64(len(buf)))
|
||||
tcheck(t, err, "reading zip")
|
||||
for _, f := range zr.File {
|
||||
if !strings.HasSuffix(f.Name, "/") {
|
||||
count++
|
||||
}
|
||||
}
|
||||
} else {
|
||||
gzr, err := gzip.NewReader(w.Body)
|
||||
tcheck(t, err, "gzip reader")
|
||||
tr := tar.NewReader(gzr)
|
||||
for {
|
||||
h, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
tcheck(t, err, "next file in tar")
|
||||
if !strings.HasSuffix(h.Name, "/") {
|
||||
count++
|
||||
}
|
||||
_, err = io.Copy(io.Discard, tr)
|
||||
tcheck(t, err, "reading from tar")
|
||||
}
|
||||
}
|
||||
if count != expectFiles {
|
||||
t.Fatalf("export, has %d files, expected %d", count, expectFiles)
|
||||
}
|
||||
}
|
||||
|
||||
testExport("/mail-export-maildir.tgz", false, 6) // 2 mailboxes, each with 2 messages and a dovecot-keyword file
|
||||
testExport("/mail-export-maildir.zip", true, 6)
|
||||
testExport("/mail-export-mbox.tgz", false, 2)
|
||||
testExport("/mail-export-mbox.zip", true, 2)
|
||||
}
|
207
webaccount/accountapi.json
Normal file
207
webaccount/accountapi.json
Normal file
@ -0,0 +1,207 @@
|
||||
{
|
||||
"Name": "Account",
|
||||
"Docs": "Account exports web API functions for the account web interface. All its\nmethods are exported under api/. Function calls require valid HTTP\nAuthentication credentials of a user.",
|
||||
"Functions": [
|
||||
{
|
||||
"Name": "SetPassword",
|
||||
"Docs": "SetPassword saves a new password for the account, invalidating the previous password.\nSessions are not interrupted, and will keep working. New login attempts must use the new password.\nPassword must be at least 8 characters.",
|
||||
"Params": [
|
||||
{
|
||||
"Name": "password",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Returns": []
|
||||
},
|
||||
{
|
||||
"Name": "Account",
|
||||
"Docs": "Account returns information about the account: full name, the default domain,\nand the destinations (keys are email addresses, or localparts to the default\ndomain). todo: replace with a function that returns the whole account, when\nsherpadoc understands unnamed struct fields.",
|
||||
"Params": [],
|
||||
"Returns": [
|
||||
{
|
||||
"Name": "r0",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "r1",
|
||||
"Typewords": [
|
||||
"Domain"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "r2",
|
||||
"Typewords": [
|
||||
"{}",
|
||||
"Destination"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "AccountSaveFullName",
|
||||
"Docs": "",
|
||||
"Params": [
|
||||
{
|
||||
"Name": "fullName",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Returns": []
|
||||
},
|
||||
{
|
||||
"Name": "DestinationSave",
|
||||
"Docs": "DestinationSave updates a destination.\nOldDest is compared against the current destination. If it does not match, an\nerror is returned. Otherwise newDest is saved and the configuration reloaded.",
|
||||
"Params": [
|
||||
{
|
||||
"Name": "destName",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "oldDest",
|
||||
"Typewords": [
|
||||
"Destination"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "newDest",
|
||||
"Typewords": [
|
||||
"Destination"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Returns": []
|
||||
},
|
||||
{
|
||||
"Name": "ImportAbort",
|
||||
"Docs": "ImportAbort aborts an import that is in progress. If the import exists and isn't\nfinished, no changes will have been made by the import.",
|
||||
"Params": [
|
||||
{
|
||||
"Name": "importToken",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
}
|
||||
],
|
||||
"Returns": []
|
||||
}
|
||||
],
|
||||
"Sections": [],
|
||||
"Structs": [
|
||||
{
|
||||
"Name": "Domain",
|
||||
"Docs": "Domain is a domain name, with one or more labels, with at least an ASCII\nrepresentation, and for IDNA non-ASCII domains a unicode representation.\nThe ASCII string must be used for DNS lookups.",
|
||||
"Fields": [
|
||||
{
|
||||
"Name": "ASCII",
|
||||
"Docs": "A non-unicode domain, e.g. with A-labels (xn--...) or NR-LDH (non-reserved letters/digits/hyphens) labels. Always in lower case.",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "Unicode",
|
||||
"Docs": "Name as U-labels. Empty if this is an ASCII-only domain.",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "Destination",
|
||||
"Docs": "",
|
||||
"Fields": [
|
||||
{
|
||||
"Name": "Mailbox",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "Rulesets",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"[]",
|
||||
"Ruleset"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "FullName",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "Ruleset",
|
||||
"Docs": "",
|
||||
"Fields": [
|
||||
{
|
||||
"Name": "SMTPMailFromRegexp",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "VerifiedDomain",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "HeadersRegexp",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"{}",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "ListAllowDomain",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "Mailbox",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"string"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "VerifiedDNSDomain",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"Domain"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Name": "ListAllowDNSDomain",
|
||||
"Docs": "",
|
||||
"Typewords": [
|
||||
"Domain"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"Ints": [],
|
||||
"Strings": [],
|
||||
"SherpaVersion": 0,
|
||||
"SherpadocVersion": 1
|
||||
}
|
865
webaccount/import.go
Normal file
865
webaccount/import.go
Normal file
@ -0,0 +1,865 @@
|
||||
package webaccount
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"archive/zip"
|
||||
"bufio"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
cryptrand "crypto/rand"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
|
||||
"github.com/mjl-/bstore"
|
||||
|
||||
"github.com/mjl-/mox/message"
|
||||
"github.com/mjl-/mox/mlog"
|
||||
"github.com/mjl-/mox/mox-"
|
||||
"github.com/mjl-/mox/store"
|
||||
)
|
||||
|
||||
type importListener struct {
|
||||
Token string
|
||||
Events chan importEvent
|
||||
Register chan bool // Whether register is successful.
|
||||
}
|
||||
|
||||
type importEvent struct {
|
||||
Token string
|
||||
SSEMsg []byte // Full SSE message, including event: ... and data: ... \n\n
|
||||
Event any // nil, importCount, importProblem, importDone, importAborted
|
||||
Cancel func() // For cancelling the context causing abort of the import. Set in first, import-registering, event.
|
||||
}
|
||||
|
||||
type importAbortRequest struct {
|
||||
Token string
|
||||
Response chan error
|
||||
}
|
||||
|
||||
var importers = struct {
|
||||
Register chan *importListener
|
||||
Unregister chan *importListener
|
||||
Events chan importEvent
|
||||
Abort chan importAbortRequest
|
||||
}{
|
||||
make(chan *importListener, 1),
|
||||
make(chan *importListener, 1),
|
||||
make(chan importEvent),
|
||||
make(chan importAbortRequest),
|
||||
}
|
||||
|
||||
// ImportManage should be run as a goroutine, it manages imports of mboxes/maildirs, propagating progress over SSE connections.
|
||||
func ImportManage() {
|
||||
log := mlog.New("httpimport")
|
||||
defer func() {
|
||||
if x := recover(); x != nil {
|
||||
log.Error("import manage panic", mlog.Field("err", x))
|
||||
debug.PrintStack()
|
||||
}
|
||||
}()
|
||||
|
||||
type state struct {
|
||||
MailboxCounts map[string]int
|
||||
Problems []string
|
||||
Done *time.Time
|
||||
Aborted *time.Time
|
||||
Listeners map[*importListener]struct{}
|
||||
Cancel func()
|
||||
}
|
||||
|
||||
imports := map[string]state{} // Token to state.
|
||||
for {
|
||||
select {
|
||||
case l := <-importers.Register:
|
||||
// If we have state, send it so the client is up to date.
|
||||
if s, ok := imports[l.Token]; ok {
|
||||
l.Register <- true
|
||||
s.Listeners[l] = struct{}{}
|
||||
|
||||
sendEvent := func(kind string, v any) {
|
||||
buf, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
log.Errorx("marshal event", err, mlog.Field("kind", kind), mlog.Field("event", v))
|
||||
return
|
||||
}
|
||||
ssemsg := fmt.Sprintf("event: %s\ndata: %s\n\n", kind, buf)
|
||||
|
||||
select {
|
||||
case l.Events <- importEvent{kind, []byte(ssemsg), nil, nil}:
|
||||
default:
|
||||
log.Debug("dropped initial import event to slow consumer")
|
||||
}
|
||||
}
|
||||
|
||||
for m, c := range s.MailboxCounts {
|
||||
sendEvent("count", importCount{m, c})
|
||||
}
|
||||
for _, p := range s.Problems {
|
||||
sendEvent("problem", importProblem{p})
|
||||
}
|
||||
if s.Done != nil {
|
||||
sendEvent("done", importDone{})
|
||||
} else if s.Aborted != nil {
|
||||
sendEvent("aborted", importAborted{})
|
||||
}
|
||||
} else {
|
||||
l.Register <- false
|
||||
}
|
||||
|
||||
case l := <-importers.Unregister:
|
||||
delete(imports[l.Token].Listeners, l)
|
||||
|
||||
case e := <-importers.Events:
|
||||
s, ok := imports[e.Token]
|
||||
if !ok {
|
||||
s = state{
|
||||
MailboxCounts: map[string]int{},
|
||||
Listeners: map[*importListener]struct{}{},
|
||||
Cancel: e.Cancel,
|
||||
}
|
||||
imports[e.Token] = s
|
||||
}
|
||||
for l := range s.Listeners {
|
||||
select {
|
||||
case l.Events <- e:
|
||||
default:
|
||||
log.Debug("dropped import event to slow consumer")
|
||||
}
|
||||
}
|
||||
if e.Event != nil {
|
||||
s := imports[e.Token]
|
||||
switch x := e.Event.(type) {
|
||||
case importCount:
|
||||
s.MailboxCounts[x.Mailbox] = x.Count
|
||||
case importProblem:
|
||||
s.Problems = append(s.Problems, x.Message)
|
||||
case importDone:
|
||||
now := time.Now()
|
||||
s.Done = &now
|
||||
case importAborted:
|
||||
now := time.Now()
|
||||
s.Aborted = &now
|
||||
}
|
||||
imports[e.Token] = s
|
||||
}
|
||||
|
||||
case a := <-importers.Abort:
|
||||
s, ok := imports[a.Token]
|
||||
if !ok {
|
||||
a.Response <- errors.New("import not found")
|
||||
return
|
||||
}
|
||||
if s.Done != nil {
|
||||
a.Response <- errors.New("import already finished")
|
||||
return
|
||||
}
|
||||
s.Cancel()
|
||||
a.Response <- nil
|
||||
}
|
||||
|
||||
// Cleanup old state.
|
||||
for t, s := range imports {
|
||||
if len(s.Listeners) > 0 {
|
||||
continue
|
||||
}
|
||||
if s.Done != nil && time.Since(*s.Done) > time.Minute || s.Aborted != nil && time.Since(*s.Aborted) > time.Minute {
|
||||
delete(imports, t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type importCount struct {
|
||||
Mailbox string
|
||||
Count int
|
||||
}
|
||||
type importProblem struct {
|
||||
Message string
|
||||
}
|
||||
type importDone struct{}
|
||||
type importAborted struct{}
|
||||
|
||||
// importStart prepare the import and launches the goroutine to actually import.
|
||||
// importStart is responsible for closing f.
|
||||
func importStart(log *mlog.Log, accName string, f *os.File, skipMailboxPrefix string) (string, error) {
|
||||
defer func() {
|
||||
if f != nil {
|
||||
err := f.Close()
|
||||
log.Check(err, "closing uploaded file")
|
||||
}
|
||||
}()
|
||||
|
||||
buf := make([]byte, 16)
|
||||
if _, err := cryptrand.Read(buf); err != nil {
|
||||
return "", err
|
||||
}
|
||||
token := fmt.Sprintf("%x", buf)
|
||||
|
||||
if _, err := f.Seek(0, 0); err != nil {
|
||||
return "", fmt.Errorf("seek to start of file: %v", err)
|
||||
}
|
||||
|
||||
// Recognize file format.
|
||||
var iszip bool
|
||||
magicZip := []byte{0x50, 0x4b, 0x03, 0x04}
|
||||
magicGzip := []byte{0x1f, 0x8b}
|
||||
magic := make([]byte, 4)
|
||||
if _, err := f.ReadAt(magic, 0); err != nil {
|
||||
return "", fmt.Errorf("detecting file format: %v", err)
|
||||
}
|
||||
if bytes.Equal(magic, magicZip) {
|
||||
iszip = true
|
||||
} else if !bytes.Equal(magic[:2], magicGzip) {
|
||||
return "", fmt.Errorf("file is not a zip or gzip file")
|
||||
}
|
||||
|
||||
var zr *zip.Reader
|
||||
var tr *tar.Reader
|
||||
if iszip {
|
||||
fi, err := f.Stat()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("stat temporary import zip file: %v", err)
|
||||
}
|
||||
zr, err = zip.NewReader(f, fi.Size())
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("opening zip file: %v", err)
|
||||
}
|
||||
} else {
|
||||
gzr, err := gzip.NewReader(f)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("gunzip: %v", err)
|
||||
}
|
||||
tr = tar.NewReader(gzr)
|
||||
}
|
||||
|
||||
acc, err := store.OpenAccount(accName)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("open acount: %v", err)
|
||||
}
|
||||
acc.Lock() // Not using WithWLock because importMessage is responsible for unlocking.
|
||||
|
||||
tx, err := acc.DB.Begin(context.Background(), true)
|
||||
if err != nil {
|
||||
acc.Unlock()
|
||||
xerr := acc.Close()
|
||||
log.Check(xerr, "closing account")
|
||||
return "", fmt.Errorf("start transaction: %v", err)
|
||||
}
|
||||
|
||||
// Ensure token is registered before returning, with context that can be canceled.
|
||||
ctx, cancel := context.WithCancel(mox.Shutdown)
|
||||
importers.Events <- importEvent{token, []byte(": keepalive\n\n"), nil, cancel}
|
||||
|
||||
log.Info("starting import")
|
||||
go importMessages(ctx, log.WithCid(mox.Cid()), token, acc, tx, zr, tr, f, skipMailboxPrefix)
|
||||
f = nil // importMessages is now responsible for closing.
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// importMessages imports the messages from zip/tgz file f.
|
||||
// importMessages is responsible for unlocking and closing acc, and closing tx and f.
|
||||
func importMessages(ctx context.Context, log *mlog.Log, token string, acc *store.Account, tx *bstore.Tx, zr *zip.Reader, tr *tar.Reader, f *os.File, skipMailboxPrefix string) {
|
||||
// If a fatal processing error occurs, we panic with this type.
|
||||
type importError struct{ Err error }
|
||||
|
||||
// During import we collect all changes and broadcast them at the end, when successful.
|
||||
var changes []store.Change
|
||||
|
||||
// ID's of delivered messages. If we have to rollback, we have to remove this files.
|
||||
var deliveredIDs []int64
|
||||
|
||||
ximportcheckf := func(err error, format string, args ...any) {
|
||||
if err != nil {
|
||||
panic(importError{fmt.Errorf("%s: %s", fmt.Sprintf(format, args...), err)})
|
||||
}
|
||||
}
|
||||
|
||||
sendEvent := func(kind string, v any) {
|
||||
buf, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
log.Errorx("marshal event", err, mlog.Field("kind", kind), mlog.Field("event", v))
|
||||
return
|
||||
}
|
||||
ssemsg := fmt.Sprintf("event: %s\ndata: %s\n\n", kind, buf)
|
||||
importers.Events <- importEvent{token, []byte(ssemsg), v, nil}
|
||||
}
|
||||
|
||||
problemf := func(format string, args ...any) {
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
sendEvent("problem", importProblem{Message: msg})
|
||||
}
|
||||
|
||||
canceled := func() bool {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
sendEvent("aborted", importAborted{})
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
defer func() {
|
||||
err := f.Close()
|
||||
log.Check(err, "closing uploaded messages file")
|
||||
|
||||
for _, id := range deliveredIDs {
|
||||
p := acc.MessagePath(id)
|
||||
err := os.Remove(p)
|
||||
log.Check(err, "closing message file after import error", mlog.Field("path", p))
|
||||
}
|
||||
if tx != nil {
|
||||
err := tx.Rollback()
|
||||
log.Check(err, "rolling back transaction")
|
||||
}
|
||||
if acc != nil {
|
||||
acc.Unlock()
|
||||
err := acc.Close()
|
||||
log.Check(err, "closing account")
|
||||
}
|
||||
|
||||
x := recover()
|
||||
if x == nil {
|
||||
return
|
||||
}
|
||||
if err, ok := x.(importError); ok {
|
||||
log.Errorx("import error", err.Err)
|
||||
problemf("%s (aborting)", err.Err)
|
||||
sendEvent("aborted", importAborted{})
|
||||
} else {
|
||||
log.Error("import panic", mlog.Field("err", x))
|
||||
debug.PrintStack()
|
||||
}
|
||||
}()
|
||||
|
||||
conf, _ := acc.Conf()
|
||||
|
||||
jf, _, err := acc.OpenJunkFilter(ctx, log)
|
||||
if err != nil && !errors.Is(err, store.ErrNoJunkFilter) {
|
||||
ximportcheckf(err, "open junk filter")
|
||||
}
|
||||
defer func() {
|
||||
if jf != nil {
|
||||
err := jf.CloseDiscard()
|
||||
log.Check(err, "closing junk filter")
|
||||
}
|
||||
}()
|
||||
|
||||
// Mailboxes we imported, and message counts.
|
||||
mailboxes := map[string]store.Mailbox{}
|
||||
messages := map[string]int{}
|
||||
|
||||
// For maildirs, we are likely to get a possible dovecot-keywords file after having
|
||||
// imported the messages. Once we see the keywords, we use them. But before that
|
||||
// time we remember which messages miss a keywords. Once the keywords become
|
||||
// available, we'll fix up the flags for the unknown messages
|
||||
mailboxKeywords := map[string]map[rune]string{} // Mailbox to 'a'-'z' to flag name.
|
||||
mailboxMissingKeywordMessages := map[string]map[int64]string{} // Mailbox to message id to string consisting of the unrecognized flags.
|
||||
|
||||
// We keep the mailboxes we deliver to up to date with count and keywords (non-system flags).
|
||||
destMailboxCounts := map[int64]store.MailboxCounts{}
|
||||
destMailboxKeywords := map[int64]map[string]bool{}
|
||||
|
||||
// Previous mailbox an event was sent for. We send an event for new mailboxes, when
|
||||
// another 100 messages were added, when adding a message to another mailbox, and
|
||||
// finally at the end as a closing statement.
|
||||
var prevMailbox string
|
||||
|
||||
var modseq store.ModSeq // Assigned on first message, used for all messages.
|
||||
|
||||
trainMessage := func(m *store.Message, p message.Part, pos string) {
|
||||
words, err := jf.ParseMessage(p)
|
||||
if err != nil {
|
||||
problemf("parsing message %s for updating junk filter: %v (continuing)", pos, err)
|
||||
return
|
||||
}
|
||||
err = jf.Train(ctx, !m.Junk, words)
|
||||
if err != nil {
|
||||
problemf("training junk filter for message %s: %v (continuing)", pos, err)
|
||||
return
|
||||
}
|
||||
m.TrainedJunk = &m.Junk
|
||||
}
|
||||
|
||||
openTrainMessage := func(m *store.Message) {
|
||||
path := acc.MessagePath(m.ID)
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
problemf("opening message again for training junk filter: %v (continuing)", err)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
err := f.Close()
|
||||
log.Check(err, "closing file after training junkfilter")
|
||||
}()
|
||||
p, err := m.LoadPart(f)
|
||||
if err != nil {
|
||||
problemf("loading parsed message again for training junk filter: %v (continuing)", err)
|
||||
return
|
||||
}
|
||||
trainMessage(m, p, fmt.Sprintf("message id %d", m.ID))
|
||||
}
|
||||
|
||||
xensureMailbox := func(name string) store.Mailbox {
|
||||
name = norm.NFC.String(name)
|
||||
if strings.ToLower(name) == "inbox" {
|
||||
name = "Inbox"
|
||||
}
|
||||
|
||||
if mb, ok := mailboxes[name]; ok {
|
||||
return mb
|
||||
}
|
||||
|
||||
var p string
|
||||
var mb store.Mailbox
|
||||
for i, e := range strings.Split(name, "/") {
|
||||
if i == 0 {
|
||||
p = e
|
||||
} else {
|
||||
p = path.Join(p, e)
|
||||
}
|
||||
if _, ok := mailboxes[p]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
q := bstore.QueryTx[store.Mailbox](tx)
|
||||
q.FilterNonzero(store.Mailbox{Name: p})
|
||||
var err error
|
||||
mb, err = q.Get()
|
||||
if err == bstore.ErrAbsent {
|
||||
uidvalidity, err := acc.NextUIDValidity(tx)
|
||||
ximportcheckf(err, "finding next uid validity")
|
||||
mb = store.Mailbox{
|
||||
Name: p,
|
||||
UIDValidity: uidvalidity,
|
||||
UIDNext: 1,
|
||||
HaveCounts: true,
|
||||
// Do not assign special-use flags. This existing account probably already has such mailboxes.
|
||||
}
|
||||
err = tx.Insert(&mb)
|
||||
ximportcheckf(err, "inserting mailbox in database")
|
||||
|
||||
if tx.Get(&store.Subscription{Name: p}) != nil {
|
||||
err := tx.Insert(&store.Subscription{Name: p})
|
||||
ximportcheckf(err, "subscribing to imported mailbox")
|
||||
}
|
||||
changes = append(changes, store.ChangeAddMailbox{Mailbox: mb, Flags: []string{`\Subscribed`}})
|
||||
} else if err != nil {
|
||||
ximportcheckf(err, "creating mailbox %s (aborting)", p)
|
||||
}
|
||||
if prevMailbox != "" && mb.Name != prevMailbox {
|
||||
sendEvent("count", importCount{prevMailbox, messages[prevMailbox]})
|
||||
}
|
||||
mailboxes[mb.Name] = mb
|
||||
sendEvent("count", importCount{mb.Name, 0})
|
||||
prevMailbox = mb.Name
|
||||
}
|
||||
return mb
|
||||
}
|
||||
|
||||
xdeliver := func(mb store.Mailbox, m *store.Message, f *os.File, pos string) {
|
||||
defer func() {
|
||||
if f != nil {
|
||||
err := os.Remove(f.Name())
|
||||
log.Check(err, "removing temporary message file for delivery")
|
||||
err = f.Close()
|
||||
log.Check(err, "closing temporary message file for delivery")
|
||||
}
|
||||
}()
|
||||
m.MailboxID = mb.ID
|
||||
m.MailboxOrigID = mb.ID
|
||||
|
||||
if modseq == 0 {
|
||||
var err error
|
||||
modseq, err = acc.NextModSeq(tx)
|
||||
ximportcheckf(err, "assigning next modseq")
|
||||
}
|
||||
m.CreateSeq = modseq
|
||||
m.ModSeq = modseq
|
||||
|
||||
mc := destMailboxCounts[mb.ID]
|
||||
mc.Add(m.MailboxCounts())
|
||||
destMailboxCounts[mb.ID] = mc
|
||||
|
||||
if len(m.Keywords) > 0 {
|
||||
if destMailboxKeywords[mb.ID] == nil {
|
||||
destMailboxKeywords[mb.ID] = map[string]bool{}
|
||||
}
|
||||
for _, k := range m.Keywords {
|
||||
destMailboxKeywords[mb.ID][k] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Parse message and store parsed information for later fast retrieval.
|
||||
p, err := message.EnsurePart(f, m.Size)
|
||||
if err != nil {
|
||||
problemf("parsing message %s: %s (continuing)", pos, err)
|
||||
}
|
||||
m.ParsedBuf, err = json.Marshal(p)
|
||||
ximportcheckf(err, "marshal parsed message structure")
|
||||
|
||||
if m.Received.IsZero() {
|
||||
if p.Envelope != nil && !p.Envelope.Date.IsZero() {
|
||||
m.Received = p.Envelope.Date
|
||||
} else {
|
||||
m.Received = time.Now()
|
||||
}
|
||||
}
|
||||
|
||||
// We set the flags that Deliver would set now and train ourselves. This prevents
|
||||
// Deliver from training, which would open the junk filter, change it, and write it
|
||||
// back to disk, for each message (slow).
|
||||
m.JunkFlagsForMailbox(mb.Name, conf)
|
||||
if jf != nil && m.NeedsTraining() {
|
||||
trainMessage(m, p, pos)
|
||||
}
|
||||
|
||||
const consumeFile = true
|
||||
const sync = false
|
||||
const notrain = true
|
||||
if err := acc.DeliverMessage(log, tx, m, f, consumeFile, mb.Sent, sync, notrain); err != nil {
|
||||
problemf("delivering message %s: %s (continuing)", pos, err)
|
||||
return
|
||||
}
|
||||
deliveredIDs = append(deliveredIDs, m.ID)
|
||||
changes = append(changes, m.ChangeAddUID())
|
||||
messages[mb.Name]++
|
||||
if messages[mb.Name]%100 == 0 || prevMailbox != mb.Name {
|
||||
prevMailbox = mb.Name
|
||||
sendEvent("count", importCount{mb.Name, messages[mb.Name]})
|
||||
}
|
||||
f = nil
|
||||
}
|
||||
|
||||
ximportMbox := func(mailbox, filename string, r io.Reader) {
|
||||
if mailbox == "" {
|
||||
problemf("empty mailbox name for mbox file %s (skipping)", filename)
|
||||
return
|
||||
}
|
||||
mb := xensureMailbox(mailbox)
|
||||
|
||||
mr := store.NewMboxReader(store.CreateMessageTemp, filename, r, log)
|
||||
for {
|
||||
m, mf, pos, err := mr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
ximportcheckf(err, "next message in mbox file")
|
||||
}
|
||||
|
||||
xdeliver(mb, m, mf, pos)
|
||||
}
|
||||
}
|
||||
|
||||
ximportMaildir := func(mailbox, filename string, r io.Reader) {
|
||||
if mailbox == "" {
|
||||
problemf("empty mailbox name for maildir file %s (skipping)", filename)
|
||||
return
|
||||
}
|
||||
mb := xensureMailbox(mailbox)
|
||||
|
||||
f, err := store.CreateMessageTemp("import")
|
||||
ximportcheckf(err, "creating temp message")
|
||||
defer func() {
|
||||
if f != nil {
|
||||
err := os.Remove(f.Name())
|
||||
log.Check(err, "removing temporary file for delivery")
|
||||
err = f.Close()
|
||||
log.Check(err, "closing temporary file for delivery")
|
||||
}
|
||||
}()
|
||||
|
||||
// Copy data, changing bare \n into \r\n.
|
||||
br := bufio.NewReader(r)
|
||||
w := bufio.NewWriter(f)
|
||||
var size int64
|
||||
for {
|
||||
line, err := br.ReadBytes('\n')
|
||||
if err != nil && err != io.EOF {
|
||||
ximportcheckf(err, "reading message")
|
||||
}
|
||||
if len(line) > 0 {
|
||||
if !bytes.HasSuffix(line, []byte("\r\n")) {
|
||||
line = append(line[:len(line)-1], "\r\n"...)
|
||||
}
|
||||
|
||||
n, err := w.Write(line)
|
||||
ximportcheckf(err, "writing message")
|
||||
size += int64(n)
|
||||
}
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
err = w.Flush()
|
||||
ximportcheckf(err, "writing message")
|
||||
|
||||
var received time.Time
|
||||
t := strings.SplitN(path.Base(filename), ".", 2)
|
||||
if v, err := strconv.ParseInt(t[0], 10, 64); err == nil {
|
||||
received = time.Unix(v, 0)
|
||||
}
|
||||
|
||||
// Parse flags. See https://cr.yp.to/proto/maildir.html.
|
||||
var keepFlags string
|
||||
var flags store.Flags
|
||||
keywords := map[string]bool{}
|
||||
t = strings.SplitN(path.Base(filename), ":2,", 2)
|
||||
if len(t) == 2 {
|
||||
for _, c := range t[1] {
|
||||
switch c {
|
||||
case 'P':
|
||||
// Passed, doesn't map to a common IMAP flag.
|
||||
case 'R':
|
||||
flags.Answered = true
|
||||
case 'S':
|
||||
flags.Seen = true
|
||||
case 'T':
|
||||
flags.Deleted = true
|
||||
case 'D':
|
||||
flags.Draft = true
|
||||
case 'F':
|
||||
flags.Flagged = true
|
||||
default:
|
||||
if c >= 'a' && c <= 'z' {
|
||||
dovecotKeywords, ok := mailboxKeywords[mailbox]
|
||||
if !ok {
|
||||
// No keywords file seen yet, we'll try later if it comes in.
|
||||
keepFlags += string(c)
|
||||
} else if kw, ok := dovecotKeywords[c]; ok {
|
||||
flagSet(&flags, keywords, kw)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m := store.Message{
|
||||
Received: received,
|
||||
Flags: flags,
|
||||
Keywords: maps.Keys(keywords),
|
||||
Size: size,
|
||||
}
|
||||
xdeliver(mb, &m, f, filename)
|
||||
f = nil
|
||||
if keepFlags != "" {
|
||||
if _, ok := mailboxMissingKeywordMessages[mailbox]; !ok {
|
||||
mailboxMissingKeywordMessages[mailbox] = map[int64]string{}
|
||||
}
|
||||
mailboxMissingKeywordMessages[mailbox][m.ID] = keepFlags
|
||||
}
|
||||
}
|
||||
|
||||
importFile := func(name string, r io.Reader) {
|
||||
origName := name
|
||||
|
||||
if strings.HasPrefix(name, skipMailboxPrefix) {
|
||||
name = strings.TrimPrefix(name[len(skipMailboxPrefix):], "/")
|
||||
}
|
||||
|
||||
if strings.HasSuffix(name, "/") {
|
||||
name = strings.TrimSuffix(name, "/")
|
||||
dir := path.Dir(name)
|
||||
switch path.Base(dir) {
|
||||
case "new", "cur", "tmp":
|
||||
// Maildir, ensure it exists.
|
||||
mailbox := path.Dir(dir)
|
||||
xensureMailbox(mailbox)
|
||||
}
|
||||
// Otherwise, this is just a directory that probably holds mbox files and maildirs.
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasSuffix(path.Base(name), ".mbox") {
|
||||
mailbox := name[:len(name)-len(".mbox")]
|
||||
ximportMbox(mailbox, origName, r)
|
||||
return
|
||||
}
|
||||
dir := path.Dir(name)
|
||||
dirbase := path.Base(dir)
|
||||
switch dirbase {
|
||||
case "new", "cur", "tmp":
|
||||
mailbox := path.Dir(dir)
|
||||
ximportMaildir(mailbox, origName, r)
|
||||
default:
|
||||
if path.Base(name) == "dovecot-keywords" {
|
||||
mailbox := path.Dir(name)
|
||||
dovecotKeywords := map[rune]string{}
|
||||
words, err := store.ParseDovecotKeywordsFlags(r, log)
|
||||
log.Check(err, "parsing dovecot keywords for mailbox", mlog.Field("mailbox", mailbox))
|
||||
for i, kw := range words {
|
||||
dovecotKeywords['a'+rune(i)] = kw
|
||||
}
|
||||
mailboxKeywords[mailbox] = dovecotKeywords
|
||||
|
||||
for id, chars := range mailboxMissingKeywordMessages[mailbox] {
|
||||
var flags, zeroflags store.Flags
|
||||
keywords := map[string]bool{}
|
||||
for _, c := range chars {
|
||||
kw, ok := dovecotKeywords[c]
|
||||
if !ok {
|
||||
problemf("unspecified dovecot message flag %c for message id %d (continuing)", c, id)
|
||||
continue
|
||||
}
|
||||
flagSet(&flags, keywords, kw)
|
||||
}
|
||||
if flags == zeroflags && len(keywords) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
m := store.Message{ID: id}
|
||||
err := tx.Get(&m)
|
||||
ximportcheckf(err, "get imported message for flag update")
|
||||
|
||||
mc := destMailboxCounts[m.MailboxID]
|
||||
mc.Sub(m.MailboxCounts())
|
||||
|
||||
oflags := m.Flags
|
||||
m.Flags = m.Flags.Set(flags, flags)
|
||||
m.Keywords = maps.Keys(keywords)
|
||||
sort.Strings(m.Keywords)
|
||||
|
||||
mc.Add(m.MailboxCounts())
|
||||
destMailboxCounts[m.MailboxID] = mc
|
||||
|
||||
if len(m.Keywords) > 0 {
|
||||
if destMailboxKeywords[m.MailboxID] == nil {
|
||||
destMailboxKeywords[m.MailboxID] = map[string]bool{}
|
||||
}
|
||||
for _, k := range m.Keywords {
|
||||
destMailboxKeywords[m.MailboxID][k] = true
|
||||
}
|
||||
}
|
||||
|
||||
// We train before updating, training may set m.TrainedJunk.
|
||||
if jf != nil && m.NeedsTraining() {
|
||||
openTrainMessage(&m)
|
||||
}
|
||||
err = tx.Update(&m)
|
||||
ximportcheckf(err, "updating message after flag update")
|
||||
changes = append(changes, m.ChangeFlags(oflags))
|
||||
}
|
||||
delete(mailboxMissingKeywordMessages, mailbox)
|
||||
} else {
|
||||
problemf("unrecognized file %s (skipping)", origName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if zr != nil {
|
||||
for _, f := range zr.File {
|
||||
if canceled() {
|
||||
return
|
||||
}
|
||||
zf, err := f.Open()
|
||||
if err != nil {
|
||||
problemf("opening file %s in zip: %v", f.Name, err)
|
||||
continue
|
||||
}
|
||||
importFile(f.Name, zf)
|
||||
err = zf.Close()
|
||||
log.Check(err, "closing file from zip")
|
||||
}
|
||||
} else {
|
||||
for {
|
||||
if canceled() {
|
||||
return
|
||||
}
|
||||
h, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
problemf("reading next tar header: %v (aborting)", err)
|
||||
return
|
||||
}
|
||||
importFile(h.Name, tr)
|
||||
}
|
||||
}
|
||||
|
||||
total := 0
|
||||
for _, count := range messages {
|
||||
total += count
|
||||
}
|
||||
log.Debug("message imported", mlog.Field("total", total))
|
||||
|
||||
// Send final update for count of last-imported mailbox.
|
||||
if prevMailbox != "" {
|
||||
sendEvent("count", importCount{prevMailbox, messages[prevMailbox]})
|
||||
}
|
||||
|
||||
// Update mailboxes with counts and keywords.
|
||||
for mbID, mc := range destMailboxCounts {
|
||||
mb := store.Mailbox{ID: mbID}
|
||||
err := tx.Get(&mb)
|
||||
ximportcheckf(err, "loading mailbox for counts and keywords")
|
||||
|
||||
if mb.MailboxCounts != mc {
|
||||
mb.MailboxCounts = mc
|
||||
changes = append(changes, mb.ChangeCounts())
|
||||
}
|
||||
|
||||
keywords := destMailboxKeywords[mb.ID]
|
||||
var mbKwChanged bool
|
||||
mb.Keywords, mbKwChanged = store.MergeKeywords(mb.Keywords, maps.Keys(keywords))
|
||||
|
||||
err = tx.Update(&mb)
|
||||
ximportcheckf(err, "updating mailbox count and keywords")
|
||||
if mbKwChanged {
|
||||
changes = append(changes, mb.ChangeKeywords())
|
||||
}
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
tx = nil
|
||||
ximportcheckf(err, "commit")
|
||||
deliveredIDs = nil
|
||||
|
||||
if jf != nil {
|
||||
if err := jf.Close(); err != nil {
|
||||
problemf("saving changes of training junk filter: %v (continuing)", err)
|
||||
log.Errorx("saving changes of training junk filter", err)
|
||||
}
|
||||
jf = nil
|
||||
}
|
||||
|
||||
store.BroadcastChanges(acc, changes)
|
||||
acc.Unlock()
|
||||
err = acc.Close()
|
||||
log.Check(err, "closing account after import")
|
||||
acc = nil
|
||||
|
||||
sendEvent("done", importDone{})
|
||||
}
|
||||
|
||||
func flagSet(flags *store.Flags, keywords map[string]bool, word string) {
|
||||
switch word {
|
||||
case "forwarded", "$forwarded":
|
||||
flags.Forwarded = true
|
||||
case "junk", "$junk":
|
||||
flags.Junk = true
|
||||
case "notjunk", "$notjunk", "nonjunk", "$nonjunk":
|
||||
flags.Notjunk = true
|
||||
case "phishing", "$phishing":
|
||||
flags.Phishing = true
|
||||
case "mdnsent", "$mdnsent":
|
||||
flags.MDNSent = true
|
||||
default:
|
||||
if err := store.CheckKeyword(word); err == nil {
|
||||
keywords[word] = true
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user