add funtionality to import zip/tgz with maildirs/mboxes to account page

so users can easily take their email out of somewhere else, and import it into mox.

this goes a little way to give feedback as the import progresses: upload
progress is shown (surprisingly, browsers aren't doing this...), imported
mailboxes/messages are counted (batched) and import issues/warnings are
displayed, all sent over an SSE connection. an import token is stored in
sessionstorage. if you reload the page (e.g. after a connection error), the
browser will reconnect to the running import and show its progress again. and
you can just abort the import before it is finished and committed, and nothing
will have changed.

this also imports flags/keywords from mbox files.
This commit is contained in:
Mechiel Lukkien
2023-02-16 09:57:27 +01:00
parent 23b530ae36
commit 5336032088
32 changed files with 1968 additions and 518 deletions

View File

@ -6,6 +6,7 @@ import (
"compress/gzip"
"context"
"encoding/base64"
"encoding/json"
"errors"
"io"
"net"
@ -106,6 +107,63 @@ func accountHandle(w http.ResponseWriter, r *http.Request) {
ctx := context.WithValue(r.Context(), mlog.CidKey, mox.Cid())
log := xlog.WithContext(ctx).Fields(mlog.Field("userauth", ""))
// Without authentication. The token is unguessable.
if r.URL.Path == "/importprogress" {
if r.Method != "GET" {
http.Error(w, "405 - method not allowed - get required", http.StatusMethodNotAllowed)
return
}
q := r.URL.Query()
token := q.Get("token")
if token == "" {
http.Error(w, "400 - bad request - missing token", http.StatusBadRequest)
return
}
flusher, ok := w.(http.Flusher)
if !ok {
log.Error("internal error: ResponseWriter not a http.Flusher")
http.Error(w, "500 - internal error - cannot sync to http connection", 500)
return
}
l := importListener{token, make(chan importEvent, 100), make(chan bool, 1)}
importers.Register <- &l
ok = <-l.Register
if !ok {
http.Error(w, "400 - bad request - unknown token, import may have finished more than a minute ago", http.StatusBadRequest)
return
}
defer func() {
importers.Unregister <- &l
}()
h := w.Header()
h.Set("Content-Type", "text/event-stream")
h.Set("Cache-Control", "no-cache")
_, err := w.Write([]byte(": keepalive\n\n"))
if err != nil {
return
}
flusher.Flush()
ctx := r.Context()
for {
select {
case e := <-l.Events:
_, err := w.Write(e.SSEMsg)
flusher.Flush()
if err != nil {
return
}
case <-ctx.Done():
return
}
}
}
accName := checkAccountAuth(ctx, log, w, r)
if accName == "" {
// Response already sent.
@ -165,6 +223,54 @@ func accountHandle(w http.ResponseWriter, r *http.Request) {
log.Errorx("exporting mail", err)
}
case "/import":
if r.Method != "POST" {
http.Error(w, "405 - method not allowed - post required", http.StatusMethodNotAllowed)
return
}
f, _, err := r.FormFile("file")
if err != nil {
if errors.Is(err, http.ErrMissingFile) {
http.Error(w, "400 - bad request - missing file", http.StatusBadRequest)
} else {
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
}
return
}
defer f.Close()
skipMailboxPrefix := r.FormValue("skipMailboxPrefix")
tmpf, err := os.CreateTemp("", "mox-import")
if err != nil {
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
return
}
defer func() {
if tmpf != nil {
tmpf.Close()
}
}()
if err := os.Remove(tmpf.Name()); err != nil {
log.Errorx("removing temporary file", err)
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
return
}
if _, err := io.Copy(tmpf, f); err != nil {
log.Errorx("copying import to temporary file", err)
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
return
}
token, err := importStart(log, accName, tmpf, skipMailboxPrefix)
if err != nil {
log.Errorx("starting import", err)
http.Error(w, "500 - internal server error - "+err.Error(), http.StatusInternalServerError)
return
}
tmpf = nil // importStart is now responsible for closing.
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]string{"ImportToken": token})
default:
if strings.HasPrefix(r.URL.Path, "/api/") {
accountSherpaHandler.ServeHTTP(w, r.WithContext(context.WithValue(ctx, authCtxKey, accName)))
@ -230,3 +336,11 @@ func (Account) DestinationSave(ctx context.Context, destName string, oldDest, ne
err := mox.DestinationSave(ctx, accountName, destName, newDest)
xcheckf(ctx, err, "saving destination")
}
// ImportAbort aborts an import that is in progress. If the import exists and isn't
// finished, no changes will have been made by the import.
func (Account) ImportAbort(ctx context.Context, importToken string) error {
req := importAbortRequest{importToken, make(chan error)}
importers.Abort <- req
return <-req.Response
}

View File

@ -130,10 +130,132 @@ const domainString = d => {
return d.ASCII
}
const box = (color, ...l) => [
dom.div(
style({
display: 'inline-block',
padding: '.25em .5em',
backgroundColor: color,
borderRadius: '3px',
margin: '.5ex 0',
}),
l,
),
dom.br(),
]
const green = '#1dea20'
const yellow = '#ffe400'
const red = '#ff7443'
const blue = '#8bc8ff'
const index = async () => {
const [domain, destinations] = await api.Destinations()
let form, fieldset, password1, password2, passwordHint
let passwordForm, passwordFieldset, password1, password2, passwordHint
let importForm, importFieldset, mailboxFile, mailboxFileHint, mailboxPrefix, mailboxPrefixHint, importProgress, importAbortBox, importAbort
const importTrack = async (token) => {
const importConnection = dom.div('Waiting for updates...')
importProgress.appendChild(importConnection)
let countsTbody
let counts = {} // mailbox -> elem
let problems // element
await new Promise((resolve, reject) => {
const eventSource = new window.EventSource('importprogress?token=' + encodeURIComponent(token))
eventSource.addEventListener('open', function(e) {
console.log('eventsource open', {e})
dom._kids(importConnection, dom.div('Waiting for updates, connected...'))
dom._kids(importAbortBox,
importAbort=dom.button('Abort import', attr({title: 'If the import is not yet finished, it can be aborted and no messages will have been imported.'}), async function click(e) {
try {
await api.ImportAbort(token)
} catch (err) {
console.log({err})
window.alert('Error: ' + err.message)
}
// On success, the event source will get an aborted notification and shutdown the connection.
})
)
})
eventSource.addEventListener('error', function(e) {
console.log('eventsource error', {e})
dom._kids(importConnection, box(red, 'Connection error'))
reject({message: 'Connection error'})
})
eventSource.addEventListener('count', (e) => {
const data = JSON.parse(e.data) // {Mailbox: ..., Count: ...}
console.log('import count event', {e, data})
if (!countsTbody) {
importProgress.appendChild(
dom.div(
dom.br(),
dom.h3('Importing mailboxes and messages...'),
dom.table(
dom.thead(
dom.tr(dom.th('Mailbox'), dom.th('Messages')),
),
countsTbody=dom.tbody(),
),
)
)
}
let elem = counts[data.Mailbox]
if (!elem) {
countsTbody.appendChild(
dom.tr(
dom.td(data.Mailbox),
elem=dom.td(style({textAlign: 'right'}), ''+data.Count),
),
)
counts[data.Mailbox] = elem
}
dom._kids(elem, ''+data.Count)
})
eventSource.addEventListener('problem', (e) => {
const data = JSON.parse(e.data) // {Message: ...}
console.log('import problem event', {e, data})
if (!problems) {
importProgress.appendChild(
dom.div(
dom.br(),
dom.h3('Problems during import'),
problems=dom.div(),
),
)
}
problems.appendChild(dom.div(box(yellow, data.Message)))
})
eventSource.addEventListener('done', (e) => {
console.log('import done event', {e})
importProgress.appendChild(dom.div(dom.br(), box(blue, 'Import finished')))
eventSource.close()
dom._kids(importConnection)
dom._kids(importAbortBox)
window.sessionStorage.removeItem('ImportToken')
resolve()
})
eventSource.addEventListener('aborted', function(e) {
console.log('import aborted event', {e})
importProgress.appendChild(dom.div(dom.br(), box(red, 'Import aborted, no message imported')))
eventSource.close()
dom._kids(importConnection)
dom._kids(importAbortBox)
window.sessionStorage.removeItem('ImportToken')
reject({message: 'Import aborted'})
})
})
}
const page = document.getElementById('page')
dom._kids(page,
@ -154,8 +276,8 @@ const index = async () => {
),
dom.br(),
dom.h2('Change password'),
form=dom.form(
fieldset=dom.fieldset(
passwordForm=dom.form(
passwordFieldset=dom.fieldset(
dom.label(
style({display: 'inline-block'}),
'New password',
@ -182,16 +304,16 @@ const index = async () => {
window.alert('Passwords do not match.')
return
}
fieldset.disabled = true
passwordFieldset.disabled = true
try {
await api.SetPassword(password1.value)
window.alert('Password has been changed.')
form.reset()
passwordForm.reset()
} catch (err) {
console.log({err})
window.alert('Error: ' + err.message)
} finally {
fieldset.disabled = false
passwordFieldset.disabled = false
}
},
),
@ -204,8 +326,138 @@ const index = async () => {
dom.li(dom.a('mail-export-mbox.tgz', attr({href: 'mail-export-mbox.tgz'}))),
dom.li(dom.a('mail-export-mbox.zip', attr({href: 'mail-export-mbox.zip'}))),
),
dom.br(),
dom.h2('Import'),
dom.p('Import messages from a .zip or .tgz file with maildirs and/or mbox files.'),
importForm=dom.form(
async function submit(e) {
e.preventDefault()
e.stopPropagation()
const request = () => {
return new Promise((resolve, reject) => {
// Browsers can do everything. Except show a progress bar while uploading...
let progressBox, progressPercentage, progressBar
dom._kids(importProgress,
progressBox=dom.div(
dom.div('Uploading... ', progressPercentage=dom.span()),
),
)
importProgress.style.display = ''
const xhr = new window.XMLHttpRequest()
xhr.open('POST', 'import', true)
xhr.upload.addEventListener('progress', (e) => {
if (!e.lengthComputable) {
return
}
const pct = Math.floor(100*e.loaded/e.total)
dom._kids(progressPercentage, pct+'%')
})
xhr.addEventListener('load', () => {
console.log('upload done', {xhr: xhr, status: xhr.status})
if (xhr.status !== 200) {
reject({message: 'status '+xhr.status})
return
}
let resp
try {
resp = JSON.parse(xhr.responseText)
} catch (err) {
reject({message: 'parsing resonse json: '+err.message})
return
}
resolve(resp)
})
xhr.addEventListener('error', (e) => reject({message: 'upload error', event: e}))
xhr.addEventListener('abort', (e) => reject({message: 'upload aborted', event: e}))
xhr.send(new window.FormData(importForm))
})
}
try {
const p = request()
importFieldset.disabled = true
const result = await p
try {
window.sessionStorage.setItem('ImportToken', result.ImportToken)
} catch (err) {
console.log('storing import token in session storage', {err})
// Ignore error, could be some browser security thing like private browsing.
}
await importTrack(result.ImportToken)
} catch (err) {
console.log({err})
window.alert('Error: '+err.message)
} finally {
importFieldset.disabled = false
}
},
importFieldset=dom.fieldset(
dom.div(
style({marginBottom: '1ex'}),
dom.label(
dom.div(style({marginBottom: '.5ex'}), 'File'),
mailboxFile=dom.input(attr({type: 'file', required: '', name: 'file'}), function focus() {
mailboxFileHint.style.display = ''
}),
),
mailboxFileHint=dom.p(style({display: 'none', fontStyle: 'italic', marginTop: '.5ex'}), 'This file must either be a zip file or a gzipped tar file with mbox and/or maildir mailboxes. For maildirs, an optional file "dovecot-keywords" is read additional keywords, like Forwarded/Junk/NotJunk. If an imported mailbox already exists by name, messages are added to the existing mailbox. If a mailbox does not yet exist it will be created.'),
),
dom.div(
style({marginBottom: '1ex'}),
dom.label(
dom.div(style({marginBottom: '.5ex'}), 'Skip mailbox prefix (optional)'),
mailboxPrefix=dom.input(attr({name: 'skipMailboxPrefix'}), function focus() {
mailboxPrefixHint.style.display = ''
}),
),
mailboxPrefixHint=dom.p(style({display: 'none', fontStyle: 'italic', marginTop: '.5ex'}), 'If set, any mbox/maildir path with this prefix will have it stripped before importing. For example, if all mailboxes are in a directory "Takeout", specify that path in the field above so mailboxes like "Takeout/Inbox.mbox" are imported into a mailbox called "Inbox" instead of "Takeout/Inbox".'),
),
dom.div(
dom.button('Upload and import'),
dom.p(style({fontStyle: 'italic', marginTop: '.5ex'}), 'The file is uploaded first, then its messages are imported. Importing is done in a transaction, you can abort the entire import before it is finished.'),
),
),
),
importAbortBox=dom.div(), // Outside fieldset because it gets disabled, above progress because may be scrolling it down quickly with problems.
importProgress=dom.div(
style({display: 'none'}),
),
footer,
)
// Try to show the progress of an earlier import session. The user may have just
// refreshed the browser.
let importToken
try {
importToken = window.sessionStorage.getItem('ImportToken')
} catch (err) {
console.log('looking up ImportToken in session storage', {err})
return
}
if (!importToken) {
return
}
importFieldset.disabled = true
dom._kids(importProgress,
dom.div(
dom.div('Reconnecting to import...'),
),
)
importProgress.style.display = ''
importTrack(importToken)
.catch((err) => {
if (window.confirm('Error reconnecting to import. Remove this import session?')) {
window.sessionStorage.removeItem('ImportToken')
dom._kids(importProgress)
importProgress.style.display = 'none'
}
})
.finally(() => {
importFieldset.disabled = false
})
}
const destination = async (name) => {

View File

@ -1,3 +1,179 @@
package http
// todo: write test for account api calls, at least for authentation and SetPassword.
import (
"archive/tar"
"archive/zip"
"bytes"
"compress/gzip"
"context"
"encoding/json"
"io"
"mime/multipart"
"net/http"
"net/http/httptest"
"os"
"path"
"path/filepath"
"strings"
"testing"
"github.com/mjl-/mox/mlog"
"github.com/mjl-/mox/mox-"
"github.com/mjl-/mox/store"
)
func tcheck(t *testing.T, err error, msg string) {
t.Helper()
if err != nil {
t.Fatalf("%s: %s", msg, err)
}
}
func TestAccount(t *testing.T) {
os.RemoveAll("../testdata/httpaccount/data")
mox.ConfigStaticPath = "../testdata/httpaccount/mox.conf"
mox.ConfigDynamicPath = filepath.Join(filepath.Dir(mox.ConfigStaticPath), "domains.conf")
mox.MustLoadConfig()
acc, err := store.OpenAccount("mjl")
tcheck(t, err, "open account")
defer acc.Close()
switchDone := store.Switchboard()
defer close(switchDone)
log := mlog.New("store")
test := func(authHdr string, expect string) {
t.Helper()
w := httptest.NewRecorder()
r := httptest.NewRequest("GET", "/ignored", nil)
if authHdr != "" {
r.Header.Add("Authorization", authHdr)
}
ok := checkAccountAuth(context.Background(), log, w, r)
if ok != expect {
t.Fatalf("got %v, expected %v", ok, expect)
}
}
const authOK = "Basic bWpsQG1veC5leGFtcGxlOnRlc3QxMjM0" // mjl@mox.example:test1234
const authBad = "Basic bWpsQG1veC5leGFtcGxlOmJhZHBhc3N3b3Jk" // mjl@mox.example:badpassword
authCtx := context.WithValue(context.Background(), authCtxKey, "mjl")
test(authOK, "") // No password set yet.
Account{}.SetPassword(authCtx, "test1234")
test(authOK, "mjl")
test(authBad, "")
_, dests := Account{}.Destinations(authCtx)
Account{}.DestinationSave(authCtx, "mjl", dests["mjl"], dests["mjl"]) // todo: save modified value and compare it afterwards
// Import mbox/maildir tgz/zip.
testImport := func(filename string, expect int) {
t.Helper()
var reqBody bytes.Buffer
mpw := multipart.NewWriter(&reqBody)
part, err := mpw.CreateFormFile("file", path.Base(filename))
tcheck(t, err, "creating form file")
buf, err := os.ReadFile(filename)
tcheck(t, err, "reading file")
_, err = part.Write(buf)
tcheck(t, err, "write part")
err = mpw.Close()
tcheck(t, err, "close multipart writer")
r := httptest.NewRequest("POST", "/import", &reqBody)
r.Header.Add("Content-Type", mpw.FormDataContentType())
r.Header.Add("Authorization", authOK)
w := httptest.NewRecorder()
accountHandle(w, r)
if w.Code != http.StatusOK {
t.Fatalf("import, got status code %d, expected 200: %s", w.Code, w.Body.Bytes())
}
m := map[string]string{}
if err := json.Unmarshal(w.Body.Bytes(), &m); err != nil {
t.Fatalf("parsing import response: %v", err)
}
token := m["ImportToken"]
l := importListener{token, make(chan importEvent, 100), make(chan bool)}
importers.Register <- &l
if !<-l.Register {
t.Fatalf("register failed")
}
defer func() {
importers.Unregister <- &l
}()
count := 0
loop:
for {
e := <-l.Events
switch x := e.Event.(type) {
case importCount:
count += x.Count
case importProblem:
t.Fatalf("unexpected problem: %q", x.Message)
case importDone:
break loop
case importAborted:
t.Fatalf("unexpected aborted import")
default:
panic("missing case")
}
}
if count != expect {
t.Fatalf("imported %d messages, expected %d", count, expect)
}
}
testImport("../testdata/importtest.mbox.zip", 2)
testImport("../testdata/importtest.maildir.tgz", 2)
testExport := func(httppath string, iszip bool, expectFiles int) {
t.Helper()
r := httptest.NewRequest("GET", httppath, nil)
r.Header.Add("Authorization", authOK)
w := httptest.NewRecorder()
accountHandle(w, r)
if w.Code != http.StatusOK {
t.Fatalf("export, got status code %d, expected 200: %s", w.Code, w.Body.Bytes())
}
var count int
if iszip {
buf := w.Body.Bytes()
zr, err := zip.NewReader(bytes.NewReader(buf), int64(len(buf)))
tcheck(t, err, "reading zip")
for _, f := range zr.File {
if !strings.HasSuffix(f.Name, "/") {
count++
}
}
} else {
gzr, err := gzip.NewReader(w.Body)
tcheck(t, err, "gzip reader")
tr := tar.NewReader(gzr)
for {
h, err := tr.Next()
if err == io.EOF {
break
}
tcheck(t, err, "next file in tar")
if !strings.HasSuffix(h.Name, "/") {
count++
}
_, err = io.Copy(io.Discard, tr)
tcheck(t, err, "reading from tar")
}
}
if count != expectFiles {
t.Fatalf("export, has %d files, expected %d", count, expectFiles)
}
}
testExport("/mail-export-maildir.tgz", false, 6) // 2 mailboxes, each with 2 messages and a dovecot-keyword file
testExport("/mail-export-maildir.zip", true, 6)
testExport("/mail-export-mbox.tgz", false, 2)
testExport("/mail-export-mbox.zip", true, 2)
}

View File

@ -59,6 +59,19 @@
}
],
"Returns": []
},
{
"Name": "ImportAbort",
"Docs": "ImportAbort aborts an import that is in progress. If the import exists and isn't\nfinished, no changes will have been made by the import.",
"Params": [
{
"Name": "importToken",
"Typewords": [
"string"
]
}
],
"Returns": []
}
],
"Sections": [],

View File

@ -110,6 +110,7 @@ return [dom, style, attr, prop]
const green = '#1dea20'
const yellow = '#ffe400'
const red = '#ff7443'
const blue = '#8bc8ff'
const link = (href, anchorOpt) => dom.a(attr({href: href, rel: 'noopener noreferrer'}), anchorOpt || href)

788
http/import.go Normal file
View File

@ -0,0 +1,788 @@
package http
import (
"archive/tar"
"archive/zip"
"bufio"
"bytes"
"compress/gzip"
"context"
cryptrand "crypto/rand"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path"
"runtime/debug"
"strconv"
"strings"
"time"
"golang.org/x/text/unicode/norm"
"github.com/mjl-/bstore"
"github.com/mjl-/mox/message"
"github.com/mjl-/mox/mlog"
"github.com/mjl-/mox/mox-"
"github.com/mjl-/mox/store"
)
type importListener struct {
Token string
Events chan importEvent
Register chan bool // Whether register is successful.
}
type importEvent struct {
Token string
SSEMsg []byte // Full SSE message, including event: ... and data: ... \n\n
Event any // nil, importCount, importProblem, importDone, importAborted
Cancel func() // For cancelling the context causing abort of the import. Set in first, import-registering, event.
}
type importAbortRequest struct {
Token string
Response chan error
}
var importers = struct {
Register chan *importListener
Unregister chan *importListener
Events chan importEvent
Abort chan importAbortRequest
}{
make(chan *importListener, 1),
make(chan *importListener, 1),
make(chan importEvent),
make(chan importAbortRequest),
}
func init() {
go importManage()
}
func importManage() {
log := mlog.New("httpimport")
defer func() {
if x := recover(); x != nil {
log.Error("import manage panic", mlog.Field("err", x))
debug.PrintStack()
}
}()
type state struct {
MailboxCounts map[string]int
Problems []string
Done *time.Time
Aborted *time.Time
Listeners map[*importListener]struct{}
Cancel func()
}
imports := map[string]state{} // Token to state.
for {
select {
case l := <-importers.Register:
// If we have state, send it so the client is up to date.
if s, ok := imports[l.Token]; ok {
l.Register <- true
s.Listeners[l] = struct{}{}
sendEvent := func(kind string, v any) {
buf, err := json.Marshal(v)
if err != nil {
log.Errorx("marshal event", err, mlog.Field("kind", kind), mlog.Field("event", v))
return
}
ssemsg := fmt.Sprintf("event: %s\ndata: %s\n\n", kind, buf)
select {
case l.Events <- importEvent{kind, []byte(ssemsg), nil, nil}:
default:
log.Debug("dropped initial import event to slow consumer")
}
}
for m, c := range s.MailboxCounts {
sendEvent("count", importCount{m, c})
}
for _, p := range s.Problems {
sendEvent("problem", importProblem{p})
}
if s.Done != nil {
sendEvent("done", importDone{})
} else if s.Aborted != nil {
sendEvent("aborted", importAborted{})
}
} else {
l.Register <- false
}
case l := <-importers.Unregister:
delete(imports[l.Token].Listeners, l)
case e := <-importers.Events:
s, ok := imports[e.Token]
if !ok {
s := state{
MailboxCounts: map[string]int{},
Listeners: map[*importListener]struct{}{},
Cancel: e.Cancel,
}
imports[e.Token] = s
}
for l := range s.Listeners {
select {
case l.Events <- e:
default:
log.Debug("dropped import event to slow consumer")
}
}
if e.Event != nil {
s := imports[e.Token]
switch x := e.Event.(type) {
case importCount:
s.MailboxCounts[x.Mailbox] = x.Count
case importProblem:
s.Problems = append(s.Problems, x.Message)
case importDone:
now := time.Now()
s.Done = &now
case importAborted:
now := time.Now()
s.Aborted = &now
}
imports[e.Token] = s
}
case a := <-importers.Abort:
s, ok := imports[a.Token]
if !ok {
a.Response <- errors.New("import not found")
return
}
if s.Done != nil {
a.Response <- errors.New("import already finished")
return
}
s.Cancel()
a.Response <- nil
}
// Cleanup old state.
for t, s := range imports {
if len(s.Listeners) > 0 {
continue
}
if s.Done != nil && time.Since(*s.Done) > time.Minute || s.Aborted != nil && time.Since(*s.Aborted) > time.Minute {
delete(imports, t)
}
}
}
}
type importCount struct {
Mailbox string
Count int
}
type importProblem struct {
Message string
}
type importDone struct{}
type importAborted struct{}
// importStart prepare the import and launches the goroutine to actually import.
// importStart is responsible for closing f.
func importStart(log *mlog.Log, accName string, f *os.File, skipMailboxPrefix string) (string, error) {
defer func() {
if f != nil {
f.Close()
}
}()
buf := make([]byte, 16)
if _, err := cryptrand.Read(buf); err != nil {
return "", err
}
token := fmt.Sprintf("%x", buf)
if _, err := f.Seek(0, 0); err != nil {
return "", fmt.Errorf("seek to start of file: %v", err)
}
// Recognize file format.
var iszip bool
magicZip := []byte{0x50, 0x4b, 0x03, 0x04}
magicGzip := []byte{0x1f, 0x8b}
magic := make([]byte, 4)
if _, err := f.ReadAt(magic, 0); err != nil {
return "", fmt.Errorf("detecting file format: %v", err)
}
if bytes.Equal(magic, magicZip) {
iszip = true
} else if !bytes.Equal(magic[:2], magicGzip) {
return "", fmt.Errorf("file is not a zip or gzip file")
}
var zr *zip.Reader
var tr *tar.Reader
if iszip {
fi, err := f.Stat()
if err != nil {
return "", fmt.Errorf("stat temporary import zip file: %v", err)
}
zr, err = zip.NewReader(f, fi.Size())
if err != nil {
return "", fmt.Errorf("opening zip file: %v", err)
}
} else {
gzr, err := gzip.NewReader(f)
if err != nil {
return "", fmt.Errorf("gunzip: %v", err)
}
tr = tar.NewReader(gzr)
}
acc, err := store.OpenAccount(accName)
if err != nil {
return "", fmt.Errorf("open acount: %v", err)
}
acc.Lock() // Not using WithWLock because importMessage is responsible for unlocking.
tx, err := acc.DB.Begin(true)
if err != nil {
acc.Unlock()
acc.Close()
return "", fmt.Errorf("start transaction: %v", err)
}
// Ensure token is registered before returning, with context that can be canceled.
ctx, cancel := context.WithCancel(mox.Shutdown)
importers.Events <- importEvent{token, []byte(": keepalive\n\n"), nil, cancel}
log.Info("starting import")
go importMessages(ctx, log.WithCid(mox.Cid()), token, acc, tx, zr, tr, f, skipMailboxPrefix)
f = nil // importMessages is now responsible for closing.
return token, nil
}
// importMessages imports the messages from zip/tgz file f.
// importMessages is responsible for unlocking and closing acc, and closing tx and f.
func importMessages(ctx context.Context, log *mlog.Log, token string, acc *store.Account, tx *bstore.Tx, zr *zip.Reader, tr *tar.Reader, f *os.File, skipMailboxPrefix string) {
// If a fatal processing error occurs, we panic with this type.
type importError struct{ Err error }
// During import we collect all changes and broadcast them at the end, when successful.
var changes []store.Change
// ID's of delivered messages. If we have to rollback, we have to remove this files.
var deliveredIDs []int64
ximportcheckf := func(err error, format string, args ...any) {
if err != nil {
panic(importError{fmt.Errorf("%s: %s", fmt.Sprintf(format, args...), err)})
}
}
sendEvent := func(kind string, v any) {
buf, err := json.Marshal(v)
if err != nil {
log.Errorx("marshal event", err, mlog.Field("kind", kind), mlog.Field("event", v))
return
}
ssemsg := fmt.Sprintf("event: %s\ndata: %s\n\n", kind, buf)
importers.Events <- importEvent{token, []byte(ssemsg), v, nil}
}
problemf := func(format string, args ...any) {
msg := fmt.Sprintf(format, args...)
sendEvent("problem", importProblem{Message: msg})
}
canceled := func() bool {
select {
case <-ctx.Done():
sendEvent("aborted", importAborted{})
return true
default:
return false
}
}
defer func() {
if err := f.Close(); err != nil {
log.Errorx("closing uploaded messages file", err)
}
for _, id := range deliveredIDs {
p := acc.MessagePath(id)
if err := os.Remove(p); err != nil {
log.Errorx("closing message file after import error", err, mlog.Field("path", p))
}
}
if tx != nil {
if err := tx.Rollback(); err != nil {
log.Errorx("rolling back transaction", err)
}
}
if acc != nil {
acc.Unlock()
if err := acc.Close(); err != nil {
log.Errorx("closing account", err)
}
}
x := recover()
if x == nil {
return
}
if err, ok := x.(importError); ok {
log.Errorx("import error", err.Err)
problemf("%s (aborting)", err.Err)
sendEvent("aborted", importAborted{})
} else {
log.Error("import panic", mlog.Field("err", x))
debug.PrintStack()
}
}()
conf, _ := acc.Conf()
jf, _, err := acc.OpenJunkFilter(log)
if err != nil && !errors.Is(err, store.ErrNoJunkFilter) {
ximportcheckf(err, "open junk filter")
}
defer func() {
if jf != nil {
err := jf.CloseDiscard()
log.Check(err, "closing junk filter")
}
}()
// Mailboxes we imported, and message counts.
mailboxes := map[string]store.Mailbox{}
messages := map[string]int{}
// For maildirs, we are likely to get a possible dovecot-keywords file after having imported the messages. Once we see the keywords, we use them. But before that time we remember which messages miss a keywords. Once the keywords become available, we'll fix up the flags for the unknown messages
mailboxKeywords := map[string]map[rune]string{} // Mailbox to 'a'-'z' to flag name.
mailboxMissingKeywordMessages := map[string]map[int64]string{} // Mailbox to message id to string consisting of the unrecognized flags.
// Previous mailbox an event was sent for. We send an event for new mailboxes, when
// another 100 messages were added, when adding a message to another mailbox, and
// finally at the end as a closing statement.
var prevMailbox string
trainMessage := func(m *store.Message, p message.Part, pos string) {
words, err := jf.ParseMessage(p)
if err != nil {
problemf("parsing message %s for updating junk filter: %v (continuing)", pos, err)
return
}
err = jf.Train(!m.Junk, words)
if err != nil {
problemf("training junk filter for message %s: %v (continuing)", pos, err)
return
}
m.TrainedJunk = &m.Junk
}
openTrainMessage := func(m *store.Message) {
path := acc.MessagePath(m.ID)
f, err := os.Open(path)
if err != nil {
problemf("opening message again for training junk filter: %v (continuing)", err)
return
}
defer f.Close()
p, err := m.LoadPart(f)
if err != nil {
problemf("loading parsed message again for training junk filter: %v (continuing)", err)
return
}
trainMessage(m, p, fmt.Sprintf("message id %d", m.ID))
}
xensureMailbox := func(name string) store.Mailbox {
name = norm.NFC.String(name)
if strings.ToLower(name) == "inbox" {
name = "Inbox"
}
if mb, ok := mailboxes[name]; ok {
return mb
}
var p string
var mb store.Mailbox
for i, e := range strings.Split(name, "/") {
if i == 0 {
p = e
} else {
p = path.Join(p, e)
}
if _, ok := mailboxes[p]; ok {
continue
}
q := bstore.QueryTx[store.Mailbox](tx)
q.FilterNonzero(store.Mailbox{Name: p})
var err error
mb, err = q.Get()
if err == bstore.ErrAbsent {
uidvalidity, err := acc.NextUIDValidity(tx)
ximportcheckf(err, "finding next uid validity")
mb = store.Mailbox{
Name: p,
UIDValidity: uidvalidity,
UIDNext: 1,
// Do not assign special-use flags. This existing account probably already has such mailboxes.
}
err = tx.Insert(&mb)
ximportcheckf(err, "inserting mailbox in database")
changes = append(changes, store.ChangeAddMailbox{Name: p})
// todo: should we also subscribe to the mailbox?
} else if err != nil {
ximportcheckf(err, "creating mailbox %s (aborting)", p)
}
if prevMailbox != "" && mb.Name != prevMailbox {
sendEvent("count", importCount{prevMailbox, messages[prevMailbox]})
}
mailboxes[mb.Name] = mb
sendEvent("count", importCount{mb.Name, 0})
prevMailbox = mb.Name
}
return mb
}
xdeliver := func(mb store.Mailbox, m *store.Message, f *os.File, pos string) {
defer func() {
if f != nil {
err := os.Remove(f.Name())
log.Check(err, "removing temporary message file for delivery")
err = f.Close()
log.Check(err, "closing temporary message file for delivery")
}
x := recover()
if x != nil {
// todo: get a variant of DeliverX that returns an error instead of panicking.
log.Error("delivery panic", mlog.Field("err", x))
}
}()
m.MailboxID = mb.ID
m.MailboxOrigID = mb.ID
// Parse message and store parsed information for later fast retrieval.
p, err := message.EnsurePart(f, m.Size)
if err != nil {
problemf("parsing message %s: %s (continuing)", pos, err)
}
m.ParsedBuf, err = json.Marshal(p)
ximportcheckf(err, "marshal parsed message structure")
if m.Received.IsZero() {
if p.Envelope != nil && !p.Envelope.Date.IsZero() {
m.Received = p.Envelope.Date
} else {
m.Received = time.Now()
}
}
// We set the flags that Deliver would set now and train ourselves. This prevents
// Deliver from training, which would open the junk filter, change it, and write it
// back to disk, for each message (slow).
m.JunkFlagsForMailbox(mb.Name, conf)
if jf != nil && m.NeedsTraining() {
trainMessage(m, p, pos)
}
const consumeFile = true
const sync = false
const notrain = true
acc.DeliverX(log, tx, m, f, consumeFile, mb.Sent, sync, notrain) // todo: need a deliver that returns an error.
deliveredIDs = append(deliveredIDs, m.ID)
changes = append(changes, store.ChangeAddUID{MailboxID: m.MailboxID, UID: m.UID, Flags: m.Flags})
messages[mb.Name]++
if messages[mb.Name]%100 == 0 || prevMailbox != mb.Name {
prevMailbox = mb.Name
sendEvent("count", importCount{mb.Name, messages[mb.Name]})
}
f = nil
}
ximportMbox := func(mailbox, filename string, r io.Reader) {
if mailbox == "" {
problemf("empty mailbox name for mbox file %s (skipping)", filename)
return
}
mb := xensureMailbox(mailbox)
mr := store.NewMboxReader(store.CreateMessageTemp, filename, r, log)
for {
m, mf, pos, err := mr.Next()
if err == io.EOF {
break
} else if err != nil {
ximportcheckf(err, "next message in mbox file")
}
xdeliver(mb, m, mf, pos)
}
}
ximportMaildir := func(mailbox, filename string, r io.Reader) {
if mailbox == "" {
problemf("empty mailbox name for maildir file %s (skipping)", filename)
return
}
mb := xensureMailbox(mailbox)
f, err := store.CreateMessageTemp("import")
ximportcheckf(err, "creating temp message")
defer func() {
if f != nil {
err := os.Remove(f.Name())
log.Check(err, "removing temporary file for delivery")
err = f.Close()
log.Check(err, "closing temporary file for delivery")
}
}()
// Copy data, changing bare \n into \r\n.
br := bufio.NewReader(r)
w := bufio.NewWriter(f)
var size int64
for {
line, err := br.ReadBytes('\n')
if err != nil && err != io.EOF {
ximportcheckf(err, "reading message")
}
if len(line) > 0 {
if !bytes.HasSuffix(line, []byte("\r\n")) {
line = append(line[:len(line)-1], "\r\n"...)
}
n, err := w.Write(line)
ximportcheckf(err, "writing message")
size += int64(n)
}
if err == io.EOF {
break
}
}
err = w.Flush()
ximportcheckf(err, "writing message")
var received time.Time
t := strings.SplitN(path.Base(filename), ".", 2)
if v, err := strconv.ParseInt(t[0], 10, 64); err == nil {
received = time.Unix(v, 0)
}
// Parse flags. See https://cr.yp.to/proto/maildir.html.
var keepFlags string
flags := store.Flags{}
t = strings.SplitN(path.Base(filename), ":2,", 2)
if len(t) == 2 {
for _, c := range t[1] {
switch c {
case 'P':
// Passed, doesn't map to a common IMAP flag.
case 'R':
flags.Answered = true
case 'S':
flags.Seen = true
case 'T':
flags.Deleted = true
case 'D':
flags.Draft = true
case 'F':
flags.Flagged = true
default:
if c >= 'a' && c <= 'z' {
keywords, ok := mailboxKeywords[mailbox]
if !ok {
// No keywords file seen yet, we'll try later if it comes in.
keepFlags += string(c)
} else if kw, ok := keywords[c]; ok {
flagSet(&flags, strings.ToLower(kw))
}
}
}
}
}
m := store.Message{
Received: received,
Flags: flags,
Size: size,
}
xdeliver(mb, &m, f, filename)
f = nil
if keepFlags != "" {
if _, ok := mailboxMissingKeywordMessages[mailbox]; !ok {
mailboxMissingKeywordMessages[mailbox] = map[int64]string{}
}
mailboxMissingKeywordMessages[mailbox][m.ID] = keepFlags
}
}
importFile := func(name string, r io.Reader) {
origName := name
if strings.HasPrefix(name, skipMailboxPrefix) {
name = strings.TrimPrefix(name[len(skipMailboxPrefix):], "/")
}
if strings.HasSuffix(name, "/") {
name = strings.TrimSuffix(name, "/")
dir := path.Dir(name)
switch path.Base(dir) {
case "new", "cur", "tmp":
// Maildir, ensure it exists.
mailbox := path.Dir(dir)
xensureMailbox(mailbox)
}
// Otherwise, this is just a directory that probably holds mbox files and maildirs.
return
}
if strings.HasSuffix(path.Base(name), ".mbox") {
mailbox := name[:len(name)-len(".mbox")]
ximportMbox(mailbox, origName, r)
return
}
dir := path.Dir(name)
dirbase := path.Base(dir)
switch dirbase {
case "new", "cur", "tmp":
mailbox := path.Dir(dir)
ximportMaildir(mailbox, origName, r)
default:
if path.Base(name) == "dovecot-keywords" {
mailbox := path.Dir(name)
keywords := map[rune]string{}
words, err := store.ParseDovecotKeywords(r, log)
log.Check(err, "parsing dovecot keywords for mailbox", mlog.Field("mailbox", mailbox))
for i, kw := range words {
keywords['a'+rune(i)] = kw
}
mailboxKeywords[mailbox] = keywords
for id, chars := range mailboxMissingKeywordMessages[mailbox] {
var flags, zeroflags store.Flags
for _, c := range chars {
kw, ok := keywords[c]
if !ok {
problemf("unspecified message flag %c for message id %d (continuing)", c, id)
continue
}
flagSet(&flags, strings.ToLower(kw))
}
if flags == zeroflags {
continue
}
m := store.Message{ID: id}
err := tx.Get(&m)
ximportcheckf(err, "get imported message for flag update")
m.Flags = m.Flags.Set(flags, flags)
// We train before updating, training may set m.TrainedJunk.
if jf != nil && m.NeedsTraining() {
openTrainMessage(&m)
}
err = tx.Update(&m)
ximportcheckf(err, "updating message after flag update")
changes = append(changes, store.ChangeFlags{MailboxID: m.MailboxID, UID: m.UID, Mask: flags, Flags: flags})
}
delete(mailboxMissingKeywordMessages, mailbox)
} else {
problemf("unrecognized file %s (skipping)", origName)
}
}
}
if zr != nil {
for _, f := range zr.File {
if canceled() {
return
}
zf, err := f.Open()
if err != nil {
problemf("opening file %s in zip: %v", f.Name, err)
continue
}
importFile(f.Name, zf)
zf.Close()
}
} else {
for {
if canceled() {
return
}
h, err := tr.Next()
if err == io.EOF {
break
} else if err != nil {
problemf("reading next tar header: %v (aborting)", err)
return
}
importFile(h.Name, tr)
}
}
total := 0
for _, count := range messages {
total += count
}
log.Debug("message imported", mlog.Field("total", total))
// Send final update for count of last-imported mailbox.
if prevMailbox != "" {
sendEvent("count", importCount{prevMailbox, messages[prevMailbox]})
}
err = tx.Commit()
tx = nil
ximportcheckf(err, "commit")
deliveredIDs = nil
if jf != nil {
err := jf.Close()
if err != nil {
problemf("saving changes of training junk filter: %v (continuing)", err)
log.Errorx("saving changes of training junk filter", err)
}
jf = nil
}
comm := store.RegisterComm(acc)
defer comm.Unregister()
comm.Broadcast(changes)
acc.Unlock()
if err := acc.Close(); err != nil {
log.Errorx("closing account after import", err)
// Continue
}
acc = nil
sendEvent("done", importDone{})
}
func flagSet(flags *store.Flags, word string) {
// todo: custom labels, e.g. $label1, JunkRecorded?
switch word {
case "forwarded", "$forwarded":
flags.Forwarded = true
case "junk", "$junk":
flags.Junk = true
case "notjunk", "$notjunk", "nonjunk", "$nonjunk":
flags.Notjunk = true
case "phishing", "$phishing":
flags.Phishing = true
case "mdnsent", "$mdnsent":
flags.MDNSent = true
}
}