diff --git a/server/enterprise/external_imports.go b/server/enterprise/external_imports.go
index ae02565354a..4ad0bf051c4 100644
--- a/server/enterprise/external_imports.go
+++ b/server/enterprise/external_imports.go
@@ -19,16 +19,8 @@ import (
// Needed to ensure the init() method in the EE gets run
_ "github.com/mattermost/enterprise/ldap"
// Needed to ensure the init() method in the EE gets run
- _ "github.com/mattermost/enterprise/message_export"
- // Needed to ensure the init() method in the EE gets run
_ "github.com/mattermost/enterprise/cloud"
// Needed to ensure the init() method in the EE gets run
- _ "github.com/mattermost/enterprise/message_export/actiance_export"
- // Needed to ensure the init() method in the EE gets run
- _ "github.com/mattermost/enterprise/message_export/csv_export"
- // Needed to ensure the init() method in the EE gets run
- _ "github.com/mattermost/enterprise/message_export/global_relay_export"
- // Needed to ensure the init() method in the EE gets run
_ "github.com/mattermost/enterprise/notification"
// Needed to ensure the init() method in the EE gets run
_ "github.com/mattermost/enterprise/oauth/google"
diff --git a/server/enterprise/local_imports.go b/server/enterprise/local_imports.go
index 0890e060eaf..adeee7463e7 100644
--- a/server/enterprise/local_imports.go
+++ b/server/enterprise/local_imports.go
@@ -8,4 +8,12 @@ package enterprise
import (
// Needed to ensure the init() method in the EE gets run
_ "github.com/mattermost/mattermost/server/v8/enterprise/metrics"
+ // Needed to ensure the init() method in the EE gets run
+ _ "github.com/mattermost/mattermost/server/v8/enterprise/message_export"
+ // Needed to ensure the init() method in the EE gets run
+ _ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/actiance_export"
+ // Needed to ensure the init() method in the EE gets run
+ _ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/csv_export"
+ // Needed to ensure the init() method in the EE gets run
+ _ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/global_relay_export"
)
diff --git a/server/enterprise/message_export/actiance_export/actiance_export.go b/server/enterprise/message_export/actiance_export/actiance_export.go
new file mode 100644
index 00000000000..b87532855d5
--- /dev/null
+++ b/server/enterprise/message_export/actiance_export/actiance_export.go
@@ -0,0 +1,356 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package actiance_export
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "net/http"
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/common_export"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+)
+
+const (
+ XMLNS = "http://www.w3.org/2001/XMLSchema-instance"
+ ActianceExportFilename = "actiance_export.xml"
+ ActianceWarningFilename = "warning.txt"
+)
+
+// The root-level element of an actiance export
+type RootNode struct {
+ XMLName xml.Name `xml:"FileDump"`
+ XMLNS string `xml:"xmlns:xsi,attr"` // this should default to "http://www.w3.org/2001/XMLSchema-instance"
+ Channels []ChannelExport // one element per channel (open or invite-only), group message, or direct message
+}
+
+// The Conversation element indicates an ad hoc IM conversation or a group chat room.
+// The messages from a persistent chat room are exported once a day so that a Conversation entry contains the messages posted to a chat room from 12:00:00 AM to 11:59:59 PM
+type ChannelExport struct {
+ XMLName xml.Name `xml:"Conversation"`
+ Perspective string `xml:"Perspective,attr"` // the value of this attribute doesn't seem to matter. Using the channel name makes the export more human readable
+ ChannelId string `xml:"-"` // the unique id of the channel
+ RoomId string `xml:"RoomID"`
+ StartTime int64 `xml:"StartTimeUTC"` // utc timestamp (seconds), start of export period or create time of channel, whichever is greater. Example: 1366611728.
+ JoinEvents []JoinExport // start with a list of all users who were present in the channel during the export period
+ Elements []any
+ UploadStarts []*FileUploadStartExport
+ UploadStops []*FileUploadStopExport
+ LeaveEvents []LeaveExport // finish with a list of all users who were present in the channel during the export period
+ EndTime int64 `xml:"EndTimeUTC"` // utc timestamp (seconds), end of export period or delete time of channel, whichever is lesser. Example: 1366611728.
+}
+
+// The ParticipantEntered element indicates each user who participates in a conversation.
+// For chat rooms, there must be one ParticipantEntered element for each user present in the chat room at the beginning of the reporting period
+type JoinExport struct {
+ XMLName xml.Name `xml:"ParticipantEntered"`
+ UserEmail string `xml:"LoginName"` // the email of the person that joined the channel
+ UserType string `xml:"UserType"` // the type of the user that joined the channel
+ JoinTime int64 `xml:"DateTimeUTC"` // utc timestamp (seconds), time at which the user joined. Example: 1366611728
+ CorporateEmailID string `xml:"CorporateEmailID"`
+}
+
+// The ParticipantLeft element indicates the user who leaves an active IM or chat room conversation.
+// For chat rooms, there must be one ParticipantLeft element for each user present in the chat room at the end of the reporting period.
+type LeaveExport struct {
+ XMLName xml.Name `xml:"ParticipantLeft"`
+ UserEmail string `xml:"LoginName"` // the email of the person that left the channel
+ UserType string `xml:"UserType"` // the type of the user that left the channel
+ LeaveTime int64 `xml:"DateTimeUTC"` // utc timestamp (seconds), time at which the user left. Example: 1366611728
+ CorporateEmailID string `xml:"CorporateEmailID"`
+}
+
+// The Message element indicates the message sent by a user
+type PostExport struct {
+ XMLName xml.Name `xml:"Message"`
+ UserEmail string `xml:"LoginName"` // the email of the person that sent the post
+ UserType string `xml:"UserType"` // the type of the person that sent the post
+ PostTime int64 `xml:"DateTimeUTC"` // utc timestamp (seconds), time at which the user sent the post. Example: 1366611728
+ Message string `xml:"Content"` // the text body of the post
+ PreviewsPost string `xml:"PreviewsPost"` // the post id of the post that is previewed by the permalink preview feature
+}
+
+// The FileTransferStarted element indicates the beginning of a file transfer in a conversation
+type FileUploadStartExport struct {
+ XMLName xml.Name `xml:"FileTransferStarted"`
+ UserEmail string `xml:"LoginName"` // the email of the person that sent the file
+ UploadStartTime int64 `xml:"DateTimeUTC"` // utc timestamp (seconds), time at which the user started the upload. Example: 1366611728
+ Filename string `xml:"UserFileName"` // the name of the file that was uploaded
+ FilePath string `xml:"FileName"` // the path to the file, as stored on the server
+}
+
+// The FileTransferEnded element indicates the end of a file transfer in a conversation
+type FileUploadStopExport struct {
+ XMLName xml.Name `xml:"FileTransferEnded"`
+ UserEmail string `xml:"LoginName"` // the email of the person that sent the file
+ UploadStopTime int64 `xml:"DateTimeUTC"` // utc timestamp (seconds), time at which the user finished the upload. Example: 1366611728
+ Filename string `xml:"UserFileName"` // the name of the file that was uploaded
+ FilePath string `xml:"FileName"` // the path to the file, as stored on the server
+ Status string `xml:"Status"` // set to either "Completed" or "Failed" depending on the outcome of the upload operation
+}
+
+func ActianceExport(rctx request.CTX, posts []*model.MessageExport, db store.Store, exportBackend filestore.FileBackend, fileAttachmentBackend filestore.FileBackend, exportDirectory string) (warningCount int64, appErr *model.AppError) {
+ // sort the posts into buckets based on the channel in which they appeared
+ membersByChannel := common_export.MembersByChannel{}
+ metadata := common_export.Metadata{
+ Channels: map[string]common_export.MetadataChannel{},
+ MessagesCount: 0,
+ AttachmentsCount: 0,
+ StartTime: 0,
+ EndTime: 0,
+ }
+ elementsByChannel := map[string][]any{}
+ allUploadedFiles := []*model.FileInfo{}
+
+ for _, post := range posts {
+ if post == nil {
+ rctx.Logger().Warn("ignored a nil post reference in the list")
+ continue
+ }
+ elementsByChannel[*post.ChannelId] = append(elementsByChannel[*post.ChannelId], postToExportEntry(post, post.PostCreateAt, *post.PostMessage))
+
+ if post.PostDeleteAt != nil && *post.PostDeleteAt > 0 && post.PostProps != nil {
+ props := map[string]any{}
+ if json.Unmarshal([]byte(*post.PostProps), &props) == nil {
+ if _, ok := props[model.PostPropsDeleteBy]; ok {
+ elementsByChannel[*post.ChannelId] = append(elementsByChannel[*post.ChannelId], postToExportEntry(post,
+ post.PostDeleteAt, "delete "+*post.PostMessage))
+ }
+ }
+ }
+
+ startUploads, stopUploads, uploadedFiles, deleteFileMessages, err := postToAttachmentsEntries(post, db)
+ if err != nil {
+ return warningCount, err
+ }
+ elementsByChannel[*post.ChannelId] = append(elementsByChannel[*post.ChannelId], startUploads...)
+ elementsByChannel[*post.ChannelId] = append(elementsByChannel[*post.ChannelId], stopUploads...)
+ elementsByChannel[*post.ChannelId] = append(elementsByChannel[*post.ChannelId], deleteFileMessages...)
+
+ allUploadedFiles = append(allUploadedFiles, uploadedFiles...)
+
+ metadata.Update(post, len(uploadedFiles))
+
+ if _, ok := membersByChannel[*post.ChannelId]; !ok {
+ membersByChannel[*post.ChannelId] = common_export.ChannelMembers{}
+ }
+ membersByChannel[*post.ChannelId][*post.UserId] = common_export.ChannelMember{
+ Email: *post.UserEmail,
+ UserId: *post.UserId,
+ IsBot: post.IsBot,
+ Username: *post.Username,
+ }
+ }
+
+ rctx.Logger().Info("Exported data for channels", mlog.Int("number_of_channels", len(metadata.Channels)))
+
+ channelExports := []ChannelExport{}
+ for _, channel := range metadata.Channels {
+ channelExport, err := buildChannelExport(
+ channel,
+ membersByChannel[channel.ChannelId],
+ elementsByChannel[channel.ChannelId],
+ db,
+ )
+ if err != nil {
+ return warningCount, err
+ }
+ channelExports = append(channelExports, *channelExport)
+ }
+
+ export := &RootNode{
+ XMLNS: XMLNS,
+ Channels: channelExports,
+ }
+
+ return writeExport(rctx, export, allUploadedFiles, exportDirectory, exportBackend, fileAttachmentBackend)
+}
+
+func postToExportEntry(post *model.MessageExport, createTime *int64, message string) *PostExport {
+ userType := "user"
+ if post.IsBot {
+ userType = "bot"
+ }
+ return &PostExport{
+ PostTime: *createTime,
+ Message: message,
+ UserType: userType,
+ UserEmail: *post.UserEmail,
+ PreviewsPost: post.PreviewID(),
+ }
+}
+
+func postToAttachmentsEntries(post *model.MessageExport, db store.Store) ([]any, []any, []*model.FileInfo, []any, *model.AppError) {
+ // if the post included any files, we need to add special elements to the export.
+ if len(post.PostFileIds) == 0 {
+ return nil, nil, nil, nil, nil
+ }
+
+ fileInfos, err := db.FileInfo().GetForPost(*post.PostId, true, true, false)
+ if err != nil {
+ return nil, nil, nil, nil, model.NewAppError("postToAttachmentsEntries", "ent.message_export.actiance_export.get_attachment_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ startUploads := []any{}
+ stopUploads := []any{}
+ deleteFileMessages := []any{}
+
+ uploadedFiles := []*model.FileInfo{}
+ for _, fileInfo := range fileInfos {
+ // insert a record of the file upload into the export file
+ // path to exported file is relative to the xml file, so it's just the name of the exported file
+ startUploads = append(startUploads, &FileUploadStartExport{
+ UserEmail: *post.UserEmail,
+ Filename: fileInfo.Name,
+ FilePath: fileInfo.Path,
+ UploadStartTime: *post.PostCreateAt,
+ })
+
+ stopUploads = append(stopUploads, &FileUploadStopExport{
+ UserEmail: *post.UserEmail,
+ Filename: fileInfo.Name,
+ FilePath: fileInfo.Path,
+ UploadStopTime: *post.PostCreateAt,
+ Status: "Completed",
+ })
+
+ if fileInfo.DeleteAt > 0 && post.PostDeleteAt != nil {
+ deleteFileMessages = append(deleteFileMessages, postToExportEntry(post, post.PostDeleteAt, "delete "+fileInfo.Path))
+ }
+
+ uploadedFiles = append(uploadedFiles, fileInfo)
+ }
+ return startUploads, stopUploads, uploadedFiles, deleteFileMessages, nil
+}
+
+func buildChannelExport(channel common_export.MetadataChannel, members common_export.ChannelMembers, elements []any, db store.Store) (*ChannelExport, *model.AppError) {
+ channelExport := ChannelExport{
+ ChannelId: channel.ChannelId,
+ RoomId: fmt.Sprintf("%v - %v - %v", common_export.ChannelTypeDisplayName(channel.ChannelType), channel.ChannelName, channel.ChannelId),
+ StartTime: channel.StartTime,
+ EndTime: channel.EndTime,
+ Perspective: channel.ChannelDisplayName,
+ }
+
+ channelMembersHistory, err := db.ChannelMemberHistory().GetUsersInChannelDuring(channel.StartTime, channel.EndTime, channel.ChannelId)
+ if err != nil {
+ return nil, model.NewAppError("buildChannelExport", "ent.get_users_in_channel_during", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ joins, leaves := common_export.GetJoinsAndLeavesForChannel(channel.StartTime, channel.EndTime, channelMembersHistory, members)
+ type StillJoinedInfo struct {
+ Time int64
+ Type string
+ }
+ stillJoined := map[string]StillJoinedInfo{}
+ for _, join := range joins {
+ userType := "user"
+ if join.IsBot {
+ userType = "bot"
+ }
+ channelExport.JoinEvents = append(channelExport.JoinEvents, JoinExport{
+ JoinTime: join.Datetime,
+ UserEmail: join.Email,
+ UserType: userType,
+ CorporateEmailID: join.Email,
+ })
+ if value, ok := stillJoined[join.Email]; !ok {
+ stillJoined[join.Email] = StillJoinedInfo{Time: join.Datetime, Type: userType}
+ } else {
+ if join.Datetime > value.Time {
+ stillJoined[join.Email] = StillJoinedInfo{Time: join.Datetime, Type: userType}
+ }
+ }
+ }
+ for _, leave := range leaves {
+ userType := "user"
+ if leave.IsBot {
+ userType = "bot"
+ }
+ channelExport.LeaveEvents = append(channelExport.LeaveEvents, LeaveExport{
+ LeaveTime: leave.Datetime,
+ UserEmail: leave.Email,
+ UserType: userType,
+ CorporateEmailID: leave.Email,
+ })
+ if leave.Datetime > stillJoined[leave.Email].Time {
+ delete(stillJoined, leave.Email)
+ }
+ }
+
+ for email := range stillJoined {
+ channelExport.LeaveEvents = append(channelExport.LeaveEvents, LeaveExport{
+ LeaveTime: channel.EndTime,
+ UserEmail: email,
+ UserType: stillJoined[email].Type,
+ CorporateEmailID: email,
+ })
+ }
+
+ sort.Slice(channelExport.LeaveEvents, func(i, j int) bool {
+ if channelExport.LeaveEvents[i].LeaveTime == channelExport.LeaveEvents[j].LeaveTime {
+ return channelExport.LeaveEvents[i].UserEmail < channelExport.LeaveEvents[j].UserEmail
+ }
+ return channelExport.LeaveEvents[i].LeaveTime < channelExport.LeaveEvents[j].LeaveTime
+ })
+
+ channelExport.Elements = elements
+ return &channelExport, nil
+}
+
+func writeExport(rctx request.CTX, export *RootNode, uploadedFiles []*model.FileInfo, exportDirectory string, exportBackend filestore.FileBackend, fileAttachmentBackend filestore.FileBackend) (warningCount int64, appErr *model.AppError) {
+ // marshal the export object to xml
+ xmlData := &bytes.Buffer{}
+ xmlData.WriteString(xml.Header)
+
+ enc := xml.NewEncoder(xmlData)
+ enc.Indent("", " ")
+ if err := enc.Encode(export); err != nil {
+ return warningCount, model.NewAppError("ActianceExport.AtianceExport", "ent.actiance.export.marshalToXml.appError", nil, "", 0).Wrap(err)
+ }
+ enc.Flush()
+
+ // Try to disable the write timeout if the backend supports it
+ if _, err := filestore.TryWriteFileContext(rctx.Context(), exportBackend, xmlData, path.Join(exportDirectory, ActianceExportFilename)); err != nil {
+ return warningCount, model.NewAppError("ActianceExport.AtianceExport", "ent.actiance.export.write_file.appError", nil, "", 0).Wrap(err)
+ }
+
+ var missingFiles []string
+ for _, fileInfo := range uploadedFiles {
+ var attachmentSrc io.ReadCloser
+ attachmentSrc, nErr := fileAttachmentBackend.Reader(fileInfo.Path)
+ if nErr != nil {
+ missingFiles = append(missingFiles, "Warning:"+common_export.MissingFileMessage+" - "+fileInfo.Path)
+ rctx.Logger().Warn(common_export.MissingFileMessage, mlog.String("FileName", fileInfo.Path))
+ continue
+ }
+ defer attachmentSrc.Close()
+
+ destPath := path.Join(exportDirectory, fileInfo.Path)
+
+ _, nErr = exportBackend.WriteFile(attachmentSrc, destPath)
+ if nErr != nil {
+ return warningCount, model.NewAppError("ActianceExport.AtianceExport", "ent.actiance.export.write_file.appError", nil, "", 0).Wrap(nErr)
+ }
+ }
+ warningCount = int64(len(missingFiles))
+ if warningCount > 0 {
+ _, err := filestore.TryWriteFileContext(rctx.Context(), exportBackend, strings.NewReader(strings.Join(missingFiles, "\n")), path.Join(exportDirectory, ActianceWarningFilename))
+ if err != nil {
+ appErr = model.NewAppError("ActianceExport.AtianceExport", "ent.actiance.export.write_file.appError", nil, "", 0).Wrap(err)
+ }
+ }
+ return warningCount, appErr
+}
diff --git a/server/enterprise/message_export/actiance_export/actiance_export_test.go b/server/enterprise/message_export/actiance_export/actiance_export_test.go
new file mode 100644
index 00000000000..2664fbdba45
--- /dev/null
+++ b/server/enterprise/message_export/actiance_export/actiance_export_test.go
@@ -0,0 +1,1075 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package actiance_export
+
+import (
+ "bytes"
+ "encoding/xml"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+ "github.com/stretchr/testify/require"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+)
+
+func TestActianceExport(t *testing.T) {
+ t.Run("no dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ fileBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ runTestActianceExport(t, fileBackend, fileBackend)
+ })
+
+ t.Run("using dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ exportBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ attachmentTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(attachmentTempDir)
+ assert.NoError(t, err)
+ })
+
+ attachmentBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: attachmentTempDir,
+ })
+
+ runTestActianceExport(t, exportBackend, attachmentBackend)
+ })
+}
+
+func runTestActianceExport(t *testing.T, exportBackend filestore.FileBackend, attachmentBackend filestore.FileBackend) {
+ rctx := request.TestContext(t)
+
+ chanTypeDirect := model.ChannelTypeDirect
+ csvExportTests := []struct {
+ name string
+ cmhs map[string][]*model.ChannelMemberHistoryResult
+ posts []*model.MessageExport
+ attachments map[string][]*model.FileInfo
+ expectedData string
+ expectedFiles int
+ }{
+ {
+ name: "empty",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{},
+ posts: []*model.MessageExport{},
+ attachments: map[string][]*model.FileInfo{},
+ expectedData: strings.Join([]string{
+ xml.Header,
+ "",
+ }, ""),
+ expectedFiles: 2,
+ },
+ {
+ name: "posts",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400))},
+ {JoinTime: 8, UserId: "test2", UserEmail: "test2", Username: "test2", LeaveTime: model.NewPointer(int64(80))},
+ {JoinTime: 400, UserId: "test3", UserEmail: "test3", Username: "test3"},
+ {JoinTime: 10, UserId: "test_bot", UserEmail: "test_bot", Username: "test_bot", IsBot: true, LeaveTime: model.NewPointer(int64(20))},
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(2)),
+ PostDeleteAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("edit message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(4)),
+ PostDeleteAt: model.NewPointer(int64(4)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer("{\"deleteBy\":\"fy8j97gwii84bk4zxprbpc9d9w\"}"),
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostUpdateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedData: strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 0\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test2\n",
+ " user\n",
+ " 8\n",
+ " test2\n",
+ " \n",
+ " \n",
+ " test_bot\n",
+ " bot\n",
+ " 10\n",
+ " test_bot\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " edit message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 4\n",
+ " delete message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test_bot\n",
+ " bot\n",
+ " 20\n",
+ " test_bot\n",
+ " \n",
+ " \n",
+ " test2\n",
+ " user\n",
+ " 80\n",
+ " test2\n",
+ " \n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 100\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " test@test.com\n",
+ " \n",
+ " 100\n",
+ " \n",
+ "",
+ }, ""),
+ expectedFiles: 2,
+ },
+ {
+ name: "post with permalink preview",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400))},
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer(`{"previewed_post":"n4w39mc1ff8y5fite4b8hacy1w"}`),
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostUpdateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer(`{"disable_group_highlight":true}`),
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedData: strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 0\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " n4w39mc1ff8y5fite4b8hacy1w\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 100\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " test@test.com\n",
+ " \n",
+ " 100\n",
+ " \n",
+ "",
+ }, ""),
+ expectedFiles: 0,
+ },
+ {
+ name: "posts with attachments",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-2"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-id-1"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1-attachment",
+ Id: "test1-attachment",
+ Path: "test1-attachment",
+ },
+ },
+ },
+ expectedData: strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 0\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test2\n",
+ " user\n",
+ " 8\n",
+ " test2\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " 1\n",
+ " test1-attachment\n",
+ " test1-attachment\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " 1\n",
+ " test1-attachment\n",
+ " test1-attachment\n",
+ " Completed\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test2\n",
+ " user\n",
+ " 80\n",
+ " test2\n",
+ " \n",
+ " \n",
+ " test\n",
+ " user\n",
+ " 100\n",
+ " test\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 100\n",
+ " test@test.com\n",
+ " \n",
+ " 100\n",
+ " \n",
+ "",
+ }, ""),
+ expectedFiles: 3,
+ },
+ }
+
+ for _, tt := range csvExportTests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ if len(tt.attachments) > 0 {
+ for post_id, attachments := range tt.attachments {
+ attachments := attachments // TODO: Remove once go1.22 is used
+ call := mockStore.FileInfoStore.On("GetForPost", post_id, true, true, false)
+ call.Run(func(args mock.Arguments) {
+ call.Return(attachments, nil)
+ })
+ _, err := attachmentBackend.WriteFile(bytes.NewReader([]byte{}), attachments[0].Path)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ err = attachmentBackend.RemoveFile(attachments[0].Path)
+ require.NoError(t, err)
+ })
+ }
+ }
+
+ if len(tt.cmhs) > 0 {
+ for channelId, cmhs := range tt.cmhs {
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(100), channelId).Return(cmhs, nil)
+ }
+ }
+
+ warnings, appErr := ActianceExport(rctx, tt.posts, mockStore, exportBackend, attachmentBackend, "test")
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(0), warnings)
+
+ data, nErr := exportBackend.ReadFile("test/actiance_export.xml")
+ assert.NoError(t, nErr)
+ assert.Equal(t, tt.expectedData, string(data))
+
+ t.Cleanup(func() {
+ err := exportBackend.RemoveFile("test/actiance_export.xml")
+ assert.NoError(t, err)
+ })
+ })
+ }
+}
+
+func TestMultipleActianceExport(t *testing.T) {
+ t.Run("no dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ fileBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ runTestMultipleActianceExport(t, fileBackend, fileBackend)
+ })
+
+ t.Run("using dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ exportBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ attachmentTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(attachmentTempDir)
+ assert.NoError(t, err)
+ })
+
+ attachmentBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: attachmentTempDir,
+ })
+
+ runTestMultipleActianceExport(t, exportBackend, attachmentBackend)
+ })
+}
+
+func runTestMultipleActianceExport(t *testing.T, exportBackend filestore.FileBackend, attachmentBackend filestore.FileBackend) {
+ rctx := request.TestContext(t)
+
+ chanTypeDirect := model.ChannelTypeDirect
+ actianceExportTests := []struct {
+ name string
+ cmhs map[string][]*model.ChannelMemberHistoryResult
+ posts map[string][]*model.MessageExport
+ attachments map[string][]*model.FileInfo
+ expectedData map[string]string
+ expectedFiles int
+ }{
+ {
+ name: "post,export,delete,export",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {JoinTime: 0, UserId: "user-id", UserEmail: "test@test.com", Username: "username", LeaveTime: model.NewPointer(int64(400))},
+ },
+ },
+ posts: map[string][]*model.MessageExport{
+ "step1": {
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ "step2": {
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(2)),
+ PostDeleteAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer("{\"deleteBy\":\"fy8j97gwii84bk4zxprbpc9d9w\"}"),
+ },
+ },
+ },
+ expectedData: map[string]string{
+ "step1": strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 0\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " 1\n",
+ " \n",
+ "",
+ }, ""),
+ "step2": strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 0\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 2\n",
+ " delete message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " 1\n",
+ " \n",
+ "",
+ }, ""),
+ },
+ expectedFiles: 2,
+ },
+ {
+ name: "post,export,edit,export",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {JoinTime: 0, UserId: "user-id", UserEmail: "test@test.com", Username: "username", LeaveTime: model.NewPointer(int64(400))},
+ },
+ },
+ posts: map[string][]*model.MessageExport{
+ "step1": {
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ "step2": {
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(2)),
+ PostDeleteAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("edit message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ },
+ expectedData: map[string]string{
+ "step1": strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 0\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " 1\n",
+ " \n",
+ "",
+ }, ""),
+ "step2": strings.Join([]string{
+ xml.Header,
+ "\n",
+ " \n",
+ " direct - channel-name - channel-id\n",
+ " 1\n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 0\n",
+ " test@test.com\n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " edit message\n",
+ " \n",
+ " \n",
+ " \n",
+ " test@test.com\n",
+ " user\n",
+ " 1\n",
+ " test@test.com\n",
+ " \n",
+ " 1\n",
+ " \n",
+ "",
+ }, ""),
+ },
+ expectedFiles: 2,
+ },
+ }
+
+ for _, tt := range actianceExportTests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ if len(tt.cmhs) > 0 {
+ for channelId, cmhs := range tt.cmhs {
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(1), channelId).Return(cmhs, nil)
+ }
+ }
+
+ warnings, appErr := ActianceExport(rctx, tt.posts["step1"], mockStore, exportBackend, attachmentBackend, "test")
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(0), warnings)
+
+ data, err := exportBackend.ReadFile("test/actiance_export.xml")
+ assert.NoError(t, err)
+ assert.Equal(t, tt.expectedData["step1"], string(data))
+
+ warnings, appErr = ActianceExport(rctx, tt.posts["step2"], mockStore, exportBackend, attachmentBackend, "test")
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(0), warnings)
+
+ data, err = exportBackend.ReadFile("test/actiance_export.xml")
+ assert.NoError(t, err)
+ assert.Equal(t, tt.expectedData["step2"], string(data))
+
+ t.Cleanup(func() {
+ err = exportBackend.RemoveFile("test/actiance_export.xml")
+ assert.NoError(t, err)
+ })
+ })
+ }
+}
+func TestPostToAttachmentsEntries(t *testing.T) {
+ chanTypeDirect := model.ChannelTypeDirect
+ tt := []struct {
+ name string
+ post model.MessageExport
+ attachments []*model.FileInfo
+ expectedStarts []any
+ expectedStops []any
+ expectedFileInfos []*model.FileInfo
+ expectedDeleteFileMessages []any
+ expectError bool
+ }{
+ {
+ name: "no-attachments",
+ post: model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ },
+ attachments: nil,
+ expectedStarts: nil,
+ expectedStops: nil,
+ expectedFileInfos: nil,
+ expectedDeleteFileMessages: nil,
+ expectError: false,
+ },
+ {
+ name: "one-attachment",
+ post: model.MessageExport{
+ PostId: model.NewPointer("test"),
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"12345"},
+ },
+ attachments: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt"},
+ },
+ expectedStarts: []any{
+ &FileUploadStartExport{UserEmail: "test@test.com", UploadStartTime: 1, Filename: "test", FilePath: "filename.txt"},
+ },
+ expectedStops: []any{
+ &FileUploadStopExport{UserEmail: "test@test.com", UploadStopTime: 1, Filename: "test", FilePath: "filename.txt", Status: "Completed"},
+ },
+ expectedFileInfos: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt"},
+ },
+ expectedDeleteFileMessages: []any{},
+ expectError: false,
+ },
+ {
+ name: "two-attachment",
+ post: model.MessageExport{
+ PostId: model.NewPointer("test"),
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"12345", "54321"},
+ },
+ attachments: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt"},
+ {Name: "test2", Id: "54321", Path: "filename2.txt"},
+ },
+ expectedStarts: []any{
+ &FileUploadStartExport{UserEmail: "test@test.com", UploadStartTime: 1, Filename: "test", FilePath: "filename.txt"},
+ &FileUploadStartExport{UserEmail: "test@test.com", UploadStartTime: 1, Filename: "test2", FilePath: "filename2.txt"},
+ },
+ expectedStops: []any{
+ &FileUploadStopExport{UserEmail: "test@test.com", UploadStopTime: 1, Filename: "test", FilePath: "filename.txt", Status: "Completed"},
+ &FileUploadStopExport{UserEmail: "test@test.com", UploadStopTime: 1, Filename: "test2", FilePath: "filename2.txt", Status: "Completed"},
+ },
+ expectedFileInfos: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt"},
+ {Name: "test2", Id: "54321", Path: "filename2.txt"},
+ },
+ expectedDeleteFileMessages: []any{},
+ expectError: false,
+ },
+ {
+ name: "one-attachment-deleted",
+ post: model.MessageExport{
+ PostId: model.NewPointer("test"),
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostDeleteAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"12345", "54321"},
+ },
+ attachments: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt", DeleteAt: 2},
+ },
+ expectedStarts: []any{
+ &FileUploadStartExport{UserEmail: "test@test.com", UploadStartTime: 1, Filename: "test", FilePath: "filename.txt"},
+ },
+ expectedStops: []any{
+ &FileUploadStopExport{UserEmail: "test@test.com", UploadStopTime: 1, Filename: "test", FilePath: "filename.txt", Status: "Completed"},
+ },
+ expectedFileInfos: []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "filename.txt", DeleteAt: 2},
+ },
+ expectedDeleteFileMessages: []any{
+ &PostExport{UserEmail: "test@test.com", UserType: "user", PostTime: 2, Message: "delete " + "filename.txt"},
+ },
+ expectError: false,
+ },
+ }
+
+ for _, tc := range tt {
+ t.Run(tc.name, func(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ if len(tc.attachments) > 0 {
+ call := mockStore.FileInfoStore.On("GetForPost", *tc.post.PostId, true, true, false)
+ call.Run(func(args mock.Arguments) {
+ call.Return(tc.attachments, nil)
+ })
+ }
+ uploadStarts, uploadStops, files, deleteFileMessages, err := postToAttachmentsEntries(&tc.post, mockStore)
+ if tc.expectError {
+ assert.NotNil(t, err)
+ } else {
+ assert.Nil(t, err)
+ }
+ assert.Equal(t, tc.expectedStarts, uploadStarts)
+ assert.Equal(t, tc.expectedStops, uploadStops)
+ assert.Equal(t, tc.expectedFileInfos, files)
+ assert.Equal(t, tc.expectedDeleteFileMessages, deleteFileMessages)
+ })
+ }
+}
+
+func TestWriteExportWarnings(t *testing.T) {
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(tempDir)
+ assert.NoError(t, err)
+ })
+
+ config := filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: tempDir,
+ }
+
+ fileBackend, err := filestore.NewFileBackend(config)
+ assert.NoError(t, err)
+
+ rctx := request.TestContext(t)
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ // Do not create the files, we want them to error
+ uploadedFiles := []*model.FileInfo{
+ {Name: "test", Id: "12345", Path: "missing.txt"},
+ {Name: "test2", Id: "54321", Path: "missing.txt"},
+ }
+ export := &RootNode{
+ XMLNS: XMLNS,
+ Channels: []ChannelExport{},
+ }
+
+ warnings, appErr := writeExport(rctx, export, uploadedFiles, "test", fileBackend, fileBackend)
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(2), warnings)
+
+ err = fileBackend.RemoveFile("test/actiance_export.xml")
+ require.NoError(t, err)
+}
diff --git a/server/enterprise/message_export/common_export/common_export.go b/server/enterprise/message_export/common_export/common_export.go
new file mode 100644
index 00000000000..6c5c8825090
--- /dev/null
+++ b/server/enterprise/message_export/common_export/common_export.go
@@ -0,0 +1,155 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package common_export
+
+import (
+ "fmt"
+
+ "github.com/mattermost/mattermost/server/public/model"
+)
+
+const MissingFileMessage = "File missing for post; cannot copy file to archive"
+
+type ChannelMemberJoin struct {
+ UserId string
+ IsBot bool
+ Email string
+ Username string
+ Datetime int64
+}
+
+type ChannelMemberLeave struct {
+ UserId string
+ IsBot bool
+ Email string
+ Username string
+ Datetime int64
+}
+
+type ChannelMember struct {
+ UserId string
+ IsBot bool
+ Email string
+ Username string
+}
+type ChannelMembers map[string]ChannelMember
+type MembersByChannel map[string]ChannelMembers
+
+type MetadataChannel struct {
+ TeamId *string
+ TeamName *string
+ TeamDisplayName *string
+ ChannelId string
+ ChannelName string
+ ChannelDisplayName string
+ ChannelType model.ChannelType
+ RoomId string
+ StartTime int64
+ EndTime int64
+ MessagesCount int
+ AttachmentsCount int
+}
+
+type Metadata struct {
+ Channels map[string]MetadataChannel
+ MessagesCount int
+ AttachmentsCount int
+ StartTime int64
+ EndTime int64
+}
+
+func (metadata *Metadata) Update(post *model.MessageExport, attachments int) {
+ channelMetadata, ok := metadata.Channels[*post.ChannelId]
+ if !ok {
+ channelMetadata = MetadataChannel{
+ TeamId: post.TeamId,
+ TeamName: post.TeamName,
+ TeamDisplayName: post.TeamDisplayName,
+ ChannelId: *post.ChannelId,
+ ChannelName: *post.ChannelName,
+ ChannelDisplayName: *post.ChannelDisplayName,
+ ChannelType: *post.ChannelType,
+ RoomId: fmt.Sprintf("%v - %v", ChannelTypeDisplayName(*post.ChannelType), *post.ChannelId),
+ StartTime: *post.PostCreateAt,
+ MessagesCount: 0,
+ AttachmentsCount: 0,
+ }
+ }
+
+ channelMetadata.EndTime = *post.PostCreateAt
+ channelMetadata.AttachmentsCount += attachments
+ metadata.AttachmentsCount += attachments
+ channelMetadata.MessagesCount += 1
+ metadata.MessagesCount += 1
+ if metadata.StartTime == 0 {
+ metadata.StartTime = *post.PostCreateAt
+ }
+ metadata.EndTime = *post.PostCreateAt
+ metadata.Channels[*post.ChannelId] = channelMetadata
+}
+
+func GetJoinsAndLeavesForChannel(startTime int64, endTime int64, channelMembersHistory []*model.ChannelMemberHistoryResult, channelMembers ChannelMembers) ([]ChannelMemberJoin, []ChannelMemberLeave) {
+ joins := []ChannelMemberJoin{}
+ leaves := []ChannelMemberLeave{}
+
+ alreadyJoined := map[string]bool{}
+ for _, cmh := range channelMembersHistory {
+ if cmh.UserDeleteAt > 0 && cmh.UserDeleteAt < startTime {
+ continue
+ }
+
+ if cmh.JoinTime > endTime {
+ continue
+ }
+
+ if cmh.LeaveTime != nil && *cmh.LeaveTime < startTime {
+ continue
+ }
+
+ if cmh.JoinTime <= endTime {
+ joins = append(joins, ChannelMemberJoin{
+ UserId: cmh.UserId,
+ IsBot: cmh.IsBot,
+ Email: cmh.UserEmail,
+ Username: cmh.Username,
+ Datetime: cmh.JoinTime,
+ })
+ alreadyJoined[cmh.UserId] = true
+ }
+
+ if cmh.LeaveTime != nil && *cmh.LeaveTime <= endTime {
+ leaves = append(leaves, ChannelMemberLeave{
+ UserId: cmh.UserId,
+ IsBot: cmh.IsBot,
+ Email: cmh.UserEmail,
+ Username: cmh.Username,
+ Datetime: *cmh.LeaveTime,
+ })
+ }
+ }
+
+ for _, member := range channelMembers {
+ if alreadyJoined[member.UserId] {
+ continue
+ }
+
+ joins = append(joins, ChannelMemberJoin{
+ UserId: member.UserId,
+ IsBot: member.IsBot,
+ Email: member.Email,
+ Username: member.Username,
+ Datetime: startTime,
+ })
+ }
+ return joins, leaves
+}
+
+func ChannelTypeDisplayName(channelType model.ChannelType) string {
+ return map[model.ChannelType]string{
+ model.ChannelTypeOpen: "public",
+ model.ChannelTypePrivate: "private",
+ model.ChannelTypeDirect: "direct",
+ model.ChannelTypeGroup: "group",
+ }[channelType]
+}
diff --git a/server/enterprise/message_export/common_export/common_export_test.go b/server/enterprise/message_export/common_export/common_export_test.go
new file mode 100644
index 00000000000..a8fc364694d
--- /dev/null
+++ b/server/enterprise/message_export/common_export/common_export_test.go
@@ -0,0 +1,237 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package common_export
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/mattermost/mattermost/server/public/model"
+)
+
+func TestUpdateMetadata(t *testing.T) {
+ metadata := Metadata{
+ Channels: map[string]MetadataChannel{},
+ MessagesCount: 0,
+ AttachmentsCount: 0,
+ StartTime: 0,
+ EndTime: 0,
+ }
+
+ testString := "test"
+ chanTypeDirect := model.ChannelTypeDirect
+ createdAt := int64(12345)
+
+ post := model.MessageExport{
+ TeamId: &testString,
+ TeamName: &testString,
+ TeamDisplayName: &testString,
+
+ ChannelId: &testString,
+ ChannelName: &testString,
+ ChannelDisplayName: &testString,
+ ChannelType: &chanTypeDirect,
+
+ UserId: &testString,
+ UserEmail: &testString,
+ Username: &testString,
+
+ PostId: &testString,
+ PostCreateAt: &createdAt,
+ PostMessage: &testString,
+ PostType: &testString,
+ PostOriginalId: &testString,
+ PostFileIds: []string{},
+ }
+ metadata.Update(&post, 2)
+
+ assert.Len(t, metadata.Channels, 1)
+ assert.Equal(t, 1, metadata.Channels["test"].MessagesCount)
+ assert.Equal(t, 2, metadata.Channels["test"].AttachmentsCount)
+ assert.Equal(t, 1, metadata.MessagesCount)
+ assert.Equal(t, 2, metadata.AttachmentsCount)
+
+ metadata.Update(&post, 2)
+
+ assert.Len(t, metadata.Channels, 1)
+ assert.Equal(t, 2, metadata.Channels["test"].MessagesCount)
+ assert.Equal(t, 4, metadata.Channels["test"].AttachmentsCount)
+ assert.Equal(t, 2, metadata.MessagesCount)
+ assert.Equal(t, 4, metadata.AttachmentsCount)
+
+ testString2 := "test2"
+ post.ChannelId = &testString2
+
+ metadata.Update(&post, 2)
+
+ assert.Len(t, metadata.Channels, 2)
+ assert.Equal(t, 2, metadata.Channels["test"].MessagesCount)
+ assert.Equal(t, 4, metadata.Channels["test"].AttachmentsCount)
+ assert.Equal(t, 1, metadata.Channels["test2"].MessagesCount)
+ assert.Equal(t, 2, metadata.Channels["test2"].AttachmentsCount)
+ assert.Equal(t, 3, metadata.MessagesCount)
+ assert.Equal(t, 6, metadata.AttachmentsCount)
+}
+
+func TestGetJoinsAndLeavesForChannel(t *testing.T) {
+ channel := MetadataChannel{
+ StartTime: 100,
+ EndTime: 200,
+ ChannelId: "good-request-1",
+ TeamId: model.NewPointer("test"),
+ TeamName: model.NewPointer("test"),
+ TeamDisplayName: model.NewPointer("test"),
+ ChannelName: "test",
+ ChannelDisplayName: "test",
+ ChannelType: "O",
+ }
+
+ tt := []struct {
+ name string
+ channel MetadataChannel
+ membersHistory []*model.ChannelMemberHistoryResult
+ usersInPosts ChannelMembers
+ expectedJoins int
+ expectedLeaves int
+ }{
+ {
+ name: "no-joins-no-leaves",
+ channel: channel,
+ membersHistory: nil,
+ usersInPosts: nil,
+ expectedJoins: 0,
+ expectedLeaves: 0,
+ },
+ {
+ name: "joins-and-leaves-outside-the-range",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 1, LeaveTime: model.NewPointer(int64(10)), UserId: "test", UserEmail: "test", Username: "test"},
+ {JoinTime: 250, LeaveTime: model.NewPointer(int64(260)), UserId: "test", UserEmail: "test", Username: "test"},
+ {JoinTime: 300, UserId: "test", UserEmail: "test", Username: "test"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 0,
+ expectedLeaves: 0,
+ },
+ {
+ name: "join-and-leave-during-the-range",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 100, LeaveTime: model.NewPointer(int64(150)), UserId: "test", UserEmail: "test", Username: "test"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 1,
+ expectedLeaves: 1,
+ },
+ {
+ name: "join-during-and-leave-after-the-range",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 150, LeaveTime: model.NewPointer(int64(300)), UserId: "test", UserEmail: "test", Username: "test"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 1,
+ expectedLeaves: 0,
+ },
+ {
+ name: "join-before-and-leave-during-the-range",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 99, LeaveTime: model.NewPointer(int64(150)), UserId: "test", UserEmail: "test", Username: "test"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 1,
+ expectedLeaves: 1,
+ },
+ {
+ name: "join-before-and-leave-after-the-range",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 99, LeaveTime: model.NewPointer(int64(350)), UserId: "test", UserEmail: "test", Username: "test"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 1,
+ expectedLeaves: 0,
+ },
+ {
+ name: "implicit-joins",
+ channel: channel,
+ membersHistory: nil,
+ usersInPosts: ChannelMembers{
+ "test1": ChannelMember{UserId: "test1", Email: "test1", Username: "test1"},
+ "test2": ChannelMember{UserId: "test2", Email: "test2", Username: "test2"},
+ },
+ expectedJoins: 2,
+ expectedLeaves: 0,
+ },
+ {
+ name: "implicit-joins-with-explicit-joins",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test1", UserEmail: "test1", Username: "test1"},
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test3", UserEmail: "test3", Username: "test3"},
+ },
+ usersInPosts: ChannelMembers{
+ "test1": ChannelMember{UserId: "test1", Email: "test1", Username: "test1"},
+ "test2": ChannelMember{UserId: "test2", Email: "test2", Username: "test2"},
+ },
+ expectedJoins: 3,
+ expectedLeaves: 2,
+ },
+ {
+ name: "join-leave-and-join-again",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test1", UserEmail: "test1", Username: "test1"},
+ {JoinTime: 160, LeaveTime: model.NewPointer(int64(180)), UserId: "test1", UserEmail: "test1", Username: "test1"},
+ },
+ usersInPosts: nil,
+ expectedJoins: 2,
+ expectedLeaves: 2,
+ },
+ {
+ name: "deactivated-members-dont-show",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 50},
+ {JoinTime: 160, LeaveTime: model.NewPointer(int64(180)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 50},
+ },
+ usersInPosts: nil,
+ expectedJoins: 0,
+ expectedLeaves: 0,
+ },
+ {
+ name: "deactivated-members-show-if-deleted-after-latest-export",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 150},
+ {JoinTime: 160, LeaveTime: model.NewPointer(int64(180)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 150},
+ },
+ usersInPosts: nil,
+ expectedJoins: 2,
+ expectedLeaves: 2,
+ },
+ {
+ name: "deactivated-members-show-and-dont-show",
+ channel: channel,
+ membersHistory: []*model.ChannelMemberHistoryResult{
+ {JoinTime: 130, LeaveTime: model.NewPointer(int64(150)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 50},
+ {JoinTime: 160, LeaveTime: model.NewPointer(int64(180)), UserId: "test1", UserEmail: "test1", Username: "test1", UserDeleteAt: 150},
+ },
+ usersInPosts: nil,
+ expectedJoins: 1,
+ expectedLeaves: 1,
+ },
+ }
+
+ for _, tc := range tt {
+ t.Run(tc.name, func(t *testing.T) {
+ joins, leaves := GetJoinsAndLeavesForChannel(tc.channel.StartTime, tc.channel.EndTime, tc.membersHistory, tc.usersInPosts)
+ assert.Len(t, joins, tc.expectedJoins)
+ assert.Len(t, leaves, tc.expectedLeaves)
+ })
+ }
+}
diff --git a/server/enterprise/message_export/csv_export/csv_export.go b/server/enterprise/message_export/csv_export/csv_export.go
new file mode 100644
index 00000000000..1a319c1a849
--- /dev/null
+++ b/server/enterprise/message_export/csv_export/csv_export.go
@@ -0,0 +1,410 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package csv_export
+
+import (
+ "archive/zip"
+ "encoding/csv"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "path"
+ "sort"
+ "strconv"
+
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/common_export"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+)
+
+const (
+ EnterPostType = "enter"
+ LeavePostType = "leave"
+ PreviouslyJoinedPostType = "previously-joined"
+ CSVExportFilename = "csv_export.zip"
+ CSVWarningFilename = "warning.txt"
+)
+
+func CsvExport(rctx request.CTX, posts []*model.MessageExport, db store.Store, exportBackend filestore.FileBackend, fileAttachmentBackend filestore.FileBackend, exportDirectory string) (warningCount int64, appErr *model.AppError) {
+ dest, err := os.CreateTemp("", CSVExportFilename)
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.file.creation.appError", nil, "", 0).Wrap(err)
+ }
+ defer os.Remove(dest.Name())
+
+ zipFile := zip.NewWriter(dest)
+
+ csvFile, err := zipFile.Create("posts.csv")
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.zip.creation.appError", nil, "", 0).Wrap(err)
+ }
+ csvWriter := csv.NewWriter(csvFile)
+ err = csvWriter.Write([]string{
+ "Post Creation Time",
+ "Team Id",
+ "Team Name",
+ "Team Display Name",
+ "Channel Id",
+ "Channel Name",
+ "Channel Display Name",
+ "Channel Type",
+ "User Id",
+ "User Email",
+ "Username",
+ "Post Id",
+ "Edited By Post Id",
+ "Replied to Post Id",
+ "Post Message",
+ "Post Type",
+ "User Type",
+ "Previews Post Id",
+ })
+
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExportPost", "ent.compliance.csv.header.export.appError", nil, "", 0).Wrap(err)
+ }
+
+ metadata := common_export.Metadata{
+ Channels: map[string]common_export.MetadataChannel{},
+ MessagesCount: 0,
+ AttachmentsCount: 0,
+ StartTime: 0,
+ EndTime: 0,
+ }
+
+ membersByChannel := make(common_export.MembersByChannel)
+
+ for _, post := range posts {
+ attachments, err := getPostAttachments(db, post)
+ if err != nil {
+ return warningCount, err
+ }
+
+ if _, ok := membersByChannel[*post.ChannelId]; !ok {
+ membersByChannel[*post.ChannelId] = common_export.ChannelMembers{}
+ }
+
+ membersByChannel[*post.ChannelId][*post.UserId] = common_export.ChannelMember{
+ UserId: *post.UserId,
+ Username: *post.Username,
+ IsBot: post.IsBot,
+ Email: *post.UserEmail,
+ }
+
+ metadata.Update(post, len(attachments))
+ }
+
+ joinLeavePosts, appErr2 := getJoinLeavePosts(metadata.Channels, membersByChannel, db)
+ if appErr2 != nil {
+ return warningCount, appErr2
+ }
+
+ postsGenerator := mergePosts(joinLeavePosts, posts)
+
+ for post := postsGenerator(); post != nil; post = postsGenerator() {
+ if err = csvWriter.Write(postToRow(post, post.PostCreateAt, *post.PostMessage)); err != nil {
+ return warningCount, model.NewAppError("CsvExportPost", "ent.compliance.csv.post.export.appError", nil, "", 0).Wrap(err)
+ }
+
+ if post.PostDeleteAt != nil && *post.PostDeleteAt > 0 && post.PostProps != nil {
+ props := map[string]any{}
+ if json.Unmarshal([]byte(*post.PostProps), &props) == nil {
+ if _, ok := props[model.PostPropsDeleteBy]; ok {
+ if err = csvWriter.Write(postToRow(post, post.PostDeleteAt, "delete "+*post.PostMessage)); err != nil {
+ return warningCount, model.NewAppError("CsvExportPost", "ent.compliance.csv.post.export.appError", nil, "", 0).Wrap(err)
+ }
+ }
+ }
+ }
+
+ var attachments []*model.FileInfo
+ attachments, appErr = getPostAttachments(db, post)
+ if appErr != nil {
+ return warningCount, appErr
+ }
+
+ for _, attachment := range attachments {
+ if err = csvWriter.Write(attachmentToRow(post, attachment)); err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.attachment.export.appError", nil, "", 0).Wrap(err)
+ }
+ }
+ }
+
+ csvWriter.Flush()
+
+ var missingFiles []string
+ for _, post := range posts {
+ attachments, err := getPostAttachments(db, post)
+ if err != nil {
+ return warningCount, err
+ }
+
+ for _, attachment := range attachments {
+ var attachmentSrc io.ReadCloser
+ attachmentSrc, nErr := fileAttachmentBackend.Reader(attachment.Path)
+ if nErr != nil {
+ missingFiles = append(missingFiles, "Warning:"+common_export.MissingFileMessage+" - Post: "+*post.PostId+" - "+attachment.Path)
+ rctx.Logger().Warn(common_export.MissingFileMessage, mlog.String("PostId", *post.PostId), mlog.String("FileName", attachment.Path))
+ continue
+ }
+ defer attachmentSrc.Close()
+
+ attachmentDst, err := zipFile.Create(path.Join("files", *post.PostId, fmt.Sprintf("%s-%s", attachment.Id, path.Base(attachment.Path))))
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.attachment.copy.appError", nil, "", 0).Wrap(err)
+ }
+
+ _, err = io.Copy(attachmentDst, attachmentSrc)
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.attachment.copy.appError", nil, "", 0).Wrap(err)
+ }
+ }
+ }
+
+ warningCount = int64(len(missingFiles))
+ if warningCount > 0 {
+ metadataFile, _ := zipFile.Create(CSVWarningFilename)
+ for _, value := range missingFiles {
+ _, err = metadataFile.Write([]byte(value + "\n"))
+ if err != nil {
+ appErr = model.NewAppError("CsvExport", "ent.compliance.csv.warning.appError", nil, "", 0).Wrap(err)
+ return warningCount, appErr
+ }
+ }
+ }
+
+ metadataFile, err := zipFile.Create("metadata.json")
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.metadata.json.zipfile.appError", nil, "", 0).Wrap(err)
+ }
+ data, err := json.MarshalIndent(metadata, "", " ")
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.metadata.json.marshalling.appError", nil, "", 0).Wrap(err)
+ }
+ _, err = metadataFile.Write(data)
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.metadata.export.appError", nil, "", 0).Wrap(err)
+ }
+ err = zipFile.Close()
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.metadata.close.appError", nil, "", 0).Wrap(err)
+ }
+
+ _, err = dest.Seek(0, 0)
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.seek.appError", nil, "", 0).Wrap(err)
+ }
+ // Try to write the file without a timeout due to the potential size of the file.
+ _, err = filestore.TryWriteFileContext(rctx.Context(), exportBackend, dest, path.Join(exportDirectory, CSVExportFilename))
+ if err != nil {
+ return warningCount, model.NewAppError("CsvExport", "ent.compliance.csv.write_file.appError", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return warningCount, appErr
+}
+
+func mergePosts(left []*model.MessageExport, right []*model.MessageExport) func() *model.MessageExport {
+ leftCursor := 0
+ rightCursor := 0
+ return func() *model.MessageExport {
+ if leftCursor >= len(left) && rightCursor >= len(right) {
+ return nil
+ }
+
+ if leftCursor >= len(left) {
+ rightCursor += 1
+ return right[rightCursor-1]
+ }
+
+ if rightCursor >= len(right) {
+ leftCursor += 1
+ return left[leftCursor-1]
+ }
+
+ if *left[leftCursor].PostCreateAt <= *right[rightCursor].PostCreateAt {
+ leftCursor += 1
+ return left[leftCursor-1]
+ }
+
+ rightCursor += 1
+ return right[rightCursor-1]
+ }
+}
+
+func getJoinLeavePosts(channels map[string]common_export.MetadataChannel, membersByChannel common_export.MembersByChannel, db store.Store) ([]*model.MessageExport, *model.AppError) {
+ joinLeavePosts := []*model.MessageExport{}
+ for _, channel := range channels {
+ channelMembersHistory, err := db.ChannelMemberHistory().GetUsersInChannelDuring(channel.StartTime, channel.EndTime, channel.ChannelId)
+ if err != nil {
+ return nil, model.NewAppError("getJoinLeavePosts", "ent.get_users_in_channel_during", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ joins, leaves := common_export.GetJoinsAndLeavesForChannel(channel.StartTime, channel.EndTime, channelMembersHistory, membersByChannel[channel.ChannelId])
+
+ for _, join := range joins {
+ enterMessage := fmt.Sprintf("User %s (%s) joined the channel", join.Username, join.Email)
+ enterPostType := EnterPostType
+ createAt := model.NewPointer(join.Datetime)
+ channelCopy := channel
+ if join.Datetime <= channel.StartTime {
+ enterPostType = PreviouslyJoinedPostType
+ enterMessage = fmt.Sprintf("User %s (%s) was already in the channel", join.Username, join.Email)
+ createAt = model.NewPointer(channel.StartTime)
+ }
+ joinLeavePosts = append(
+ joinLeavePosts,
+ &model.MessageExport{
+ TeamId: channel.TeamId,
+ TeamName: channel.TeamName,
+ TeamDisplayName: channel.TeamDisplayName,
+
+ ChannelId: &channelCopy.ChannelId,
+ ChannelName: &channelCopy.ChannelName,
+ ChannelDisplayName: &channelCopy.ChannelDisplayName,
+ ChannelType: &channelCopy.ChannelType,
+
+ UserId: model.NewPointer(join.UserId),
+ UserEmail: model.NewPointer(join.Email),
+ Username: model.NewPointer(join.Username),
+ IsBot: join.IsBot,
+
+ PostId: model.NewPointer(""),
+ PostCreateAt: createAt,
+ PostMessage: &enterMessage,
+ PostType: &enterPostType,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: []string{},
+ },
+ )
+ }
+ for _, leave := range leaves {
+ leaveMessage := fmt.Sprintf("User %s (%s) leaved the channel", leave.Username, leave.Email)
+ leavePostType := LeavePostType
+ channelCopy := channel
+
+ joinLeavePosts = append(
+ joinLeavePosts,
+ &model.MessageExport{
+ TeamId: channel.TeamId,
+ TeamName: channel.TeamName,
+ TeamDisplayName: channel.TeamDisplayName,
+
+ ChannelId: &channelCopy.ChannelId,
+ ChannelName: &channelCopy.ChannelName,
+ ChannelDisplayName: &channelCopy.ChannelDisplayName,
+ ChannelType: &channelCopy.ChannelType,
+
+ UserId: model.NewPointer(leave.UserId),
+ UserEmail: model.NewPointer(leave.Email),
+ Username: model.NewPointer(leave.Username),
+ IsBot: leave.IsBot,
+
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(leave.Datetime),
+ PostMessage: &leaveMessage,
+ PostType: &leavePostType,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: []string{},
+ },
+ )
+ }
+ }
+
+ sort.Slice(joinLeavePosts, func(i, j int) bool {
+ return *joinLeavePosts[i].PostCreateAt < *joinLeavePosts[j].PostCreateAt
+ })
+ return joinLeavePosts, nil
+}
+
+func getPostAttachments(db store.Store, post *model.MessageExport) ([]*model.FileInfo, *model.AppError) {
+ // if the post included any files, we need to add special elements to the export.
+ if len(post.PostFileIds) == 0 {
+ return []*model.FileInfo{}, nil
+ }
+
+ attachments, err := db.FileInfo().GetForPost(*post.PostId, true, true, false)
+ if err != nil {
+ return nil, model.NewAppError("getPostAttachments", "ent.message_export.csv_export.get_attachment_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return attachments, nil
+}
+
+func postToRow(post *model.MessageExport, createTime *int64, message string) []string {
+ teamId := ""
+ teamName := ""
+ teamDisplayName := ""
+ if post.TeamId != nil {
+ teamId = *post.TeamId
+ }
+ if post.TeamName != nil {
+ teamName = *post.TeamName
+ }
+ if post.TeamDisplayName != nil {
+ teamDisplayName = *post.TeamDisplayName
+ }
+ postType := "message"
+ if post.PostType != nil && *post.PostType != "" {
+ postType = *post.PostType
+ }
+ postRootId := ""
+ if post.PostRootId != nil {
+ postRootId = *post.PostRootId
+ }
+ userType := "user"
+ if post.IsBot {
+ userType = "bot"
+ }
+
+ return []string{
+ strconv.FormatInt(*createTime, 10),
+ teamId,
+ teamName,
+ teamDisplayName,
+ *post.ChannelId,
+ *post.ChannelName,
+ *post.ChannelDisplayName,
+ common_export.ChannelTypeDisplayName(*post.ChannelType),
+ *post.UserId,
+ *post.UserEmail,
+ *post.Username,
+ *post.PostId,
+ *post.PostOriginalId,
+ postRootId,
+ message,
+ postType,
+ userType,
+ post.PreviewID(),
+ }
+}
+
+func attachmentToRow(post *model.MessageExport, attachment *model.FileInfo) []string {
+ row := postToRow(post, post.PostCreateAt, *post.PostMessage)
+
+ attachmentEntry := fmt.Sprintf("%s (files/%s/%s-%s)", attachment.Name, *post.PostId, attachment.Id, path.Base(attachment.Path))
+ attachmentMessage := "attachment"
+ userType := row[len(row)-2]
+
+ if attachment.DeleteAt > 0 && post.PostDeleteAt != nil {
+ deleteRow := postToRow(post, post.PostDeleteAt, *post.PostMessage)
+ row = append(
+ deleteRow[:len(deleteRow)-4],
+ attachmentEntry,
+ "deleted "+attachmentMessage,
+ userType,
+ )
+ } else {
+ row = append(
+ row[:len(row)-4],
+ attachmentEntry,
+ attachmentMessage,
+ userType,
+ )
+ }
+ return row
+}
diff --git a/server/enterprise/message_export/csv_export/csv_export_test.go b/server/enterprise/message_export/csv_export/csv_export_test.go
new file mode 100644
index 00000000000..88359252358
--- /dev/null
+++ b/server/enterprise/message_export/csv_export/csv_export_test.go
@@ -0,0 +1,1099 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package csv_export
+
+import (
+ "archive/zip"
+ "bytes"
+ "io"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+ "github.com/stretchr/testify/require"
+
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/common_export"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+)
+
+func TestMergePosts(t *testing.T) {
+ chanTypeDirect := model.ChannelTypeDirect
+ // these two posts were made in the same channel
+ post1 := &model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post2 := &model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post3 := &model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(3)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post4 := &model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(4)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post2other := &model.MessageExport{
+ ChannelId: model.NewPointer("Test"),
+ ChannelDisplayName: model.NewPointer("Test"),
+ PostCreateAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("Some message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("test"),
+ Username: model.NewPointer("test"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ var mergetests = []struct {
+ name string
+ in1 []*model.MessageExport
+ in2 []*model.MessageExport
+ out []*model.MessageExport
+ }{
+ {
+ "merge all",
+ []*model.MessageExport{post1, post2, post3, post4},
+ []*model.MessageExport{post1, post2, post3, post4},
+ []*model.MessageExport{post1, post1, post2, post2, post3, post3, post4, post4},
+ },
+ {
+ "split and merge 1",
+ []*model.MessageExport{post1, post3},
+ []*model.MessageExport{post2, post4},
+ []*model.MessageExport{post1, post2, post3, post4},
+ },
+ {
+ "split and merge 2",
+ []*model.MessageExport{post1, post4},
+ []*model.MessageExport{post2, post3},
+ []*model.MessageExport{post1, post2, post3, post4},
+ },
+ {
+ "ordered 1",
+ []*model.MessageExport{post1, post2},
+ []*model.MessageExport{post1, post2other},
+ []*model.MessageExport{post1, post1, post2, post2other},
+ },
+ {
+ "ordered 2",
+ []*model.MessageExport{post1, post2other},
+ []*model.MessageExport{post1, post2},
+ []*model.MessageExport{post1, post1, post2other, post2},
+ },
+ }
+
+ for _, tt := range mergetests {
+ t.Run(tt.name, func(t *testing.T) {
+ next := mergePosts(tt.in1, tt.in2)
+ posts := []*model.MessageExport{}
+ for post := next(); post != nil; post = next() {
+ posts = append(posts, post)
+ }
+ assert.Equal(t, tt.out, posts)
+ })
+ }
+}
+
+func TestPostToRow(t *testing.T) {
+ chanTypeDirect := model.ChannelTypeDirect
+ // these two posts were made in the same channel
+ post := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post_without_team := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post_with_other_type := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostType: model.NewPointer("other"),
+ }
+
+ post_with_other_type_bot := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostType: model.NewPointer("other"),
+ IsBot: true,
+ }
+
+ post_with_permalink_preview := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostProps: model.NewPointer(`{"previewed_post":"n4w39mc1ff8y5fite4b8hacy1w"}`),
+ }
+ torowtests := []struct {
+ name string
+ in *model.MessageExport
+ out []string
+ }{
+ {
+ "simple row",
+ post,
+ []string{"1", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "message", "message", "user", ""},
+ },
+ {
+ "without team data",
+ post_without_team,
+ []string{"1", "", "", "", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "message", "message", "user", ""},
+ },
+ {
+ "with special post type",
+ post_with_other_type,
+ []string{"1", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "message", "other", "user", ""},
+ },
+ {
+ "with special post type from bot",
+ post_with_other_type_bot,
+ []string{"1", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "message", "other", "bot", ""},
+ },
+ {
+ "with permalink preview",
+ post_with_permalink_preview,
+ []string{"1", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "message", "message", "user", "n4w39mc1ff8y5fite4b8hacy1w"},
+ },
+ }
+
+ for _, tt := range torowtests {
+ t.Run(tt.name, func(t *testing.T) {
+ assert.Equal(t, tt.out, postToRow(tt.in, tt.in.PostCreateAt, *tt.in.PostMessage))
+ })
+ }
+}
+
+func TestAttachmentToRow(t *testing.T) {
+ chanTypeDirect := model.ChannelTypeDirect
+ post := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ post_deleted := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostDeleteAt: model.NewPointer(int64(10)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ }
+
+ file := &model.FileInfo{
+ Name: "test1",
+ Id: "12345",
+ Path: "filename.txt",
+ }
+
+ file_deleted := &model.FileInfo{
+ Name: "test2",
+ Id: "12346",
+ Path: "filename.txt",
+ DeleteAt: 10,
+ }
+
+ torowtests := []struct {
+ name string
+ post *model.MessageExport
+ attachment *model.FileInfo
+ out []string
+ }{
+ {
+ "simple attachment",
+ post,
+ file,
+ []string{"1", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "test1 (files/post-id/12345-filename.txt)", "attachment", "user"},
+ },
+ {
+ "simple deleted attachment",
+ post_deleted,
+ file_deleted,
+ []string{"10", "team-id", "team-name", "team-display-name", "channel-id", "channel-name", "channel-display-name", "direct", "user-id", "test@test.com", "username", "post-id", "post-original-id", "post-root-id", "test2 (files/post-id/12346-filename.txt)", "deleted attachment", "user"},
+ },
+ }
+
+ for _, tt := range torowtests {
+ t.Run(tt.name, func(t *testing.T) {
+ assert.Equal(t, tt.out, attachmentToRow(tt.post, tt.attachment))
+ })
+ }
+}
+
+func TestGetPostAttachments(t *testing.T) {
+ chanTypeDirect := model.ChannelTypeDirect
+ post := &model.MessageExport{
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ }
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ files, appErr := getPostAttachments(mockStore, post)
+ assert.Nil(t, appErr)
+ assert.Empty(t, files)
+
+ post.PostFileIds = []string{"1", "2"}
+
+ mockStore.FileInfoStore.On("GetForPost", *post.PostId, true, true, false).Return([]*model.FileInfo{{Name: "test"}, {Name: "test2"}}, nil)
+
+ files, appErr = getPostAttachments(mockStore, post)
+ assert.Nil(t, appErr)
+ assert.Len(t, files, 2)
+
+ post.PostId = model.NewPointer("post-id-2")
+
+ mockStore.FileInfoStore.On("GetForPost", *post.PostId, true, true, false).Return(nil, model.NewAppError("Test", "test", nil, "", 400))
+
+ files, appErr = getPostAttachments(mockStore, post)
+ assert.NotNil(t, appErr)
+ assert.Nil(t, files)
+}
+
+func TestGetJoinLeavePosts(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ channels := map[string]common_export.MetadataChannel{"bad-request": {StartTime: 1, EndTime: 2, ChannelId: "bad-request"}}
+
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(2), "bad-request").Return(nil, model.NewAppError("Test", "test", nil, "", 400))
+
+ _, appErr := getJoinLeavePosts(channels, nil, mockStore)
+ assert.NotNil(t, appErr)
+
+ channels = map[string]common_export.MetadataChannel{
+ "good-request-1": {StartTime: 1, EndTime: 7, ChannelId: "good-request-1", TeamId: model.NewPointer("test1"), TeamName: model.NewPointer("test1"), TeamDisplayName: model.NewPointer("test1"), ChannelName: "test1", ChannelDisplayName: "test1", ChannelType: "O"},
+ "good-request-2": {StartTime: 2, EndTime: 7, ChannelId: "good-request-2", TeamId: model.NewPointer("test2"), TeamName: model.NewPointer("test2"), TeamDisplayName: model.NewPointer("test2"), ChannelName: "test2", ChannelDisplayName: "test2", ChannelType: "P"},
+ }
+
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", channels["good-request-1"].StartTime, channels["good-request-1"].EndTime, "good-request-1").Return(
+ []*model.ChannelMemberHistoryResult{
+ {JoinTime: 1, UserId: "test1", UserEmail: "test1", Username: "test1"},
+ {JoinTime: 2, LeaveTime: model.NewPointer(int64(3)), UserId: "test2", UserEmail: "test2", Username: "test2"},
+ {JoinTime: 3, UserId: "test3", UserEmail: "test3", Username: "test3"},
+ },
+ nil,
+ )
+
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", channels["good-request-2"].StartTime, channels["good-request-2"].EndTime, "good-request-2").Return(
+ []*model.ChannelMemberHistoryResult{
+ {JoinTime: 4, UserId: "test4", UserEmail: "test4", Username: "test4"},
+ {JoinTime: 5, LeaveTime: model.NewPointer(int64(6)), UserId: "test5", UserEmail: "test5", Username: "test5"},
+ {JoinTime: 6, UserId: "test6", UserEmail: "test6", Username: "test6"},
+ },
+ nil,
+ )
+
+ messages, appErr := getJoinLeavePosts(channels, nil, mockStore)
+ assert.Nil(t, appErr)
+ assert.Len(t, messages, 8)
+ chanTypeOpen := model.ChannelTypeOpen
+ chanTypePr := model.ChannelTypePrivate
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test1"),
+ TeamName: model.NewPointer("test1"),
+ TeamDisplayName: model.NewPointer("test1"),
+ ChannelId: model.NewPointer("good-request-1"),
+ ChannelName: model.NewPointer("test1"),
+ ChannelDisplayName: model.NewPointer("test1"),
+ ChannelType: &chanTypeOpen,
+ UserId: model.NewPointer("test1"),
+ UserEmail: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("User test1 (test1) was already in the channel"),
+ PostType: model.NewPointer("previously-joined"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[0])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test1"),
+ TeamName: model.NewPointer("test1"),
+ TeamDisplayName: model.NewPointer("test1"),
+ ChannelId: model.NewPointer("good-request-1"),
+ ChannelName: model.NewPointer("test1"),
+ ChannelDisplayName: model.NewPointer("test1"),
+ ChannelType: &chanTypeOpen,
+ UserId: model.NewPointer("test2"),
+ UserEmail: model.NewPointer("test2"),
+ Username: model.NewPointer("test2"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(2)),
+ PostMessage: model.NewPointer("User test2 (test2) joined the channel"),
+ PostType: model.NewPointer("enter"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[1])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test1"),
+ TeamName: model.NewPointer("test1"),
+ TeamDisplayName: model.NewPointer("test1"),
+ ChannelId: model.NewPointer("good-request-1"),
+ ChannelName: model.NewPointer("test1"),
+ ChannelDisplayName: model.NewPointer("test1"),
+ ChannelType: &chanTypeOpen,
+ UserId: model.NewPointer("test3"),
+ UserEmail: model.NewPointer("test3"),
+ Username: model.NewPointer("test3"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(3)),
+ PostMessage: model.NewPointer("User test3 (test3) joined the channel"),
+ PostType: model.NewPointer("enter"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[2])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test1"),
+ TeamName: model.NewPointer("test1"),
+ TeamDisplayName: model.NewPointer("test1"),
+ ChannelId: model.NewPointer("good-request-1"),
+ ChannelName: model.NewPointer("test1"),
+ ChannelDisplayName: model.NewPointer("test1"),
+ ChannelType: &chanTypeOpen,
+ UserId: model.NewPointer("test2"),
+ UserEmail: model.NewPointer("test2"),
+ Username: model.NewPointer("test2"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(3)),
+ PostMessage: model.NewPointer("User test2 (test2) leaved the channel"),
+ PostType: model.NewPointer("leave"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[3])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test2"),
+ TeamName: model.NewPointer("test2"),
+ TeamDisplayName: model.NewPointer("test2"),
+ ChannelId: model.NewPointer("good-request-2"),
+ ChannelName: model.NewPointer("test2"),
+ ChannelDisplayName: model.NewPointer("test2"),
+ ChannelType: &chanTypePr,
+ UserId: model.NewPointer("test4"),
+ UserEmail: model.NewPointer("test4"),
+ Username: model.NewPointer("test4"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(4)),
+ PostMessage: model.NewPointer("User test4 (test4) joined the channel"),
+ PostType: model.NewPointer("enter"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[4])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test2"),
+ TeamName: model.NewPointer("test2"),
+ TeamDisplayName: model.NewPointer("test2"),
+ ChannelId: model.NewPointer("good-request-2"),
+ ChannelName: model.NewPointer("test2"),
+ ChannelDisplayName: model.NewPointer("test2"),
+ ChannelType: &chanTypePr,
+ UserId: model.NewPointer("test5"),
+ UserEmail: model.NewPointer("test5"),
+ Username: model.NewPointer("test5"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(5)),
+ PostMessage: model.NewPointer("User test5 (test5) joined the channel"),
+ PostType: model.NewPointer("enter"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[5])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test2"),
+ TeamName: model.NewPointer("test2"),
+ TeamDisplayName: model.NewPointer("test2"),
+ ChannelId: model.NewPointer("good-request-2"),
+ ChannelName: model.NewPointer("test2"),
+ ChannelDisplayName: model.NewPointer("test2"),
+ ChannelType: &chanTypePr,
+ UserId: model.NewPointer("test6"),
+ UserEmail: model.NewPointer("test6"),
+ Username: model.NewPointer("test6"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(6)),
+ PostMessage: model.NewPointer("User test6 (test6) joined the channel"),
+ PostType: model.NewPointer("enter"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[6])
+ assert.Equal(t, &model.MessageExport{
+ TeamId: model.NewPointer("test2"),
+ TeamName: model.NewPointer("test2"),
+ TeamDisplayName: model.NewPointer("test2"),
+ ChannelId: model.NewPointer("good-request-2"),
+ ChannelName: model.NewPointer("test2"),
+ ChannelDisplayName: model.NewPointer("test2"),
+ ChannelType: &chanTypePr,
+ UserId: model.NewPointer("test5"),
+ UserEmail: model.NewPointer("test5"),
+ Username: model.NewPointer("test5"),
+ IsBot: false,
+ PostId: model.NewPointer(""),
+ PostCreateAt: model.NewPointer(int64(6)),
+ PostMessage: model.NewPointer("User test5 (test5) leaved the channel"),
+ PostType: model.NewPointer("leave"),
+ PostRootId: nil,
+ PostProps: nil,
+ PostOriginalId: model.NewPointer(""),
+ PostFileIds: model.StringArray{},
+ }, messages[7])
+}
+
+func TestCsvExport(t *testing.T) {
+ t.Run("no dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ fileBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ runTestCsvExportDedicatedExportFilestore(t, fileBackend, fileBackend)
+ })
+
+ t.Run("using dedicated export filestore", func(t *testing.T) {
+ exportTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(exportTempDir)
+ assert.NoError(t, err)
+ })
+
+ exportBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: exportTempDir,
+ })
+ assert.NoError(t, err)
+
+ attachmentTempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(attachmentTempDir)
+ assert.NoError(t, err)
+ })
+
+ attachmentBackend, err := filestore.NewFileBackend(filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: attachmentTempDir,
+ })
+
+ runTestCsvExportDedicatedExportFilestore(t, exportBackend, attachmentBackend)
+ })
+}
+
+func runTestCsvExportDedicatedExportFilestore(t *testing.T, exportBackend filestore.FileBackend, attachmentBackend filestore.FileBackend) {
+ rctx := request.TestContext(t)
+
+ header := "Post Creation Time,Team Id,Team Name,Team Display Name,Channel Id,Channel Name,Channel Display Name,Channel Type,User Id,User Email,Username,Post Id,Edited By Post Id,Replied to Post Id,Post Message,Post Type,User Type,Previews Post Id\n"
+
+ chanTypeDirect := model.ChannelTypeDirect
+ csvExportTests := []struct {
+ name string
+ cmhs map[string][]*model.ChannelMemberHistoryResult
+ posts []*model.MessageExport
+ attachments map[string][]*model.FileInfo
+ expectedPosts string
+ expectedMetadata string
+ expectedFiles int
+ }{
+ {
+ name: "empty",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{},
+ posts: []*model.MessageExport{},
+ attachments: map[string][]*model.FileInfo{},
+ expectedPosts: header,
+ expectedMetadata: "{\n \"Channels\": {},\n \"MessagesCount\": 0,\n \"AttachmentsCount\": 0,\n \"StartTime\": 0,\n \"EndTime\": 0\n}",
+ expectedFiles: 2,
+ },
+ {
+ name: "posts",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer(`{"previewed_post":"o4w39mc1ff8y5fite4b8hacy1x"}`),
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedPosts: strings.Join([]string{
+ header,
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test,test,test,,,,User test (test) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,,,,User username (test@test.com) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,,message,message,user,\n",
+ "8,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test2,test2,test2,,,,User test2 (test2) joined the channel,enter,user,\n",
+ "80,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test2,test2,test2,,,,User test2 (test2) leaved the channel,leave,user,\n",
+ "100,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,post-root-id,message,message,user,\n",
+ "100,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,post-root-id,message,message,user,o4w39mc1ff8y5fite4b8hacy1x\n",
+ }, ""),
+ expectedMetadata: "{\n \"Channels\": {\n \"channel-id\": {\n \"TeamId\": \"team-id\",\n \"TeamName\": \"team-name\",\n \"TeamDisplayName\": \"team-display-name\",\n \"ChannelId\": \"channel-id\",\n \"ChannelName\": \"channel-name\",\n \"ChannelDisplayName\": \"channel-display-name\",\n \"ChannelType\": \"D\",\n \"RoomId\": \"direct - channel-id\",\n \"StartTime\": 1,\n \"EndTime\": 100,\n \"MessagesCount\": 3,\n \"AttachmentsCount\": 0\n }\n },\n \"MessagesCount\": 3,\n \"AttachmentsCount\": 0,\n \"StartTime\": 1,\n \"EndTime\": 100\n}",
+ expectedFiles: 2,
+ },
+ {
+ name: "deleted post",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostDeleteAt: model.NewPointer(int64(101)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ PostProps: model.NewPointer("{\"deleteBy\":\"fy8j97gwii84bk4zxprbpc9d9w\"}"),
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedPosts: strings.Join([]string{
+ header,
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test,test,test,,,,User test (test) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,,,,User username (test@test.com) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,,message,message,user,\n",
+ "100,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,,message,message,user,\n",
+ "101,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id,post-original-id,,delete message,message,user,\n",
+ }, ""),
+ expectedMetadata: "{\n \"Channels\": {\n \"channel-id\": {\n \"TeamId\": \"team-id\",\n \"TeamName\": \"team-name\",\n \"TeamDisplayName\": \"team-display-name\",\n \"ChannelId\": \"channel-id\",\n \"ChannelName\": \"channel-name\",\n \"ChannelDisplayName\": \"channel-display-name\",\n \"ChannelType\": \"D\",\n \"RoomId\": \"direct - channel-id\",\n \"StartTime\": 1,\n \"EndTime\": 100,\n \"MessagesCount\": 2,\n \"AttachmentsCount\": 0\n }\n },\n \"MessagesCount\": 2,\n \"AttachmentsCount\": 0,\n \"StartTime\": 1,\n \"EndTime\": 100\n}",
+ expectedFiles: 2,
+ },
+ {
+ name: "posts with attachments",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-3"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(2)),
+ PostDeleteAt: model.NewPointer(int64(3)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test2"},
+ },
+ {
+ PostId: model.NewPointer("post-id-2"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-id-1"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1",
+ Id: "test1",
+ Path: "test1",
+ },
+ },
+ "post-id-3": {
+ {
+ Name: "test2",
+ Id: "test2",
+ Path: "test2",
+ DeleteAt: 3,
+ },
+ },
+ },
+ expectedPosts: strings.Join([]string{
+ header,
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test,test,test,,,,User test (test) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,,,,User username (test@test.com) was already in the channel,previously-joined,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id-1,post-original-id,,message,message,user,\n",
+ "1,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id-1,post-original-id,,test1 (files/post-id-1/test1-test1),attachment,user\n",
+ "2,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id-3,post-original-id,,message,message,user,\n",
+ "3,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id-3,post-original-id,,test2 (files/post-id-3/test2-test2),deleted attachment,user\n",
+ "8,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test2,test2,test2,,,,User test2 (test2) joined the channel,enter,user,\n",
+ "80,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,test2,test2,test2,,,,User test2 (test2) leaved the channel,leave,user,\n",
+ "100,team-id,team-name,team-display-name,channel-id,channel-name,channel-display-name,direct,user-id,test@test.com,username,post-id-2,post-original-id,post-id-1,message,message,user,\n",
+ }, ""),
+ expectedMetadata: "{\n \"Channels\": {\n \"channel-id\": {\n \"TeamId\": \"team-id\",\n \"TeamName\": \"team-name\",\n \"TeamDisplayName\": \"team-display-name\",\n \"ChannelId\": \"channel-id\",\n \"ChannelName\": \"channel-name\",\n \"ChannelDisplayName\": \"channel-display-name\",\n \"ChannelType\": \"D\",\n \"RoomId\": \"direct - channel-id\",\n \"StartTime\": 1,\n \"EndTime\": 100,\n \"MessagesCount\": 3,\n \"AttachmentsCount\": 2\n }\n },\n \"MessagesCount\": 3,\n \"AttachmentsCount\": 2,\n \"StartTime\": 1,\n \"EndTime\": 100\n}",
+ expectedFiles: 4,
+ },
+ }
+
+ for _, tt := range csvExportTests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ if len(tt.attachments) > 0 {
+ for post_id, attachments := range tt.attachments {
+ attachments := attachments // TODO: Remove once go1.22 is used
+ call := mockStore.FileInfoStore.On("GetForPost", post_id, true, true, false).Times(3)
+ call.Run(func(args mock.Arguments) {
+ call.Return(tt.attachments[args.Get(0).(string)], nil)
+ })
+ _, err := attachmentBackend.WriteFile(bytes.NewReader([]byte{}), attachments[0].Path)
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = attachmentBackend.RemoveFile(attachments[0].Path)
+ require.NoError(t, err)
+ })
+ }
+ }
+
+ if len(tt.cmhs) > 0 {
+ for channelId, cmhs := range tt.cmhs {
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(100), channelId).Return(cmhs, nil)
+ }
+ }
+
+ warningCount, appErr := CsvExport(rctx, tt.posts, mockStore, exportBackend, attachmentBackend, "test")
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(0), warningCount)
+
+ zipBytes, err := exportBackend.ReadFile("test/csv_export.zip")
+ assert.NoError(t, err)
+ t.Cleanup(func() {
+ err = exportBackend.RemoveFile("test/csv_export.zip")
+ require.NoError(t, err)
+ })
+
+ zipReader, err := zip.NewReader(bytes.NewReader(zipBytes), int64(len(zipBytes)))
+ assert.NoError(t, err)
+ assert.Len(t, zipReader.File, tt.expectedFiles)
+
+ postsFile, err := zipReader.File[0].Open()
+ require.NoError(t, err)
+ defer postsFile.Close()
+ postsFileData, err := io.ReadAll(postsFile)
+ assert.NoError(t, err)
+ postsFile.Close()
+
+ metadataFile, err := zipReader.File[len(zipReader.File)-1].Open()
+ require.NoError(t, err)
+ defer metadataFile.Close()
+ metadataFileData, err := io.ReadAll(metadataFile)
+ require.NoError(t, err)
+ err = metadataFile.Close()
+ require.NoError(t, err)
+
+ assert.Equal(t, tt.expectedPosts, string(postsFileData))
+ assert.Equal(t, tt.expectedMetadata, string(metadataFileData))
+ })
+ }
+}
+
+func TestWriteExportWarnings(t *testing.T) {
+ rctx := request.TestContext(t)
+
+ chanTypeDirect := model.ChannelTypeDirect
+ cmhs := map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test", UserEmail: "test", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3", Username: "test3",
+ },
+ },
+ }
+
+ posts := []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"post-id-1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-3"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(2)),
+ PostDeleteAt: model.NewPointer(int64(3)),
+ PostMessage: model.NewPointer("message"),
+ UserEmail: model.NewPointer("test@test.com"),
+ UserId: model.NewPointer("user-id"),
+ Username: model.NewPointer("username"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"post-id-2"},
+ },
+ }
+
+ attachments := map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1",
+ Id: "test1",
+ Path: "test1",
+ },
+ },
+ "post-id-3": {
+ {
+ Name: "test2",
+ Id: "test2",
+ Path: "test2",
+ },
+ },
+ }
+
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(tempDir)
+ assert.NoError(t, err)
+ })
+
+ config := filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: tempDir,
+ }
+
+ fileBackend, err := filestore.NewFileBackend(config)
+ assert.NoError(t, err)
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ for post_id := range attachments {
+ call := mockStore.FileInfoStore.On("GetForPost", post_id, true, true, false).Times(3)
+ call.Run(func(args mock.Arguments) {
+ call.Return(attachments[args.Get(0).(string)], nil)
+ })
+ }
+
+ for channelId, cmhs := range cmhs {
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(2), channelId).Return(cmhs, nil)
+ }
+
+ warningCount, appErr := CsvExport(rctx, posts, mockStore, fileBackend, fileBackend, "test")
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(2), warningCount)
+
+ zipBytes, err := fileBackend.ReadFile("test/csv_export.zip")
+ assert.NoError(t, err)
+
+ t.Cleanup(func() {
+ err = fileBackend.RemoveFile("test/csv_export.zip")
+ assert.NoError(t, err)
+ })
+
+ zipReader, err := zip.NewReader(bytes.NewReader(zipBytes), int64(len(zipBytes)))
+ assert.NoError(t, err)
+ assert.Len(t, zipReader.File, 3)
+}
diff --git a/server/enterprise/message_export/global_relay_export/deliver.go b/server/enterprise/message_export/global_relay_export/deliver.go
new file mode 100644
index 00000000000..2da7dc77588
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/deliver.go
@@ -0,0 +1,110 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "archive/zip"
+ "context"
+ "io"
+ "net/http"
+ "net/mail"
+ "net/smtp"
+ "os"
+ "time"
+
+ "github.com/mattermost/mattermost/server/public/model"
+)
+
+func Deliver(export *os.File, config *model.Config) *model.AppError {
+ info, err := export.Stat()
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_get_file_info.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ zipFile, err := zip.NewReader(export, info.Size())
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_open_zip_file_data.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ to := *config.MessageExportSettings.GlobalRelaySettings.EmailAddress
+ ctx := context.Background()
+ ctx, cancel := context.WithTimeout(ctx, time.Duration(*config.EmailSettings.SMTPServerTimeout)*time.Second)
+ defer cancel()
+
+ conn, err := connectToSMTPServer(ctx, config)
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_connect_smtp_server.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ defer conn.Close()
+
+ mailsCount := 0
+ for _, mail := range zipFile.File {
+ from, err := getFrom(mail)
+ if err != nil {
+ return err
+ }
+ if err := deliverEmail(conn, mail, from, to); err != nil {
+ return err
+ }
+
+ mailsCount++
+ if mailsCount == MaxEmailsPerConnection {
+ mailsCount = 0
+ conn.Close()
+
+ var nErr error
+ conn, nErr = connectToSMTPServer(context.Background(), config)
+ if nErr != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_connect_smtp_server.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr)
+ }
+ }
+ }
+ return nil
+}
+
+func deliverEmail(c *smtp.Client, mailFile *zip.File, from string, to string) *model.AppError {
+ mailData, err := mailFile.Open()
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_open_email_file.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ defer mailData.Close()
+
+ err = c.Mail(from)
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.from_address.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ err = c.Rcpt(to)
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.to_address.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ w, err := c.Data()
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.msg_data.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ _, err = io.Copy(w, mailData)
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.msg.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ err = w.Close()
+ if err != nil {
+ return model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.close.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return nil
+}
+
+func getFrom(mailFile *zip.File) (string, *model.AppError) {
+ mailData, err := mailFile.Open()
+ if err != nil {
+ return "", model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.unable_to_open_email_file.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ defer mailData.Close()
+
+ message, err := mail.ReadMessage(mailData)
+ if err != nil {
+ return "", model.NewAppError("GlobalRelayDelivery", "ent.message_export.global_relay_export.deliver.parse_mail.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return message.Header.Get("From"), nil
+}
diff --git a/server/enterprise/message_export/global_relay_export/deliver_test.go b/server/enterprise/message_export/global_relay_export/deliver_test.go
new file mode 100644
index 00000000000..114cf8206fa
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/deliver_test.go
@@ -0,0 +1,140 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "archive/zip"
+ "fmt"
+ "io"
+ "os"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ gomail "gopkg.in/mail.v2"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/mail"
+)
+
+func TestDeliver(t *testing.T) {
+ config := &model.Config{}
+ config.SetDefaults()
+ config.MessageExportSettings.GlobalRelaySettings.CustomerType = model.NewPointer("INBUCKET")
+ config.MessageExportSettings.GlobalRelaySettings.EmailAddress = model.NewPointer("test-globalrelay-mailbox@test")
+
+ t.Run("Testing invalid zip file", func(t *testing.T) {
+ emptyFile, err := os.CreateTemp("", "export")
+ require.NoError(t, err)
+ defer emptyFile.Close()
+ defer os.Remove(emptyFile.Name())
+
+ appErr := Deliver(emptyFile, config)
+ assert.NotNil(t, appErr)
+ })
+
+ t.Run("Testing empty zip file", func(t *testing.T) {
+ emptyZipFile, err := os.CreateTemp("", "export")
+ require.NoError(t, err)
+ zipFile := zip.NewWriter(emptyZipFile)
+ err = zipFile.Close()
+ require.NoError(t, err)
+ defer emptyZipFile.Close()
+ defer os.Remove(emptyZipFile.Name())
+
+ err = mail.DeleteMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.NoError(t, err)
+
+ appErr := Deliver(emptyZipFile, config)
+ assert.Nil(t, appErr)
+
+ _, err = mail.GetMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.Error(t, err)
+ })
+
+ t.Run("Testing zip file with one email", func(t *testing.T) {
+ headers := map[string][]string{
+ "From": {"test@test.com"},
+ "To": {*config.MessageExportSettings.GlobalRelaySettings.EmailAddress},
+ "Subject": {encodeRFC2047Word("test")},
+ "Content-Transfer-Encoding": {"8bit"},
+ "Auto-Submitted": {"auto-generated"},
+ "Precedence": {"bulk"},
+ GlobalRelayMsgTypeHeader: {"Mattermost"},
+ GlobalRelayChannelNameHeader: {encodeRFC2047Word("test")},
+ GlobalRelayChannelIDHeader: {encodeRFC2047Word("test")},
+ GlobalRelayChannelTypeHeader: {encodeRFC2047Word("test")},
+ }
+
+ m := gomail.NewMessage(gomail.SetCharset("UTF-8"))
+ m.SetHeaders(headers)
+ m.SetBody("text/plain", "test")
+
+ emptyZipFile, err := os.CreateTemp("", "export")
+ require.NoError(t, err)
+ zipFile := zip.NewWriter(emptyZipFile)
+ file, err := zipFile.Create("test")
+ require.NoError(t, err)
+ _, err = m.WriteTo(file)
+ require.NoError(t, err)
+
+ err = zipFile.Close()
+ require.NoError(t, err)
+ defer emptyZipFile.Close()
+ defer os.Remove(emptyZipFile.Name())
+
+ err = mail.DeleteMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.NoError(t, err)
+
+ appErr := Deliver(emptyZipFile, config)
+ assert.Nil(t, appErr)
+
+ mailbox, err := mail.GetMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.NoError(t, err)
+ require.Len(t, mailbox, 1)
+ })
+
+ t.Run("Testing zip file with 50 emails", func(t *testing.T) {
+ headers := map[string][]string{
+ "From": {"test@test.com"},
+ "To": {*config.MessageExportSettings.GlobalRelaySettings.EmailAddress},
+ "Subject": {encodeRFC2047Word("test")},
+ "Content-Transfer-Encoding": {"8bit"},
+ "Auto-Submitted": {"auto-generated"},
+ "Precedence": {"bulk"},
+ GlobalRelayMsgTypeHeader: {"Mattermost"},
+ GlobalRelayChannelNameHeader: {encodeRFC2047Word("test")},
+ GlobalRelayChannelIDHeader: {encodeRFC2047Word("test")},
+ GlobalRelayChannelTypeHeader: {encodeRFC2047Word("test")},
+ }
+ m := gomail.NewMessage(gomail.SetCharset("UTF-8"))
+ m.SetHeaders(headers)
+ m.SetBody("text/plain", "test")
+
+ emptyZipFile, err := os.CreateTemp("", "export")
+ require.NoError(t, err)
+ zipFile := zip.NewWriter(emptyZipFile)
+ for x := 0; x < 50; x++ {
+ var file io.Writer
+ file, err = zipFile.Create(fmt.Sprintf("test-%d", x))
+ require.NoError(t, err)
+ _, err = m.WriteTo(file)
+ require.NoError(t, err)
+ }
+ err = zipFile.Close()
+ require.NoError(t, err)
+ defer emptyZipFile.Close()
+ defer os.Remove(emptyZipFile.Name())
+
+ err = mail.DeleteMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.NoError(t, err)
+
+ appErr := Deliver(emptyZipFile, config)
+ assert.Nil(t, appErr)
+
+ mailbox, err := mail.GetMailBox(*config.MessageExportSettings.GlobalRelaySettings.EmailAddress)
+ require.NoError(t, err)
+ require.Len(t, mailbox, 50)
+ })
+}
diff --git a/server/enterprise/message_export/global_relay_export/global_relay_export.go b/server/enterprise/message_export/global_relay_export/global_relay_export.go
new file mode 100644
index 00000000000..b709e9b4f48
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/global_relay_export.go
@@ -0,0 +1,420 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "archive/zip"
+ "encoding/json"
+ "fmt"
+ "io"
+ "mime"
+ "net/http"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/jaytaylor/html2text"
+ gomail "gopkg.in/mail.v2"
+
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/common_export"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
+)
+
+const (
+ GlobalRelayMsgTypeHeader = "X-GlobalRelay-MsgType"
+ GlobalRelayChannelNameHeader = "X-Mattermost-ChannelName"
+ GlobalRelayChannelIDHeader = "X-Mattermost-ChannelID"
+ GlobalRelayChannelTypeHeader = "X-Mattermost-ChannelType"
+ MaxEmailBytes = 250 << (10 * 2)
+ MaxEmailsPerConnection = 400
+)
+
+type AllExport map[string][]*ChannelExport
+
+type ChannelExport struct {
+ ChannelId string // the unique id of the channel
+ ChannelName string // the name of the channel
+ ChannelType model.ChannelType // the channel type
+ StartTime int64 // utc timestamp (seconds), start of export period or create time of channel, whichever is greater. Example: 1366611728.
+ EndTime int64 // utc timestamp (seconds), end of export period or delete time of channel, whichever is lesser. Example: 1366611728.
+ Participants []ParticipantRow // summary information about the conversation participants
+ Messages []Message // the messages that were sent during the conversation
+ ExportedOn int64 // utc timestamp (seconds), when this export was generated
+ numUserMessages map[string]int // key is user id, value is number of messages that they sent during this period
+ uploadedFiles []*model.FileInfo // any files that were uploaded to the channel during the export period
+ bytes int64
+}
+
+// a row in the summary table at the top of the export
+type ParticipantRow struct {
+ Username string
+ UserType string
+ Email string
+ JoinTime int64
+ LeaveTime int64
+ MessagesSent int
+}
+
+type Message struct {
+ SentTime int64
+ SenderUsername string
+ PostType string
+ PostUsername string
+ SenderUserType string
+ SenderEmail string
+ Message string
+ PreviewsPost string
+}
+
+func GlobalRelayExport(rctx request.CTX, posts []*model.MessageExport, db store.Store, fileAttachmentBackend filestore.FileBackend, dest io.Writer, templates *templates.Container) ([]string, int64, *model.AppError) {
+ var warningCount int64
+ attachmentsRemovedPostIDs := []string{}
+ allExports := make(map[string][]*ChannelExport)
+
+ zipFile := zip.NewWriter(dest)
+
+ membersByChannel := common_export.MembersByChannel{}
+ metadata := common_export.Metadata{
+ Channels: map[string]common_export.MetadataChannel{},
+ MessagesCount: 0,
+ AttachmentsCount: 0,
+ StartTime: 0,
+ EndTime: 0,
+ }
+
+ for _, post := range posts {
+ if _, ok := membersByChannel[*post.ChannelId]; !ok {
+ membersByChannel[*post.ChannelId] = common_export.ChannelMembers{}
+ }
+
+ membersByChannel[*post.ChannelId][*post.UserId] = common_export.ChannelMember{
+ UserId: *post.UserId,
+ Username: *post.Username,
+ IsBot: post.IsBot,
+ Email: *post.UserEmail,
+ }
+
+ attachments := []*model.FileInfo{}
+ if len(post.PostFileIds) > 0 {
+ var err error
+ attachments, err = db.FileInfo().GetForPost(*post.PostId, true, true, false)
+ if err != nil {
+ return nil, warningCount, model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay_export.get_attachment_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ }
+
+ attachmentsRemoved := addToExports(rctx, attachments, allExports, post)
+ attachmentsRemovedPostIDs = append(attachmentsRemovedPostIDs, attachmentsRemoved...)
+
+ metadata.Update(post, len(attachments))
+ }
+
+ for _, channelExportList := range allExports {
+ for batchId, channelExport := range channelExportList {
+ participants, appErr := getParticipants(db, channelExport, membersByChannel[channelExport.ChannelId])
+ if appErr != nil {
+ return nil, warningCount, appErr
+ }
+ channelExport.Participants = participants
+ channelExport.ExportedOn = time.Now().Unix() * 1000
+
+ channelExportFile, err := zipFile.Create(fmt.Sprintf("%s - (%s) - %d.eml", channelExport.ChannelName, channelExport.ChannelId, batchId))
+ if err != nil {
+ return nil, warningCount, model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay.create_file_in_zip.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ if appErr, warningCount = generateEmail(rctx, fileAttachmentBackend, channelExport, templates, channelExportFile); appErr != nil {
+ return nil, warningCount, appErr
+ }
+ }
+ }
+
+ err := zipFile.Close()
+ if err != nil {
+ return nil, warningCount, model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay.close_zip_file.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ return attachmentsRemovedPostIDs, warningCount, nil
+}
+
+func addToExports(rctx request.CTX, attachments []*model.FileInfo, exports map[string][]*ChannelExport, post *model.MessageExport) []string {
+ var channelExport *ChannelExport
+ attachmentsRemovedPostIDs := []string{}
+ if channelExports, present := exports[*post.ChannelId]; !present {
+ // we found a new channel
+ channelExport = &ChannelExport{
+ ChannelId: *post.ChannelId,
+ ChannelName: *post.ChannelDisplayName,
+ ChannelType: *post.ChannelType,
+ StartTime: *post.PostCreateAt,
+ Messages: make([]Message, 0),
+ Participants: make([]ParticipantRow, 0),
+ numUserMessages: make(map[string]int),
+ uploadedFiles: make([]*model.FileInfo, 0),
+ bytes: 0,
+ }
+ exports[*post.ChannelId] = []*ChannelExport{channelExport}
+ } else {
+ // we already know about this channel
+ channelExport = channelExports[len(channelExports)-1]
+ }
+
+ // Create a new ChannelExport if it would be too many bytes to add the post
+ fileBytes := fileInfoListBytes(attachments)
+ msgBytes := int64(len(*post.PostMessage))
+ postBytes := fileBytes + msgBytes
+ postTooLargeForChannelBatch := channelExport.bytes+postBytes > MaxEmailBytes
+ postAloneTooLargeToSend := postBytes > MaxEmailBytes // Attachments must be removed from export, they're too big to send.
+
+ if postAloneTooLargeToSend {
+ attachmentsRemovedPostIDs = append(attachmentsRemovedPostIDs, *post.PostId)
+ }
+
+ if postTooLargeForChannelBatch && !postAloneTooLargeToSend {
+ channelExport = &ChannelExport{
+ ChannelId: *post.ChannelId,
+ ChannelName: *post.ChannelDisplayName,
+ ChannelType: *post.ChannelType,
+ StartTime: *post.PostCreateAt,
+ Messages: make([]Message, 0),
+ Participants: make([]ParticipantRow, 0),
+ numUserMessages: make(map[string]int),
+ uploadedFiles: make([]*model.FileInfo, 0),
+ bytes: 0,
+ }
+ exports[*post.ChannelId] = append(exports[*post.ChannelId], channelExport)
+ }
+
+ addPostToChannelExport(rctx, channelExport, post)
+
+ // if this post includes files, add them to the collection
+ for _, fileInfo := range attachments {
+ addAttachmentToChannelExport(channelExport, post, fileInfo, postAloneTooLargeToSend)
+ }
+ channelExport.bytes += postBytes
+ return attachmentsRemovedPostIDs
+}
+
+func getParticipants(db store.Store, channelExport *ChannelExport, members common_export.ChannelMembers) ([]ParticipantRow, *model.AppError) {
+ participantsMap := map[string]ParticipantRow{}
+ channelMembersHistory, err := db.ChannelMemberHistory().GetUsersInChannelDuring(channelExport.StartTime, channelExport.EndTime, channelExport.ChannelId)
+ if err != nil {
+ return nil, model.NewAppError("getParticipants", "ent.get_users_in_channel_during", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+
+ joins, leaves := common_export.GetJoinsAndLeavesForChannel(channelExport.StartTime, channelExport.EndTime, channelMembersHistory, members)
+
+ for _, join := range joins {
+ userType := "user"
+ if join.IsBot {
+ userType = "bot"
+ }
+
+ if _, ok := participantsMap[join.UserId]; !ok {
+ participantsMap[join.UserId] = ParticipantRow{
+ Username: join.Username,
+ UserType: userType,
+ Email: join.Email,
+ JoinTime: join.Datetime,
+ LeaveTime: channelExport.EndTime,
+ MessagesSent: channelExport.numUserMessages[join.UserId],
+ }
+ }
+ }
+ for _, leave := range leaves {
+ if participantRow, ok := participantsMap[leave.UserId]; ok {
+ participantRow.LeaveTime = leave.Datetime //nolint:govet
+ }
+ }
+
+ participants := []ParticipantRow{}
+ for _, participant := range participantsMap {
+ participants = append(participants, participant)
+ }
+
+ sort.Slice(participants, func(i, j int) bool {
+ return participants[i].Username < participants[j].Username
+ })
+ return participants, nil
+}
+
+func generateEmail(rctx request.CTX, fileAttachmentBackend filestore.FileBackend, channelExport *ChannelExport, templates *templates.Container, w io.Writer) (*model.AppError, int64) {
+ var warningCount int64
+ participantEmailAddresses := getParticipantEmails(channelExport)
+
+ // GlobalRelay expects the email to come from the person that initiated the conversation.
+ // our conversations aren't really initiated, so we just use the first person we find
+ from := participantEmailAddresses[0]
+
+ // it also expects the email to be addressed to the other participants in the conversation
+ mimeTo := strings.Join(participantEmailAddresses, ",")
+
+ htmlBody, err := channelExportToHTML(rctx, channelExport, templates)
+ if err != nil {
+ return model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay.generate_email.app_error", nil, "", http.StatusInternalServerError).Wrap(err), warningCount
+ }
+
+ subject := fmt.Sprintf("Mattermost Compliance Export: %s", channelExport.ChannelName)
+ htmlMessage := "\r\n
" + htmlBody + ""
+
+ txtBody, err := html2text.FromString(htmlBody)
+ if err != nil {
+ rctx.Logger().Warn("Error transforming html to plain text for GlobalRelay email", mlog.Err(err))
+ txtBody = ""
+ }
+
+ headers := map[string][]string{
+ "From": {from},
+ "To": {mimeTo},
+ "Subject": {encodeRFC2047Word(subject)},
+ "Content-Transfer-Encoding": {"8bit"},
+ "Auto-Submitted": {"auto-generated"},
+ "Precedence": {"bulk"},
+ GlobalRelayMsgTypeHeader: {"Mattermost"},
+ GlobalRelayChannelNameHeader: {encodeRFC2047Word(channelExport.ChannelName)},
+ GlobalRelayChannelIDHeader: {encodeRFC2047Word(channelExport.ChannelId)},
+ GlobalRelayChannelTypeHeader: {encodeRFC2047Word(common_export.ChannelTypeDisplayName(channelExport.ChannelType))},
+ }
+
+ m := gomail.NewMessage(gomail.SetCharset("UTF-8"))
+ m.SetHeaders(headers)
+ m.SetDateHeader("Date", time.Unix(channelExport.EndTime/1000, 0).UTC())
+ m.SetBody("text/plain", txtBody)
+ m.AddAlternative("text/html", htmlMessage)
+
+ for _, fileInfo := range channelExport.uploadedFiles {
+ path := fileInfo.Path
+
+ m.Attach(fileInfo.Name, gomail.SetCopyFunc(func(writer io.Writer) error {
+ reader, appErr := fileAttachmentBackend.Reader(path)
+ if appErr != nil {
+ rctx.Logger().Warn("File not found for export", mlog.String("Filename", path))
+ warningCount += 1
+ return nil
+ }
+ defer reader.Close()
+
+ _, err = io.Copy(writer, reader)
+ if err != nil {
+ return model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay.attach_file.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return nil
+ }))
+ }
+
+ _, err = m.WriteTo(w)
+ if err != nil {
+ return model.NewAppError("GlobalRelayExport", "ent.message_export.global_relay.generate_email.app_error", nil, "", http.StatusInternalServerError).Wrap(err), warningCount
+ }
+ return nil, warningCount
+}
+
+func getParticipantEmails(channelExport *ChannelExport) []string {
+ participantEmails := make([]string, len(channelExport.Participants))
+ for i, participant := range channelExport.Participants {
+ participantEmails[i] = participant.Email
+ }
+ return participantEmails
+}
+
+func fileInfoListBytes(fileInfoList []*model.FileInfo) int64 {
+ totalBytes := int64(0)
+ for _, fileInfo := range fileInfoList {
+ totalBytes += fileInfo.Size
+ }
+ return totalBytes
+}
+
+func addPostToChannelExport(rctx request.CTX, channelExport *ChannelExport, post *model.MessageExport) {
+ userType := "user"
+ if post.IsBot {
+ userType = "bot"
+ }
+
+ strPostProps := post.PostProps
+ bytPostProps := []byte(*strPostProps)
+
+ // Added to show the username if overridden by a webhook or API integration
+ postUserName := ""
+ var postPropsLocal map[string]any
+ err := json.Unmarshal(bytPostProps, &postPropsLocal)
+ if err != nil {
+ rctx.Logger().Warn("Failed to unmarshal post Props into JSON. Ignoring username override.", mlog.Err(err))
+ } else {
+ if overrideUsername, ok := postPropsLocal["override_username"]; ok {
+ postUserName = overrideUsername.(string)
+ }
+
+ if postUserName == "" {
+ if overrideUsername, ok := postPropsLocal["webhook_display_name"]; ok {
+ postUserName = overrideUsername.(string)
+ }
+ }
+ }
+
+ element := Message{
+ SentTime: *post.PostCreateAt,
+ Message: *post.PostMessage,
+ SenderUserType: userType,
+ PostType: *post.PostType,
+ PostUsername: postUserName,
+ SenderUsername: *post.Username,
+ SenderEmail: *post.UserEmail,
+ PreviewsPost: post.PreviewID(),
+ }
+ channelExport.Messages = append(channelExport.Messages, element)
+ channelExport.EndTime = *post.PostCreateAt
+ channelExport.numUserMessages[*post.UserId] += 1
+}
+
+func addAttachmentToChannelExport(channelExport *ChannelExport, post *model.MessageExport, fileInfo *model.FileInfo, removeAttachments bool) {
+ var uploadElement Message
+ userType := "user"
+ if post.IsBot {
+ userType = "bot"
+ }
+ if removeAttachments {
+ // add "post" message indicating that attachments were not sent
+ uploadElement = Message{
+ SentTime: fileInfo.CreateAt,
+ Message: fmt.Sprintf("Uploaded file '%s' (id '%s') was removed because it was too large to send.", fileInfo.Name, fileInfo.Id),
+ SenderUsername: *post.Username,
+ SenderUserType: userType,
+ SenderEmail: *post.UserEmail,
+ }
+
+ if fileInfo.DeleteAt != 0 {
+ uploadElement.SentTime = fileInfo.DeleteAt
+ uploadElement.Message = fmt.Sprintf("Deleted file '%s' (id '%s') was removed because it was too large to send.", fileInfo.Name, fileInfo.Id)
+ }
+ } else {
+ channelExport.uploadedFiles = append(channelExport.uploadedFiles, fileInfo)
+
+ // add an implicit "post" to the export that includes the filename so GlobalRelay knows who uploaded each file
+ uploadElement = Message{
+ SentTime: fileInfo.CreateAt,
+ Message: fmt.Sprintf("Uploaded file %s", fileInfo.Name),
+ SenderUsername: *post.Username,
+ SenderUserType: userType,
+ SenderEmail: *post.UserEmail,
+ }
+
+ if fileInfo.DeleteAt != 0 {
+ uploadElement.SentTime = fileInfo.DeleteAt
+ uploadElement.Message = fmt.Sprintf("Deleted file %s", fileInfo.Name)
+ }
+ }
+
+ channelExport.Messages = append(channelExport.Messages, uploadElement)
+}
+
+func encodeRFC2047Word(s string) string {
+ return mime.BEncoding.Encode("utf-8", s)
+}
diff --git a/server/enterprise/message_export/global_relay_export/global_relay_export_test.go b/server/enterprise/message_export/global_relay_export/global_relay_export_test.go
new file mode 100644
index 00000000000..9ff3be16078
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/global_relay_export_test.go
@@ -0,0 +1,1331 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "archive/zip"
+ "bytes"
+ "io"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+ "github.com/stretchr/testify/require"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest"
+ "github.com/mattermost/mattermost/server/v8/channels/utils/fileutils"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
+)
+
+func TestGlobalRelayExport(t *testing.T) {
+ templatesDir, ok := fileutils.FindDir("templates")
+ require.True(t, ok)
+
+ templatesContainer, err := templates.New(templatesDir)
+ require.NotNil(t, templatesContainer)
+ require.NoError(t, err)
+
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(tempDir)
+ assert.NoError(t, err)
+ })
+
+ rctx := request.TestContext(t)
+
+ config := filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: tempDir,
+ }
+
+ fileBackend, err := filestore.NewFileBackend(config)
+ assert.NoError(t, err)
+
+ chanTypeDirect := model.ChannelTypeDirect
+ csvExportTests := []struct {
+ name string
+ cmhs map[string][]*model.ChannelMemberHistoryResult
+ posts []*model.MessageExport
+ attachments map[string][]*model.FileInfo
+ expectedHeaders []string
+ expectedTexts []string
+ expectedHTMLs []string
+ expectedFiles int
+ expectedWarnings int
+ }{
+ {
+ name: "empty",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{},
+ posts: []*model.MessageExport{},
+ attachments: map[string][]*model.FileInfo{},
+ expectedHeaders: []string{},
+ expectedTexts: []string{},
+ expectedHTMLs: []string{},
+ expectedFiles: 0,
+ expectedWarnings: 0,
+ },
+ {
+ name: "posts",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:01:40Z @test1 user (test1@test.com): message",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ expectedFiles: 1,
+ expectedWarnings: 0,
+ },
+ {
+ name: "posts with attachments",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-2"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-id-1"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1-attachment",
+ Id: "test1-attachment",
+ Path: "test1-attachment",
+ CreateAt: 1,
+ },
+ },
+ },
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): Uploaded file test1-at=",
+ "tachment",
+ "* 1970-01-01T00:01:40Z @test1 user (test1@test.com): message",
+ }, "\r\n"),
+ strings.Join([]string{
+ "Content-Disposition: attachment; filename=\"test1-attachment\"",
+ "Content-Transfer-Encoding: base64",
+ "Content-Type: application/octet-stream; name=\"test1-attachment\"",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " Uploaded file test1-attachment",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ expectedFiles: 1,
+ expectedWarnings: 0,
+ },
+ {
+ name: "posts with deleted attachments",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-2"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-id-1"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1-attachment",
+ Id: "test1-attachment",
+ Path: "test1-attachment",
+ DeleteAt: 200000,
+ },
+ },
+ },
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:01:40Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:03:20Z @test1 user (test1@test.com): Deleted file test1-att=",
+ "achment",
+ }, "\r\n"),
+ strings.Join([]string{
+ "Content-Disposition: attachment; filename=\"test1-attachment\"",
+ "Content-Transfer-Encoding: base64",
+ "Content-Type: application/octet-stream; name=\"test1-attachment\"",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:03:20Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " Deleted file test1-attachment",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ expectedFiles: 1,
+ expectedWarnings: 0,
+ },
+ {
+ name: "posts with missing attachments",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id-1"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{"test1"},
+ },
+ {
+ PostId: model.NewPointer("post-id-2"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-id-1"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{
+ "post-id-1": {
+ {
+ Name: "test1-attachment",
+ Id: "test1-attachment",
+ Path: "test1-attachment",
+ CreateAt: 1,
+ },
+ },
+ },
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): Uploaded file test1-at=",
+ "tachment",
+ "* 1970-01-01T00:01:40Z @test1 user (test1@test.com): message",
+ }, "\r\n"),
+ strings.Join([]string{
+ "Content-Disposition: attachment; filename=\"test1-attachment\"",
+ "Content-Transfer-Encoding: base64",
+ "Content-Type: application/octet-stream; name=\"test1-attachment\"",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " Uploaded file test1-attachment",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ expectedFiles: 1,
+ expectedWarnings: 1,
+ },
+ {
+ name: "posts with override_username property",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{\"from_webhook\":\"true\",\"html\":\"Test HTML\",\"override_username\":\"test_username_override\",\"webhook_display_name\":\"Test Bot\"}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:01:40Z @test1 @test_username_override user (test1@test.com): message",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " @test_username_override",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ },
+ {
+ name: "posts with webhook_display_name property",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{\"from_webhook\":\"true\",\"webhook_display_name\":\"Test Bot\"}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message",
+ "* 1970-01-01T00:01:40Z @test1 @Test Bot user (test1@test.com): message",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " @Test Bot",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ },
+ {
+ name: "post with permalink preview",
+ cmhs: map[string][]*model.ChannelMemberHistoryResult{
+ "channel-id": {
+ {
+ JoinTime: 0, UserId: "test1", UserEmail: "test1@test.com", Username: "test", LeaveTime: model.NewPointer(int64(400)),
+ },
+ {
+ JoinTime: 8, UserId: "test2", UserEmail: "test2@test.com", Username: "test2", LeaveTime: model.NewPointer(int64(80)),
+ },
+ {
+ JoinTime: 400, UserId: "test3", UserEmail: "test3@test.com", Username: "test3",
+ },
+ },
+ },
+ posts: []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer(`{"previewed_post":"o4w39mc1ff8y5fite4b8hacy1x"}`),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ PostRootId: model.NewPointer("post-root-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(100000)),
+ PostMessage: model.NewPointer("message"),
+ PostProps: model.NewPointer("{}"),
+ PostType: model.NewPointer(""),
+ UserEmail: model.NewPointer("test1@test.com"),
+ UserId: model.NewPointer("test1"),
+ Username: model.NewPointer("test1"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ },
+ attachments: map[string][]*model.FileInfo{},
+ expectedHeaders: []string{
+ "MIME-Version: 1.0",
+ "X-Mattermost-ChannelType: direct",
+ "Content-Transfer-Encoding: 8bit",
+ "Precedence: bulk",
+ "X-GlobalRelay-MsgType: Mattermost",
+ "X-Mattermost-ChannelID: channel-id",
+ "X-Mattermost-ChannelName: channel-display-name",
+ "Auto-Submitted: auto-generated",
+ "Date: Thu, 01 Jan 1970 00:01:40 +0000",
+ "From: test1@test.com",
+ "To: test1@test.com,test2@test.com",
+ "Subject: Mattermost Compliance Export: channel-display-name",
+ },
+
+ expectedTexts: []string{
+ strings.Join([]string{
+ "* Channel: channel-display-name",
+ "* Started: 1970-01-01T00:00:00Z",
+ "* Ended: 1970-01-01T00:01:40Z",
+ "* Duration: 2 minutes",
+ }, "\r\n"),
+ strings.Join([]string{
+ "--------",
+ "Messages",
+ "--------",
+ "",
+ "* 1970-01-01T00:00:00Z @test1 user (test1@test.com): message o4w39mc1ff8y5f=",
+ "ite4b8hacy1x",
+ "* 1970-01-01T00:01:40Z @test1 user (test1@test.com): message",
+ }, "\r\n"),
+ },
+
+ expectedHTMLs: []string{
+ strings.Join([]string{
+ " ",
+ " - Channel: channel-display-name<=",
+ "/li>",
+ "
- Started: 1970-01-01T00:00:00Z<=",
+ "/li>",
+ "
- Ended: 1970-01-01T00:01:40Z",
+ "
- Duration: 2 minutes
",
+ "
",
+ }, "\r\n"),
+ strings.Join([]string{
+ "",
+ " @test | ",
+ " user | ",
+ " test1@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 2 | ",
+ "
",
+ "",
+ "",
+ " @test2 | ",
+ " user | ",
+ " test2@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ "",
+ "",
+ " @test3 | ",
+ " user | ",
+ " test3@test.com | ",
+ " 1970-01-01T00:00:00Z | ",
+ " 1970-01-01T00:01:40Z | ",
+ " 2 minutes | ",
+ " 0 | ",
+ "
",
+ }, "\r\n"),
+
+ strings.Join([]string{
+ "",
+ " 1970-01-01T00:00:00Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " o4w39mc1ff8y5fite4b8hacy1x",
+ "",
+ "",
+ "",
+ " 1970-01-01T00:01:40Z",
+ " @test1",
+ " ",
+ " user",
+ " (test1@test.com):",
+ " message",
+ " ",
+ "",
+ }, "\r\n"),
+ },
+ expectedFiles: 1,
+ expectedWarnings: 0,
+ },
+ }
+
+ for _, tt := range csvExportTests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ if len(tt.attachments) > 0 {
+ for post_id, attachments := range tt.attachments {
+ attachments := attachments // TODO: Remove once go1.22 is used
+ call := mockStore.FileInfoStore.On("GetForPost", post_id, true, true, false)
+ call.Run(func(args mock.Arguments) {
+ call.Return(attachments, nil)
+ })
+ if tt.expectedWarnings == 0 {
+ _, err = fileBackend.WriteFile(bytes.NewReader([]byte{}), attachments[0].Path)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ err = fileBackend.RemoveFile(attachments[0].Path)
+ assert.NoError(t, err)
+ })
+ }
+ }
+ }
+
+ if len(tt.cmhs) > 0 {
+ for channelId, cmhs := range tt.cmhs {
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(100000), channelId).Return(cmhs, nil)
+ }
+ }
+
+ dest, err := os.CreateTemp("", "")
+ assert.NoError(t, err)
+ defer os.Remove(dest.Name())
+
+ _, warningCount, appErr := GlobalRelayExport(rctx, tt.posts, mockStore, fileBackend, dest, templatesContainer)
+ assert.Nil(t, appErr)
+ assert.Equal(t, int64(tt.expectedWarnings), warningCount)
+
+ _, err = dest.Seek(0, 0)
+ assert.NoError(t, err)
+
+ destInfo, err := dest.Stat()
+ assert.NoError(t, err)
+
+ zipFile, err := zip.NewReader(dest, destInfo.Size())
+ assert.NoError(t, err)
+
+ if tt.expectedFiles > 0 {
+ firstFile, err := zipFile.File[0].Open()
+ assert.NoError(t, err)
+
+ data, err := io.ReadAll(firstFile)
+ assert.NoError(t, err)
+
+ t.Run("headers", func(t *testing.T) {
+ for _, expectedHeader := range tt.expectedHeaders {
+ assert.Contains(t, string(data), expectedHeader)
+ }
+ })
+
+ t.Run("text-version", func(t *testing.T) {
+ for _, expectedText := range tt.expectedTexts {
+ assert.Contains(t, string(data), expectedText)
+ }
+ })
+
+ t.Run("html-version", func(t *testing.T) {
+ for _, expectedHTML := range tt.expectedHTMLs {
+ assert.Contains(t, string(data), expectedHTML)
+ }
+ })
+ }
+ })
+ }
+}
diff --git a/server/enterprise/message_export/global_relay_export/main_test.go b/server/enterprise/message_export/global_relay_export/main_test.go
new file mode 100644
index 00000000000..3a1822aa6e9
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/main_test.go
@@ -0,0 +1,23 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost/server/v8/channels/testlib"
+)
+
+var mainHelper *testlib.MainHelper
+
+func TestMain(m *testing.M) {
+ var options = testlib.HelperOptions{
+ EnableResources: true,
+ }
+
+ mainHelper = testlib.NewMainHelperWithOptions(&options)
+ defer mainHelper.Close()
+
+ mainHelper.Main(m)
+}
diff --git a/server/enterprise/message_export/global_relay_export/smtp.go b/server/enterprise/message_export/global_relay_export/smtp.go
new file mode 100644
index 00000000000..adbd5a3ab8d
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/smtp.go
@@ -0,0 +1,93 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "context"
+ "net/smtp"
+ "os"
+ "strconv"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/v8/channels/utils"
+ "github.com/mattermost/mattermost/server/v8/channels/utils/testutils"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/mail"
+)
+
+const (
+ GlobalRelayA9Server = "mailarchivespool1.globalrelay.com"
+ GlobalRelayA10Server = "feeds.globalrelay.com"
+ GlobalRelayA9IP = "208.81.212.70"
+ GlobalRelayA10IP = "208.81.213.24"
+
+ defaultSMTPPort = "25"
+ defaultInbucketSMTPPort = "10025"
+)
+
+func connectToSMTPServer(ctx context.Context, config *model.Config) (*smtp.Client, error) {
+ smtpServerName := ""
+ smtpServerHost := ""
+ smtpPort := defaultSMTPPort
+ security := model.ConnSecurityStarttls
+ auth := true
+ if *config.MessageExportSettings.GlobalRelaySettings.CustomerType == "A10" {
+ smtpServerName = GlobalRelayA10Server
+ smtpServerHost = GlobalRelayA10IP
+ } else if *config.MessageExportSettings.GlobalRelaySettings.CustomerType == "A9" {
+ smtpServerName = GlobalRelayA9Server
+ smtpServerHost = GlobalRelayA9IP
+ } else if *config.MessageExportSettings.GlobalRelaySettings.CustomerType == "INBUCKET" {
+ inbucketSMTPPort := os.Getenv("CI_INBUCKET_SMTP_PORT")
+ if inbucketSMTPPort == "" {
+ inbucketSMTPPort = defaultInbucketSMTPPort
+ }
+ inbucketHost := os.Getenv("CI_INBUCKET_HOST")
+ if inbucketHost == "" {
+ intPort, err := strconv.Atoi(inbucketSMTPPort)
+ if err != nil {
+ intPort = 0
+ }
+ inbucketHost = testutils.GetInterface(intPort)
+ }
+ smtpServerName = inbucketHost
+ smtpServerHost = inbucketHost
+ smtpPort = inbucketSMTPPort
+ auth = false
+ } else if *config.MessageExportSettings.GlobalRelaySettings.CustomerType == model.GlobalrelayCustomerTypeCustom {
+ customSMTPPort := *config.MessageExportSettings.GlobalRelaySettings.CustomSMTPPort
+ if customSMTPPort != "" {
+ smtpPort = customSMTPPort
+ }
+ smtpServerName = *config.MessageExportSettings.GlobalRelaySettings.CustomSMTPServerName
+ smtpServerHost = *config.MessageExportSettings.GlobalRelaySettings.CustomSMTPServerName
+ }
+
+ smtpConfig := &mail.SMTPConfig{
+ ConnectionSecurity: security,
+ SkipServerCertificateVerification: false,
+ Hostname: utils.GetHostnameFromSiteURL(*config.ServiceSettings.SiteURL),
+ ServerName: smtpServerName,
+ Server: smtpServerHost,
+ Port: smtpPort,
+ EnableSMTPAuth: auth,
+ Username: *config.MessageExportSettings.GlobalRelaySettings.SMTPUsername,
+ Password: *config.MessageExportSettings.GlobalRelaySettings.SMTPPassword,
+ ServerTimeout: *config.MessageExportSettings.GlobalRelaySettings.SMTPServerTimeout,
+ }
+ conn, err1 := mail.ConnectToSMTPServerAdvanced(smtpConfig)
+ if err1 != nil {
+ return nil, err1
+ }
+
+ c, err2 := mail.NewSMTPClientAdvanced(
+ ctx,
+ conn,
+ smtpConfig,
+ )
+ if err2 != nil {
+ conn.Close()
+ return nil, err2
+ }
+ return c, nil
+}
diff --git a/server/enterprise/message_export/global_relay_export/to_html.go b/server/enterprise/message_export/global_relay_export/to_html.go
new file mode 100644
index 00000000000..c8ad4eb8d86
--- /dev/null
+++ b/server/enterprise/message_export/global_relay_export/to_html.go
@@ -0,0 +1,105 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package global_relay_export
+
+import (
+ "bytes"
+ "html/template"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/hako/durafmt"
+
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
+)
+
+func channelExportToHTML(rctx request.CTX, channelExport *ChannelExport, t *templates.Container) (string, error) {
+ durationMilliseconds := channelExport.EndTime - channelExport.StartTime
+ // TODO CHECK IF WE NEED THE MILISECONS HERE OR WE CAN ROUND IT DIRECTLY HERE
+ duration := time.Duration(durationMilliseconds) * time.Millisecond
+
+ var participantRowsBuffer bytes.Buffer
+ for i := range channelExport.Participants {
+ participantHTML, err := participantToHTML(&channelExport.Participants[i], t)
+ if err != nil {
+ rctx.Logger().Error("Unable to render participant html for compliance export", mlog.Err(err))
+ continue
+ }
+ participantRowsBuffer.WriteString(participantHTML)
+ }
+
+ var messagesBuffer bytes.Buffer
+ sort.Slice(channelExport.Messages, func(i, j int) bool {
+ if channelExport.Messages[i].SentTime == channelExport.Messages[j].SentTime {
+ return !strings.HasPrefix(channelExport.Messages[i].Message, "Uploaded file") && !strings.HasPrefix(channelExport.Messages[i].Message, "Deleted file")
+ }
+ return channelExport.Messages[i].SentTime < channelExport.Messages[j].SentTime
+ })
+ for i := range channelExport.Messages {
+ messageHTML, err := messageToHTML(&channelExport.Messages[i], t)
+ if err != nil {
+ rctx.Logger().Error("Unable to render message html for compliance export", mlog.Err(err))
+ continue
+ }
+ messagesBuffer.WriteString(messageHTML)
+ }
+
+ data := templates.Data{
+ Props: map[string]any{
+ "ChannelName": channelExport.ChannelName,
+ "Started": time.Unix(channelExport.StartTime/1000, 0).UTC().Format(time.RFC3339),
+ "Ended": time.Unix(channelExport.EndTime/1000, 0).UTC().Format(time.RFC3339),
+ "Duration": durafmt.Parse(duration.Round(time.Minute)).String(),
+ "ParticipantRows": template.HTML(participantRowsBuffer.String()),
+ "Messages": template.HTML(messagesBuffer.String()),
+ "ExportDate": time.Unix(channelExport.ExportedOn/1000, 0).UTC().Format(time.RFC3339),
+ },
+ }
+
+ return t.RenderToString("globalrelay_compliance_export", data)
+}
+
+func participantToHTML(participant *ParticipantRow, t *templates.Container) (string, error) {
+ durationMilliseconds := participant.LeaveTime - participant.JoinTime
+ // TODO CHECK IF WE NEED THE MILISECONS HERE OR WE CAN ROUND IT DIRECTLY HERE
+ duration := time.Duration(durationMilliseconds) * time.Millisecond
+
+ data := templates.Data{
+ Props: map[string]any{
+ "Username": participant.Username,
+ "UserType": participant.UserType,
+ "Email": participant.Email,
+ "Joined": time.Unix(participant.JoinTime/1000, 0).UTC().Format(time.RFC3339),
+ "Left": time.Unix(participant.LeaveTime/1000, 0).UTC().Format(time.RFC3339),
+ "Duration": durafmt.Parse(duration.Round(time.Minute)).String(),
+ "NumMessages": participant.MessagesSent,
+ },
+ }
+ return t.RenderToString("globalrelay_compliance_export_participant_row", data)
+}
+
+func messageToHTML(message *Message, t *templates.Container) (string, error) {
+ postUsername := message.PostUsername
+ // Added to improve readability
+ if postUsername != "" {
+ postUsername = "@" + postUsername
+ }
+ data := templates.Data{
+ Props: map[string]any{
+ "SentTime": time.Unix(message.SentTime/1000, 0).UTC().Format(time.RFC3339),
+ "Username": message.SenderUsername,
+ "PostUsername": postUsername,
+ "UserType": message.SenderUserType,
+ "PostType": message.PostType,
+ "Email": message.SenderEmail,
+ "Message": message.Message,
+ "PreviewsPost": message.PreviewsPost,
+ },
+ }
+
+ return t.RenderToString("globalrelay_compliance_export_message", data)
+}
diff --git a/server/enterprise/message_export/main_test.go b/server/enterprise/message_export/main_test.go
new file mode 100644
index 00000000000..794f0ca457b
--- /dev/null
+++ b/server/enterprise/message_export/main_test.go
@@ -0,0 +1,21 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost/server/v8/channels/api4"
+ "github.com/mattermost/mattermost/server/v8/channels/testlib"
+)
+
+var mainHelper *testlib.MainHelper
+
+func TestMain(m *testing.M) {
+ mainHelper = testlib.NewMainHelper()
+ defer mainHelper.Close()
+ api4.SetMainHelper(mainHelper)
+
+ mainHelper.Main(m)
+}
diff --git a/server/enterprise/message_export/membership_map.go b/server/enterprise/message_export/membership_map.go
new file mode 100644
index 00000000000..26dcb898972
--- /dev/null
+++ b/server/enterprise/message_export/membership_map.go
@@ -0,0 +1,58 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+type MembershipMapUser struct {
+ userId string
+ email string
+ username string
+}
+
+// Provides a clean interface for tracking the users that are present in any number of channels by channel id and user email
+type MembershipMap map[string]map[string]MembershipMapUser
+
+func (m *MembershipMap) init(channelId string) {
+ if *m == nil {
+ *m = make(map[string]map[string]MembershipMapUser)
+ }
+ if (*m)[channelId] == nil {
+ (*m)[channelId] = make(map[string]MembershipMapUser)
+ }
+}
+
+func (m *MembershipMap) AddUserToChannel(channelId string, user MembershipMapUser) {
+ m.init(channelId)
+ if !m.IsUserInChannel(channelId, user.email) {
+ (*m)[channelId][user.email] = user
+ }
+}
+
+func (m *MembershipMap) RemoveUserFromChannel(channelId string, userEmail string) {
+ m.init(channelId)
+ delete((*m)[channelId], userEmail)
+}
+
+func (m *MembershipMap) IsUserInChannel(channelId string, userEmail string) bool {
+ m.init(channelId)
+ _, exists := (*m)[channelId][userEmail]
+ return exists
+}
+
+func (m *MembershipMap) GetUserEmailsInChannel(channelId string) []string {
+ m.init(channelId)
+ users := make([]string, 0, len((*m)[channelId]))
+ for k := range (*m)[channelId] {
+ users = append(users, k)
+ }
+ return users
+}
+
+func (m *MembershipMap) GetUsersInChannel(channelId string) []MembershipMapUser {
+ m.init(channelId)
+ users := make([]MembershipMapUser, 0, len((*m)[channelId]))
+ for _, v := range (*m)[channelId] {
+ users = append(users, v)
+ }
+ return users
+}
diff --git a/server/enterprise/message_export/membership_map_test.go b/server/enterprise/message_export/membership_map_test.go
new file mode 100644
index 00000000000..05670d63575
--- /dev/null
+++ b/server/enterprise/message_export/membership_map_test.go
@@ -0,0 +1,79 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/mattermost/mattermost/server/public/model"
+)
+
+func TestMembershipMap(t *testing.T) {
+ membershipMap := make(MembershipMap)
+
+ channelId := model.NewId()
+
+ user1 := &MembershipMapUser{
+ email: model.NewId() + "@mattermost.com",
+ username: model.NewId(),
+ userId: model.NewId(),
+ }
+ user2 := &MembershipMapUser{
+ email: model.NewId() + "@mattermost.com",
+ username: model.NewId(),
+ userId: model.NewId(),
+ }
+
+ assert.False(t, membershipMap.IsUserInChannel(channelId, user1.email))
+ membershipMap.AddUserToChannel(channelId, *user1)
+ assert.True(t, membershipMap.IsUserInChannel(channelId, user1.email))
+
+ assert.False(t, membershipMap.IsUserInChannel(channelId, user2.email))
+ membershipMap.AddUserToChannel(channelId, *user2)
+ assert.True(t, membershipMap.IsUserInChannel(channelId, user2.email))
+
+ // ensure that the correct user emails are returned
+ emails := membershipMap.GetUserEmailsInChannel(channelId)
+ assert.Len(t, emails, 2)
+ assert.Contains(t, emails, user1.email)
+ assert.Contains(t, emails, user2.email)
+
+ // ensure that the correct user objects are returned
+ users := membershipMap.GetUsersInChannel(channelId)
+ assert.Len(t, users, 2)
+ if users[0].userId == user1.userId {
+ assert.Equal(t, user1.username, users[0].username)
+ assert.Equal(t, user1.email, users[0].email)
+ assert.Equal(t, user2.userId, users[1].userId)
+ assert.Equal(t, user2.username, users[1].username)
+ assert.Equal(t, user2.email, users[1].email)
+ } else if users[0].userId == user2.userId {
+ assert.Equal(t, user2.username, users[0].username)
+ assert.Equal(t, user2.email, users[0].email)
+ assert.Equal(t, user1.userId, users[1].userId)
+ assert.Equal(t, user1.username, users[1].username)
+ assert.Equal(t, user1.email, users[1].email)
+ } else {
+ assert.Fail(t, "First returned user is not recognized")
+ }
+
+ // remove user1 from the channel
+ membershipMap.RemoveUserFromChannel(channelId, user1.email)
+ assert.False(t, membershipMap.IsUserInChannel(channelId, user1.email))
+ assert.True(t, membershipMap.IsUserInChannel(channelId, user2.email))
+
+ // ensure that user2's email is returned
+ emails = membershipMap.GetUserEmailsInChannel(channelId)
+ assert.Len(t, emails, 1)
+ assert.Contains(t, emails, user2.email)
+
+ // ensure that only user2 is returned
+ users = membershipMap.GetUsersInChannel(channelId)
+ assert.Len(t, users, 1)
+ assert.Equal(t, user2.userId, users[0].userId)
+ assert.Equal(t, user2.username, users[0].username)
+ assert.Equal(t, user2.email, users[0].email)
+}
diff --git a/server/enterprise/message_export/message_export.go b/server/enterprise/message_export/message_export.go
new file mode 100644
index 00000000000..50643019c95
--- /dev/null
+++ b/server/enterprise/message_export/message_export.go
@@ -0,0 +1,234 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "net/http"
+ "os"
+ "path"
+ "time"
+
+ "strconv"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/app"
+ "github.com/mattermost/mattermost/server/v8/channels/store"
+ "github.com/mattermost/mattermost/server/v8/channels/utils/fileutils"
+ "github.com/mattermost/mattermost/server/v8/einterfaces"
+ ejobs "github.com/mattermost/mattermost/server/v8/einterfaces/jobs"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
+
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/actiance_export"
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/csv_export"
+ "github.com/mattermost/mattermost/server/v8/enterprise/message_export/global_relay_export"
+)
+
+const (
+ GlobalRelayExportFilename = "global-relay.zip"
+)
+
+type MessageExportInterfaceImpl struct {
+ Server *app.Server
+}
+
+type MessageExportJobInterfaceImpl struct {
+ Server *app.Server
+}
+
+func init() {
+ app.RegisterJobsMessageExportJobInterface(func(s *app.Server) ejobs.MessageExportJobInterface {
+ return &MessageExportJobInterfaceImpl{s}
+ })
+ app.RegisterMessageExportInterface(func(app *app.App) einterfaces.MessageExportInterface {
+ return &MessageExportInterfaceImpl{app.Srv()}
+ })
+}
+
+func (m *MessageExportInterfaceImpl) StartSynchronizeJob(rctx request.CTX, exportFromTimestamp int64) (*model.Job, *model.AppError) {
+ // if a valid export time was specified, put it in the job data
+ jobData := make(map[string]string)
+ if exportFromTimestamp >= 0 {
+ jobData[JobDataBatchStartTimestamp] = strconv.FormatInt(exportFromTimestamp, 10)
+ }
+
+ // passing nil for job data will cause the worker to inherit start time from previously successful job
+ job, err := m.Server.Jobs.CreateJob(rctx, model.JobTypeMessageExport, jobData)
+ if err != nil {
+ return nil, err
+ }
+
+ ticker := time.NewTicker(time.Second)
+ defer ticker.Stop()
+
+ for job.Status == model.JobStatusPending ||
+ job.Status == model.JobStatusInProgress ||
+ job.Status == model.JobStatusCancelRequested {
+ select {
+ case <-ticker.C:
+ job, err = m.Server.Jobs.GetJob(rctx, job.Id)
+ if err != nil {
+ return nil, err
+ }
+ case <-rctx.Context().Done():
+ return nil, model.NewAppError("StartSynchronizeJob", "ent.jobs.start_synchronize_job.timeout", nil, "", 0).Wrap(rctx.Context().Err())
+ }
+ }
+
+ return job, nil
+}
+
+func (m *MessageExportInterfaceImpl) RunExport(rctx request.CTX, exportType string, since int64, limit int) (warningCount int64, appErr *model.AppError) {
+ if limit < 0 {
+ limit = math.MaxInt64
+ }
+ postsToExport, _, err := m.Server.Store().Compliance().MessageExport(rctx, model.MessageExportCursor{LastPostUpdateAt: since}, limit)
+ if err != nil {
+ return warningCount, model.NewAppError("RunExport", "ent.message_export.run_export.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ rctx.Logger().Debug("Found posts to export", mlog.Int("number_of_posts", len(postsToExport)))
+
+ fileBackend := m.Server.FileBackend()
+ templatesDir, ok := fileutils.FindDir("templates")
+ if !ok {
+ return warningCount, model.NewAppError("RunExport", "ent.compliance.run_export.template_watcher.appError", nil, "", http.StatusAccepted)
+ }
+
+ t, err2 := templates.New(templatesDir)
+ if err2 != nil {
+ return warningCount, model.NewAppError("RunExport", "ent.compliance.run_export.template_watcher.appError", nil, "", http.StatusAccepted).Wrap(err2)
+ }
+
+ exportDirectory := getOutputDirectoryPath(since, model.GetMillis())
+ return runExportByType(rctx, exportType, postsToExport, exportDirectory, m.Server.Store(), fileBackend, fileBackend, t, m.Server.Config())
+}
+
+func runExportByType(rctx request.CTX, exportType string, postsToExport []*model.MessageExport, exportDirectory string, db store.Store, exportBackend filestore.FileBackend, fileAttachmentBackend filestore.FileBackend, htmlTemplates *templates.Container, config *model.Config) (warningCount int64, appErr *model.AppError) {
+ // go through all the posts and if the post's props contain 'from_bot' - override the IsBot field, since it's possible that the sender is not a user, but was a Bot and vise-versa
+ for _, post := range postsToExport {
+ if post.PostProps != nil {
+ props := map[string]any{}
+
+ if json.Unmarshal([]byte(*post.PostProps), &props) == nil {
+ if val, ok := props["from_bot"]; ok {
+ post.IsBot = val == "true"
+ }
+ }
+ }
+
+ // Team info can be null for DM/GM channels.
+ if post.TeamId == nil {
+ post.TeamId = new(string)
+ }
+ if post.TeamName == nil {
+ post.TeamName = new(string)
+ }
+ if post.TeamDisplayName == nil {
+ post.TeamDisplayName = new(string)
+ }
+
+ // make sure user information is present. Set defaults and log an error otherwise.
+ if post.ChannelId == nil {
+ rctx.Logger().Warn("ChannelId is missing for post", mlog.String("post_id", *post.PostId))
+ post.ChannelId = new(string)
+ }
+ if post.ChannelName == nil {
+ rctx.Logger().Warn("ChannelName is missing for post", mlog.String("post_id", *post.PostId))
+ post.ChannelName = new(string)
+ }
+ if post.ChannelDisplayName == nil {
+ rctx.Logger().Warn("ChannelDisplayName is missing for post", mlog.String("post_id", *post.PostId))
+ post.ChannelDisplayName = new(string)
+ }
+ if post.ChannelType == nil {
+ rctx.Logger().Warn("ChannelType is missing for post", mlog.String("post_id", *post.PostId))
+ post.ChannelType = new(model.ChannelType)
+ }
+
+ if post.UserId == nil {
+ rctx.Logger().Warn("UserId is missing for post", mlog.String("post_id", *post.PostId))
+ post.UserId = new(string)
+ }
+ if post.UserEmail == nil {
+ rctx.Logger().Warn("UserEmail is missing for post", mlog.String("post_id", *post.PostId))
+ post.UserEmail = new(string)
+ }
+ if post.Username == nil {
+ rctx.Logger().Warn("Username is missing for post", mlog.String("post_id", *post.PostId))
+ post.Username = new(string)
+ }
+
+ if post.PostType == nil {
+ rctx.Logger().Warn("Type is missing for post", mlog.String("post_id", *post.PostId))
+ post.PostType = new(string)
+ }
+ if post.PostMessage == nil {
+ rctx.Logger().Warn("Message is missing for post", mlog.String("post_id", *post.PostId))
+ post.PostMessage = new(string)
+ }
+ if post.PostCreateAt == nil {
+ rctx.Logger().Warn("CreateAt is missing for post", mlog.String("post_id", *post.PostId))
+ post.PostCreateAt = new(int64)
+ }
+ }
+
+ switch exportType {
+ case model.ComplianceExportTypeCsv:
+ rctx.Logger().Debug("Exporting CSV")
+ return csv_export.CsvExport(rctx, postsToExport, db, exportBackend, fileAttachmentBackend, exportDirectory)
+
+ case model.ComplianceExportTypeActiance:
+ rctx.Logger().Debug("Exporting Actiance")
+ return actiance_export.ActianceExport(rctx, postsToExport, db, exportBackend, fileAttachmentBackend, exportDirectory)
+
+ case model.ComplianceExportTypeGlobalrelay, model.ComplianceExportTypeGlobalrelayZip:
+ rctx.Logger().Debug("Exporting GlobalRelay")
+ f, err := os.CreateTemp("", "")
+ if err != nil {
+ return warningCount, model.NewAppError("RunExport", "ent.compliance.global_relay.open_temporary_file.appError", nil, "", http.StatusAccepted).Wrap(err)
+ }
+ defer f.Close()
+ defer os.Remove(f.Name())
+
+ attachmentsRemovedPostIDs, warnings, appErr := global_relay_export.GlobalRelayExport(rctx, postsToExport, db, fileAttachmentBackend, f, htmlTemplates)
+ if appErr != nil {
+ return warningCount, appErr
+ }
+ warningCount = warnings
+ _, err = f.Seek(0, 0)
+ if err != nil {
+ return warningCount, model.NewAppError("RunExport", "ent.compliance.global_relay.rewind_temporary_file.appError", nil, "", http.StatusAccepted).Wrap(err)
+ }
+
+ if exportType == model.ComplianceExportTypeGlobalrelayZip {
+ // Try to disable the write timeout for the potentially big export file.
+ _, nErr := filestore.TryWriteFileContext(rctx.Context(), exportBackend, f, path.Join(exportDirectory, GlobalRelayExportFilename))
+ if nErr != nil {
+ return warningCount, model.NewAppError("runExportByType", "ent.compliance.global_relay.write_file.appError", nil, "", http.StatusInternalServerError).Wrap(nErr)
+ }
+ } else {
+ appErr = global_relay_export.Deliver(f, config)
+ if appErr != nil {
+ return warningCount, appErr
+ }
+ }
+
+ if len(attachmentsRemovedPostIDs) > 0 {
+ rctx.Logger().Debug("Global Relay Attachments Removed because they were too large to send to Global Relay", mlog.Array("attachment_ids", attachmentsRemovedPostIDs))
+ description := fmt.Sprintf("Attachments to post IDs %v were removed because they were too large to send to Global Relay.", attachmentsRemovedPostIDs)
+ appErr = model.NewAppError("RunExport", "ent.compliance.global_relay.attachments_removed.appError", map[string]any{"Description": description}, description, http.StatusAccepted)
+ return warningCount, appErr
+ }
+ default:
+ err := errors.New("Unknown output format " + exportType)
+ return warningCount, model.NewAppError("RunExport", "ent.compliance.bad_export_type.appError", map[string]any{"ExportType": exportType}, "", http.StatusBadRequest).Wrap(err)
+ }
+ return warningCount, nil
+}
diff --git a/server/enterprise/message_export/message_export_test.go b/server/enterprise/message_export/message_export_test.go
new file mode 100644
index 00000000000..ad41c2c79cb
--- /dev/null
+++ b/server/enterprise/message_export/message_export_test.go
@@ -0,0 +1,145 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "context"
+ "os"
+ "strconv"
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/api4"
+ "github.com/mattermost/mattermost/server/v8/channels/jobs"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRunExportByType(t *testing.T) {
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(tempDir)
+ assert.NoError(t, err)
+ })
+
+ config := filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: tempDir,
+ }
+
+ fileBackend, err := filestore.NewFileBackend(config)
+ require.NoError(t, err)
+
+ rctx := request.TestContext(t)
+
+ chanTypeDirect := model.ChannelTypeDirect
+ t.Run("missing user info", func(t *testing.T) {
+ posts := []*model.MessageExport{
+ {
+ PostId: model.NewPointer("post-id"),
+ PostOriginalId: model.NewPointer("post-original-id"),
+ TeamId: model.NewPointer("team-id"),
+ TeamName: model.NewPointer("team-name"),
+ TeamDisplayName: model.NewPointer("team-display-name"),
+ ChannelId: model.NewPointer("channel-id"),
+ ChannelName: model.NewPointer("channel-name"),
+ ChannelDisplayName: model.NewPointer("channel-display-name"),
+ PostCreateAt: model.NewPointer(int64(1)),
+ PostUpdateAt: model.NewPointer(int64(1)),
+ PostMessage: model.NewPointer("message"),
+ ChannelType: &chanTypeDirect,
+ PostFileIds: []string{},
+ },
+ }
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+ mockStore.ChannelMemberHistoryStore.On("GetUsersInChannelDuring", int64(1), int64(1), "channel-id").Return([]*model.ChannelMemberHistoryResult{}, nil)
+
+ warnings, err := runExportByType(rctx, model.ComplianceExportTypeActiance, posts, tempDir, mockStore, fileBackend, fileBackend, nil, nil)
+ require.Nil(t, err)
+ require.Zero(t, warnings)
+ })
+}
+
+func runJobForTest(t *testing.T, th *api4.TestHelper) *model.Job {
+ job, _, err := th.SystemAdminClient.CreateJob(context.Background(), &model.Job{Type: "message_export"})
+ require.NoError(t, err)
+ // poll until completion
+ doneChan := make(chan bool)
+ go func() {
+ defer close(doneChan)
+ for {
+ jobs, _, err := th.SystemAdminClient.GetJobsByType(context.Background(), "message_export", 0, 1)
+ require.NoError(t, err)
+ require.Len(t, jobs, 1)
+ require.Equal(t, job.Id, jobs[0].Id)
+ job = jobs[0]
+ if job.Status != "pending" && job.Status != "in_progress" {
+ break
+ }
+ time.Sleep(1 * time.Second)
+ }
+ require.Equal(t, "success", job.Status)
+ }()
+ select {
+ case <-doneChan:
+ case <-time.After(10 * time.Second):
+ require.True(t, false, "job is taking too long")
+ }
+ return job
+}
+
+func TestRunExportJob(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping test in short mode.")
+ }
+
+ jobs.DefaultWatcherPollingInterval = 100
+ th := api4.SetupEnterprise(t).InitBasic()
+ th.App.Srv().SetLicense(model.NewTestLicense("message_export"))
+ defer th.TearDown()
+ messageExportImpl := MessageExportJobInterfaceImpl{th.App.Srv()}
+ th.App.Srv().Jobs.RegisterJobType(model.JobTypeMessageExport, messageExportImpl.MakeWorker(), messageExportImpl.MakeScheduler())
+
+ err := th.App.Srv().Jobs.StartWorkers()
+ require.NoError(t, err)
+
+ err = th.App.Srv().Jobs.StartSchedulers()
+ require.NoError(t, err)
+
+ th.App.UpdateConfig(func(cfg *model.Config) {
+ *cfg.MessageExportSettings.EnableExport = true
+ })
+
+ t.Run("conflicting timestamps", func(t *testing.T) {
+ time.Sleep(100 * time.Millisecond)
+ now := model.GetMillis()
+ th.App.UpdateConfig(func(cfg *model.Config) {
+ *cfg.MessageExportSettings.ExportFromTimestamp = now - 1
+ *cfg.MessageExportSettings.BatchSize = 2
+ })
+
+ for i := 0; i < 3; i++ {
+ _, err := th.App.Srv().Store().Post().Save(th.Context, &model.Post{
+ ChannelId: th.BasicChannel.Id,
+ UserId: model.NewId(),
+ Message: "zz" + model.NewId() + "b",
+ CreateAt: now,
+ })
+ require.NoError(t, err)
+ }
+
+ job := runJobForTest(t, th)
+ numExported, err := strconv.ParseInt(job.Data["messages_exported"], 0, 64)
+ require.NoError(t, err)
+ require.Equal(t, int64(3), numExported)
+ })
+}
diff --git a/server/enterprise/message_export/scheduler.go b/server/enterprise/message_export/scheduler.go
new file mode 100644
index 00000000000..f100fb3fcc2
--- /dev/null
+++ b/server/enterprise/message_export/scheduler.go
@@ -0,0 +1,76 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "net/http"
+ "time"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/jobs"
+ ejobs "github.com/mattermost/mattermost/server/v8/einterfaces/jobs"
+)
+
+type MessageExportScheduler struct {
+ jobServer *jobs.JobServer
+ enabledFunc func(cfg *model.Config) bool
+}
+
+var _ jobs.Scheduler = (*MessageExportScheduler)(nil)
+
+func NewMessageExportScheduler(jobServer *jobs.JobServer, enabledFunc func(cfg *model.Config) bool) *MessageExportScheduler {
+ return &MessageExportScheduler{
+ enabledFunc: enabledFunc,
+ jobServer: jobServer,
+ }
+}
+
+func (s *MessageExportScheduler) Enabled(cfg *model.Config) bool {
+ return s.enabledFunc(cfg)
+}
+
+func (s *MessageExportScheduler) NextScheduleTime(cfg *model.Config, now time.Time, _ bool, _ *model.Job) *time.Time {
+ // We set the next scheduled time regardless of whether there is a running or pending job
+ // In ScheduleJob we check pending or running jobs, before actually scheduling a job
+ parsedTime, err := time.Parse("15:04", *cfg.MessageExportSettings.DailyRunTime)
+ if err != nil {
+ s.jobServer.Logger().Error(
+ "Cannot determine next schedule time for message export. DailyRunTime config value is invalid.",
+ mlog.String("DailyRunTime", *cfg.MessageExportSettings.DailyRunTime),
+ )
+ return nil
+ }
+ return jobs.GenerateNextStartDateTime(now, parsedTime)
+}
+
+func (s *MessageExportScheduler) ScheduleJob(rctx request.CTX, _ *model.Config, havePendingJobs bool, _ *model.Job) (*model.Job, *model.AppError) {
+ // Don't schedule a job if we already have a pending job
+ if havePendingJobs {
+ return nil, nil
+ }
+ // Don't schedule a job if we already have a running job
+ count, err := s.jobServer.Store.Job().GetCountByStatusAndType(model.JobStatusInProgress, model.JobTypeMessageExport)
+ if err != nil {
+ return nil, model.NewAppError(
+ "ScheduleJob",
+ "app.job.get_count_by_status_and_type.app_error",
+ map[string]any{"jobtype": model.JobTypeMessageExport, "status": model.JobStatusInProgress},
+ "",
+ http.StatusInternalServerError).Wrap(err)
+ }
+ if count > 0 {
+ return nil, nil
+ }
+ return s.jobServer.CreateJob(rctx, model.JobTypeMessageExport, nil)
+}
+
+func (dr *MessageExportJobInterfaceImpl) MakeScheduler() ejobs.Scheduler {
+ enabled := func(cfg *model.Config) bool {
+ license := dr.Server.License()
+ return license != nil && *license.Features.MessageExport && *cfg.MessageExportSettings.EnableExport
+ }
+ return NewMessageExportScheduler(dr.Server.Jobs, enabled)
+}
diff --git a/server/enterprise/message_export/scheduler_test.go b/server/enterprise/message_export/scheduler_test.go
new file mode 100644
index 00000000000..5371a12c778
--- /dev/null
+++ b/server/enterprise/message_export/scheduler_test.go
@@ -0,0 +1,98 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/v8/channels/api4"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest/mocks"
+)
+
+func TestMessageExportJobEnabled(t *testing.T) {
+ t.Run("MessageExport job is enabled only if feature is enabled", func(t *testing.T) {
+ th := api4.SetupEnterpriseWithStoreMock(t)
+ defer th.TearDown()
+
+ th.Server.SetLicense(model.NewTestLicense("message_export"))
+
+ messageExport := &MessageExportJobInterfaceImpl{th.App.Srv()}
+
+ config := &model.Config{
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ },
+ }
+ scheduler := messageExport.MakeScheduler()
+ result := scheduler.Enabled(config)
+ assert.True(t, result)
+ })
+
+ t.Run("MessageExport job is disabled if there is no license", func(t *testing.T) {
+ th := api4.SetupEnterpriseWithStoreMock(t)
+ defer th.TearDown()
+
+ th.Server.SetLicense(nil)
+
+ messageExport := &MessageExportJobInterfaceImpl{th.App.Srv()}
+
+ config := &model.Config{
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ },
+ }
+ scheduler := messageExport.MakeScheduler()
+ result := scheduler.Enabled(config)
+ assert.False(t, result)
+ })
+}
+
+func TestMessageExportJobPending(t *testing.T) {
+ th := api4.SetupEnterpriseWithStoreMock(t)
+ defer th.TearDown()
+
+ mockStore := th.App.Srv().Platform().Store.(*mocks.Store)
+ mockUserStore := mocks.UserStore{}
+ mockUserStore.On("Count", mock.Anything).Return(int64(10), nil)
+ mockPostStore := mocks.PostStore{}
+ mockPostStore.On("GetMaxPostSize").Return(65535, nil)
+ mockSystemStore := mocks.SystemStore{}
+ mockSystemStore.On("GetByName", "UpgradedFromTE").Return(&model.System{Name: "UpgradedFromTE", Value: "false"}, nil)
+ mockSystemStore.On("GetByName", "InstallationDate").Return(&model.System{Name: "InstallationDate", Value: "10"}, nil)
+ mockSystemStore.On("GetByName", "FirstServerRunTimestamp").Return(&model.System{Name: "FirstServerRunTimestamp", Value: "10"}, nil)
+ mockStore.On("User").Return(&mockUserStore)
+ mockStore.On("Post").Return(&mockPostStore)
+ mockStore.On("System").Return(&mockSystemStore)
+ mockStore.On("GetDBSchemaVersion").Return(1, nil)
+
+ mockJobServerStore := th.App.Srv().Jobs.Store.(*mocks.Store)
+ mockJobStore := mocks.JobStore{}
+ // Mock that we have an in-progress message export job
+ mockJobStore.On("GetCountByStatusAndType", model.JobStatusInProgress, model.JobTypeMessageExport).Return(int64(1), nil)
+ mockJobServerStore.On("Job").Return(&mockJobStore)
+
+ th.App.UpdateConfig(func(cfg *model.Config) {
+ *cfg.MessageExportSettings.EnableExport = true
+ *cfg.MessageExportSettings.DailyRunTime = "10:40"
+ })
+
+ th.App.Srv().SetLicense(model.NewTestLicense("message_export"))
+
+ messageExport := &MessageExportJobInterfaceImpl{th.App.Srv()}
+ scheduler := messageExport.MakeScheduler()
+
+ // Confirm that job is not scheduled if we have pending jobs
+ job, err := scheduler.ScheduleJob(th.Context, th.App.Config(), true, nil)
+ assert.Nil(t, err)
+ assert.Nil(t, job)
+
+ // Confirm that job is not scheduled if we have an inprogress job
+ job, err = scheduler.ScheduleJob(th.Context, th.App.Config(), false, nil)
+ assert.Nil(t, err)
+ assert.Nil(t, job)
+}
diff --git a/server/enterprise/message_export/worker.go b/server/enterprise/message_export/worker.go
new file mode 100644
index 00000000000..3c97073ceb5
--- /dev/null
+++ b/server/enterprise/message_export/worker.go
@@ -0,0 +1,579 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "archive/zip"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "path"
+ "strconv"
+ "sync"
+ "time"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/jobs"
+ "github.com/mattermost/mattermost/server/v8/channels/utils/fileutils"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/templates"
+)
+
+const (
+ JobDataBatchStartTimestamp = "batch_start_timestamp" // message export uses keyset pagination sorted by (posts.updateat, posts.id). batch_start_timestamp is the posts.updateat value from the previous batch.
+ JobDataBatchStartId = "batch_start_id" // message export uses keyset pagination sorted by (posts.updateat, posts.id). batch_start_id is the posts.id value from the previous batch.
+
+ JobDataStartTimestamp = "start_timestamp"
+ JobDataStartId = "start_id"
+ JobDataExportType = "export_type"
+ JOB_DATA_BatchSize = "batch_size"
+ JobDataMessagesExported = "messages_exported"
+ JobDataWarningCount = "warning_count"
+ JobDataIsDownloadable = "is_downloadable"
+ JobDirectories = "job_directories"
+ TimeBetweenBatches = 100
+
+ estimatedPostCount = 10_000_000
+)
+
+const exportPath = "export"
+
+type MessageExportWorker struct {
+ name string
+ // stateMut protects stopCh, cancel, and stopped and helps enforce
+ // ordering in case subsequent Run or Stop calls are made.
+ stateMut sync.Mutex
+ stopCh chan struct{}
+ stopped bool
+ stoppedCh chan struct{}
+ jobs chan model.Job
+ jobServer *jobs.JobServer
+ logger mlog.LoggerIFace
+ htmlTemplateWatcher *templates.Container
+ license func() *model.License
+
+ context context.Context
+ cancel func()
+}
+
+func (dr *MessageExportJobInterfaceImpl) MakeWorker() model.Worker {
+ const workerName = "MessageExportWorker"
+ logger := dr.Server.Jobs.Logger().With(mlog.String("worker_name", workerName))
+
+ templatesDir, ok := fileutils.FindDir("templates")
+ if !ok {
+ logger.Error("Failed to initialize HTMLTemplateWatcher, templates directory not found")
+ return nil
+ }
+ htmlTemplateWatcher, err := templates.New(templatesDir)
+ if err != nil {
+ logger.Error("Failed to initialize HTMLTemplateWatcher", mlog.Err(err))
+ return nil
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+
+ return &MessageExportWorker{
+ name: workerName,
+ stoppedCh: make(chan struct{}, 1),
+ jobs: make(chan model.Job),
+ jobServer: dr.Server.Jobs,
+ logger: logger,
+ htmlTemplateWatcher: htmlTemplateWatcher,
+ // It is not a best practice to store context inside a struct,
+ // however we need to cancel a SQL query during a job execution.
+ // There is no other good way.
+ context: ctx,
+ cancel: cancel,
+ license: dr.Server.License,
+ stopped: true,
+ }
+}
+
+func (worker *MessageExportWorker) IsEnabled(cfg *model.Config) bool {
+ return worker.license() != nil && *worker.license().Features.MessageExport && *cfg.MessageExportSettings.EnableExport
+}
+
+func (worker *MessageExportWorker) Run() {
+ worker.stateMut.Lock()
+ // We have to re-assign the stop channel again, because
+ // it might happen that the job was restarted due to a config change.
+ if worker.stopped {
+ worker.stopped = false
+ worker.stopCh = make(chan struct{})
+ worker.context, worker.cancel = context.WithCancel(context.Background())
+ } else {
+ worker.stateMut.Unlock()
+ return
+ }
+ // Run is called from a separate goroutine and doesn't return.
+ // So we cannot Unlock in a defer clause.
+ worker.stateMut.Unlock()
+
+ worker.logger.Debug("Worker Started")
+
+ defer func() {
+ worker.logger.Debug("Worker finished")
+ worker.stoppedCh <- struct{}{}
+ }()
+
+ for {
+ select {
+ case <-worker.stopCh:
+ worker.logger.Debug("Worker: Received stop signal")
+ return
+ case job := <-worker.jobs:
+ worker.DoJob(&job)
+ }
+ }
+}
+
+func (worker *MessageExportWorker) Stop() {
+ worker.stateMut.Lock()
+ defer worker.stateMut.Unlock()
+
+ // Set to close, and if already closed before, then return.
+ if worker.stopped {
+ return
+ }
+ worker.stopped = true
+
+ worker.logger.Debug("Worker: Stopping")
+ worker.cancel()
+ close(worker.stopCh)
+ <-worker.stoppedCh
+}
+
+func (worker *MessageExportWorker) JobChannel() chan<- model.Job {
+ return worker.jobs
+}
+
+// getExportBackend returns the file backend where the export will be created.
+func (worker *MessageExportWorker) getExportBackend(rctx request.CTX) (filestore.FileBackend, *model.AppError) {
+ config := worker.jobServer.Config()
+ insecure := config.ServiceSettings.EnableInsecureOutgoingConnections
+
+ if config.FileSettings.DedicatedExportStore != nil && *config.FileSettings.DedicatedExportStore {
+ rctx.Logger().Debug("Worker: using dedicated export filestore", mlog.String("driver_name", *config.FileSettings.ExportDriverName))
+ backend, errFileBack := filestore.NewExportFileBackend(filestore.NewExportFileBackendSettingsFromConfig(&config.FileSettings, true, insecure != nil && *insecure))
+ if errFileBack != nil {
+ return nil, model.NewAppError("getFileBackend", "api.file.no_driver.app_error", nil, "", http.StatusInternalServerError).Wrap(errFileBack)
+ }
+
+ return backend, nil
+ }
+
+ backend, err := filestore.NewFileBackend(filestore.NewFileBackendSettingsFromConfig(&config.FileSettings, true, insecure != nil && *insecure))
+ if err != nil {
+ return nil, model.NewAppError("getFileBackend", "api.file.no_driver.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return backend, nil
+}
+
+// getFileAttachmentBackend returns the file backend where file attachments are
+// located for messages that will be exported. This may be the same backend
+// where the export will be created.
+func (worker *MessageExportWorker) getFileAttachmentBackend(rctx request.CTX) (filestore.FileBackend, *model.AppError) {
+ config := worker.jobServer.Config()
+ insecure := config.ServiceSettings.EnableInsecureOutgoingConnections
+
+ backend, err := filestore.NewFileBackend(filestore.NewFileBackendSettingsFromConfig(&config.FileSettings, true, insecure != nil && *insecure))
+ if err != nil {
+ return nil, model.NewAppError("getFileBackend", "api.file.no_driver.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
+ }
+ return backend, nil
+}
+
+func (worker *MessageExportWorker) DoJob(job *model.Job) {
+ logger := worker.logger.With(jobs.JobLoggerFields(job)...)
+ logger.Debug("Worker: Received a new candidate job.")
+ defer worker.jobServer.HandleJobPanic(logger, job)
+
+ claimed, appErr := worker.jobServer.ClaimJob(job)
+ if appErr != nil {
+ logger.Info("Worker: Error occurred while trying to claim job", mlog.Err(appErr))
+ return
+ }
+
+ if !claimed {
+ return
+ }
+
+ var cancelContext request.CTX = request.EmptyContext(worker.logger)
+ cancelCtx, cancelCancelWatcher := context.WithCancel(context.Background())
+ cancelWatcherChan := make(chan struct{}, 1)
+ cancelContext = cancelContext.WithContext(cancelCtx)
+ go worker.jobServer.CancellationWatcher(cancelContext, job.Id, cancelWatcherChan)
+ defer cancelCancelWatcher()
+
+ // if job data is missing, we'll do our best to recover
+ worker.initJobData(logger, job)
+
+ // the initJobData call above populates the create_at timestamp of the first post that we should export
+ // incase of job resumption or new job
+ batchStartTime, err := strconv.ParseInt(job.Data[JobDataBatchStartTimestamp], 10, 64)
+ if err != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusBadRequest).Wrap((err)))
+ return
+ }
+ batchStartId := job.Data[JobDataBatchStartId]
+
+ jobStartTime, err := strconv.ParseInt(job.Data[JobDataStartTimestamp], 10, 64)
+ if err != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusBadRequest).Wrap((err)))
+ return
+ }
+ jobStartId := job.Data[JobDataStartId]
+
+ batchSize, err := strconv.Atoi(job.Data[JOB_DATA_BatchSize])
+ if err != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusBadRequest).Wrap((err)))
+ return
+ }
+
+ totalPostsExported, err := strconv.ParseInt(job.Data[JobDataMessagesExported], 10, 64)
+ if err != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusBadRequest).Wrap((err)))
+ return
+ }
+
+ var directories []string
+ err = json.Unmarshal([]byte(job.Data[JobDirectories]), &directories)
+ if err != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusBadRequest).Wrap((err)))
+ return
+ }
+
+ // Counting all posts may fail or timeout when the posts table is large. If this happens, log a warning, but carry
+ // on with the job anyway. The only issue is that the progress % reporting will be inaccurate.
+ var totalPosts int64
+ if count, err := worker.jobServer.Store.Post().AnalyticsPostCount(&model.PostCountOptions{ExcludeSystemPosts: true, SincePostID: jobStartId, SinceUpdateAt: jobStartTime}); err != nil {
+ logger.Warn("Worker: Failed to fetch total post count for job. An estimated value will be used for progress reporting.", mlog.Err(err))
+ totalPosts = estimatedPostCount
+ } else {
+ totalPosts = count
+ }
+
+ var totalWarningCount int64
+ cursor := model.MessageExportCursor{LastPostUpdateAt: batchStartTime, LastPostId: batchStartId}
+ for {
+ select {
+ case <-cancelWatcherChan:
+ logger.Debug("Worker: Job has been canceled via CancellationWatcher")
+ worker.setJobCanceled(logger, job)
+ return
+
+ case <-worker.stopCh:
+ logger.Debug("Worker: Job has been canceled via Worker Stop. Setting the job back to pending")
+ worker.SetJobPending(logger, job)
+ return
+
+ case <-time.After(TimeBetweenBatches * time.Millisecond):
+ logger.Debug("Starting batch export", mlog.Int("last_post_update_at", cursor.LastPostUpdateAt))
+ rctx := request.EmptyContext(logger).WithContext(worker.context)
+ prevPostUpdateAt := cursor.LastPostUpdateAt
+
+ var postsExported []*model.MessageExport
+ var nErr error
+ postsExported, cursor, nErr = worker.jobServer.Store.Compliance().MessageExport(rctx, cursor, batchSize)
+ if nErr != nil {
+ // We ignore error if the job was explicitly cancelled
+ // and let it
+ if worker.context.Err() == context.Canceled {
+ logger.Debug("Worker: Job has been canceled via worker's context. Setting the job back to pending")
+ worker.SetJobPending(logger, job)
+ } else {
+ worker.setJobError(logger, job, model.NewAppError("DoJob", "ent.message_export.run_export.app_error", nil, "", http.StatusInternalServerError).Wrap(nErr))
+ }
+ return
+ }
+ logger.Debug("Found posts to export", mlog.Int("number_of_posts", len(postsExported)))
+ totalPostsExported += int64(len(postsExported))
+ job.Data[JobDataMessagesExported] = strconv.FormatInt(totalPostsExported, 10)
+ job.Data[JobDataBatchStartTimestamp] = strconv.FormatInt(cursor.LastPostUpdateAt, 10)
+ job.Data[JobDataBatchStartId] = cursor.LastPostId
+
+ if len(postsExported) == 0 {
+ job.Data[JobDataWarningCount] = strconv.FormatInt(totalWarningCount, 10)
+ // we've exported everything up to the current time
+ logger.Debug("FormatExport complete")
+
+ // Create downloadable zip file of all batches.
+ if job.Data[JobDataExportType] != model.ComplianceExportTypeGlobalrelay {
+ exportBackend, err := worker.getExportBackend(rctx)
+ if err != nil {
+ worker.setJobError(logger, job, err)
+ return
+ }
+
+ zipErr := createZipFile(rctx, exportBackend, job.Id, directories)
+ if zipErr != nil {
+ logger.Error("Error creating zip file for export", mlog.Err(zipErr))
+ job.Data[JobDataIsDownloadable] = "false"
+ } else {
+ job.Data[JobDataIsDownloadable] = "true"
+ }
+ }
+ if totalWarningCount > 0 {
+ worker.setJobWarning(logger, job)
+ } else {
+ worker.setJobSuccess(logger, job)
+ }
+ return
+ }
+
+ exportBackend, err := worker.getExportBackend(rctx)
+ if err != nil {
+ worker.setJobError(logger, job, err)
+ return
+ }
+
+ fileAttachmentBackend, err := worker.getFileAttachmentBackend(rctx)
+ if err != nil {
+ worker.setJobError(logger, job, err)
+ return
+ }
+
+ batchDirectory := getOutputDirectoryPath(prevPostUpdateAt, cursor.LastPostUpdateAt)
+ warningCount, err := runExportByType(
+ rctx,
+ job.Data[JobDataExportType],
+ postsExported,
+ batchDirectory,
+ worker.jobServer.Store,
+ exportBackend,
+ fileAttachmentBackend,
+ worker.htmlTemplateWatcher,
+ worker.jobServer.Config(),
+ )
+ if err != nil {
+ worker.setJobError(logger, job, err)
+ return
+ }
+
+ totalWarningCount += warningCount
+
+ directories = append(directories, batchDirectory)
+ directoriesBytes, e := json.Marshal(directories)
+ if e != nil {
+ worker.setJobError(logger, job, model.NewAppError("Job.DoJob", model.NoTranslation, nil, "", http.StatusInternalServerError).Wrap((e)))
+ return
+ }
+ job.Data[JobDirectories] = string(directoriesBytes)
+
+ // also saves the last post create time
+ if err := worker.jobServer.SetJobProgress(job, getJobProgress(totalPostsExported, totalPosts)); err != nil {
+ worker.setJobError(logger, job, err)
+ return
+ }
+ }
+ }
+}
+
+func createZipFile(rctx request.CTX, fileBackend filestore.FileBackend, jobId string, directories []string) error {
+ zipFileName := jobId + ".zip"
+
+ dest, err := os.CreateTemp("", zipFileName)
+ if err != nil {
+ return err
+ }
+ defer os.Remove(dest.Name())
+
+ // Create a new zip archive.
+ w := zip.NewWriter(dest)
+
+ // create a 32 KiB buffer for copying files
+ buf := make([]byte, 32*1024)
+
+ // Add directories to the archive.
+ for _, directory := range directories {
+ err = addFiles(w, fileBackend, directory, buf)
+ if err != nil {
+ return err
+ }
+ }
+
+ // Make sure to check the error on Close.
+ err = w.Close()
+ if err != nil {
+ return fmt.Errorf("error closing zip file: %s %v", dest.Name(), err)
+ }
+
+ _, err = dest.Seek(0, 0)
+ if err != nil {
+ return fmt.Errorf("error seeking zip file: %s %v", dest.Name(), err)
+ }
+
+ zipPath := path.Join(exportPath, zipFileName)
+
+ // If the file backend allows it, we want to upload without a timeout
+ _, err = filestore.TryWriteFileContext(rctx.Context(), fileBackend, dest, zipPath)
+ return err
+}
+
+func addFiles(w *zip.Writer, fileBackend filestore.FileBackend, basePath string, buf []byte) error {
+ // Open the Directory
+ files, err := fileBackend.ListDirectoryRecursively(basePath)
+ if err != nil {
+ return err
+ }
+
+ for _, file := range files {
+ err = addFile(w, fileBackend, file, basePath, buf)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func addFile(w *zip.Writer, fileBackend filestore.FileBackend, file, basePath string, buf []byte) error {
+ // In some storage backends like Hitachi HCP, the first entry
+ // from a ListObjects API is always the dir entry itself.
+ if file == basePath {
+ return nil
+ }
+
+ size, err := fileBackend.FileSize(file)
+ if err != nil {
+ return fmt.Errorf("error reading file size for %s: %w", file, err)
+ }
+ if size == 0 {
+ // skip empty files
+ return nil
+ }
+
+ r, err := fileBackend.Reader(file)
+ if err != nil {
+ return fmt.Errorf("error opening file %s: %w", file, err)
+ }
+ defer r.Close()
+
+ // Add some files to the archive.
+ f, err := w.Create(file)
+ if err != nil {
+ return fmt.Errorf("error creating file %s in the archive: %w", file, err)
+ }
+ _, err = io.CopyBuffer(f, r, buf)
+ if err != nil {
+ return fmt.Errorf("error copying file %s into the archive: %w", file, err)
+ }
+
+ return nil
+}
+
+// initializes job data if it's missing, allows us to recover from failed or improperly configured jobs
+func (worker *MessageExportWorker) initJobData(logger mlog.LoggerIFace, job *model.Job) {
+ if job.Data == nil {
+ job.Data = make(map[string]string)
+ }
+ if _, exists := job.Data[JobDataMessagesExported]; !exists {
+ job.Data[JobDataMessagesExported] = "0"
+ }
+ if _, exists := job.Data[JobDirectories]; !exists {
+ // json null value
+ job.Data[JobDirectories] = "null"
+ }
+ if _, exists := job.Data[JobDataExportType]; !exists {
+ // for now, we'll default to Actiance. When we support multiple export types, we'll have to fetch it from config instead
+ logger.Info("Worker: Defaulting to configured export format")
+ job.Data[JobDataExportType] = *worker.jobServer.Config().MessageExportSettings.ExportFormat
+ }
+ if _, exists := job.Data[JOB_DATA_BatchSize]; !exists {
+ logger.Info("Worker: Defaulting to configured batch size")
+ job.Data[JOB_DATA_BatchSize] = strconv.Itoa(*worker.jobServer.Config().MessageExportSettings.BatchSize)
+ }
+ if _, exists := job.Data[JobDataBatchStartTimestamp]; !exists {
+ previousJob, err := worker.jobServer.Store.Job().GetNewestJobByStatusesAndType([]string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport)
+ if err != nil {
+ logger.Info("Worker: No previously successful job found, falling back to configured MessageExportSettings.ExportFromTimestamp")
+ job.Data[JobDataBatchStartTimestamp] = strconv.FormatInt(*worker.jobServer.Config().MessageExportSettings.ExportFromTimestamp, 10)
+ job.Data[JobDataBatchStartId] = ""
+ job.Data[JobDataStartTimestamp] = job.Data[JobDataBatchStartTimestamp]
+ job.Data[JobDataStartId] = job.Data[JobDataBatchStartId]
+ return
+ }
+
+ logger.Info("Worker: Implicitly resuming export from where previously successful job left off")
+ if previousJob == nil {
+ previousJob = &model.Job{}
+ }
+ if previousJob.Data == nil {
+ previousJob.Data = make(map[string]string)
+ }
+ if _, prevExists := previousJob.Data[JobDataBatchStartTimestamp]; !prevExists {
+ logger.Info("Worker: Previously successful job lacks job data, falling back to configured MessageExportSettings.ExportFromTimestamp")
+ job.Data[JobDataBatchStartTimestamp] = strconv.FormatInt(*worker.jobServer.Config().MessageExportSettings.ExportFromTimestamp, 10)
+ } else {
+ job.Data[JobDataBatchStartTimestamp] = previousJob.Data[JobDataBatchStartTimestamp]
+ }
+ if _, prevExists := previousJob.Data[JobDataBatchStartId]; !prevExists {
+ logger.Info("Worker: Previously successful job lacks post ID, falling back to empty string")
+ job.Data[JobDataBatchStartId] = ""
+ } else {
+ job.Data[JobDataBatchStartId] = previousJob.Data[JobDataBatchStartId]
+ }
+ job.Data[JobDataStartTimestamp] = job.Data[JobDataBatchStartTimestamp]
+ job.Data[JobDataStartId] = job.Data[JobDataBatchStartId]
+ } else {
+ logger.Info("Worker: FormatExport start time explicitly set", mlog.String("new_start_time", job.Data[JobDataBatchStartTimestamp]))
+ }
+}
+
+func getJobProgress(totalExportedPosts, totalPosts int64) int64 {
+ return totalExportedPosts * 100 / totalPosts
+}
+
+func (worker *MessageExportWorker) setJobSuccess(logger mlog.LoggerIFace, job *model.Job) {
+ // setting progress causes the job data to be saved, which is necessary if we want the next job to pick up where this one left off
+ if err := worker.jobServer.SetJobProgress(job, 100); err != nil {
+ logger.Error("Worker: Failed to update progress for job", mlog.Err(err))
+ worker.setJobError(logger, job, err)
+ }
+ if err := worker.jobServer.SetJobSuccess(job); err != nil {
+ logger.Error("Worker: Failed to set success for job", mlog.Err(err))
+ worker.setJobError(logger, job, err)
+ }
+}
+
+func (worker *MessageExportWorker) setJobWarning(logger mlog.LoggerIFace, job *model.Job) {
+ // setting progress causes the job data to be saved, which is necessary if we want the next job to pick up where this one left off
+ if err := worker.jobServer.SetJobProgress(job, 100); err != nil {
+ logger.Error("Worker: Failed to update progress for job", mlog.Err(err))
+ worker.setJobError(logger, job, err)
+ }
+ if err := worker.jobServer.SetJobWarning(job); err != nil {
+ logger.Error("Worker: Failed to set warning for job", mlog.Err(err))
+ worker.setJobError(logger, job, err)
+ }
+}
+
+func (worker *MessageExportWorker) setJobError(logger mlog.LoggerIFace, job *model.Job, appError *model.AppError) {
+ logger.Error("Worker: Job error", mlog.Err(appError))
+ if err := worker.jobServer.SetJobError(job, appError); err != nil {
+ logger.Error("Worker: Failed to set job errorv", mlog.Err(err), mlog.NamedErr("set_error", appError))
+ }
+}
+
+func (worker *MessageExportWorker) setJobCanceled(logger mlog.LoggerIFace, job *model.Job) {
+ if err := worker.jobServer.SetJobCanceled(job); err != nil {
+ logger.Error("Worker: Failed to mark job as canceled", mlog.Err(err))
+ }
+}
+
+func (worker *MessageExportWorker) SetJobPending(logger mlog.LoggerIFace, job *model.Job) {
+ if err := worker.jobServer.SetJobPending(job); err != nil {
+ logger.Error("Worker: Failed to mark job as pending", mlog.Err(err))
+ }
+}
+
+func getOutputDirectoryPath(exportStartTime int64, exportEndTime int64) string {
+ return path.Join(exportPath, strconv.FormatInt(exportStartTime, 10)+"-"+strconv.FormatInt(exportEndTime, 10))
+}
diff --git a/server/enterprise/message_export/worker_test.go b/server/enterprise/message_export/worker_test.go
new file mode 100644
index 00000000000..68a74285c3d
--- /dev/null
+++ b/server/enterprise/message_export/worker_test.go
@@ -0,0 +1,499 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.enterprise for license information.
+
+package message_export
+
+import (
+ "archive/zip"
+ "bytes"
+ "context"
+ "net/http"
+ "os"
+ "path"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ tmock "github.com/stretchr/testify/mock"
+ "github.com/stretchr/testify/require"
+
+ "github.com/mattermost/mattermost/server/public/model"
+ "github.com/mattermost/mattermost/server/public/plugin/plugintest/mock"
+ "github.com/mattermost/mattermost/server/public/shared/mlog"
+ "github.com/mattermost/mattermost/server/public/shared/request"
+ "github.com/mattermost/mattermost/server/v8/channels/app"
+ "github.com/mattermost/mattermost/server/v8/channels/jobs"
+ "github.com/mattermost/mattermost/server/v8/channels/store/storetest"
+ "github.com/mattermost/mattermost/server/v8/channels/utils/testutils"
+ "github.com/mattermost/mattermost/server/v8/einterfaces/mocks"
+ "github.com/mattermost/mattermost/server/v8/platform/shared/filestore"
+ fmocks "github.com/mattermost/mattermost/server/v8/platform/shared/filestore/mocks"
+)
+
+func TestInitJobDataNoJobData(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ // mock job store doesn't return a previously successful job, forcing fallback to config
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(nil, model.NewAppError("", "", nil, "", http.StatusBadRequest))
+
+ worker := &MessageExportWorker{
+ jobServer: &jobs.JobServer{
+ Store: mockStore,
+ ConfigService: &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ },
+ logger: logger,
+ }
+
+ // actually execute the code under test
+ worker.initJobData(logger, job)
+
+ assert.Equal(t, model.ComplianceExportTypeActiance, job.Data[JobDataExportType])
+ assert.Equal(t, strconv.Itoa(*worker.jobServer.Config().MessageExportSettings.BatchSize), job.Data[JOB_DATA_BatchSize])
+ assert.Equal(t, strconv.FormatInt(*worker.jobServer.Config().MessageExportSettings.ExportFromTimestamp, 10), job.Data[JobDataBatchStartTimestamp])
+}
+
+func TestInitJobDataPreviousJobNoJobData(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ previousJob := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusSuccess,
+ Type: model.JobTypeMessageExport,
+ StartAt: model.GetMillis() - 1000,
+ LastActivityAt: model.GetMillis() - 1000,
+ }
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ // mock job store returns a previously successful job, but it doesn't have job data either, so we still fall back to config
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(previousJob, nil)
+
+ worker := &MessageExportWorker{
+ jobServer: &jobs.JobServer{
+ Store: mockStore,
+ ConfigService: &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ },
+ logger: logger,
+ }
+
+ // actually execute the code under test
+ worker.initJobData(logger, job)
+
+ assert.Equal(t, model.ComplianceExportTypeActiance, job.Data[JobDataExportType])
+ assert.Equal(t, strconv.Itoa(*worker.jobServer.Config().MessageExportSettings.BatchSize), job.Data[JOB_DATA_BatchSize])
+ assert.Equal(t, strconv.FormatInt(*worker.jobServer.Config().MessageExportSettings.ExportFromTimestamp, 10), job.Data[JobDataBatchStartTimestamp])
+}
+
+func TestInitJobDataPreviousJobWithJobData(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ previousJob := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusSuccess,
+ Type: model.JobTypeMessageExport,
+ StartAt: model.GetMillis() - 1000,
+ LastActivityAt: model.GetMillis() - 1000,
+ Data: map[string]string{JobDataBatchStartTimestamp: "123"},
+ }
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ // mock job store returns a previously successful job that has the config that we're looking for, so we use it
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(previousJob, nil)
+
+ worker := &MessageExportWorker{
+ jobServer: &jobs.JobServer{
+ Store: mockStore,
+ ConfigService: &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ },
+ logger: logger,
+ }
+
+ // actually execute the code under test
+ worker.initJobData(logger, job)
+
+ assert.Equal(t, model.ComplianceExportTypeActiance, job.Data[JobDataExportType])
+ assert.Equal(t, strconv.Itoa(*worker.jobServer.Config().MessageExportSettings.BatchSize), job.Data[JOB_DATA_BatchSize])
+ assert.Equal(t, previousJob.Data[JobDataBatchStartTimestamp], job.Data[JobDataBatchStartTimestamp])
+}
+
+func TestDoJobNoPostsToExport(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ mockMetrics := &mocks.MetricsInterface{}
+ defer mockMetrics.AssertExpectations(t)
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ // claim job succeeds
+ mockStore.JobStore.On("UpdateStatusOptimistically", job.Id, model.JobStatusPending, model.JobStatusInProgress).Return(true, nil)
+ mockMetrics.On("IncrementJobActive", model.JobTypeMessageExport)
+
+ // no previous job, data will be loaded from config
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(nil, model.NewAppError("", "", nil, "", http.StatusBadRequest))
+
+ // no posts found to export
+ mockStore.ComplianceStore.On("MessageExport", mock.Anything, mock.AnythingOfType("model.MessageExportCursor"), 10000).Return(
+ make([]*model.MessageExport, 0), model.MessageExportCursor{}, nil,
+ )
+
+ mockStore.PostStore.On("AnalyticsPostCount", mock.Anything).Return(
+ int64(estimatedPostCount), nil,
+ )
+
+ // job completed successfully
+ mockStore.JobStore.On("UpdateOptimistically", job, model.JobStatusInProgress).Return(true, nil)
+ mockStore.JobStore.On("UpdateStatus", job.Id, model.JobStatusSuccess).Return(job, nil)
+ mockMetrics.On("DecrementJobActive", model.JobTypeMessageExport)
+
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ err = os.RemoveAll(tempDir)
+ assert.NoError(t, err)
+ })
+
+ worker := &MessageExportWorker{
+ jobServer: jobs.NewJobServer(
+ &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ FileSettings: model.FileSettings{
+ DriverName: model.NewPointer(model.ImageDriverLocal),
+ Directory: model.NewPointer(tempDir),
+ },
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ mockStore,
+ mockMetrics,
+ logger,
+ ),
+ logger: logger,
+ }
+
+ // actually execute the code under test
+ worker.DoJob(job)
+}
+
+func TestDoJobWithDedicatedExportBackend(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+
+ mockStore := &storetest.Store{}
+ defer mockStore.AssertExpectations(t)
+
+ mockMetrics := &mocks.MetricsInterface{}
+ defer mockMetrics.AssertExpectations(t)
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ // claim job succeeds
+ mockStore.JobStore.On("UpdateStatusOptimistically", job.Id, model.JobStatusPending, model.JobStatusInProgress).Return(true, nil)
+ mockMetrics.On("IncrementJobActive", model.JobTypeMessageExport)
+
+ // no previous job, data will be loaded from config
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(nil, model.NewAppError("", "", nil, "", http.StatusBadRequest))
+
+ // no posts found to export
+ mockStore.ComplianceStore.On("MessageExport", mock.Anything, mock.AnythingOfType("model.MessageExportCursor"), 10000).Return(
+ make([]*model.MessageExport, 0), model.MessageExportCursor{}, nil,
+ )
+
+ mockStore.PostStore.On("AnalyticsPostCount", mock.Anything).Return(
+ int64(estimatedPostCount), nil,
+ )
+
+ // job completed successfully
+ mockStore.JobStore.On("UpdateOptimistically", job, model.JobStatusInProgress).Return(true, nil)
+ mockStore.JobStore.On("UpdateStatus", job.Id, model.JobStatusSuccess).Return(job, nil)
+ mockMetrics.On("DecrementJobActive", model.JobTypeMessageExport)
+
+ // create primary filestore directory
+ tempPrimaryDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPrimaryDir)
+
+ // create dedicated filestore directory
+ tempDedicatedDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempDedicatedDir)
+
+ // setup worker with primary and dedicated filestores.
+ worker := &MessageExportWorker{
+ jobServer: jobs.NewJobServer(
+ &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ FileSettings: model.FileSettings{
+ DriverName: model.NewPointer(model.ImageDriverLocal),
+ Directory: model.NewPointer(tempPrimaryDir),
+ DedicatedExportStore: model.NewPointer(true),
+ ExportDriverName: model.NewPointer(model.ImageDriverLocal),
+ ExportDirectory: model.NewPointer(tempDedicatedDir),
+ },
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ mockStore,
+ mockMetrics,
+ logger,
+ ),
+ logger: logger,
+ }
+
+ // actually execute the code under test
+ worker.DoJob(job)
+
+ // ensure no primary filestore files exist
+ files, err := os.ReadDir(tempPrimaryDir)
+ require.NoError(t, err)
+ assert.Zero(t, len(files))
+
+ // ensure some dedicated filestore files exist
+ files, err = os.ReadDir(tempDedicatedDir)
+ require.NoError(t, err)
+ assert.NotZero(t, len(files))
+}
+
+func TestDoJobCancel(t *testing.T) {
+ logger := mlog.CreateConsoleTestLogger(t)
+
+ mockStore := &storetest.Store{}
+ t.Cleanup(func() { mockStore.AssertExpectations(t) })
+ mockMetrics := &mocks.MetricsInterface{}
+ t.Cleanup(func() { mockMetrics.AssertExpectations(t) })
+
+ job := &model.Job{
+ Id: model.NewId(),
+ CreateAt: model.GetMillis(),
+ Status: model.JobStatusPending,
+ Type: model.JobTypeMessageExport,
+ }
+
+ tempDir, err := os.MkdirTemp("", "")
+ require.NoError(t, err)
+ t.Cleanup(func() { os.RemoveAll(tempDir) })
+
+ impl := MessageExportJobInterfaceImpl{
+ Server: &app.Server{
+ Jobs: jobs.NewJobServer(
+ &testutils.StaticConfigService{
+ Cfg: &model.Config{
+ // mock config
+ FileSettings: model.FileSettings{
+ DriverName: model.NewPointer(model.ImageDriverLocal),
+ Directory: model.NewPointer(tempDir),
+ },
+ MessageExportSettings: model.MessageExportSettings{
+ EnableExport: model.NewPointer(true),
+ ExportFormat: model.NewPointer(model.ComplianceExportTypeActiance),
+ DailyRunTime: model.NewPointer("01:00"),
+ ExportFromTimestamp: model.NewPointer(int64(0)),
+ BatchSize: model.NewPointer(10000),
+ },
+ },
+ },
+ mockStore,
+ mockMetrics,
+ logger,
+ ),
+ },
+ }
+ worker, ok := impl.MakeWorker().(*MessageExportWorker)
+ require.True(t, ok)
+
+ // Claim job succeeds
+ mockStore.JobStore.On("UpdateStatusOptimistically", job.Id, model.JobStatusPending, model.JobStatusInProgress).Return(true, nil)
+ mockMetrics.On("IncrementJobActive", model.JobTypeMessageExport)
+
+ // No previous job, data will be loaded from config
+ mockStore.JobStore.On("GetNewestJobByStatusesAndType", []string{model.JobStatusWarning, model.JobStatusSuccess}, model.JobTypeMessageExport).Return(nil, model.NewAppError("", "", nil, "", http.StatusBadRequest))
+
+ cancelled := make(chan struct{})
+ // Cancel the worker and return an error
+ mockStore.ComplianceStore.On("MessageExport", mock.Anything, mock.AnythingOfType("model.MessageExportCursor"), 10000).Run(func(args tmock.Arguments) {
+ worker.cancel()
+
+ rctx, ok := args.Get(0).(request.CTX)
+ require.True(t, ok)
+ assert.Error(t, rctx.Context().Err())
+ assert.ErrorIs(t, rctx.Context().Err(), context.Canceled)
+
+ cancelled <- struct{}{}
+ }).Return(
+ nil, model.MessageExportCursor{}, context.Canceled,
+ )
+
+ mockStore.PostStore.On("AnalyticsPostCount", mock.Anything).Return(
+ int64(estimatedPostCount), nil,
+ )
+
+ // Job marked as pending
+ mockStore.JobStore.On("UpdateStatus", job.Id, model.JobStatusPending).Return(job, nil)
+ mockMetrics.On("DecrementJobActive", model.JobTypeMessageExport)
+
+ go worker.Run()
+
+ worker.JobChannel() <- *job
+
+ // Wait for the cancelation
+ <-cancelled
+
+ // Cleanup
+ worker.Stop()
+}
+
+func TestCreateZipFile(t *testing.T) {
+ rctx := request.TestContext(t)
+
+ tempDir, ioErr := os.MkdirTemp("", "")
+ require.NoError(t, ioErr)
+ defer os.RemoveAll(tempDir)
+
+ config := filestore.FileBackendSettings{
+ DriverName: model.ImageDriverLocal,
+ Directory: tempDir,
+ }
+
+ fileBackend, err := filestore.NewFileBackend(config)
+ assert.NoError(t, err)
+ _ = fileBackend
+
+ b := []byte("test")
+ path1 := path.Join(exportPath, "19700101")
+ path2 := path.Join(exportPath, "19800101/subdir")
+
+ // We test with a mock to test the Hitachi HCP case
+ // where ListDirectory returns the dir itself as the first entry.
+ // Note: If the mocks fail, that means the logic in createZipFile has
+ // gone wrong and needs to be verified.
+ mock := &fmocks.FileBackend{}
+ defer mock.AssertExpectations(t)
+
+ mock.On("WriteFile", tmock.Anything, tmock.AnythingOfType("string")).Return(int64(4), nil)
+ mock.On("FileSize", tmock.Anything).Return(int64(4), nil)
+ mock.On("FileSize", tmock.Anything).Return(int64(4), nil)
+ mock.On("Reader", path.Join(path1, "testid")).Return(mockReadSeekCloser{bytes.NewReader([]byte("test"))}, nil)
+ mock.On("Reader", path.Join(path2, "testid")).Return(mockReadSeekCloser{bytes.NewReader([]byte("test"))}, nil)
+ mock.On("ListDirectoryRecursively", path1).Return([]string{path1, path.Join(path1, "testid")}, nil)
+ mock.On("ListDirectoryRecursively", path2).Return([]string{path2, path.Join(path2, "testid")}, nil)
+
+ for i, backend := range []filestore.FileBackend{fileBackend, mock} {
+ written, err := backend.WriteFile(bytes.NewReader(b), path1+"/"+model.NewId())
+ assert.NoError(t, err)
+ assert.Equal(t, int64(len(b)), written)
+
+ written, err = backend.WriteFile(bytes.NewReader(b), path2+"/"+model.NewId())
+ assert.NoError(t, err)
+ assert.Equal(t, int64(len(b)), written)
+
+ written, err = backend.WriteFile(bytes.NewReader(b), path2+"/"+model.NewId())
+ assert.NoError(t, err)
+ assert.Equal(t, int64(len(b)), written)
+
+ err = createZipFile(rctx, backend, "testjob", []string{path1, path2})
+ assert.NoError(t, err)
+
+ // Skip checking the zip file in mock case.
+ if i == 1 {
+ continue
+ }
+ r, err := zip.OpenReader(path.Join(tempDir, exportPath) + "/testjob.zip")
+ assert.NoError(t, err)
+ err = r.Close()
+ require.NoError(t, err)
+
+ assert.Equal(t, 3, len(r.File))
+ }
+}
+
+type mockReadSeekCloser struct {
+ *bytes.Reader
+}
+
+func (r mockReadSeekCloser) Close() error {
+ return nil
+}