From 05e16ee5f6606912b8fd5add4ecafb4943ad081c Mon Sep 17 00:00:00 2001 From: Nic Pottier Date: Tue, 22 May 2018 14:03:50 -0500 Subject: [PATCH] create montly archives when doing backfills, add input and value to run outputs --- archiver.go | 89 +++++++++++++++++++++++++----------- archiver_test.go | 106 ++++++++++++++++++++++--------------------- testdata/runs1.jsonl | 2 +- testdata/runs2.jsonl | 2 +- testdb.sql | 9 +++- 5 files changed, 126 insertions(+), 82 deletions(-) diff --git a/archiver.go b/archiver.go index 03d1c0c..7054503 100644 --- a/archiver.go +++ b/archiver.go @@ -80,6 +80,21 @@ type Archive struct { Dailies []*Archive } +func (a *Archive) endDate() time.Time { + endDate := a.StartDate + if a.Period == DayPeriod { + endDate = endDate.AddDate(0, 0, 1) + } else { + endDate = endDate.AddDate(0, 1, 0) + } + return endDate +} + +func (a *Archive) coversDate(d time.Time) bool { + end := a.endDate() + return !a.StartDate.After(d) && end.After(d) +} + const lookupActiveOrgs = `SELECT id, name, created_on, is_anon FROM orgs_org WHERE is_active = TRUE order by id` // GetActiveOrgs returns the active organizations sorted by id @@ -103,7 +118,7 @@ func GetActiveOrgs(ctx context.Context, db *sqlx.DB) ([]Org, error) { return orgs, nil } -const lookupOrgArchives = `SELECT id, start_date, period, archive_type, hash, size, record_count, url, rollup_id FROM archives_archive WHERE org_id = $1 AND archive_type = $2 ORDER BY start_date asc` +const lookupOrgArchives = `SELECT id, start_date, period, archive_type, hash, size, record_count, url, rollup_id FROM archives_archive WHERE org_id = $1 AND archive_type = $2 ORDER BY start_date asc, period desc` // GetCurrentArchives returns all the current archives for the passed in org and record type func GetCurrentArchives(ctx context.Context, db *sqlx.DB, org Org, archiveType ArchiveType) ([]*Archive, error) { @@ -118,7 +133,6 @@ func GetCurrentArchives(ctx context.Context, db *sqlx.DB, org Org, archiveType A // GetMissingDayArchives calculates what archives need to be generated for the passed in org this is calculated per day func GetMissingDayArchives(archives []*Archive, now time.Time, org Org, archiveType ArchiveType) ([]*Archive, error) { - // our first archive would be active days from today endDate := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, time.UTC).AddDate(0, 0, -org.ActiveDays) orgUTC := org.CreatedOn.In(time.UTC) startDate := time.Date(orgUTC.Year(), orgUTC.Month(), orgUTC.Day(), 0, 0, 0, 0, time.UTC) @@ -131,12 +145,12 @@ func GetMissingDayArchives(archives []*Archive, now time.Time, org Org, archiveT existing := false // advance our current archive idx until we are on our start date or later - for archiveIDX < len(archives) && (archives[archiveIDX].StartDate.Before(startDate) || archives[archiveIDX].Period == MonthPeriod) { + for archiveIDX < len(archives) && archives[archiveIDX].StartDate.Before(startDate) && !archives[archiveIDX].coversDate(startDate) { archiveIDX++ } - // do we already have this archive? - if archiveIDX < len(archives) && archives[archiveIDX].StartDate.Equal(startDate) { + // do we already have an archive covering this date? + if archiveIDX < len(archives) && archives[archiveIDX].coversDate(startDate) { existing = true } @@ -158,7 +172,7 @@ func GetMissingDayArchives(archives []*Archive, now time.Time, org Org, archiveT return missing, nil } -// GetMissingMonthArchives gets which archives need to be rolled up in the set that has been passed in +// GetMissingMonthArchives gets which montly archives are currently missing for this org func GetMissingMonthArchives(archives []*Archive, now time.Time, org Org, archiveType ArchiveType) ([]*Archive, error) { lastActive := now.AddDate(0, 0, -org.ActiveDays) endDate := time.Date(lastActive.Year(), lastActive.Month(), 1, 0, 0, 0, 0, time.UTC) @@ -174,11 +188,11 @@ func GetMissingMonthArchives(archives []*Archive, now time.Time, org Org, archiv existing := false // advance our current archive idx until we are on our start date or later - for archiveIDX < len(archives) && (archives[archiveIDX].StartDate.Before(startDate) || archives[archiveIDX].Period == DayPeriod) { + for archiveIDX < len(archives) && (archives[archiveIDX].StartDate.Before(startDate) || archives[archiveIDX].Period != MonthPeriod) { archiveIDX++ } - // do we already have this archive? + // do we already have an archive covering this date? if archiveIDX < len(archives) && archives[archiveIDX].StartDate.Equal(startDate) { existing = true } @@ -202,8 +216,8 @@ func GetMissingMonthArchives(archives []*Archive, now time.Time, org Org, archiv return missing, nil } -// BuildMonthlyArchive builds a monthly archive from the files present on S3 -func BuildMonthlyArchive(ctx context.Context, conf *Config, s3Client s3iface.S3API, archives []*Archive, month *Archive, now time.Time, org Org, archiveType ArchiveType) error { +// BuildRollupArchive builds a monthly archive from the files present on S3 +func BuildRollupArchive(ctx context.Context, conf *Config, s3Client s3iface.S3API, archives []*Archive, month *Archive, now time.Time, org Org, archiveType ArchiveType) error { start := time.Now() log := logrus.WithFields(logrus.Fields{ @@ -383,7 +397,7 @@ FROM ( from jsonb_array_elements(fr.path :: jsonb) as path_row) as path_data ) as path, (select coalesce(jsonb_agg(values_data.tmp_values), '{}'::jsonb) from ( - select json_build_object(key, jsonb_build_object('name', value -> 'name', 'time', (value -> 'created_on')::text::timestamptz, 'category', value -> 'category', 'node', value -> 'node_uuid')) as tmp_values + select json_build_object(key, jsonb_build_object('name', value -> 'name', 'value', value -> 'value', 'input', value -> 'input', 'time', (value -> 'created_on')::text::timestamptz, 'category', value -> 'category', 'node', value -> 'node_uuid')) as tmp_values FROM jsonb_each(fr.results :: jsonb)) as values_data ) as values, CASE @@ -454,6 +468,7 @@ func CreateArchiveFile(ctx context.Context, db *sqlx.DB, archive *Archive, archi "org_id": archive.Org.ID, "archive_type": archive.ArchiveType, "start_date": archive.StartDate, + "end_date": archive.endDate(), "period": archive.Period, }) @@ -478,12 +493,11 @@ func CreateArchiveFile(ctx context.Context, db *sqlx.DB, archive *Archive, archi return err } - endDate := archive.StartDate.Add(time.Hour * 24) var rows *sqlx.Rows if archive.ArchiveType == MessageType { - rows, err = tx.QueryxContext(ctx, lookupMsgs, archive.Org.ID, archive.StartDate, endDate) + rows, err = tx.QueryxContext(ctx, lookupMsgs, archive.Org.ID, archive.StartDate, archive.endDate()) } else if archive.ArchiveType == RunType { - rows, err = tx.QueryxContext(ctx, lookupFlowRuns, archive.Org.IsAnon, archive.Org.ID, archive.StartDate, endDate) + rows, err = tx.QueryxContext(ctx, lookupFlowRuns, archive.Org.IsAnon, archive.Org.ID, archive.StartDate, archive.endDate()) } if err != nil { return err @@ -689,8 +703,8 @@ func DeleteArchiveFile(archive *Archive) error { return nil } -// BuildOrgDailyArchives builds all the montly archives for the passid in org -func BuildOrgDailyArchives(ctx context.Context, now time.Time, config *Config, db *sqlx.DB, s3Client s3iface.S3API, org Org, archiveType ArchiveType) ([]*Archive, error) { +// CreateOrgArchives builds all the missing archives for the passed in org +func CreateOrgArchives(ctx context.Context, now time.Time, config *Config, db *sqlx.DB, s3Client s3iface.S3API, org Org, archiveType ArchiveType) ([]*Archive, error) { log := logrus.WithField("org", org.Name).WithField("org_id", org.ID) records := 0 created := make([]*Archive, 0, 1) @@ -701,13 +715,35 @@ func BuildOrgDailyArchives(ctx context.Context, now time.Time, config *Config, d return nil, fmt.Errorf("error getting current archives") } - archives, err := GetMissingDayArchives(existing, now, org, archiveType) - if err != nil { - return nil, fmt.Errorf("error calculating tasks for type '%s'", archiveType) + var archives []*Archive + if len(existing) == 0 { + // no existing archives means this might be a backfill, figure out if there are full monthes we can build first + archives, err = GetMissingMonthArchives(existing, now, org, archiveType) + if err != nil { + log.WithError(err).Error("error calculating missing monthly archives") + return nil, err + } + + // then add in daily archives taking into account the monthly that will be built + daily, err := GetMissingDayArchives(archives, now, org, archiveType) + if err != nil { + log.WithError(err).Error("error calculating missing daily archives") + return nil, err + } + for _, d := range daily { + archives = append(archives, d) + } + } else { + // figure out any missing day archives + archives, err = GetMissingDayArchives(existing, now, org, archiveType) + if err != nil { + log.WithError(err).Error("error calculating missing daily archives") + return nil, err + } } for _, archive := range archives { - log = log.WithField("start_date", archive.StartDate).WithField("period", archive.Period).WithField("archive_type", archive.ArchiveType) + log = log.WithField("start_date", archive.StartDate).WithField("end_date", archive.endDate()).WithField("period", archive.Period).WithField("archive_type", archive.ArchiveType) log.Info("starting archive") err := CreateArchiveFile(ctx, db, archive, config.TempDir) if err != nil { @@ -754,8 +790,8 @@ func BuildOrgDailyArchives(ctx context.Context, now time.Time, config *Config, d return created, nil } -// BuildOrgMonthlyArchives builds all the montly archives for the passid in org -func BuildOrgMonthlyArchives(ctx context.Context, now time.Time, config *Config, db *sqlx.DB, s3Client s3iface.S3API, org Org, archiveType ArchiveType) ([]*Archive, error) { +// RollupOrgArchives rolls up monthly archives from our daily archives +func RollupOrgArchives(ctx context.Context, now time.Time, config *Config, db *sqlx.DB, s3Client s3iface.S3API, org Org, archiveType ArchiveType) ([]*Archive, error) { log := logrus.WithField("org", org.Name).WithField("org_id", org.ID) records := 0 created := make([]*Archive, 0, 1) @@ -766,14 +802,15 @@ func BuildOrgMonthlyArchives(ctx context.Context, now time.Time, config *Config, return nil, fmt.Errorf("error getting current archives") } - // now build our monthlies + // get our missing monthly archives archives, err := GetMissingMonthArchives(existing, now, org, archiveType) if err != nil { return nil, fmt.Errorf("error calculating missing monthly archives for type '%s'", archiveType) } + // build them from rollups for _, archive := range archives { - err = BuildMonthlyArchive(ctx, config, s3Client, existing, archive, now, org, archiveType) + err = BuildRollupArchive(ctx, config, s3Client, existing, archive, now, org, archiveType) if err != nil { log.WithError(err).Error("error building monthly archive") continue @@ -816,12 +853,12 @@ func BuildOrgMonthlyArchives(ctx context.Context, now time.Time, config *Config, // ArchiveOrg looks for any missing archives for the passed in org, creating and uploading them as necessary, returning the created archives func ArchiveOrg(ctx context.Context, now time.Time, config *Config, db *sqlx.DB, s3Client s3iface.S3API, org Org, archiveType ArchiveType) ([]*Archive, error) { - created, err := BuildOrgDailyArchives(ctx, now, config, db, s3Client, org, archiveType) + created, err := CreateOrgArchives(ctx, now, config, db, s3Client, org, archiveType) if err != nil { return nil, err } - monthlies, err := BuildOrgMonthlyArchives(ctx, now, config, db, s3Client, org, archiveType) + monthlies, err := RollupOrgArchives(ctx, now, config, db, s3Client, org, archiveType) if err != nil { return nil, err } diff --git a/archiver_test.go b/archiver_test.go index 51db955..008a327 100644 --- a/archiver_test.go +++ b/archiver_test.go @@ -60,9 +60,10 @@ func TestGetMissingDayArchives(t *testing.T) { assert.NoError(t, err) tasks, err = GetMissingDayArchives(existing, now, orgs[2], MessageType) assert.NoError(t, err) - assert.Equal(t, 60, len(tasks)) + assert.Equal(t, 31, len(tasks)) assert.Equal(t, time.Date(2017, 8, 11, 0, 0, 0, 0, time.UTC), tasks[0].StartDate) - assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), tasks[59].StartDate) + assert.Equal(t, time.Date(2017, 10, 1, 0, 0, 0, 0, time.UTC), tasks[21].StartDate) + assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), tasks[30].StartDate) } func TestGetMissingMonthArchives(t *testing.T) { @@ -149,14 +150,13 @@ func TestCreateMsgArchive(t *testing.T) { assert.NoError(t, err) tasks, err = GetMissingDayArchives(existing, now, orgs[2], MessageType) assert.NoError(t, err) - assert.Equal(t, 60, len(tasks)) + assert.Equal(t, 31, len(tasks)) task = tasks[0] - // build our first task, should have no messages err = CreateArchiveFile(ctx, db, task, "/tmp") assert.NoError(t, err) - // should have no records and be an empty gzip file + // should have one record assert.Equal(t, 1, task.RecordCount) assert.Equal(t, int64(283), task.Size) assert.Equal(t, "d03b1ab8d3312b37d5e0ae38b88e1ea7", task.Hash) @@ -198,7 +198,6 @@ func TestCreateRunArchive(t *testing.T) { assert.Equal(t, 62, len(tasks)) task := tasks[0] - // build our first task, should have no messages err = CreateArchiveFile(ctx, db, task, "/tmp") assert.NoError(t, err) @@ -209,15 +208,14 @@ func TestCreateRunArchive(t *testing.T) { DeleteArchiveFile(task) - // build our third task, should have a single message task = tasks[2] err = CreateArchiveFile(ctx, db, task, "/tmp") assert.NoError(t, err) // should have two record assert.Equal(t, 2, task.RecordCount) - assert.Equal(t, int64(568), task.Size) - assert.Equal(t, "830b11f3653e4c961fe714fb425d4cec", task.Hash) + assert.Equal(t, int64(581), task.Size) + assert.Equal(t, "d2111b94c94756147838129ca0618f38", task.Hash) assertArchiveFile(t, task, "runs1.jsonl") DeleteArchiveFile(task) @@ -238,8 +236,8 @@ func TestCreateRunArchive(t *testing.T) { // should have one record assert.Equal(t, 1, task.RecordCount) - assert.Equal(t, int64(389), task.Size) - assert.Equal(t, "d356e67393a5ae9c0fc07f81739c9d03", task.Hash) + assert.Equal(t, int64(393), task.Size) + assert.Equal(t, "4f3beb90ee4dc586db7b04ddc7e0117d", task.Hash) assertArchiveFile(t, task, "runs2.jsonl") DeleteArchiveFile(task) @@ -258,7 +256,7 @@ func TestWriteArchiveToDB(t *testing.T) { tasks, err := GetMissingDayArchives(existing, now, orgs[2], MessageType) assert.NoError(t, err) - assert.Equal(t, 60, len(tasks)) + assert.Equal(t, 31, len(tasks)) assert.Equal(t, time.Date(2017, 8, 11, 0, 0, 0, 0, time.UTC), tasks[0].StartDate) task := tasks[0] @@ -278,7 +276,7 @@ func TestWriteArchiveToDB(t *testing.T) { assert.NoError(t, err) tasks, err = GetMissingDayArchives(existing, now, orgs[2], MessageType) assert.NoError(t, err) - assert.Equal(t, 59, len(tasks)) + assert.Equal(t, 30, len(tasks)) assert.Equal(t, time.Date(2017, 8, 12, 0, 0, 0, 0, time.UTC), tasks[0].StartDate) } @@ -298,38 +296,40 @@ func TestArchiveOrgMessages(t *testing.T) { // AWS S3 config in the environment needed to download from S3 if config.AWSAccessKeyID != "missing_aws_access_key_id" && config.AWSSecretAccessKey != "missing_aws_secret_access_key" { - s3Client, err := NewS3Client(config) assert.NoError(t, err) archives, err := ArchiveOrg(ctx, now, config, db, s3Client, orgs[1], MessageType) assert.NoError(t, err) - assert.Equal(t, 64, len(archives)) - assert.Equal(t, time.Date(2017, 8, 10, 0, 0, 0, 0, time.UTC), archives[0].StartDate) - assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), archives[61].StartDate) - assert.Equal(t, time.Date(2017, 8, 1, 0, 0, 0, 0, time.UTC), archives[62].StartDate) - assert.Equal(t, time.Date(2017, 9, 1, 0, 0, 0, 0, time.UTC), archives[63].StartDate) + assert.Equal(t, 12, len(archives)) + assert.Equal(t, time.Date(2017, 8, 1, 0, 0, 0, 0, time.UTC), archives[0].StartDate) + assert.Equal(t, MonthPeriod, archives[0].Period) + + assert.Equal(t, time.Date(2017, 9, 1, 0, 0, 0, 0, time.UTC), archives[1].StartDate) + assert.Equal(t, MonthPeriod, archives[1].Period) - assert.Equal(t, 0, archives[0].RecordCount) - assert.Equal(t, int64(23), archives[0].Size) - assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[0].Hash) + assert.Equal(t, time.Date(2017, 10, 1, 0, 0, 0, 0, time.UTC), archives[2].StartDate) + assert.Equal(t, DayPeriod, archives[2].Period) - assert.Equal(t, 2, archives[2].RecordCount) - assert.Equal(t, int64(448), archives[2].Size) - assert.Equal(t, "74ab5f70262ccd7b10ef0ae7274c806d", archives[2].Hash) + assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), archives[11].StartDate) + assert.Equal(t, DayPeriod, archives[11].Period) - assert.Equal(t, 1, archives[3].RecordCount) - assert.Equal(t, int64(299), archives[3].Size) - assert.Equal(t, "3683faa7b3a546b47b0bac1ec150f8af", archives[3].Hash) + assert.Equal(t, 3, archives[0].RecordCount) + assert.Equal(t, int64(470), archives[0].Size) + assert.Equal(t, "7033bb24efca482d121b8e0cdc6b1430", archives[0].Hash) - assert.Equal(t, 3, archives[62].RecordCount) - assert.Equal(t, int64(470), archives[62].Size) - assert.Equal(t, "7033bb24efca482d121b8e0cdc6b1430", archives[62].Hash) + assert.Equal(t, 0, archives[1].RecordCount) + assert.Equal(t, int64(23), archives[1].Size) + assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[1].Hash) - assert.Equal(t, 0, archives[63].RecordCount) - assert.Equal(t, int64(23), archives[63].Size) - assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[63].Hash) + assert.Equal(t, 0, archives[2].RecordCount) + assert.Equal(t, int64(23), archives[2].Size) + assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[2].Hash) + + assert.Equal(t, 0, archives[11].RecordCount) + assert.Equal(t, int64(23), archives[11].Size) + assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[11].Hash) } } @@ -349,33 +349,35 @@ func TestArchiveOrgRuns(t *testing.T) { // AWS S3 config in the environment needed to download from S3 if config.AWSAccessKeyID != "missing_aws_access_key_id" && config.AWSSecretAccessKey != "missing_aws_secret_access_key" { - s3Client, err := NewS3Client(config) assert.NoError(t, err) archives, err := ArchiveOrg(ctx, now, config, db, s3Client, orgs[2], RunType) assert.NoError(t, err) - assert.Equal(t, 64, len(archives)) - assert.Equal(t, time.Date(2017, 8, 10, 0, 0, 0, 0, time.UTC), archives[0].StartDate) - assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), archives[61].StartDate) - assert.Equal(t, time.Date(2017, 8, 1, 0, 0, 0, 0, time.UTC), archives[62].StartDate) - assert.Equal(t, time.Date(2017, 9, 1, 0, 0, 0, 0, time.UTC), archives[63].StartDate) + assert.Equal(t, 12, len(archives)) + assert.Equal(t, time.Date(2017, 8, 1, 0, 0, 0, 0, time.UTC), archives[0].StartDate) + assert.Equal(t, MonthPeriod, archives[0].Period) - assert.Equal(t, 1, archives[0].RecordCount) - assert.Equal(t, int64(389), archives[0].Size) - assert.Equal(t, "d356e67393a5ae9c0fc07f81739c9d03", archives[0].Hash) + assert.Equal(t, time.Date(2017, 9, 1, 0, 0, 0, 0, time.UTC), archives[1].StartDate) + assert.Equal(t, MonthPeriod, archives[1].Period) - assert.Equal(t, 0, archives[2].RecordCount) - assert.Equal(t, int64(23), archives[2].Size) - assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[2].Hash) + assert.Equal(t, time.Date(2017, 10, 1, 0, 0, 0, 0, time.UTC), archives[2].StartDate) + assert.Equal(t, DayPeriod, archives[2].Period) + + assert.Equal(t, time.Date(2017, 10, 10, 0, 0, 0, 0, time.UTC), archives[11].StartDate) + assert.Equal(t, DayPeriod, archives[11].Period) + + assert.Equal(t, 1, archives[0].RecordCount) + assert.Equal(t, int64(393), archives[0].Size) + assert.Equal(t, "4f3beb90ee4dc586db7b04ddc7e0117d", archives[0].Hash) - assert.Equal(t, 1, archives[62].RecordCount) - assert.Equal(t, int64(389), archives[62].Size) - assert.Equal(t, "d356e67393a5ae9c0fc07f81739c9d03", archives[62].Hash) + assert.Equal(t, 0, archives[1].RecordCount) + assert.Equal(t, int64(23), archives[1].Size) + assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[1].Hash) - assert.Equal(t, 0, archives[63].RecordCount) - assert.Equal(t, int64(23), archives[63].Size) - assert.Equal(t, "f0d79988b7772c003d04a28bd7417a62", archives[63].Hash) + assert.Equal(t, 1, archives[11].RecordCount) + assert.Equal(t, int64(385), archives[11].Size) + assert.Equal(t, "e4ac24080ca5a05539d058cd7fe63291", archives[11].Hash) } } diff --git a/testdata/runs1.jsonl b/testdata/runs1.jsonl index 8e928a0..78a3e23 100644 --- a/testdata/runs1.jsonl +++ b/testdata/runs1.jsonl @@ -1,2 +1,2 @@ {"id":1,"flow":{"uuid":"6639286a-9120-45d4-aa39-03ae3942a4a6","name":"Flow 1"},"contact":{"uuid":"3e814add-e614-41f7-8b5d-a07f670a698f","name":"Ajodinabiff Dane"},"responded":true,"path":[],"values":{},"events":[],"created_on":"2017-08-12T19:11:59.890662+00:00","modified_on":"2017-08-12T19:11:59.890662+00:00","exited_on":"2017-08-12T19:11:59.890662+00:00","exit_type":"completed"} -{"id":2,"flow":{"uuid":"6639286a-9120-45d4-aa39-03ae3942a4a6","name":"Flow 1"},"contact":{"uuid":"3e814add-e614-41f7-8b5d-a07f670a698f","name":"Ajodinabiff Dane"},"responded":true,"path":[{"node": "10896d63-8df7-4022-88dd-a9d93edf355b", "time": "2017-08-12T13:07:24.049815+00:00"}],"values":[{"agree": {"name": "Do you agree?", "node": "a0434c54-3e26-4eb0-bafc-46cdeaf435ac", "time": "2017-05-03T12:25:21.714339+00:00", "category": "Strongly agree"}}],"events":[{"msg": {"urn": "tel:+12076661212", "text": "hola", "uuid": "cf05c58f-31fb-4ce8-9e65-4ecc9fd47cbe", "channel": {"name": "1223", "uuid": "bbfe2e9c-cf69-4d0a-b42e-00ac3dc0b0b8"}}, "type": "msg_created", "step_uuid": "659cdae5-1f29-4a58-9437-10421f724268", "created_on": "2018-01-22T15:06:47.357682+00:00"}],"created_on":"2017-08-12T19:11:59.890662+00:00","modified_on":"2017-08-12T19:11:59.890662+00:00","exited_on":"2017-08-12T19:11:59.890662+00:00","exit_type":"completed"} +{"id":2,"flow":{"uuid":"6639286a-9120-45d4-aa39-03ae3942a4a6","name":"Flow 1"},"contact":{"uuid":"3e814add-e614-41f7-8b5d-a07f670a698f","name":"Ajodinabiff Dane"},"responded":true,"path":[{"node": "10896d63-8df7-4022-88dd-a9d93edf355b", "time": "2017-08-12T13:07:24.049815+00:00"}],"values":[{"agree": {"name": "Do you agree?", "node": "a0434c54-3e26-4eb0-bafc-46cdeaf435ac", "time": "2017-05-03T12:25:21.714339+00:00", "input": "A", "value": "A", "category": "Strongly agree"}}],"events":[{"msg": {"urn": "tel:+12076661212", "text": "hola", "uuid": "cf05c58f-31fb-4ce8-9e65-4ecc9fd47cbe", "channel": {"name": "1223", "uuid": "bbfe2e9c-cf69-4d0a-b42e-00ac3dc0b0b8"}}, "type": "msg_created", "step_uuid": "659cdae5-1f29-4a58-9437-10421f724268", "created_on": "2018-01-22T15:06:47.357682+00:00"}],"created_on":"2017-08-12T19:11:59.890662+00:00","modified_on":"2017-08-12T19:11:59.890662+00:00","exited_on":"2017-08-12T19:11:59.890662+00:00","exit_type":"completed"} diff --git a/testdata/runs2.jsonl b/testdata/runs2.jsonl index 0f19b6b..2d8d346 100644 --- a/testdata/runs2.jsonl +++ b/testdata/runs2.jsonl @@ -1 +1 @@ -{"id":3,"flow":{"uuid":"629db399-a5fb-4fa0-88e6-f479957b63d2","name":"Flow 2"},"contact":{"uuid":"7051dff0-0a27-49d7-af1f-4494239139e6","name":"Joanne Stone"},"responded":true,"path":[{"node": "accbc6e2-b0df-46cd-9a76-bff0fdf4d753", "time": "2017-08-12T13:07:24.049815+00:00"}],"values":[{"agree": {"name": "Do you agree?", "node": "084c8cf1-715d-4d0a-b38d-a616ed74e638", "time": "2017-05-03T12:25:21.714339+00:00", "category": "Strongly agree"}}],"events":[],"created_on":"2017-08-10T19:11:59.890662+00:00","modified_on":"2017-08-10T19:11:59.890662+00:00","exited_on":"2017-08-10T19:11:59.890662+00:00","exit_type":"completed"} +{"id":3,"flow":{"uuid":"629db399-a5fb-4fa0-88e6-f479957b63d2","name":"Flow 2"},"contact":{"uuid":"7051dff0-0a27-49d7-af1f-4494239139e6","name":"Joanne Stone"},"responded":true,"path":[{"node": "accbc6e2-b0df-46cd-9a76-bff0fdf4d753", "time": "2017-08-12T13:07:24.049815+00:00"}],"values":[{"agree": {"name": "Agree", "node": "084c8cf1-715d-4d0a-b38d-a616ed74e638", "time": "2017-05-03T12:25:21.714339+00:00", "input": "A", "value": "A", "category": "Strongly agree"}}],"events":[],"created_on":"2017-08-10T19:11:59.890662+00:00","modified_on":"2017-08-10T19:11:59.890662+00:00","exited_on":"2017-08-10T19:11:59.890662+00:00","exit_type":"completed"} diff --git a/testdb.sql b/testdb.sql index fab93d3..9cc6860 100644 --- a/testdb.sql +++ b/testdb.sql @@ -230,7 +230,12 @@ INSERT INTO flows_flowrun(id, uuid, responded, contact_id, flow_id, org_id, resu '[{"msg": {"urn": "tel:+12076661212", "text": "hola", "uuid": "cf05c58f-31fb-4ce8-9e65-4ecc9fd47cbe", "channel": {"name": "1223", "uuid": "bbfe2e9c-cf69-4d0a-b42e-00ac3dc0b0b8"}}, "type": "msg_created", "step_uuid": "659cdae5-1f29-4a58-9437-10421f724268", "created_on": "2018-01-22T15:06:47.357682+00:00"}]', '2017-08-12 21:11:59.890662+02:00','2017-08-12 21:11:59.890662+02:00','2017-08-12 21:11:59.890662+02:00', 'C'), (3, 'de782b35-a398-46ed-8550-34c66053841b', TRUE, 7, 2, 3, -'{"agree": {"category": "Strongly agree", "node_uuid": "084c8cf1-715d-4d0a-b38d-a616ed74e638", "name": "Do you agree?", "value": "A", "created_on": "2017-05-03T12:25:21.714339+00:00", "input": "A"}}', +'{"agree": {"category": "Strongly agree", "node_uuid": "084c8cf1-715d-4d0a-b38d-a616ed74e638", "name": "Agree", "value": "A", "created_on": "2017-05-03T12:25:21.714339+00:00", "input": "A"}}', '[{"uuid": "600ac5b4-4895-4161-ad97-6e2f1bb48bcb", "node_uuid": "accbc6e2-b0df-46cd-9a76-bff0fdf4d753", "arrived_on": "2017-08-12T15:07:24.049815+02:00", "exit_uuid": "8249e2dc-c893-4200-b6d2-398d07a459bc"}]', '[{"msg": {"urn": "tel:+12076661212", "text": "hola", "uuid": "9ea50923-0888-4596-9a9d-4890994934a9", "channel": {"name": "1223", "uuid": "d6597e08-8285-428c-8e7e-97c68adfa073"}}, "type": "msg_created", "step_uuid": "ae067248-df92-41c8-bb29-92506e984259", "created_on": "2018-01-22T15:06:47.357682+00:00"}]', -'2017-08-10 21:11:59.890662+02:00','2017-08-10 21:11:59.890662+02:00','2017-08-10 21:11:59.890662+02:00', 'C'); +'2017-08-10 21:11:59.890662+02:00','2017-08-10 21:11:59.890662+02:00','2017-08-10 21:11:59.890662+02:00', 'C'), +(4, 'de782b35-a398-46ed-8550-34c66053841b', TRUE, 7, 2, 3, +'{"agree": {"category": "Disagree", "node_uuid": "084c8cf1-715d-4d0a-b38d-a616ed74e638", "name": "Agree", "value": "B", "created_on": "2017-10-10T12:25:21.714339+00:00", "input": "B"}}', +'[{"uuid": "babf4fc8-e12c-4bb9-a9dd-61178a118b5a", "node_uuid": "accbc6e2-b0df-46cd-9a76-bff0fdf4d753", "arrived_on": "2017-10-12T15:07:24.049815+02:00", "exit_uuid": "8249e2dc-c893-4200-b6d2-398d07a459bc"}]', +'[{"msg": {"urn": "tel:+12076661212", "text": "hi hi", "uuid": "543d2c4b-ff0b-4b87-a9a4-b2d6745cf470", "channel": {"name": "1223", "uuid": "d6597e08-8285-428c-8e7e-97c68adfa073"}}, "type": "msg_created", "step_uuid": "3a5014dd-7b14-4b7a-be52-0419c09340a6", "created_on": "2018-10-12T15:06:47.357682+00:00"}]', +'2017-10-10 21:11:59.890662+02:00','2017-10-10 21:11:59.890662+02:00','2017-10-10 21:11:59.890662+02:00', 'C');