diff --git a/api/dataset.go b/api/dataset.go index ac1d0d2..e2113fd 100644 --- a/api/dataset.go +++ b/api/dataset.go @@ -3,7 +3,6 @@ package api import ( "errors" "fmt" - "io" "net/http" "github.com/Yiling-J/tablepilot/ent" @@ -12,8 +11,6 @@ import ( "github.com/gin-gonic/gin" ) -// Dataset-related handlers - func (hs *HTTPServer) CreateDataset(ctx *gin.Context) { var apiReq services_dataset.DatasetAPIRequest if err := ctx.ShouldBind(&apiReq); err != nil { @@ -28,19 +25,22 @@ func (hs *HTTPServer) CreateDataset(ctx *gin.Context) { Data: apiReq.Data, } - if apiReq.Type == "csv" { + if apiReq.Type == "csv" || apiReq.Type == "image" { if len(apiReq.Files) == 0 { errorResponse(ctx, http.StatusBadRequest, errors.New("at least one file is required for CSV dataset type")) return } - var readers []io.Reader + var readers []services_dataset.CreateDatasetFile for _, fh := range apiReq.Files { f, err := fh.Open() if err != nil { errorResponse(ctx, http.StatusBadRequest, err) return } - readers = append(readers, f) + readers = append(readers, services_dataset.CreateDatasetFile{ + Name: fh.Filename, + Reader: f, + }) } serviceReq.Files = readers } @@ -107,19 +107,22 @@ func (hs *HTTPServer) UpdateDataset(ctx *gin.Context) { } if apiReq.Files != nil { - var readers []io.Reader + var readers []services_dataset.CreateDatasetFile for _, fh := range apiReq.Files { f, err := fh.Open() if err != nil { errorResponse(ctx, http.StatusBadRequest, err) return } - readers = append(readers, f) + readers = append(readers, services_dataset.CreateDatasetFile{ + Name: fh.Filename, + Reader: f, + }) } serviceReq.Files = readers serviceReq.Fields = append(serviceReq.Fields, "files") } else { - serviceReq.Files = []io.Reader{} + serviceReq.Files = []services_dataset.CreateDatasetFile{} } err := hs.DatasetService.Update(ctx.Request.Context(), datasetID, serviceReq) diff --git a/api/dataset_test.go b/api/dataset_test.go index be2e85a..8813ab3 100644 --- a/api/dataset_test.go +++ b/api/dataset_test.go @@ -72,7 +72,7 @@ func TestAPI_CreateDatasetWithFiles(t *testing.T) { require.Equal(t, expectedRequest.Description, req.Description) require.Equal(t, expectedRequest.Type, req.Type) require.Equal(t, 1, len(req.Files)) - data, err := io.ReadAll(req.Files[0]) + data, err := io.ReadAll(req.Files[0].Reader) require.NoError(t, err) require.Equal(t, "header1,header2,header3\nr1c1,r1c2,r1c3\n", string(data)) return "new_dataset_id", nil diff --git a/cmd/cli/handler.go b/cmd/cli/handler.go index d9e72f2..c7e4605 100644 --- a/cmd/cli/handler.go +++ b/cmd/cli/handler.go @@ -181,16 +181,17 @@ func (h *Handler) CreateDataset(cmd *cobra.Command, args []string) error { options = append(options, o...) } req.Data = options - case "csv": + case "csv", "image": filePaths, err := cmd.Flags().GetStringArray("path") if err != nil { return fmt.Errorf("error getting file flag for csv type: %w", err) } if len(filePaths) == 0 { - return fmt.Errorf("at least one --path must be provided for type 'csv'") + return fmt.Errorf("at least one --path must be provided") } - var readers []io.Reader + var readers []dataset.CreateDatasetFile files, err := parsePaths(filePaths) + names := []string{} if err != nil { return err } @@ -199,12 +200,17 @@ func (h *Handler) CreateDataset(cmd *cobra.Command, args []string) error { if err != nil { return fmt.Errorf("failed to open file %s: %w", f, err) } - readers = append(readers, file) + readers = append(readers, dataset.CreateDatasetFile{ + Name: filepath.Base(f), + Reader: file, + }) + names = append(names, filepath.Base(f)) } req.Files = readers + req.Data = names defer func() { for _, f := range readers { - if c, ok := f.(io.Closer); ok { + if c, ok := f.Reader.(io.Closer); ok { c.Close() } } @@ -307,7 +313,7 @@ func (h *Handler) UpdateDataset(cmd *cobra.Command, args []string) error { options = append(options, o...) } req.Data = options - case "csv": + case "csv", "image": filePaths, err := cmd.Flags().GetStringArray("file") if err != nil { return fmt.Errorf("error getting file flag for csv type: %w", err) @@ -315,24 +321,21 @@ func (h *Handler) UpdateDataset(cmd *cobra.Command, args []string) error { if len(filePaths) == 0 { return fmt.Errorf("at least one --file path must be provided for type 'csv'") } - var files []io.Reader + var files []dataset.CreateDatasetFile for _, filePath := range filePaths { file, err := os.Open(filePath) if err != nil { - // Close already opened files if any - for _, f := range files { - if c, ok := f.(io.Closer); ok { - c.Close() - } - } return fmt.Errorf("failed to open file %s: %w", filePath, err) } - files = append(files, file) + files = append(files, dataset.CreateDatasetFile{ + Name: filepath.Base(filePath), + Reader: file, + }) } req.Files = files defer func() { for _, f := range files { - if c, ok := f.(io.Closer); ok { + if c, ok := f.Reader.(io.Closer); ok { c.Close() } } diff --git a/ent/dataset/dataset.go b/ent/dataset/dataset.go index 7170354..ac7a081 100644 --- a/ent/dataset/dataset.go +++ b/ent/dataset/dataset.go @@ -78,8 +78,9 @@ type Type string // Type values. const ( - TypeList Type = "list" - TypeCsv Type = "csv" + TypeList Type = "list" + TypeCsv Type = "csv" + TypeImage Type = "image" ) func (_type Type) String() string { @@ -89,7 +90,7 @@ func (_type Type) String() string { // TypeValidator is a validator for the "type" field enum values. It is called by the builders before save. func TypeValidator(_type Type) error { switch _type { - case TypeList, TypeCsv: + case TypeList, TypeCsv, TypeImage: return nil default: return fmt.Errorf("dataset: invalid enum value for type field: %q", _type) diff --git a/ent/migrate/schema.go b/ent/migrate/schema.go index f0e6149..b7e5901 100644 --- a/ent/migrate/schema.go +++ b/ent/migrate/schema.go @@ -17,7 +17,7 @@ var ( {Name: "name", Type: field.TypeString, Unique: true}, {Name: "path", Type: field.TypeString, Nullable: true}, {Name: "description", Type: field.TypeString, Default: ""}, - {Name: "type", Type: field.TypeEnum, Enums: []string{"list", "csv"}}, + {Name: "type", Type: field.TypeEnum, Enums: []string{"list", "csv", "image"}}, {Name: "indexer", Type: field.TypeJSON, Nullable: true}, {Name: "values", Type: field.TypeJSON, Nullable: true}, } diff --git a/ent/schema/dataset.go b/ent/schema/dataset.go index 3e12b21..a5e2ffd 100644 --- a/ent/schema/dataset.go +++ b/ent/schema/dataset.go @@ -36,7 +36,7 @@ func (Dataset) Fields() []ent.Field { field.String("name").Unique().NotEmpty(), field.String("path").Optional(), field.String("description").Default(""), - field.Enum("type").Values("list", "csv"), + field.Enum("type").Values("list", "csv", "image"), field.JSON("indexer", CSVIndexer{}).Optional(), field.Strings("values").Optional(), } diff --git a/services/dataset/dataset.go b/services/dataset/dataset.go index d007fec..deab788 100644 --- a/services/dataset/dataset.go +++ b/services/dataset/dataset.go @@ -7,13 +7,13 @@ import ( "io" "os" "path/filepath" + "slices" "github.com/Yiling-J/tablepilot/config" "github.com/Yiling-J/tablepilot/ent" db_dataset "github.com/Yiling-J/tablepilot/ent/dataset" "github.com/Yiling-J/tablepilot/services/source" "github.com/Yiling-J/tablepilot/services/source/csvindexer" - "github.com/Yiling-J/tablepilot/utils" ) //go:generate moq -rm -out dataset_moq.go . DatasetService @@ -39,56 +39,43 @@ func NewDatasetService(db *ent.Client, cfg *config.Config) *DatasetServiceImpl { } func (s DatasetServiceImpl) buildCreateDatasetReq(ctx context.Context, req *CreateDatasetRequest, sr *ent.Dataset) error { - switch req.Type { - case db_dataset.TypeCsv: - relativePath := filepath.Join("datasets/shared", sr.Nanoid) - dirPath := filepath.Join(s.cfg.Common.DataDir, relativePath) - err := os.MkdirAll(dirPath, os.ModePerm) - if err != nil { - return fmt.Errorf("failed to create directory: %w", err) - } + relativePath := filepath.Join("datasets/shared", sr.Nanoid) + dirPath := filepath.Join(s.cfg.Common.DataDir, relativePath) + err := os.MkdirAll(dirPath, os.ModePerm) + if err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } - if len(req.Files) == 0 { - return errors.New("dataset.Create: files should not be empty") - } - filePath := filepath.Join(dirPath, "data.csv") - outFile, err := os.Create(filePath) + for _, file := range req.Files { + outFile, err := os.Create(filepath.Join(dirPath, file.Name)) if err != nil { - return fmt.Errorf("failed to create file %s: %w", filePath, err) + return fmt.Errorf("failed to create file %w", err) } - for i, file := range req.Files { - // skip csv headers - if i > 0 { - reader := utils.NewCsvReader(file) - _, err = reader.Read() - if err != nil { - return fmt.Errorf("failed to read csv %w", err) - } - offset := reader.InputOffset() - _, err = file.(io.ReadSeeker).Seek(offset, io.SeekStart) - if err != nil { - return fmt.Errorf("failed to seek csv file %w", err) - } - } - _, err = io.Copy(outFile, file) - if err != nil { - return fmt.Errorf("failed to write to file %s: %w", filePath, err) - } + defer outFile.Close() + _, err = io.Copy(outFile, file.Reader) + if err != nil { + return fmt.Errorf("failed to write to file %w", err) } - outFile.Close() - // build index - indexer, err := csvindexer.NewCSVIndexer(os.DirFS(dirPath), []string{"data.csv"}) + } + switch req.Type { + case db_dataset.TypeCsv: + indexer, err := csvindexer.NewCSVIndexer(os.DirFS(dirPath), req.Data) if err != nil { return fmt.Errorf("table.Create: build csv index: %w", err) } - err = sr.Update().SetPath(relativePath).SetIndexer(indexer.CSVIndexer).Exec(ctx) + err = sr.Update().SetPath(relativePath).SetIndexer(indexer.CSVIndexer).SetValues(req.Data).Exec(ctx) if err != nil { - return fmt.Errorf("table.Create: update dataset metadata: %w", err) // Clarified error + return fmt.Errorf("table.Create: update dataset metadata: %w", err) + } + case db_dataset.TypeImage: + err = sr.Update().SetPath(relativePath).SetValues(req.Data).Exec(ctx) + if err != nil { + return fmt.Errorf("table.Create: update dataset metadata: %w", err) } case db_dataset.TypeList: err := sr.Update().SetValues(req.Data).Exec(ctx) if err != nil { - return fmt.Errorf("table.Create: update dataset values: %w", err) // Clarified error + return fmt.Errorf("table.Create: update dataset values: %w", err) } } return nil @@ -119,6 +106,10 @@ func (s *DatasetServiceImpl) List(ctx context.Context) ([]*DatasetInfo, error) { } datasetInfos := []*DatasetInfo{} for _, ds := range datasets { + // backward compatible + if ds.Type == db_dataset.TypeCsv && len(ds.Values) == 0 { + ds.Values = []string{"data.csv"} + } datasetInfos = append(datasetInfos, &DatasetInfo{ ID: ds.Nanoid, Name: ds.Name, @@ -161,22 +152,38 @@ func (s *DatasetServiceImpl) Update(ctx context.Context, dataset string, req *Up case "description": updater.SetDescription(req.Description) case "data", "files": - processDataRebuild = true - updater.ClearIndexer().ClearPath().SetValues(nil) + // new files or data slice change + if len(req.Files) > 0 || !slices.Equal(req.Data, ds.Values) { + processDataRebuild = true + updater.ClearIndexer().ClearPath().SetValues(nil) + } } } updatedDsEntity, err := updater.Save(ctx) if err != nil { - return ent.Rollback(tx, fmt.Errorf("dataset.Update: save changes: %w", err)) // Clarified error + return ent.Rollback(tx, fmt.Errorf("dataset.Update: save changes: %w", err)) } if processDataRebuild { if originalPath != "" { oldDirPath := filepath.Join(s.cfg.Common.DataDir, originalPath) if _, statErr := os.Stat(oldDirPath); !os.IsNotExist(statErr) { - if removeErr := os.RemoveAll(oldDirPath); removeErr != nil { - return ent.Rollback(tx, fmt.Errorf("dataset.Update: failed to remove old directory %s: %w", oldDirPath, removeErr)) + keep := map[string]bool{} + for _, file := range req.Data { + keep[file] = true + } + entries, err := os.ReadDir(oldDirPath) + if err != nil { + return ent.Rollback(tx, fmt.Errorf("dataset.Update: read dir: %w", err)) + } + for _, e := range entries { + if _, ok := keep[e.Name()]; !ok { + err = os.Remove(filepath.Join(oldDirPath, e.Name())) + if err != nil { + return ent.Rollback(tx, fmt.Errorf("dataset.Update: remove file: %w", err)) + } + } } } } @@ -217,7 +224,7 @@ func (s *DatasetServiceImpl) Delete(ctx context.Context, dataset string) error { err = tx.Dataset.DeleteOne(ds).Exec(ctx) if err != nil { - return ent.Rollback(tx, fmt.Errorf("dataset.Delete: execute delete: %w", err)) // Clarified error + return ent.Rollback(tx, fmt.Errorf("dataset.Delete: execute delete: %w", err)) } return tx.Commit() @@ -229,7 +236,7 @@ func (s *DatasetServiceImpl) Get(ctx context.Context, source string) (*DatasetIn db_dataset.Nanoid(source), )).Only(ctx) if err != nil { - return nil, fmt.Errorf("dataset.Get: query dataset: %w", err) // Clarified error + return nil, fmt.Errorf("dataset.Get: query dataset: %w", err) } return &DatasetInfo{ Name: sr.Name, @@ -276,6 +283,11 @@ func (s *DatasetServiceImpl) Preview(ctx context.Context, dataset string) (*Data Type: sr.Type, Rows: rows, }, nil + case db_dataset.TypeImage: + return &DatasetRows{ + Type: sr.Type, + Data: sr.Values, + }, nil case db_dataset.TypeList: return &DatasetRows{ Type: sr.Type, diff --git a/services/dataset/dataset_test.go b/services/dataset/dataset_test.go index 51b3c75..2816e9b 100644 --- a/services/dataset/dataset_test.go +++ b/services/dataset/dataset_test.go @@ -4,7 +4,6 @@ import ( "bytes" "encoding/csv" "fmt" - "io" "os" "testing" @@ -13,6 +12,8 @@ import ( "github.com/Yiling-J/tablepilot/config" "github.com/Yiling-J/tablepilot/ent" + "github.com/Yiling-J/tablepilot/ent/dataset" + "github.com/Yiling-J/tablepilot/ent/schema" "github.com/Yiling-J/tablepilot/infra/db" "github.com/google/uuid" "github.com/stretchr/testify/require" @@ -75,7 +76,11 @@ func TestDatasetService_Get(t *testing.T) { Name: csvDatasetName, Description: csvDatasetDesc, Type: "csv", - Files: []io.Reader{bytes.NewReader(csvBuf.Bytes())}, + Files: []CreateDatasetFile{{ + Name: "file.csv", + Reader: bytes.NewReader(csvBuf.Bytes()), + }}, + Data: []string{"file.csv"}, }) require.NoError(t, err) require.NotEmpty(t, csvNanoid) @@ -108,30 +113,34 @@ func TestDatasetService_List(t *testing.T) { require.NoError(t, err) require.Empty(t, listedDatasets) - listDatasetName_l := "test-list-dataset-for-list" - listDatasetDesc_l := "A list dataset for testing List" - listDatasetData_l := []string{"entryA", "entryB"} + listDatasetName := "test-list-dataset-for-list" + listDatasetDesc := "A list dataset for testing List" + listDatasetData := []string{"entryA", "entryB"} _, err = srv.Create(ctx, &CreateDatasetRequest{ - Name: listDatasetName_l, - Description: listDatasetDesc_l, + Name: listDatasetName, + Description: listDatasetDesc, Type: "list", - Data: listDatasetData_l, + Data: listDatasetData, }) require.NoError(t, err) - csvDatasetName_l := "test-csv-dataset-for-list" - csvDatasetDesc_l := "A csv dataset for testing List" - var csvBuf_l bytes.Buffer - csvWriter_l := csv.NewWriter(&csvBuf_l) - csvHeaders_l := []string{"header1", "header2", "header3"} - _ = csvWriter_l.Write(csvHeaders_l) - _ = csvWriter_l.Write([]string{"r1c1", "r1c2", "r1c3"}) - csvWriter_l.Flush() + csvDatasetName := "test-csv-dataset-for-list" + csvDatasetDesc := "A csv dataset for testing List" + var csvBuf bytes.Buffer + csvWriter := csv.NewWriter(&csvBuf) + csvHeaders := []string{"header1", "header2", "header3"} + _ = csvWriter.Write(csvHeaders) + _ = csvWriter.Write([]string{"r1c1", "r1c2", "r1c3"}) + csvWriter.Flush() _, err = srv.Create(ctx, &CreateDatasetRequest{ - Name: csvDatasetName_l, - Description: csvDatasetDesc_l, + Name: csvDatasetName, + Description: csvDatasetDesc, Type: "csv", - Files: []io.Reader{bytes.NewReader(csvBuf_l.Bytes())}, + Files: []CreateDatasetFile{{ + Name: "file", + Reader: bytes.NewReader(csvBuf.Bytes()), + }}, + Data: []string{"file"}, }) require.NoError(t, err) @@ -143,18 +152,18 @@ func TestDatasetService_List(t *testing.T) { foundCsvDs := false for _, dsInfo := range listedDatasets { switch dsInfo.Name { - case listDatasetName_l: + case listDatasetName: foundListDs = true - require.Equal(t, listDatasetDesc_l, dsInfo.Description) + require.Equal(t, listDatasetDesc, dsInfo.Description) require.Equal(t, "list", dsInfo.Type) - require.Equal(t, len(listDatasetData_l), dsInfo.ValueCount) + require.Equal(t, len(listDatasetData), dsInfo.ValueCount) require.Equal(t, 0, dsInfo.ColumnCount) - case csvDatasetName_l: + case csvDatasetName: foundCsvDs = true - require.Equal(t, csvDatasetDesc_l, dsInfo.Description) + require.Equal(t, csvDatasetDesc, dsInfo.Description) require.Equal(t, "csv", dsInfo.Type) - require.Equal(t, len(csvHeaders_l), dsInfo.ColumnCount) - require.Equal(t, 0, dsInfo.ValueCount) + require.Equal(t, len(csvHeaders), dsInfo.ColumnCount) + require.Equal(t, 1, dsInfo.ValueCount) require.Equal(t, []string{"header1", "header2", "header3"}, dsInfo.Columns) } } @@ -194,7 +203,11 @@ func TestDatasetService_Create(t *testing.T) { Name: "ds2", Description: "dataset2", Type: "csv", - Files: []io.Reader{bytes.NewReader(buf.Bytes()), bytes.NewReader(buf2.Bytes())}, + Files: []CreateDatasetFile{ + {Name: "c2.csv", Reader: bytes.NewReader(buf.Bytes())}, + {Name: "c1.csv", Reader: bytes.NewReader(buf2.Bytes())}, + }, + Data: []string{"c2.csv", "c1.csv"}, }) require.NoError(t, err) defer func() { @@ -211,6 +224,47 @@ func TestDatasetService_Create(t *testing.T) { {"Name": "Bob", "Age": "25", "City": "San Francisco"}, {"Name": "Tommy", "Age": "65", "City": "Apple"}, }, rows.Rows) + + di, err := db.Dataset.Query().Where(dataset.Nanoid(ds2)).Only(t.Context()) + require.NoError(t, err) + require.Equal(t, []string{"c2.csv", "c1.csv"}, di.Values) + require.Equal(t, schema.FileOffset{ + File: 0, + Total: 2, + Offset: 14, + }, di.Indexer.Positions[0]) + + ds3, err := srv.Create(t.Context(), &CreateDatasetRequest{ + Name: "ds3", + Description: "dataset3", + Type: "csv", + Files: []CreateDatasetFile{ + {Name: "c2.csv", Reader: bytes.NewReader(buf.Bytes())}, + {Name: "c1.csv", Reader: bytes.NewReader(buf2.Bytes())}, + }, + Data: []string{"c1.csv", "c2.csv"}, + }) + require.NoError(t, err) + defer func() { + _ = os.RemoveAll("./dstest") + }() + + rows, err = srv.Preview(t.Context(), ds3) + require.NoError(t, err) + require.Equal(t, []map[string]any{ + {"Name": "Tommy", "Age": "65", "City": "Apple"}, + {"Name": "Alice", "Age": "30", "City": "New York"}, + {"Name": "Bob", "Age": "25", "City": "San Francisco"}, + }, rows.Rows) + + di, err = db.Dataset.Query().Where(dataset.Nanoid(ds3)).Only(t.Context()) + require.NoError(t, err) + require.Equal(t, []string{"c1.csv", "c2.csv"}, di.Values) + require.Equal(t, schema.FileOffset{ + File: 0, + Total: 1, + Offset: 14, + }, di.Indexer.Positions[0]) } func TestDatasetService_Update(t *testing.T) { @@ -282,15 +336,41 @@ func TestDatasetService_Update(t *testing.T) { srv := newServiceForTest("csv_update") csvName := "initial-csv" csvDesc := "Initial CSV description" - var initialCsvBuf bytes.Buffer - csvW := csv.NewWriter(&initialCsvBuf) + + var csv1buf bytes.Buffer + csvW := csv.NewWriter(&csv1buf) _ = csvW.Write([]string{"h1", "h2"}) - _ = csvW.Write([]string{"r1v1", "r1v2"}) + _ = csvW.Write([]string{"v1", "v2"}) + csvW.Flush() + var csv2buf bytes.Buffer + csvW = csv.NewWriter(&csv2buf) + _ = csvW.Write([]string{"h1", "h2"}) + _ = csvW.Write([]string{"v3", "v4"}) + csvW.Flush() + var csv3buf bytes.Buffer + csvW = csv.NewWriter(&csv3buf) + _ = csvW.Write([]string{"h1", "h2"}) + _ = csvW.Write([]string{"v5", "v6"}) csvW.Flush() - initialHeaders := []string{"h1", "h2"} + initialHeaders := []string{"h1", "h2"} csvNanoid, err := srv.Create(ctx, &CreateDatasetRequest{ - Name: csvName, Description: csvDesc, Type: "csv", Files: []io.Reader{bytes.NewReader(initialCsvBuf.Bytes())}, + Name: csvName, Description: csvDesc, Type: "csv", + Files: []CreateDatasetFile{ + { + Name: "1.csv", + Reader: bytes.NewReader(csv1buf.Bytes()), + }, + { + Name: "2.csv", + Reader: bytes.NewReader(csv2buf.Bytes()), + }, + { + Name: "3.csv", + Reader: bytes.NewReader(csv3buf.Bytes()), + }, + }, + Data: []string{"1.csv", "2.csv", "3.csv"}, }) require.NoError(t, err) require.NotEmpty(t, csvNanoid) @@ -309,107 +389,76 @@ func TestDatasetService_Update(t *testing.T) { require.Equal(t, updatedCsvDesc, retrieved.Description) require.Equal(t, len(initialHeaders), retrieved.ColumnCount) + // update 2.csv var updatedCsvBuf bytes.Buffer csvW = csv.NewWriter(&updatedCsvBuf) - updatedHeaders := []string{"new_h1", "new_h2", "new_h3"} - _ = csvW.Write(updatedHeaders) - _ = csvW.Write([]string{"new_r1v1", "new_r1v2", "new_r1v3"}) + _ = csvW.Write([]string{"h1", "h2"}) + _ = csvW.Write([]string{"vv1", "vv2"}) csvW.Flush() + err = srv.Update(ctx, csvNanoid, &UpdateDatasetRequest{ - CreateDatasetRequest: CreateDatasetRequest{Type: "csv", Files: []io.Reader{bytes.NewReader(updatedCsvBuf.Bytes())}}, - Fields: []string{"files"}, + CreateDatasetRequest: CreateDatasetRequest{ + Type: "csv", + Files: []CreateDatasetFile{{ + Name: "2.csv", + Reader: bytes.NewReader(updatedCsvBuf.Bytes()), + }}, + Data: []string{"1.csv", "2.csv", "3.csv"}, + }, + Fields: []string{"files"}, }) require.NoError(t, err) - - retrieved, err = srv.Get(ctx, csvNanoid) - require.NoError(t, err) - require.Equal(t, len(updatedHeaders), retrieved.ColumnCount) - preview, err := srv.Preview(ctx, csvNanoid) require.NoError(t, err) - require.Len(t, preview.Rows, 1) - require.Equal(t, map[string]any{"new_h1": "new_r1v1", "new_h2": "new_r1v2", "new_h3": "new_r1v3"}, preview.Rows[0]) - }) - - t.Run("update non-existent dataset", func(t *testing.T) { - srv := newServiceForTest("non_existent_update") - err := srv.Update(ctx, "fake-nanoid", &UpdateDatasetRequest{ - CreateDatasetRequest: CreateDatasetRequest{Name: "anything"}, - Fields: []string{"name"}, - }) - require.Error(t, err) - }) + require.Equal(t, []map[string]any{ + {"h1": "v1", "h2": "v2"}, + {"h1": "vv1", "h2": "vv2"}, + {"h1": "v5", "h2": "v6"}, + }, preview.Rows) - t.Run("convert list to csv not allowed", func(t *testing.T) { - srv := newServiceForTest("list_to_csv") - listName := "list-to-convert" - listNanoid, err := srv.Create(ctx, &CreateDatasetRequest{ - Name: listName, Description: "list to be csv", Type: "list", Data: []string{"q", "w", "e"}, - }) - require.NoError(t, err) - - var csvBuf bytes.Buffer - csvW := csv.NewWriter(&csvBuf) - csvHeaders := []string{"c1", "c2"} - _ = csvW.Write(csvHeaders) - _ = csvW.Write([]string{"d1", "d2"}) - csvW.Flush() - err = srv.Update(ctx, listNanoid, &UpdateDatasetRequest{ - CreateDatasetRequest: CreateDatasetRequest{Type: "csv", Files: []io.Reader{bytes.NewReader(csvBuf.Bytes())}, Data: nil}, - Fields: []string{"type", "files", "data"}, + // reorder + err = srv.Update(ctx, csvNanoid, &UpdateDatasetRequest{ + CreateDatasetRequest: CreateDatasetRequest{ + Type: "csv", + Data: []string{"3.csv", "2.csv", "1.csv"}, + }, + Fields: []string{"files"}, }) - require.Error(t, err) - require.EqualError(t, err, "dataset type cannot be changed via update") - - // Verify that the dataset was NOT changed - retrieved, err := srv.Get(ctx, listNanoid) require.NoError(t, err) - require.Equal(t, "list", retrieved.Type) // Should still be list - originalData, err := srv.Preview(ctx, listNanoid) + preview, err = srv.Preview(ctx, csvNanoid) require.NoError(t, err) - require.Equal(t, []string{"q", "w", "e"}, originalData.Data) - }) - - t.Run("convert csv to list not allowed", func(t *testing.T) { - srv := newServiceForTest("csv_to_list") - csvName := "csv-to-convert" - originalCsvHeaders := []string{"h_old1", "h_old2"} - originalCsvRow := []string{"r_old1", "r_old2"} - - var initialCsvBuf bytes.Buffer - csvW := csv.NewWriter(&initialCsvBuf) - _ = csvW.Write(originalCsvHeaders) - _ = csvW.Write(originalCsvRow) + require.Equal(t, []map[string]any{ + {"h1": "v5", "h2": "v6"}, + {"h1": "vv1", "h2": "vv2"}, + {"h1": "v1", "h2": "v2"}, + }, preview.Rows) + + // remove 2.csv and add 4.csv + var updatedCsvBuf2 bytes.Buffer + csvW = csv.NewWriter(&updatedCsvBuf2) + _ = csvW.Write([]string{"h1", "h2"}) + _ = csvW.Write([]string{"vv3", "vv4"}) csvW.Flush() - csvNanoid, err := srv.Create(ctx, &CreateDatasetRequest{ - Name: csvName, Description: "csv to be list", Type: "csv", Files: []io.Reader{bytes.NewReader(initialCsvBuf.Bytes())}, - }) - require.NoError(t, err) - listData := []string{"new_list_item1", "new_list_item2"} err = srv.Update(ctx, csvNanoid, &UpdateDatasetRequest{ - CreateDatasetRequest: CreateDatasetRequest{Type: "list", Data: listData, Files: nil}, - Fields: []string{"type", "data", "files"}, + CreateDatasetRequest: CreateDatasetRequest{ + Type: "csv", + Files: []CreateDatasetFile{{ + Name: "4.csv", + Reader: bytes.NewReader(updatedCsvBuf2.Bytes()), + }}, + Data: []string{"1.csv", "4.csv", "3.csv"}, + }, + Fields: []string{"files"}, }) - require.Error(t, err) - require.EqualError(t, err, "dataset type cannot be changed via update") - - // Verify that the dataset was NOT changed - retrieved, err := srv.Get(ctx, csvNanoid) require.NoError(t, err) - require.Equal(t, "csv", retrieved.Type) - require.Equal(t, len(originalCsvHeaders), retrieved.ColumnCount) - require.Equal(t, 0, retrieved.ValueCount) - - preview, err := srv.Preview(ctx, csvNanoid) + preview, err = srv.Preview(ctx, csvNanoid) require.NoError(t, err) - require.Empty(t, preview.Data) - require.Len(t, preview.Rows, 1) - expectedRow := make(map[string]any) - for i, h := range originalCsvHeaders { - expectedRow[h] = originalCsvRow[i] - } - require.Equal(t, expectedRow, preview.Rows[0]) + require.Equal(t, []map[string]any{ + {"h1": "v1", "h2": "v2"}, + {"h1": "vv3", "h2": "vv4"}, + {"h1": "v5", "h2": "v6"}, + }, preview.Rows) }) } @@ -469,7 +518,11 @@ func TestDatasetService_Delete(t *testing.T) { csvW.Flush() csvNanoid, err := srv.Create(ctx, &CreateDatasetRequest{ - Name: csvName, Description: "temp csv", Type: "csv", Files: []io.Reader{bytes.NewReader(csvBuf.Bytes())}, + Name: csvName, Description: "temp csv", Type: "csv", + Files: []CreateDatasetFile{{ + Name: "file", + Reader: bytes.NewReader(csvBuf.Bytes()), + }}, }) require.NoError(t, err) require.NotEmpty(t, csvNanoid) @@ -531,6 +584,18 @@ func TestDatasetService_Preview(t *testing.T) { require.Equal(t, data, rows.Data) }) + t.Run("image show all files", func(t *testing.T) { + data := []string{"a1.png", "b1.png", "a2.png", "1.png"} + for i := range 120 { + data = append(data, fmt.Sprintf("%d", i)) + } + ds1, err := db.Dataset.Create().SetName("dsi").SetType(dataset.TypeImage).SetValues(data).Save(t.Context()) + require.NoError(t, err) + rows, err := srv.Preview(t.Context(), ds1.Nanoid) + require.NoError(t, err) + require.Equal(t, data, rows.Data) + }) + t.Run("csv show first 100 rows", func(t *testing.T) { var buf bytes.Buffer writer := csv.NewWriter(&buf) @@ -544,7 +609,11 @@ func TestDatasetService_Preview(t *testing.T) { Name: "ds2", Description: "dataset2", Type: "csv", - Files: []io.Reader{bytes.NewReader(buf.Bytes())}, + Files: []CreateDatasetFile{{ + Name: "file", + Reader: bytes.NewReader(buf.Bytes()), + }}, + Data: []string{"file"}, }) require.NoError(t, err) defer func() { diff --git a/services/dataset/models.go b/services/dataset/models.go index a936ecc..744ff98 100644 --- a/services/dataset/models.go +++ b/services/dataset/models.go @@ -7,12 +7,17 @@ import ( db_dataset "github.com/Yiling-J/tablepilot/ent/dataset" ) +type CreateDatasetFile struct { + Name string + Reader io.Reader +} + type CreateDatasetRequest struct { - Name string `json:"name"` - Description string `json:"description"` - Type db_dataset.Type `json:"type"` - Data []string `json:"data"` // for list type - Files []io.Reader `json:"files"` // for csv type + Name string `json:"name"` + Description string `json:"description"` + Type db_dataset.Type `json:"type"` + Data []string `json:"data"` // for list type + Files []CreateDatasetFile `json:"files"` // for csv type } type UpdateDatasetRequest struct { @@ -21,14 +26,16 @@ type UpdateDatasetRequest struct { } type DatasetInfo struct { - ID string `json:"id"` - Name string `json:"name"` - Description string `json:"description"` - Type string `json:"type"` - ColumnCount int `json:"column_count"` - ValueCount int `json:"value_count"` - Data []string `json:"data"` - Columns []string `json:"columns"` + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Type string `json:"type"` + ColumnCount int `json:"column_count"` + ValueCount int `json:"value_count"` + // for list type, data is the available options + // for files type, data is the files in the dataset dir + Data []string `json:"data"` + Columns []string `json:"columns"` } type DatasetRows struct { diff --git a/services/source/files.go b/services/source/files.go index 1722193..03ae94e 100644 --- a/services/source/files.go +++ b/services/source/files.go @@ -11,7 +11,7 @@ import ( type FilesSource struct { BasicSource Paths []string `json:"paths"` - files []string + Files []string } func (f *FilesSource) getRoot(ctx context.Context, logger *zap.SugaredLogger, dir string) (*os.Root, string, error) { @@ -31,7 +31,7 @@ func (f *FilesSource) Init(ctx context.Context, logger *zap.SugaredLogger, dir s return err } fileSystem := root.FS() - f.files, err = parsePaths(fileSystem, f.Paths) + f.Files, err = parsePaths(fileSystem, f.Paths) if err != nil { return err } @@ -39,9 +39,9 @@ func (f *FilesSource) Init(ctx context.Context, logger *zap.SugaredLogger, dir s } func (f *FilesSource) Next(ctx context.Context, idx int) (*schema.CellValue, error) { - return &schema.CellValue{Value: f.files[idx]}, nil + return &schema.CellValue{Value: f.Files[idx]}, nil } func (f *FilesSource) Total() int { - return len(f.files) + return len(f.Files) } diff --git a/services/source/files_test.go b/services/source/files_test.go index 6895eb5..a67edc9 100644 --- a/services/source/files_test.go +++ b/services/source/files_test.go @@ -29,7 +29,7 @@ func TestSource_Files(t *testing.T) { "parquet/test_data/3.parquet", "parquet/test_data/4.parquet", "parquet/test_data/5.parquet", "parquet/test_data/6.parquet", "parquet/test_data/7.parquet", "parquet/test_data/8.parquet", "parquet/test_data/9.parquet", tmpFile.Name()[2:], - }, so.files) + }, so.Files) indexer := NewIndexer(so, &ent.TableColumn{Random: false}) v, err := indexer.Next(ctx) require.NoError(t, err) diff --git a/services/table/table.go b/services/table/table.go index e0d5bf6..2500cd3 100644 --- a/services/table/table.go +++ b/services/table/table.go @@ -803,6 +803,7 @@ func (t *TableServiceImpl) Import(ctx context.Context, request ImportRequest) (s } switch ds.Type { case dataset.TypeList: + case dataset.TypeImage: case dataset.TypeCsv: ts := &source.CsvSource{ RandomCSV: &csvindexer.CSVIndexer{ @@ -920,6 +921,15 @@ func getSourceFromColumn(ctx context.Context, db *ent.Client, dataDir string, co }, } so = ls + case dataset.TypeImage: + cs := []string{} + for _, v := range ds.Values { + cs = append(cs, filepath.Join("datasets/shared", ds.Nanoid, v)) + } + ls := &source.FilesSource{ + Files: cs, + } + so = ls } case tablecolumn.SourceTypeOptions: ls := &source.ListSource{Options: column.Options} diff --git a/services/table/table_test.go b/services/table/table_test.go index 8e2a885..6bcb32b 100644 --- a/services/table/table_test.go +++ b/services/table/table_test.go @@ -6,7 +6,6 @@ import ( "encoding/base64" "encoding/csv" "encoding/json" - "io" "os" "strings" "testing" @@ -540,6 +539,7 @@ func TestTableService_ImportSourceColumn(t *testing.T) { {"list", tablecolumn.SourceTypeDataset}, {"table", tablecolumn.SourceTypeTable}, {"csv", tablecolumn.SourceTypeDataset}, + {"image", tablecolumn.SourceTypeDataset}, } for _, tc := range cases { @@ -586,7 +586,32 @@ func TestTableService_ImportSourceColumn(t *testing.T) { Name: "s1", Description: "ds", Type: dataset.TypeCsv, - Files: []io.Reader{b}, + Files: []dataset_service.CreateDatasetFile{{ + Name: "file", + Reader: b, + }}, + Data: []string{"file"}, + }) + require.NoError(t, err) + dsid = id + case "image": + var buf []byte + b := bytes.NewBuffer(buf) + writer := csv.NewWriter(b) + require.NoError(t, writer.Write([]string{"col"})) + require.NoError(t, writer.Write([]string{"bar"})) + writer.Flush() + require.NoError(t, writer.Error()) + dsv := dataset_service.NewDatasetService(db, &config.Config{}) + id, err := dsv.Create(t.Context(), &dataset_service.CreateDatasetRequest{ + Name: "s1", + Description: "ds", + Type: dataset.TypeImage, + Files: []dataset_service.CreateDatasetFile{{ + Name: "file", + Reader: b, + }}, + Data: []string{"file"}, }) require.NoError(t, err) dsid = id @@ -641,6 +666,11 @@ func TestTableService_ImportSourceColumn(t *testing.T) { "description": "c1d", }}, }, cell) + case "image": + require.Equal(t, &schema.CellValue{ + Value: "bar", + ContextValue: nil, + }, cell) default: require.Equal(t, &schema.CellValue{ Value: "bar", @@ -922,9 +952,13 @@ func TestTableService_ImportLinked(t *testing.T) { require.NoError(t, writer.Error()) ds := dataset_service.NewDatasetService(db, &config.Config{}) did, err := ds.Create(t.Context(), &dataset_service.CreateDatasetRequest{ - Name: "ds", - Type: dataset.TypeCsv, - Files: []io.Reader{b}, + Name: "ds", + Type: dataset.TypeCsv, + Files: []dataset_service.CreateDatasetFile{{ + Name: "file", + Reader: b, + }}, + Data: []string{"file"}, }) require.NoError(t, err) diff --git a/tests/cli/snapshot.go b/tests/cli/snapshot.go index cae4a6f..f3956f3 100644 --- a/tests/cli/snapshot.go +++ b/tests/cli/snapshot.go @@ -22,23 +22,23 @@ var snapshots = []struct { example string prepare [][]string }{ - // test auto column gen, pick from list dataset and pick from options - {"recipes", "recipes.json", [][]string{ - {"dataset", "create", "--name", "ings", "--type", "list", "--path", "cases/ingredients.txt"}, - }}, - // test pick from table with context - {"recipes_for_customers", "recipes_for_customers.json", [][]string{ - {"create", "cases/customers.json"}, - {"generate", "customers", "-c", "5", "-b", "5"}, - }}, - // test pick from csv dataset with wildcard path - {"imdb_movie_haiku", "haiku.json", [][]string{ - {"dataset", "create", "--name", "movies", "--type", "csv", "--path", "cases/movies/*.csv"}, - }}, - // vision - // {"icon_jokes", "icon_jokes.json", [][]string{ - // {"dataset", "create", "--name", "icons", "--type", "csv", "--path", "cases/icons/icons.csv"}, + // // test auto column gen, pick from list dataset and pick from options + // {"recipes", "recipes.json", [][]string{ + // {"dataset", "create", "--name", "ings", "--type", "list", "--path", "cases/ingredients.txt"}, + // }}, + // // test pick from table with context + // {"recipes_for_customers", "recipes_for_customers.json", [][]string{ + // {"create", "cases/customers.json"}, + // {"generate", "customers", "-c", "5", "-b", "5"}, + // }}, + // // test pick from csv dataset with wildcard path + // {"imdb_movie_haiku", "haiku.json", [][]string{ + // {"dataset", "create", "--name", "movies", "--type", "csv", "--path", "cases/movies/*.csv"}, // }}, + // vision + {"icon_jokes", "icon_jokes.json", [][]string{ + {"dataset", "create", "--name", "icons", "--type", "image", "--path", "cases/icons/*.png"}, + }}, } var autofills = []struct { @@ -47,18 +47,18 @@ var autofills = []struct { commands [][]string }{ // autofill - {"pokemons", "pokemons.json", [][]string{ - {"create", "cases/pokemons.json"}, - {"import", "cases/pokemons.csv", "-t", "pokemons"}, - {"autofill", "pokemons", "-c", "5", "-b", "3", "--columns", "Ecology"}, - }}, - // autofill based on linked column with context - {"pokemons_autofill", "pokemons_autofill.json", [][]string{ - {"dataset", "create", "--name", "pokemons", "--type", "csv", "--path", "cases/pokemons.csv"}, - {"create", "cases/pokemons_autofill.json"}, - {"import", "cases/stories.csv", "-t", "pokemon_stories"}, - {"autofill", "pokemon_stories", "-c", "5", "-b", "3", "--columns", "Story"}, - }}, + // {"pokemons", "pokemons.json", [][]string{ + // {"create", "cases/pokemons.json"}, + // {"import", "cases/pokemons.csv", "-t", "pokemons"}, + // {"autofill", "pokemons", "-c", "5", "-b", "3", "--columns", "Ecology"}, + // }}, + // // autofill based on linked column with context + // {"pokemons_autofill", "pokemons_autofill.json", [][]string{ + // {"dataset", "create", "--name", "pokemons", "--type", "csv", "--path", "cases/pokemons.csv"}, + // {"create", "cases/pokemons_autofill.json"}, + // {"import", "cases/stories.csv", "-t", "pokemon_stories"}, + // {"autofill", "pokemon_stories", "-c", "5", "-b", "3", "--columns", "Story"}, + // }}, } func main() { diff --git a/tests/cli/snapshots/icon_jokes.csv b/tests/cli/snapshots/icon_jokes.csv index fc3d508..9e20ec1 100644 --- a/tests/cli/snapshots/icon_jokes.csv +++ b/tests/cli/snapshots/icon_jokes.csv @@ -1,6 +1,6 @@ Image 1,Image 2,Joke -cases/icons/i1.png,cases/icons/i1.png,Why did the two left arrows break up? They just couldn't see eye to eye! -cases/icons/i5.png,cases/icons/i5.png,Why did the trash can get a promotion? Because it was outstanding in its field of refuse! -cases/icons/i3.png,cases/icons/i3.png,What do you call a right arrow that's always telling jokes? A real right-up comedian! -cases/icons/i2.png,cases/icons/i2.png,Why did the check mark bring a friend? Because two checks are better than one! -cases/icons/i4.png,cases/icons/i4.png,Why did the X get a time-out? Because it kept crossing the line! +datasets/shared/UkLWZg/i1.png,datasets/shared/UkLWZg/i1.png,Why did the left arrow go to therapy? Because it felt like it was always being pushed to the side! +datasets/shared/UkLWZg/i5.png,datasets/shared/UkLWZg/i5.png,Why did the trash can break up with the broom? Because it felt like it was always being swept aside! +datasets/shared/UkLWZg/i3.png,datasets/shared/UkLWZg/i3.png,Why did the right arrow get a promotion? Because it always knew how to point things in the right direction! +datasets/shared/UkLWZg/i2.png,datasets/shared/UkLWZg/i2.png,Why did the two checkmarks get married? Because they were the perfect match! +datasets/shared/UkLWZg/i4.png,datasets/shared/UkLWZg/i4.png,Why did the X and checkmark break up? They just couldn't agree on anything! diff --git a/tests/cli/snapshots/icon_jokes.json b/tests/cli/snapshots/icon_jokes.json index 66a65ba..8b1aef3 100644 --- a/tests/cli/snapshots/icon_jokes.json +++ b/tests/cli/snapshots/icon_jokes.json @@ -1 +1 @@ -[{"request":"{\"messages\":[{\"content\":\"\\u003cTableDescription\\u003eA table of funny jokes inspired by two icon images\\u003c/TableDescription\\u003e\\nColumns of the table:\\n\\u003cColumns\\u003e\\n \\u003cColumn id=\\\"id\\\" name=\\\"id\\\" description=\\\"index of the row, always starting from 0 in each generation\\\" type=\\\"integer\\\"/\\u003e\\n \\u003cColumn id=\\\"UkLWZg\\\" name=\\\"Image 1\\\" description=\\\"The first icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"gbHJdm\\\" name=\\\"Image 2\\\" description=\\\"The second icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\" name=\\\"Joke\\\" description=\\\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\\\" type=\\\"string\\\"/\\u003e\\n\\u003c/Columns\\u003e\\nGenerate values for the following missing columns:\\n\\u003cMissingColumns\\u003e\\n \\u003cColumn id=\\\"id\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\"/\\u003e\\n\\u003c/MissingColumns\\u003e\\nBelow is the rows data, each row contains existing columns data, and help me fill missing columns for each row. In the return rows array, provide id field and missing column data.\\n\\u003cRows\\u003e\\n \\u003cRow __id__=\\\"0\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;cases/icons/i1.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;cases/icons/i1.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"1\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;cases/icons/i5.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;cases/icons/i5.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"2\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;cases/icons/i3.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;cases/icons/i3.png\\u0026quot;}\\u003c/Row\\u003e\\n\\u003c/Rows\\u003e\\n\",\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003ccases/icons/i1.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAs9JREFUeF7t2r9OwzAQx3FceIyeH4sNsTAgsTAgsYDojMTAgsTAgth4qqoPYhSqqAFC41zPuXPuxxzHyfeD86dtOMKfaoGgOjsmPwKA8j8BAACgXEB5eqwAACgXUJ4eKwAAygWUp8cKAIByAeXpsQIAoFxAeXqsAAAoF1CeHisAAPwC6/V6FUI4iTHe8feiO7LaFdDEXywWD02+EMLTcrm80U3Jm71KgG789rRTSi8xxiteBr1R1QH0xe/keyOiC72c42euCmAgfnv2H0R0Nj6FzohqADLjtxU/iehUJ+m4WasAmGv87weIcV7Tbz3n+OYB5h7fNICH+GYBvMQ3CeApvjmAzWbTfLSwyrzVV/Ooue98zDwFeYxvZgV4jW8CwHN8dQBvN9y+e4HaPQDxtxwqAIi/WwuTAyD+zwvRpACI//cuMBkA4ve/jk0CgPj/vwsXB0D8/R+sFAVA/OFPtYoBIP5w/GLvAYifF78IAOLnxxcHGBl/3JEa3ZqIDrqMHzS428Rj/Ob8TQB4jW8CwHN8ABi4L+ASpIxgAqBp4PVSZAbAK4IpAAbCLH7bc8hVUOw94IB3AtcIRQCwEvLXRDEAIOQhFAUAwjBCcQAgKH4jhhuzkRXQHsbIlzUXT0eTXIK6/wfef4z7e01MDtAcABB2DCoAQDAAAIQtgtoKwI3ZCID39wT1FeB9JZgB8LoSTAF4RDAH4A3BJIAnBLMAXhBMA3hAMA8wd4QqAOaMUA1ALkJK6T3GeD78VYiNLaoCGEJIKb3GGC9tpM07iuoA9iA8E9F13mnb2apKgB6ERyK6tZM1/0iqBWgRQgjHMcb7/FO2tWXVALZS8o4GALxuYqMAIJaStyMA8LqJjQKAWErejgDA6yY2CgBiKXk7AgCvm9goAIil5O0IALxuYqMAIJaStyMA8LqJjQKAWErejgDA6yY2CgBiKXk7AgCvm9ioL2yrP3D7zb0hAAAAAElFTkSuQmCC\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003ccases/icons/i3.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAedJREFUeF7t3MtNA1EQBdFnyUlMCpAPQZIPpDBJzAJ5MRJCmM/Gdbu7SKChjq+8GOzL8gctcEGve3wJAL8IBBAALgCfdwECwAXg8y5AALgAfN4FCAAXgM+7AAHgAvB5FyAAXAA+7wIEgAvA512AAHAB+LwLEAAuAJ93AQLABeDzkQvY9/1p27Z3uM1DzscB3OIfx/F6vV5fJiBEAZzx11rPa623CQgxAF/in/NvjxABcCf+CAQc4Jf47RFQgD/Gb42AAtzKTkfAAaYjRABMRogBmIoQBTARIQ5gGkIkwCSEWIApCNEAExDiAbojlADojFAGoCtCKYCOCOUAuiGUBOiEUBagC0JpgA4I5QGqIwjwkH+/un+kPED1R5qlAarHv+2iLECH+GUBusQvCdApfjmAbvFLAXSMXwaga/wSAJ3jxwN0jx8NMCF+LMCU+JEAk+LHAUyLHwUwMX4MwNT4EQCT4+MA0+PjAP94ntv2A9sRD2T8oDb8YPqHJbR95Z/JIxZw/jJ+WUfQEiZ8U0rEm/B35n5hU8ASpvwKUe8BU6J//jsFgNUFEAAuAJ93AQLABeDzLkAAuAB83gUIABeAz7sAAeAC8HkXIABcAD7vAgSAC8DnXYAAcAH4vAsQAC4An3cBAsAF4PMf9Ta2cF9XfTsAAAAASUVORK5CYII=\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003ccases/icons/i5.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAstJREFUeF7tnd1tE1EQRmflGlCQrHEBpATeQaEEoAWgloQWgBJAyXtKgAK8sgSiBmvRSnnARvhmPDOZm+T41Ts//s6d795drZJB+JQqMJRWp7gAoHgRAAAAxQoUl2cCAFCsQHF5JgAAxQoUl2cCAFCsQHF5JgAAuwqM4zhlaqKqXS26rpqZhQdA5vK7RW4A3EKkzEsAYFQ3WzBjO3d+uXdPce8BAPBt6gBwzgwT4BTQGw4Ar4LOeAA4BfSGA8CroDMeAE4BveHlAPZ/QOtY6m3YK1gr/q77dx9DAdBCevh7AOzpwwT4FpQ7GgBuCX0JAODTzx0NALeEvgQA8Onnjn70ALwCVMdbV0B3x9BqAb31AdB4q6J1Jw6AYgEBAACri+1e711B9z3eqh6bMM+CHtYEMQHFewgAAHD47ebsc3j1Js4EMAFMgGUKOIZyDOUYapmY5rXVm2B1/aZAexdgQVgQFmSdmoPXV1tAdX2rmFgQFoQFWacGCwpUDAvCgrCgwIFq/6kBnobuyo0FYUFYEBb0lwLeGzmrmFgQFoQFWaeGG7FAxbAgLAgLChwobsSsYmJBWBAWZJ2a1FNQaDNHJHv0N2JHaBYaAoBQOe3JAGDXLDQCAKFy2pMBwK5ZaAQAQuW0JwOAXbPQiHsPYL1e/xyG4SRUlU6STdP0a7VaPY1sJ+NRxKWIvIhssqNcV6r6MrKfDAAXIvIussmOcn1U1feR/WQAOBORr5FNdpTrlap+i+wnHMDc3DiOn0TkTWSjHeT6rKpvo/tIAXADIfWf8UQL0crXeqGsFf+/79MA3ECY/fL82OY6ifugqvO+lvJJBTB3vNlsnm+329fDMDwTkVMReZLyS+KS/haR79M0/VgsFl+Wy+V1XOp/M6UDyGz+IeQGQDFFAACgWIHi8kwAAIoVKC7PBACgWIHi8kwAAIoVKC7PBACgWIHi8n8AacEjjj/dxkcAAAAASUVORK5CYII=\"},\"type\":\"image_url\"}],\"role\":\"user\"}],\"model\":\"gemini-2.0-flash-001\",\"max_completion_tokens\":6000,\"presence_penalty\":0,\"temperature\":0.6,\"response_format\":{\"json_schema\":{\"name\":\"schema\",\"description\":\"schema for table\",\"schema\":{\"properties\":{\"data\":{\"items\":{\"properties\":{\"__id__\":{\"type\":\"integer\"},\"EfhxLZ\":{\"type\":\"string\",\"description\":\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\"}},\"additionalProperties\":false,\"type\":\"object\",\"required\":[\"__id__\",\"EfhxLZ\"]},\"type\":\"array\"}},\"additionalProperties\":false,\"type\":\"object\"}},\"type\":\"json_schema\"}}","response":"{\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"message\":{\"content\":\"{\\n \\\"data\\\": [\\n {\\n \\\"__id__\\\": 0,\\n \\\"EfhxLZ\\\": \\\"Why did the two left arrows break up? They just couldn't see eye to eye!\\\"\\n },\\n {\\n \\\"__id__\\\": 1,\\n \\\"EfhxLZ\\\": \\\"Why did the trash can get a promotion? Because it was outstanding in its field of refuse!\\\"\\n },\\n {\\n \\\"__id__\\\": 2,\\n \\\"EfhxLZ\\\": \\\"What do you call a right arrow that's always telling jokes? A real right-up comedian!\\\"\\n }\\n ]\\n}\",\"role\":\"assistant\"}}],\"created\":1749291427,\"id\":\"oRFEaMTIMvn9hMIPqNeXiAU\",\"model\":\"gemini-2.0-flash-001\",\"object\":\"chat.completion\",\"usage\":{\"completion_tokens\":140,\"prompt_tokens\":1296,\"total_tokens\":1436}}"},{"request":"{\"messages\":[{\"content\":\"\\u003cTableDescription\\u003eA table of funny jokes inspired by two icon images\\u003c/TableDescription\\u003e\\nColumns of the table:\\n\\u003cColumns\\u003e\\n \\u003cColumn id=\\\"id\\\" name=\\\"id\\\" description=\\\"index of the row, always starting from 0 in each generation\\\" type=\\\"integer\\\"/\\u003e\\n \\u003cColumn id=\\\"UkLWZg\\\" name=\\\"Image 1\\\" description=\\\"The first icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"gbHJdm\\\" name=\\\"Image 2\\\" description=\\\"The second icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\" name=\\\"Joke\\\" description=\\\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\\\" type=\\\"string\\\"/\\u003e\\n\\u003c/Columns\\u003e\\nGenerate values for the following missing columns:\\n\\u003cMissingColumns\\u003e\\n \\u003cColumn id=\\\"id\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\"/\\u003e\\n\\u003c/MissingColumns\\u003e\\nBelow is the rows data, each row contains existing columns data, and help me fill missing columns for each row. In the return rows array, provide id field and missing column data.\\n\\u003cRows\\u003e\\n \\u003cRow __id__=\\\"0\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;cases/icons/i2.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;cases/icons/i2.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"1\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;cases/icons/i4.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;cases/icons/i4.png\\u0026quot;}\\u003c/Row\\u003e\\n\\u003c/Rows\\u003e\\n\",\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003ccases/icons/i2.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAADHVJREFUeF7tXQuMJFUVvbeqe3ZXXJWw7MxO13s9M8sqmiiyQlzwLx/5LUTEDQYxIPjhJ7ALBgQF/ABGPsriJyiKP/ywAUFZdFHWRIVNIH5IiJ9hZ6brVc8nA2pUXGa7u679SE8yTKZe3VdV3dVrupLJZlP33XvfPe++eu+++24j9J5cLYC5Su8Jhx4AOQ+CHgA9AHK2QM7iex7QAyBnC+QsvucBPQCiLTA5ObmqVqttcBxngIgGELEfEQcA4IX/639bracRcRoApolI/83o/4dhOF0sFncNDg4+k7OdI8V3nQcopUoAcBwRnY6IR2dhOCL6JSL+EAB+LoSoZsEzKx5dAUClUhlxXfdYADiBiDZm1bml+CDiTwFge6PR2FEul8faKYvDO1cAgiA4CQA+3G6jRxmiBcYdnuf9jGOsdtDkAkDehl9syDyB6CgA3Wb4bgCiIwB0u+HzBKLtAPi+vxkRb27H/NlunkS0RUp5SzvltBUApdSXAOBjKTvweBiG2x3HeUb/1ev1ZwuFgl7XP4OIz2reRHQAAKyq1+urCoXCAWEYrtJ/juO8q7n03JBS/m1CiItT8uj8PsD3/fsR8WRbxRHxOSLaDgA7XNfdMTg46NvyWEg/Pj4+UCgUjiKioxFRL3UHbfkR0QNSylNs23Ho2+IBSqk/AsAhHAXmaRDxz2EYbi0UCve0a+f6xBNPFFevXn08Il4KAG+30Q8A/iSEeL1lm1jyzAFQSv0DAF4RK7lFgIgz2vBzc3Nb161b9y9uu7R01Wr1/Eajob9Pay14/VMIsb8FfSxppgD4vj/VitXECgaAGiJurdVqW4eHhyc4DbKmmZqaOrBer28GAO0Ryzj8daxJSrmGQ8uhyQwApZSet4/nCAWARx3HuahUKv2eSd9WsomJifWu625tfneOZAp6SAhxApPWSJYJAL7vX4+IVzIVukcIsYlJ21EypdSPAeC9HKFEdIOU8hMcWhNNagCCINhMRKx1PhF9UUqp3b1rH9/3b0XESzgKIuIWz/NS7RNSAaCUOgcAvsFRVkc6hRAPMWlzJVNK6alUT6mc51whxJ0cwqVoEgOgwwtEpEO7sY8QIrGcWOZtJFBKEYc9Im5MGlFNbJggCB5ghpEPEUI8yelIt9EopV6n1/9xeuloqud51ptOzTcRANzRj4gf8Dzvu3Ed6Ob3QRCcSUTfidMxqRckBYAz+m8SQlwep/i+8F4p9QUAuMy4mknoBdYAMEf/DiGEDoT93zxKqV809y86lhT5JPGCJADEjf6a4zgbumWTldUIqFar68Mw3AUAxSieSb4FVgBwRj8i3uJ53pasOt5NfIIguJmIdOgiMy+wBcA4+nVgTefx5BXbSQJWpVJ5jeu6dc/z/hbXfnx8fEjnGRFRf1ZewAagWq2+KgzDv5iUJKKrpZSfi+tIt7wfHx8/2HXdbYhYd133lMHBwUqcbr7vX4WInzXROY5zcKlU+mscL6tlaFzIQcfz9+zZs6GTIWVOB6NogiB4JRFtA4DXtmh2NRqNU4eGhqZMfEdHR1+2YsUK7QWvNngBO0TB9oAgCH5FRO+MEkpE50spv5rGKJ1qq5Q6CAC08RcfGu0EgNOEEH836eL7/nmI+BUDAI94nncUpz8sAMbGxsrFYtEUs9/T19c33N/fP8MRmieNzsJzHEcb/9Cl9EDE7YVCYdPAwMBzUXrOzMz07927dxwAVkTR1Gq1oZGRkdgpjQVAEASXEpEp6rddCHFinoblyJ6amhqq1+va+G+Iof/JzMzMpsMOO6wWRaeUelAHGA1esNnzvFvj9OIC8JtmguubDdPP5VLKm+KE5fl+cnJSNhoNbfzDOXog4ibP8+6JovV9/zJE1DvkJZ9mYvFvPc97S5ysWADGxsb6i8WiTv2OfIjoICnl7jhheb0PgsBrfXDfyNFBZ2ZLKX8U8x1Yi4hPm2hqtdrAyMiIcVqOBaBSqZzkOI4p7PykEMIqA4JjhKxoZmdnB+fm5rYR0REcno7jvK9UKulU9thHKaUjpTpiuuQThuHGcrlsTPyNBUApdS4AfN0g5LpyuXxtrLY5EExMTKzRaS5E9Cam+DOEEHczaaFSqVzrOM41BvoPCSGMB1axAPi+fzUifsYg5CIhxO1cpTtFp6fOQqGgN1mR365FupwphPiejX5KqQsBQB/mL/kQ0SellMZNWywASiktQAta8rFxWZvOpaEdHR09cPny5fqD+1YOn6TnFtVq9fQwDH9gkHG7EOIikw6xAARBoF34NMMUdGy5XH6Y09GFNL7v393c0heyzpAIguCA1geXlfkWhuFZ5XL527b6a/pKpXKM4zg7otoi4jbP84xZFhwAjEtQx3HWl0qlP9h0wPf9OxHxg602maWpVCqV/VubrMgd+0I9EfFsz/PustF9IW21Wj00DMPI3CbOUjQWAKXUKADorXvUFCRLpZLidsL3/dsR8YJF9KlB2L1798uLxaKe81kX+4joHCnlN7l6L0VXrVZFGIam5OGnhRDrUk1BSql/A8BLo5i4rrvf4ODgfzkd8X3/ZkSMiqcnBmF2dnbl888/r+d844nVAh1TpZLM85mcnHxJo9GIDFk0jzH/I4RY2RUAMLPnrEGYnp7er1araeMfxxkE+lKgECJyWc3k8QJZpwBIPQUx1ssL+80GQSmlg2Ha+Nw8zY8IIe6wMbKJtiNTUBAEmXyEswZhfHx8ebFY1Cs0fdU19mnenj9PSvm1WEILgo58hLNchmYFwlNPPdW3cuVK/cFlXepGxPM9z8v8rKIjy9CsN2JpQdi5c2dh7dq12visK0NEdKGU8ssWA5tN2pGNWDtCEUlBICInCAI957+baaW2hkk6FYpoSzAuCQhKKW389zCNf7EQ4jYmbSIyRh/SB+MY4ehdQghWqHdxLxkdeNHqiHt5AgAuad7l0ldk2/oopR4zXYPNJByta/Y0Go1ZU0/q9fqa4eFh46FNVHtLEGINqhOnpJSxR4GxjGIIWtdfjRkUruseGHfjMzYUofXwff/hmC3++5tb7u8n7VRWIGRxY4Xbh2aI5oxmiCYyfK1rFEkpj4njxwIg7iYMEd0lpTw7TpjpfVoQiKij59K+738LEc8y9IkV7uACoKtYBQZhkzMzM0OmLAIOOClA+LgQIvKAnCPbhkZf+O7v79dpOqZb9x6nOhcLAK1c3I0YIjpFSvmATUeWok0AwhVCiM+nlWvT3vf9kxHx/qg2NlnSNgB8tJmUatpN/loI8Q6bjmTwYb5SCHFjFjJteCildAZd5IFPM0n5PM/zWGEPNgCtjDJj6onjOBeUSqXIlD2bTsZ5AhFdJaW83oZnFrS6xEEYhsaddRiGa7n16NgAMKeh3cVi8Yg1a9YYl61cQ0SB0MxmvtrzvI5nYevSBrVa7TFTfQmb6UfbwRYAztXUG4UQ3FvzsVgsBoGIPiWlNGVpxPJMSqCUugEArjC1t72mZAUAxwsAYK5ZEvLIoaGhzOpALADhGiHEp5MaME27Vj2JR01FPWxHv7UHtADgeMGjQghuMhTLLs1L09r417GI20CklPpdXDEP29GfCACmF2gy9slWG+yVKUtOEY8koz8NABwv0LXcur44RxxS3OIdSUZ/YgAsvECT7jNFOhaDwS3akXT0pwWA5QVaSK9YR7SfWa+CFrKyrAm6zxTt4Bbp0LZIW1s0FQBaAZvaoEmTYOPm6Szfc4tztGSmrimaGgCtiGWN0K4t4sEpyjEPdla1RDMBoOUJNrVCdziOc2W31JNobbL0Lpeb2phZDdHMAGiBYFMzdJ8sW9k8b860dmimALSmI5vaobAvFW7NumZoqmWo6cNnWUP0BVbzpYuXLVt2b7sufLcy6o5LWLo4s1qhC22XuQfMM2dmQy+F4x4A2ElE+tDjvrTXX/UVVQDYSET6sOhtALDadtWUVY3QpeS2DYDWbpldU9RglCfDMLwvSfl613W14dfbGvxFIzSD2qAm+W0FoPVhtqktmsZW7WjLymxII7jtALQ8IddfS7I1UCd/1KcjAMwboNt/S6aThp+3SUcB6FYg8jB8rgB0CxB5Gr4rAJhXQtejIyJdb+hEU1Uu27l8yWUf4iPNkjUPIuKD3LpuWciN4pHLFGTqkK7O1dfXdyoAnGqqUWRjFH1hulnV5N69e/fey6liZcM7LW3XAbCwQ7rghuu6h6f5OdtGo/F4XM2etEZM076rAUjTsX2lbQ+AnJHqAdADIGcL5Cy+5wE9AHK2QM7iex7QAyBnC+Qs/n+1PtqsKEfEgQAAAABJRU5ErkJggg==\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003ccases/icons/i4.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAu5JREFUeF7tnFGOwjAMRIPEJXqF3fvsIfc+7BW4BB+rSERCFZCksT1jNPy2zdjv1aVqBaeiD5TACZqu8CIB4JNAAiQATAAcrwmQADABcLwmQALABMDxmgAJABMAx2sCJABMAByvCZAAMAFwvCZAAsAEwPGaAAkAEwDHh0zA9Xr92rbtD9zrVHxUze4CaiO32+33fD7/ZJEQWbOrgNZIKeW7lHLJICG6ZjcBu0ba+FNLQNTsIuBFI9QSUDWbC+g0QikBWbOpgMFGqCSgazYVUMmiG5q512So1VxAFgkM8CsrFwHsEljguwpglcAE310AmwQ2+CECWCQwwg8TgJbACj9UAEoCM/xwAdES2OFDBERJyAAfJsBbQhb4UAFeEjLBhwuwlpANPoUAKwkZ4dMIWJWQFT6VgKMS6nH1pf/9vXPvaTTdK1G3p6E9Eq+2z57N93XqS//ehw4+3QQ0gpMSeuDrdkr4tAIOXI7eSaCFTy3ASAI1fHoBixLo4acQcFBCCvgSMPL17bwP3W3ovt+FO6IUU0AtYAF+80gvgVaAAfwUEigFGMKnl0AnYBL+RY8iDO8SZuHXH3zoYZyRgCPw20+eVo41Kv/wMhSXIAuAFmscprhwIFyAJTjLtRaYTh0KFeABzGPNKaKTO8MEeILyXHuSb3d3iIAIQBEZXboDO4QLiAQTmTXA+ukuoQIQQBCZMzLCBCBBILN7MkIEMABgqOGZDHcBTI0z1dJkuApgbJitJjcBbI0+jj9TbS4CmBp89SXIUqO5AJbGencfdTtDraYCGBoaAc90OTIVMHFWUb0sHzxxXGo2FzAgwaWR2TN/v39HglvNLgLeSHBrZFUAqmY3AU8aoobfBO4mwb1mVwGPEjL8Y+JeQkTN7gKahCz/GfooIaLmEAEW1+dPXUMCwGYlQALABMDxmgAJABMAx2sCJABMAByvCZAAMAFwvCZAAsAEwPGaAAkAEwDHawIkAEwAHK8JkAAwAXC8JgAs4B9h/IqOl1uPzQAAAABJRU5ErkJggg==\"},\"type\":\"image_url\"}],\"role\":\"user\"}],\"model\":\"gemini-2.0-flash-001\",\"max_completion_tokens\":6000,\"presence_penalty\":0,\"temperature\":0.6,\"response_format\":{\"json_schema\":{\"name\":\"schema\",\"description\":\"schema for table\",\"schema\":{\"properties\":{\"data\":{\"items\":{\"properties\":{\"__id__\":{\"type\":\"integer\"},\"EfhxLZ\":{\"type\":\"string\",\"description\":\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\"}},\"additionalProperties\":false,\"type\":\"object\",\"required\":[\"__id__\",\"EfhxLZ\"]},\"type\":\"array\"}},\"additionalProperties\":false,\"type\":\"object\"}},\"type\":\"json_schema\"}}","response":"{\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"message\":{\"content\":\"{\\n \\\"data\\\": [\\n {\\n \\\"__id__\\\": 0,\\n \\\"EfhxLZ\\\": \\\"Why did the check mark bring a friend? Because two checks are better than one!\\\"\\n },\\n {\\n \\\"__id__\\\": 1,\\n \\\"EfhxLZ\\\": \\\"Why did the X get a time-out? Because it kept crossing the line!\\\"\\n }\\n ]\\n}\",\"role\":\"assistant\"}}],\"created\":1749291428,\"id\":\"oxFEaM_VIsGhxfcPqYWlwQ4\",\"model\":\"gemini-2.0-flash-001\",\"object\":\"chat.completion\",\"usage\":{\"completion_tokens\":92,\"prompt_tokens\":961,\"total_tokens\":1053}}"}] +[{"request":"{\"messages\":[{\"content\":\"\\u003cTableDescription\\u003eA table of funny jokes inspired by two icon images\\u003c/TableDescription\\u003e\\nColumns of the table:\\n\\u003cColumns\\u003e\\n \\u003cColumn id=\\\"id\\\" name=\\\"id\\\" description=\\\"index of the row, always starting from 0 in each generation\\\" type=\\\"integer\\\"/\\u003e\\n \\u003cColumn id=\\\"UkLWZg\\\" name=\\\"Image 1\\\" description=\\\"The first icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"gbHJdm\\\" name=\\\"Image 2\\\" description=\\\"The second icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\" name=\\\"Joke\\\" description=\\\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\\\" type=\\\"string\\\"/\\u003e\\n\\u003c/Columns\\u003e\\nGenerate values for the following missing columns:\\n\\u003cMissingColumns\\u003e\\n \\u003cColumn id=\\\"id\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\"/\\u003e\\n\\u003c/MissingColumns\\u003e\\nBelow is the rows data, each row contains existing columns data, and help me fill missing columns for each row. In the return rows array, provide id field and missing column data.\\n\\u003cRows\\u003e\\n \\u003cRow __id__=\\\"0\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i1.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i1.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"1\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i5.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i5.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"2\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i3.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i3.png\\u0026quot;}\\u003c/Row\\u003e\\n\\u003c/Rows\\u003e\\n\",\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003cdatasets/shared/UkLWZg/i1.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAs9JREFUeF7t2r9OwzAQx3FceIyeH4sNsTAgsTAgsYDojMTAgsTAgth4qqoPYhSqqAFC41zPuXPuxxzHyfeD86dtOMKfaoGgOjsmPwKA8j8BAACgXEB5eqwAACgXUJ4eKwAAygWUp8cKAIByAeXpsQIAoFxAeXqsAAAoF1CeHisAAPwC6/V6FUI4iTHe8feiO7LaFdDEXywWD02+EMLTcrm80U3Jm71KgG789rRTSi8xxiteBr1R1QH0xe/keyOiC72c42euCmAgfnv2H0R0Nj6FzohqADLjtxU/iehUJ+m4WasAmGv87weIcV7Tbz3n+OYB5h7fNICH+GYBvMQ3CeApvjmAzWbTfLSwyrzVV/Ooue98zDwFeYxvZgV4jW8CwHN8dQBvN9y+e4HaPQDxtxwqAIi/WwuTAyD+zwvRpACI//cuMBkA4ve/jk0CgPj/vwsXB0D8/R+sFAVA/OFPtYoBIP5w/GLvAYifF78IAOLnxxcHGBl/3JEa3ZqIDrqMHzS428Rj/Ob8TQB4jW8CwHN8ABi4L+ASpIxgAqBp4PVSZAbAK4IpAAbCLH7bc8hVUOw94IB3AtcIRQCwEvLXRDEAIOQhFAUAwjBCcQAgKH4jhhuzkRXQHsbIlzUXT0eTXIK6/wfef4z7e01MDtAcABB2DCoAQDAAAIQtgtoKwI3ZCID39wT1FeB9JZgB8LoSTAF4RDAH4A3BJIAnBLMAXhBMA3hAMA8wd4QqAOaMUA1ALkJK6T3GeD78VYiNLaoCGEJIKb3GGC9tpM07iuoA9iA8E9F13mnb2apKgB6ERyK6tZM1/0iqBWgRQgjHMcb7/FO2tWXVALZS8o4GALxuYqMAIJaStyMA8LqJjQKAWErejgDA6yY2CgBiKXk7AgCvm9goAIil5O0IALxuYqMAIJaStyMA8LqJjQKAWErejgDA6yY2CgBiKXk7AgCvm9ioL2yrP3D7zb0hAAAAAElFTkSuQmCC\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003cdatasets/shared/UkLWZg/i3.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAedJREFUeF7t3MtNA1EQBdFnyUlMCpAPQZIPpDBJzAJ5MRJCmM/Gdbu7SKChjq+8GOzL8gctcEGve3wJAL8IBBAALgCfdwECwAXg8y5AALgAfN4FCAAXgM+7AAHgAvB5FyAAXAA+7wIEgAvA512AAHAB+LwLEAAuAJ93AQLABeDzkQvY9/1p27Z3uM1DzscB3OIfx/F6vV5fJiBEAZzx11rPa623CQgxAF/in/NvjxABcCf+CAQc4Jf47RFQgD/Gb42AAtzKTkfAAaYjRABMRogBmIoQBTARIQ5gGkIkwCSEWIApCNEAExDiAbojlADojFAGoCtCKYCOCOUAuiGUBOiEUBagC0JpgA4I5QGqIwjwkH+/un+kPED1R5qlAarHv+2iLECH+GUBusQvCdApfjmAbvFLAXSMXwaga/wSAJ3jxwN0jx8NMCF+LMCU+JEAk+LHAUyLHwUwMX4MwNT4EQCT4+MA0+PjAP94ntv2A9sRD2T8oDb8YPqHJbR95Z/JIxZw/jJ+WUfQEiZ8U0rEm/B35n5hU8ASpvwKUe8BU6J//jsFgNUFEAAuAJ93AQLABeDzLkAAuAB83gUIABeAz7sAAeAC8HkXIABcAD7vAgSAC8DnXYAAcAH4vAsQAC4An3cBAsAF4PMf9Ta2cF9XfTsAAAAASUVORK5CYII=\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003cdatasets/shared/UkLWZg/i5.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAstJREFUeF7tnd1tE1EQRmflGlCQrHEBpATeQaEEoAWgloQWgBJAyXtKgAK8sgSiBmvRSnnARvhmPDOZm+T41Ts//s6d795drZJB+JQqMJRWp7gAoHgRAAAAxQoUl2cCAFCsQHF5JgAAxQoUl2cCAFCsQHF5JgAAuwqM4zhlaqKqXS26rpqZhQdA5vK7RW4A3EKkzEsAYFQ3WzBjO3d+uXdPce8BAPBt6gBwzgwT4BTQGw4Ar4LOeAA4BfSGA8CroDMeAE4BveHlAPZ/QOtY6m3YK1gr/q77dx9DAdBCevh7AOzpwwT4FpQ7GgBuCX0JAODTzx0NALeEvgQA8Onnjn70ALwCVMdbV0B3x9BqAb31AdB4q6J1Jw6AYgEBAACri+1e711B9z3eqh6bMM+CHtYEMQHFewgAAHD47ebsc3j1Js4EMAFMgGUKOIZyDOUYapmY5rXVm2B1/aZAexdgQVgQFmSdmoPXV1tAdX2rmFgQFoQFWacGCwpUDAvCgrCgwIFq/6kBnobuyo0FYUFYEBb0lwLeGzmrmFgQFoQFWaeGG7FAxbAgLAgLChwobsSsYmJBWBAWZJ2a1FNQaDNHJHv0N2JHaBYaAoBQOe3JAGDXLDQCAKFy2pMBwK5ZaAQAQuW0JwOAXbPQiHsPYL1e/xyG4SRUlU6STdP0a7VaPY1sJ+NRxKWIvIhssqNcV6r6MrKfDAAXIvIussmOcn1U1feR/WQAOBORr5FNdpTrlap+i+wnHMDc3DiOn0TkTWSjHeT6rKpvo/tIAXADIfWf8UQL0crXeqGsFf+/79MA3ECY/fL82OY6ifugqvO+lvJJBTB3vNlsnm+329fDMDwTkVMReZLyS+KS/haR79M0/VgsFl+Wy+V1XOp/M6UDyGz+IeQGQDFFAACgWIHi8kwAAIoVKC7PBACgWIHi8kwAAIoVKC7PBACgWIHi8n8AacEjjj/dxkcAAAAASUVORK5CYII=\"},\"type\":\"image_url\"}],\"role\":\"user\"}],\"model\":\"gemini-2.0-flash-001\",\"max_completion_tokens\":6000,\"presence_penalty\":0,\"temperature\":0.6,\"response_format\":{\"json_schema\":{\"name\":\"schema\",\"description\":\"schema for table\",\"schema\":{\"properties\":{\"data\":{\"items\":{\"properties\":{\"__id__\":{\"type\":\"integer\"},\"EfhxLZ\":{\"type\":\"string\",\"description\":\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\"}},\"additionalProperties\":false,\"type\":\"object\",\"required\":[\"__id__\",\"EfhxLZ\"]},\"type\":\"array\"}},\"additionalProperties\":false,\"type\":\"object\"}},\"type\":\"json_schema\"}}","response":"{\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"message\":{\"content\":\"{\\n \\\"data\\\": [\\n {\\n \\\"EfhxLZ\\\": \\\"Why did the left arrow go to therapy? Because it felt like it was always being pushed to the side!\\\",\\n \\\"__id__\\\": 0\\n },\\n {\\n \\\"EfhxLZ\\\": \\\"Why did the trash can break up with the broom? Because it felt like it was always being swept aside!\\\",\\n \\\"__id__\\\": 1\\n },\\n {\\n \\\"EfhxLZ\\\": \\\"Why did the right arrow get a promotion? Because it always knew how to point things in the right direction!\\\",\\n \\\"__id__\\\": 2\\n }\\n ]\\n}\",\"role\":\"assistant\"}}],\"created\":1749654960,\"id\":\"rZ1JaKC_KNmJmNAPl-aeuAg\",\"model\":\"gemini-2.0-flash-001\",\"object\":\"chat.completion\",\"usage\":{\"completion_tokens\":144,\"prompt_tokens\":1324,\"total_tokens\":1468}}"},{"request":"{\"messages\":[{\"content\":\"\\u003cTableDescription\\u003eA table of funny jokes inspired by two icon images\\u003c/TableDescription\\u003e\\nColumns of the table:\\n\\u003cColumns\\u003e\\n \\u003cColumn id=\\\"id\\\" name=\\\"id\\\" description=\\\"index of the row, always starting from 0 in each generation\\\" type=\\\"integer\\\"/\\u003e\\n \\u003cColumn id=\\\"UkLWZg\\\" name=\\\"Image 1\\\" description=\\\"The first icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"gbHJdm\\\" name=\\\"Image 2\\\" description=\\\"The second icon image file\\\" type=\\\"image\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\" name=\\\"Joke\\\" description=\\\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\\\" type=\\\"string\\\"/\\u003e\\n\\u003c/Columns\\u003e\\nGenerate values for the following missing columns:\\n\\u003cMissingColumns\\u003e\\n \\u003cColumn id=\\\"id\\\"/\\u003e\\n \\u003cColumn id=\\\"EfhxLZ\\\"/\\u003e\\n\\u003c/MissingColumns\\u003e\\nBelow is the rows data, each row contains existing columns data, and help me fill missing columns for each row. In the return rows array, provide id field and missing column data.\\n\\u003cRows\\u003e\\n \\u003cRow __id__=\\\"0\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i2.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i2.png\\u0026quot;}\\u003c/Row\\u003e\\n \\u003cRow __id__=\\\"1\\\"\\u003e{\\u0026quot;UkLWZg\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i4.png\\u0026quot;,\\u0026quot;gbHJdm\\u0026quot;:\\u0026quot;datasets/shared/UkLWZg/i4.png\\u0026quot;}\\u003c/Row\\u003e\\n\\u003c/Rows\\u003e\\n\",\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003cdatasets/shared/UkLWZg/i2.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAADHVJREFUeF7tXQuMJFUVvbeqe3ZXXJWw7MxO13s9M8sqmiiyQlzwLx/5LUTEDQYxIPjhJ7ALBgQF/ABGPsriJyiKP/ywAUFZdFHWRIVNIH5IiJ9hZ6brVc8nA2pUXGa7u679SE8yTKZe3VdV3dVrupLJZlP33XvfPe++eu+++24j9J5cLYC5Su8Jhx4AOQ+CHgA9AHK2QM7iex7QAyBnC+QsvucBPQCiLTA5ObmqVqttcBxngIgGELEfEQcA4IX/639bracRcRoApolI/83o/4dhOF0sFncNDg4+k7OdI8V3nQcopUoAcBwRnY6IR2dhOCL6JSL+EAB+LoSoZsEzKx5dAUClUhlxXfdYADiBiDZm1bml+CDiTwFge6PR2FEul8faKYvDO1cAgiA4CQA+3G6jRxmiBcYdnuf9jGOsdtDkAkDehl9syDyB6CgA3Wb4bgCiIwB0u+HzBKLtAPi+vxkRb27H/NlunkS0RUp5SzvltBUApdSXAOBjKTvweBiG2x3HeUb/1ev1ZwuFgl7XP4OIz2reRHQAAKyq1+urCoXCAWEYrtJ/juO8q7n03JBS/m1CiItT8uj8PsD3/fsR8WRbxRHxOSLaDgA7XNfdMTg46NvyWEg/Pj4+UCgUjiKioxFRL3UHbfkR0QNSylNs23Ho2+IBSqk/AsAhHAXmaRDxz2EYbi0UCve0a+f6xBNPFFevXn08Il4KAG+30Q8A/iSEeL1lm1jyzAFQSv0DAF4RK7lFgIgz2vBzc3Nb161b9y9uu7R01Wr1/Eajob9Pay14/VMIsb8FfSxppgD4vj/VitXECgaAGiJurdVqW4eHhyc4DbKmmZqaOrBer28GAO0Ryzj8daxJSrmGQ8uhyQwApZSet4/nCAWARx3HuahUKv2eSd9WsomJifWu625tfneOZAp6SAhxApPWSJYJAL7vX4+IVzIVukcIsYlJ21EypdSPAeC9HKFEdIOU8hMcWhNNagCCINhMRKx1PhF9UUqp3b1rH9/3b0XESzgKIuIWz/NS7RNSAaCUOgcAvsFRVkc6hRAPMWlzJVNK6alUT6mc51whxJ0cwqVoEgOgwwtEpEO7sY8QIrGcWOZtJFBKEYc9Im5MGlFNbJggCB5ghpEPEUI8yelIt9EopV6n1/9xeuloqud51ptOzTcRANzRj4gf8Dzvu3Ed6Ob3QRCcSUTfidMxqRckBYAz+m8SQlwep/i+8F4p9QUAuMy4mknoBdYAMEf/DiGEDoT93zxKqV809y86lhT5JPGCJADEjf6a4zgbumWTldUIqFar68Mw3AUAxSieSb4FVgBwRj8i3uJ53pasOt5NfIIguJmIdOgiMy+wBcA4+nVgTefx5BXbSQJWpVJ5jeu6dc/z/hbXfnx8fEjnGRFRf1ZewAagWq2+KgzDv5iUJKKrpZSfi+tIt7wfHx8/2HXdbYhYd133lMHBwUqcbr7vX4WInzXROY5zcKlU+mscL6tlaFzIQcfz9+zZs6GTIWVOB6NogiB4JRFtA4DXtmh2NRqNU4eGhqZMfEdHR1+2YsUK7QWvNngBO0TB9oAgCH5FRO+MEkpE50spv5rGKJ1qq5Q6CAC08RcfGu0EgNOEEH836eL7/nmI+BUDAI94nncUpz8sAMbGxsrFYtEUs9/T19c33N/fP8MRmieNzsJzHEcb/9Cl9EDE7YVCYdPAwMBzUXrOzMz07927dxwAVkTR1Gq1oZGRkdgpjQVAEASXEpEp6rddCHFinoblyJ6amhqq1+va+G+Iof/JzMzMpsMOO6wWRaeUelAHGA1esNnzvFvj9OIC8JtmguubDdPP5VLKm+KE5fl+cnJSNhoNbfzDOXog4ibP8+6JovV9/zJE1DvkJZ9mYvFvPc97S5ysWADGxsb6i8WiTv2OfIjoICnl7jhheb0PgsBrfXDfyNFBZ2ZLKX8U8x1Yi4hPm2hqtdrAyMiIcVqOBaBSqZzkOI4p7PykEMIqA4JjhKxoZmdnB+fm5rYR0REcno7jvK9UKulU9thHKaUjpTpiuuQThuHGcrlsTPyNBUApdS4AfN0g5LpyuXxtrLY5EExMTKzRaS5E9Cam+DOEEHczaaFSqVzrOM41BvoPCSGMB1axAPi+fzUifsYg5CIhxO1cpTtFp6fOQqGgN1mR365FupwphPiejX5KqQsBQB/mL/kQ0SellMZNWywASiktQAta8rFxWZvOpaEdHR09cPny5fqD+1YOn6TnFtVq9fQwDH9gkHG7EOIikw6xAARBoF34NMMUdGy5XH6Y09GFNL7v393c0heyzpAIguCA1geXlfkWhuFZ5XL527b6a/pKpXKM4zg7otoi4jbP84xZFhwAjEtQx3HWl0qlP9h0wPf9OxHxg602maWpVCqV/VubrMgd+0I9EfFsz/PustF9IW21Wj00DMPI3CbOUjQWAKXUKADorXvUFCRLpZLidsL3/dsR8YJF9KlB2L1798uLxaKe81kX+4joHCnlN7l6L0VXrVZFGIam5OGnhRDrUk1BSql/A8BLo5i4rrvf4ODgfzkd8X3/ZkSMiqcnBmF2dnbl888/r+d844nVAh1TpZLM85mcnHxJo9GIDFk0jzH/I4RY2RUAMLPnrEGYnp7er1araeMfxxkE+lKgECJyWc3k8QJZpwBIPQUx1ssL+80GQSmlg2Ha+Nw8zY8IIe6wMbKJtiNTUBAEmXyEswZhfHx8ebFY1Cs0fdU19mnenj9PSvm1WEILgo58hLNchmYFwlNPPdW3cuVK/cFlXepGxPM9z8v8rKIjy9CsN2JpQdi5c2dh7dq12visK0NEdKGU8ssWA5tN2pGNWDtCEUlBICInCAI957+baaW2hkk6FYpoSzAuCQhKKW389zCNf7EQ4jYmbSIyRh/SB+MY4ehdQghWqHdxLxkdeNHqiHt5AgAuad7l0ldk2/oopR4zXYPNJByta/Y0Go1ZU0/q9fqa4eFh46FNVHtLEGINqhOnpJSxR4GxjGIIWtdfjRkUruseGHfjMzYUofXwff/hmC3++5tb7u8n7VRWIGRxY4Xbh2aI5oxmiCYyfK1rFEkpj4njxwIg7iYMEd0lpTw7TpjpfVoQiKij59K+738LEc8y9IkV7uACoKtYBQZhkzMzM0OmLAIOOClA+LgQIvKAnCPbhkZf+O7v79dpOqZb9x6nOhcLAK1c3I0YIjpFSvmATUeWok0AwhVCiM+nlWvT3vf9kxHx/qg2NlnSNgB8tJmUatpN/loI8Q6bjmTwYb5SCHFjFjJteCildAZd5IFPM0n5PM/zWGEPNgCtjDJj6onjOBeUSqXIlD2bTsZ5AhFdJaW83oZnFrS6xEEYhsaddRiGa7n16NgAMKeh3cVi8Yg1a9YYl61cQ0SB0MxmvtrzvI5nYevSBrVa7TFTfQmb6UfbwRYAztXUG4UQ3FvzsVgsBoGIPiWlNGVpxPJMSqCUugEArjC1t72mZAUAxwsAYK5ZEvLIoaGhzOpALADhGiHEp5MaME27Vj2JR01FPWxHv7UHtADgeMGjQghuMhTLLs1L09r417GI20CklPpdXDEP29GfCACmF2gy9slWG+yVKUtOEY8koz8NABwv0LXcur44RxxS3OIdSUZ/YgAsvECT7jNFOhaDwS3akXT0pwWA5QVaSK9YR7SfWa+CFrKyrAm6zxTt4Bbp0LZIW1s0FQBaAZvaoEmTYOPm6Szfc4tztGSmrimaGgCtiGWN0K4t4sEpyjEPdla1RDMBoOUJNrVCdziOc2W31JNobbL0Lpeb2phZDdHMAGiBYFMzdJ8sW9k8b860dmimALSmI5vaobAvFW7NumZoqmWo6cNnWUP0BVbzpYuXLVt2b7sufLcy6o5LWLo4s1qhC22XuQfMM2dmQy+F4x4A2ElE+tDjvrTXX/UVVQDYSET6sOhtALDadtWUVY3QpeS2DYDWbpldU9RglCfDMLwvSfl613W14dfbGvxFIzSD2qAm+W0FoPVhtqktmsZW7WjLymxII7jtALQ8IddfS7I1UCd/1KcjAMwboNt/S6aThp+3SUcB6FYg8jB8rgB0CxB5Gr4rAJhXQtejIyJdb+hEU1Uu27l8yWUf4iPNkjUPIuKD3LpuWciN4pHLFGTqkK7O1dfXdyoAnGqqUWRjFH1hulnV5N69e/fey6liZcM7LW3XAbCwQ7rghuu6h6f5OdtGo/F4XM2etEZM076rAUjTsX2lbQ+AnJHqAdADIGcL5Cy+5wE9AHK2QM7iex7QAyBnC+Qs/n+1PtqsKEfEgQAAAABJRU5ErkJggg==\"},\"type\":\"image_url\"}],\"role\":\"user\"},{\"content\":\"\\nBelow is the image with ID: \\u003cdatasets/shared/UkLWZg/i4.png\\u003e\",\"role\":\"user\"},{\"content\":[{\"image_url\":{\"url\":\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAAAXNSR0IArs4c6QAAAu5JREFUeF7tnFGOwjAMRIPEJXqF3fvsIfc+7BW4BB+rSERCFZCksT1jNPy2zdjv1aVqBaeiD5TACZqu8CIB4JNAAiQATAAcrwmQADABcLwmQALABMDxmgAJABMAx2sCJABMAByvCZAAMAFwvCZAAsAEwPGaAAkAEwDHh0zA9Xr92rbtD9zrVHxUze4CaiO32+33fD7/ZJEQWbOrgNZIKeW7lHLJICG6ZjcBu0ba+FNLQNTsIuBFI9QSUDWbC+g0QikBWbOpgMFGqCSgazYVUMmiG5q512So1VxAFgkM8CsrFwHsEljguwpglcAE310AmwQ2+CECWCQwwg8TgJbACj9UAEoCM/xwAdES2OFDBERJyAAfJsBbQhb4UAFeEjLBhwuwlpANPoUAKwkZ4dMIWJWQFT6VgKMS6nH1pf/9vXPvaTTdK1G3p6E9Eq+2z57N93XqS//ehw4+3QQ0gpMSeuDrdkr4tAIOXI7eSaCFTy3ASAI1fHoBixLo4acQcFBCCvgSMPL17bwP3W3ovt+FO6IUU0AtYAF+80gvgVaAAfwUEigFGMKnl0AnYBL+RY8iDO8SZuHXH3zoYZyRgCPw20+eVo41Kv/wMhSXIAuAFmscprhwIFyAJTjLtRaYTh0KFeABzGPNKaKTO8MEeILyXHuSb3d3iIAIQBEZXboDO4QLiAQTmTXA+ukuoQIQQBCZMzLCBCBBILN7MkIEMABgqOGZDHcBTI0z1dJkuApgbJitJjcBbI0+jj9TbS4CmBp89SXIUqO5AJbGencfdTtDraYCGBoaAc90OTIVMHFWUb0sHzxxXGo2FzAgwaWR2TN/v39HglvNLgLeSHBrZFUAqmY3AU8aoobfBO4mwb1mVwGPEjL8Y+JeQkTN7gKahCz/GfooIaLmEAEW1+dPXUMCwGYlQALABMDxmgAJABMAx2sCJABMAByvCZAAMAFwvCZAAsAEwPGaAAkAEwDHawIkAEwAHK8JkAAwAXC8JgAs4B9h/IqOl1uPzQAAAABJRU5ErkJggg==\"},\"type\":\"image_url\"}],\"role\":\"user\"}],\"model\":\"gemini-2.0-flash-001\",\"max_completion_tokens\":6000,\"presence_penalty\":0,\"temperature\":0.6,\"response_format\":{\"json_schema\":{\"name\":\"schema\",\"description\":\"schema for table\",\"schema\":{\"properties\":{\"data\":{\"items\":{\"properties\":{\"__id__\":{\"type\":\"integer\"},\"EfhxLZ\":{\"type\":\"string\",\"description\":\"A humorous and imaginative joke about the interaction or contrast between Image 1 and Image 2\"}},\"additionalProperties\":false,\"type\":\"object\",\"required\":[\"__id__\",\"EfhxLZ\"]},\"type\":\"array\"}},\"additionalProperties\":false,\"type\":\"object\"}},\"type\":\"json_schema\"}}","response":"{\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"message\":{\"content\":\"{\\\"data\\\": [{\\\"EfhxLZ\\\": \\\"Why did the two checkmarks get married? Because they were the perfect match!\\\", \\\"__id__\\\": 0}, {\\\"EfhxLZ\\\": \\\"Why did the X and checkmark break up? They just couldn't agree on anything!\\\", \\\"__id__\\\": 1}]}\",\"role\":\"assistant\"}}],\"created\":1749654963,\"id\":\"sJ1JaKalO82hxfcP55zY-A4\",\"model\":\"gemini-2.0-flash-001\",\"object\":\"chat.completion\",\"usage\":{\"completion_tokens\":66,\"prompt_tokens\":977,\"total_tokens\":1043}}"}] diff --git a/ui/package-lock.json b/ui/package-lock.json index 9d4cd57..b84dab0 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -8,6 +8,9 @@ "name": "tablepilot", "version": "0.0.0", "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", "@material-symbols/font-400": "^0.29.1", "@microsoft/fetch-event-source": "^2.0.1", "@radix-ui/react-alert-dialog": "^1.1.14", @@ -18,6 +21,7 @@ "@radix-ui/react-icons": "^1.3.2", "@radix-ui/react-label": "^2.1.2", "@radix-ui/react-popover": "^1.1.6", + "@radix-ui/react-radio-group": "^1.3.7", "@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-select": "^2.1.6", "@radix-ui/react-separator": "^1.1.2", @@ -389,6 +393,55 @@ "node": ">=6.9.0" } }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/@emotion/babel-plugin": { "version": "11.13.5", "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.13.5.tgz", @@ -1762,6 +1815,37 @@ } } }, + "node_modules/@radix-ui/react-radio-group": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.7.tgz", + "integrity": "sha512-9w5XhD0KPOrm92OTTE0SysH3sYzHsSTHNvZgUBo/VZ80VdYyB5RneDbc0dKpURS24IxkoFRu/hI0i4XyfFwY6g==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.10", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-roving-focus": { "version": "1.1.10", "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.10.tgz", diff --git a/ui/package.json b/ui/package.json index 38ea814..671ae35 100644 --- a/ui/package.json +++ b/ui/package.json @@ -11,6 +11,9 @@ "test": "vitest" }, "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", "@material-symbols/font-400": "^0.29.1", "@microsoft/fetch-event-source": "^2.0.1", "@radix-ui/react-alert-dialog": "^1.1.14", diff --git a/ui/pnpm-lock.yaml b/ui/pnpm-lock.yaml index 2b320e2..b3ec406 100644 --- a/ui/pnpm-lock.yaml +++ b/ui/pnpm-lock.yaml @@ -8,6 +8,15 @@ importers: .: dependencies: + '@dnd-kit/core': + specifier: ^6.3.1 + version: 6.3.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + '@dnd-kit/sortable': + specifier: ^10.0.0 + version: 10.0.0(@dnd-kit/core@6.3.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0) + '@dnd-kit/utilities': + specifier: ^3.2.2 + version: 3.2.2(react@19.1.0) '@material-symbols/font-400': specifier: ^0.29.1 version: 0.29.3 @@ -310,6 +319,28 @@ packages: resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} engines: {node: '>=6.9.0'} + '@dnd-kit/accessibility@3.1.1': + resolution: {integrity: sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==} + peerDependencies: + react: '>=16.8.0' + + '@dnd-kit/core@6.3.1': + resolution: {integrity: sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + + '@dnd-kit/sortable@10.0.0': + resolution: {integrity: sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==} + peerDependencies: + '@dnd-kit/core': ^6.3.0 + react: '>=16.8.0' + + '@dnd-kit/utilities@3.2.2': + resolution: {integrity: sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==} + peerDependencies: + react: '>=16.8.0' + '@emotion/babel-plugin@11.13.5': resolution: {integrity: sha512-pxHCpT2ex+0q+HH91/zsdHkw/lXd468DIN2zvfvLtPKLLMo6gQj7oLObq8PhkrxOZb/gGCq03S3Z7PDhS8pduQ==} @@ -2898,6 +2929,31 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 + '@dnd-kit/accessibility@3.1.1(react@19.1.0)': + dependencies: + react: 19.1.0 + tslib: 2.8.1 + + '@dnd-kit/core@6.3.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': + dependencies: + '@dnd-kit/accessibility': 3.1.1(react@19.1.0) + '@dnd-kit/utilities': 3.2.2(react@19.1.0) + react: 19.1.0 + react-dom: 19.1.0(react@19.1.0) + tslib: 2.8.1 + + '@dnd-kit/sortable@10.0.0(@dnd-kit/core@6.3.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)': + dependencies: + '@dnd-kit/core': 6.3.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + '@dnd-kit/utilities': 3.2.2(react@19.1.0) + react: 19.1.0 + tslib: 2.8.1 + + '@dnd-kit/utilities@3.2.2(react@19.1.0)': + dependencies: + react: 19.1.0 + tslib: 2.8.1 + '@emotion/babel-plugin@11.13.5': dependencies: '@babel/helper-module-imports': 7.27.1 diff --git a/ui/src/actions.ts b/ui/src/actions.ts index e1aae24..11849a2 100644 --- a/ui/src/actions.ts +++ b/ui/src/actions.ts @@ -699,7 +699,7 @@ export async function deleteWorkflow(id: string) { } } -export type DatasetType = "list" | "csv"; +export type DatasetType = "list" | "csv" | "image"; export interface DatasetInfo { id: string; @@ -735,7 +735,6 @@ export interface CreateDatasetRequest { type: DatasetType; data: string[]; files: File[]; - private: boolean; table?: string; workflow?: string; } @@ -746,7 +745,6 @@ export async function createDataset( const formData = new FormData(); formData.append("name", req.name); formData.append("description", req.description); - formData.append("private", JSON.stringify(req.private)); formData.append("type", req.type); if (req.data.length > 0) { req.data.forEach((v) => formData.append("data", v)); diff --git a/ui/src/components/dataset-list-page.tsx b/ui/src/components/dataset-list-page.tsx index 88c393a..c2ad5a8 100644 --- a/ui/src/components/dataset-list-page.tsx +++ b/ui/src/components/dataset-list-page.tsx @@ -38,26 +38,25 @@ export function DatasetListPage() { setIsCreateDialogOpen(true); }; - const handleCreateDataset = async (data: { + const handleCreateDataset = async (payload: { name: string; description: string; - type: "list" | "csv"; - options?: string[]; + type: "list" | "csv" | "image"; + data?: string[]; files?: File[]; }) => { try { const requestPayload = { - name: data.name, - description: data.description, - type: data.type, - data: data.type === "list" ? data.options || [] : [], - files: data.type === "csv" ? data.files || [] : [], - private: false, + name: payload.name, + description: payload.description, + type: payload.type, + data: payload.data ?? [], + files: payload.files ?? [], }; await createDataset(requestPayload); toast({ title: "Success", - description: `Dataset "${data.name}" created successfully.`, + description: `Dataset "${payload.name}" created successfully.`, }); setIsCreateDialogOpen(false); fetchDatasetsCallback(); // This will update refreshKey, triggering child list refresh @@ -77,8 +76,8 @@ export function DatasetListPage() { data: { name: string; description: string; - type: "list" | "csv"; - options?: string[]; + type: "list" | "csv" | "image"; + data?: string[]; files?: File[]; }, ) => { @@ -87,9 +86,8 @@ export function DatasetListPage() { name: data.name, description: data.description, type: data.type, - data: data.type === "list" ? data.options || [] : [], - files: data.type === "csv" ? data.files || [] : [], - private: false, + data: data.data ?? [], + files: data.files ?? [], }; await updateDataset(id, requestPayload); toast({ @@ -220,26 +218,26 @@ function DatasetList({ .filter((dataset) => dataset.name.toLowerCase().includes(searchQuery.toLowerCase()), ) - .map((dataset) => ( - { - setSelectedDatasetId(dataset.id); - setIsPreviewDialogOpen(true); - }} - onEdit={() => onEditDataset(dataset)} - onDelete={async () => { - setLoading(true); - await deleteDataset(dataset.id); - await fetchDatasets(); - setLoading(false); - }} - badgeText={dataset.type} - > -

{dataset.description}

-
- ))} + .map((dataset) => ( + { + setSelectedDatasetId(dataset.id); + setIsPreviewDialogOpen(true); + }} + onEdit={() => onEditDataset(dataset)} + onDelete={async () => { + setLoading(true); + await deleteDataset(dataset.id); + await fetchDatasets(); + setLoading(false); + }} + badgeText={dataset.type} + > +

{dataset.description}

+
+ ))} ({ - GenerateOptionsDialog: vi.fn((props) => { - if (!props.isOpen) return null; - return ( -
- - -

Dataset Name: {props.datasetName}

-

Dataset Description: {props.datasetDescription}

-
- ); - }), +import { DatasetInfo } from "@/actions"; +import { Active, DragEndEvent, Over, UniqueIdentifier } from "@dnd-kit/core"; +import { act, fireEvent, render, screen } from "@testing-library/react"; // Removed waitFor +import { ReactNode } from "react"; +import { + afterEach, + beforeEach, + describe, + expect, + Mock, + test, + vi, +} from "vitest"; +import { CreateDatasetDialog, CreateDatasetDialogProps } from "./dataset"; + +vi.mock("@/actions", () => ({})); +vi.mock("@/urls", () => ({ + imageUrl: (path: string) => `mock://${path}`, })); -const mockOnCreate = vi.fn(); -const mockOnUpdate = vi.fn(); - -describe("CreateDatasetDialog", () => { - beforeEach(async () => { - vi.mock("react-router-dom"); - vi.mocked(useNavigate).mockReturnValue(vi.fn()); - render( - - {}} - onCreate={mockOnCreate} - onUpdate={mockOnUpdate} - /> - , - ); - }); - - it("should render", () => { - expect(true).toBe(true); - }); - - it("should enable Create button only when name is provided", async () => { - const createButton = screen.getByRole("button", { name: "Create" }); - expect(createButton).toBeDisabled(); - - const nameInput = screen.getByLabelText("Name"); - await userEvent.type(nameInput, "test-dataset"); - - expect(createButton).toBeEnabled(); - }); - - it("should call onCreate with correct data for list type dataset", async () => { - const nameInput = screen.getByLabelText("Name"); - await userEvent.type(nameInput, "test-list-dataset"); - - const descriptionInput = screen.getByLabelText("Description"); - await userEvent.type(descriptionInput, "This is a test list dataset."); - - const listTypeRadio = screen.getByLabelText("List"); - await userEvent.click(listTypeRadio); +type MockDatasetInfo = { + id: string; + name: string; + description: string; + type: "list" | "csv" | "image"; + data: string[]; + columns: string[]; +}; - const optionsInput = screen.getByLabelText("Options"); - await userEvent.type(optionsInput, "Option 1\nOption 2\nOption 3"); +let dndOnDragEnd: ((event: DragEndEvent) => void) | undefined = undefined; + +vi.mock("@dnd-kit/core", async () => { + const actual = await vi.importActual("@dnd-kit/core"); + return { + ...actual, + DndContext: ({ + children, + onDragEnd, + }: { + children: ReactNode; + onDragEnd?: (event: DragEndEvent) => void; + }) => { + dndOnDragEnd = onDragEnd; + return
{children}
; + }, + useSensor: vi.fn(), + useSensors: vi.fn(), + PointerSensor: vi.fn(), + KeyboardSensor: vi.fn(), + closestCenter: vi.fn(), + }; +}); - const createButton = screen.getByRole("button", { name: "Create" }); - await userEvent.click(createButton); +vi.mock("@dnd-kit/sortable", async () => { + const actual = await vi.importActual("@dnd-kit/sortable"); + return { + ...actual, + SortableContext: ({ children }: { children: ReactNode }) => ( +
{children}
+ ), + useSortable: ({ id }: { id: string }) => ({ + attributes: { + role: "button", + "aria-roledescription": "sortable", + "data-sortable-id": id, + }, + listeners: { onMouseDown: vi.fn(), onKeyDown: vi.fn() }, + setNodeRef: vi.fn(), + transform: null, + transition: null, + isDragging: false, + }), + arrayMove: vi.fn((arr, from, to) => { + const newArray = [...arr]; + const element = newArray.splice(from, 1)[0]; + newArray.splice(to, 0, element); + return newArray; + }), + verticalListSortingStrategy: vi.fn(), + sortableKeyboardCoordinates: vi.fn(), + }; +}); - expect(mockOnCreate).toHaveBeenCalledWith({ - name: "test-list-dataset", - description: "This is a test list dataset.", - type: "list", - options: ["Option 1", "Option 2", "Option 3"], +const mockFile = ( + name: string, + type: string, + content: string = "", + size?: number, +): File => { + const blob = new Blob([content], { type }); + const file = new File([blob], name, { type, lastModified: Date.now() }); + if (size !== undefined) { + Object.defineProperty(file, "size", { + value: size, + writable: false, + configurable: true, }); - }); + } + return file; +}; - it("should call onCreate with correct data for csv type dataset", async () => { - const nameInput = screen.getByLabelText("Name"); - await userEvent.type(nameInput, "test-csv-dataset"); +describe("CreateDatasetDialog Management", () => { + let mockOnClose: Mock<() => void>; + let mockOnCreate: Mock; + let mockOnUpdate: Mock; + const OriginalImage = window.Image; - const descriptionInput = screen.getByLabelText("Description"); - await userEvent.type(descriptionInput, "This is a test csv dataset."); + const initialProps: Omit< + CreateDatasetDialogProps, + "onCreate" | "onUpdate" | "onClose" | "dataset" + > = { + isOpen: true, + }; - const csvTypeRadio = screen.getByLabelText("CSV"); - await userEvent.click(csvTypeRadio); + beforeEach(() => { + vi.clearAllMocks(); + dndOnDragEnd = undefined; + mockOnClose = vi.fn(); + mockOnCreate = vi.fn(); + mockOnUpdate = vi.fn(); + + const mockCtx = { + drawImage: vi.fn(), + toDataURL: vi.fn(() => "mock-data-url-canvas"), + } as Partial; + vi.spyOn(HTMLCanvasElement.prototype, "getContext").mockImplementation( + () => mockCtx as CanvasRenderingContext2D, + ); - const fileInput = screen.getByLabelText("CSV Files") as HTMLInputElement; - const testFile1 = new File(["col1,col2\nval1,val2"], "test1.csv", { - type: "text/csv", + const MockedFileReader = vi.fn((): FileReader => { + const self = { + // Properties + error: null as DOMException | null, + readyState: 0 as 0 | 1 | 2, // EMPTY + result: null as string | ArrayBuffer | null, + + // Event handlers + onabort: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + onerror: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + onload: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + onloadend: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + onloadstart: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + onprogress: null as + | ((this: FileReader, ev: ProgressEvent) => void) + | null, + + // Methods + abort: vi.fn<() => void>(), + readAsArrayBuffer: vi.fn<(blob: Blob) => void>(), + readAsBinaryString: vi.fn<(blob: Blob) => void>(), + readAsDataURL: vi.fn((_blob: Blob): void => { + // Ensure explicit void return for readAsDataURL + const useFake = + vi.isMockFunction(setTimeout) && + (setTimeout as unknown as { clock: unknown }).clock; + const delayFn = useFake + ? setTimeout + : (fn: () => void) => Promise.resolve().then(fn); + + self.readyState = 1; // LOADING + delayFn(() => { + self.result = "mock-data-url-filereader"; + self.readyState = 2; // DONE + if (self.onload) { + self.onload.call( + self as unknown as FileReader, + { + target: self, + } as unknown as ProgressEvent, + ); + } + }, 0); + }), + readAsText: vi.fn<(blob: Blob, encoding?: string) => void>(), + + // EventTarget methods + addEventListener: vi.fn(), // Simpler typing + removeEventListener: vi.fn(), // Simpler typing + dispatchEvent: vi.fn<(event: Event) => boolean>(), + }; + return self as unknown as FileReader; }); - const testFile2 = new File(["h1,h2\ndata1,data2"], "test2.csv", { - type: "text/csv", + // Attach static properties to the mock constructor + Object.defineProperty(MockedFileReader, "EMPTY", { + value: 0, + writable: false, + }); + Object.defineProperty(MockedFileReader, "LOADING", { + value: 1, + writable: false, + }); + Object.defineProperty(MockedFileReader, "DONE", { + value: 2, + writable: false, }); - await userEvent.upload(fileInput, [testFile1, testFile2]); - - // Check if files are listed (optional, good for debugging) - expect(screen.getByText(/test1.csv \(\d+\.\d{2} KB\)/)).toBeInTheDocument(); - expect(screen.getByText(/test2.csv \(\d+\.\d{2} KB\)/)).toBeInTheDocument(); - - const createButton = screen.getByRole("button", { name: "Create" }); - await userEvent.click(createButton); - expect(mockOnCreate).toHaveBeenCalledWith({ - name: "test-csv-dataset", - description: "This is a test csv dataset.", - type: "csv", - files: [testFile1, testFile2], + vi.spyOn(window, "FileReader").mockImplementation(MockedFileReader); + + window.Image = vi.fn(function () { + const img = new OriginalImage(); + let _src = ""; + Object.defineProperty(img, "src", { + get: () => _src, + set(value) { + _src = value; + img.width = 100; + img.height = 100; + const useFake = + vi.isMockFunction(setTimeout) && + (setTimeout as unknown as { clock: unknown }).clock; + const delayFn = useFake + ? setTimeout + : (fn: () => void) => Promise.resolve().then(fn); + delayFn(() => { + if (img.onload) { + // Null check before calling + img.onload({} as Event); + } + }, 0); + }, + }); + return img; }); }); -}); -const mockOnCloseForAIFeature: MockedFunction<() => void> = vi.fn(); -const mockOnCreateForAIFeature: MockedFunction< - (data: { - name: string; - description: string; - type: "list" | "csv"; - options?: string[]; - files?: File[]; - }) => void -> = vi.fn(); -const mockOnUpdateForAIFeature: MockedFunction< - ( - id: string, - data: { - name: string; - description: string; - type: "list" | "csv"; - options?: string[]; - files?: File[]; - }, - ) => void -> = vi.fn(); - -const defaultTestPropsForAIFeature: CreateDatasetDialogProps = { - isOpen: true, - onClose: mockOnCloseForAIFeature, - onCreate: mockOnCreateForAIFeature, - onUpdate: mockOnUpdateForAIFeature, -}; + afterEach(() => { + window.Image = OriginalImage; + // Ensure fake timers are restored if a test block used them + // Check if clock exists on setTimeout to determine if it's a Vitest fake timer + if ( + vi.isMockFunction(setTimeout) && + (setTimeout as unknown as { clock: unknown }).clock + ) { + vi.useRealTimers(); + } + }); -const renderCreateDatasetDialogForAIFeature = ( - props?: Partial, -) => { - return render( - - - , - ); -}; + const findFileItemByName = (name: string) => + screen.findByText( + (content, element) => { + return ( + element?.tagName.toLowerCase() === "span" && content.startsWith(name) + ); + }, + {}, + { timeout: 5000 }, + ); -describe("CreateDatasetDialog - AI Options Generation Feature", () => { - beforeEach(() => { - mockOnCloseForAIFeature.mockClear(); - mockOnCreateForAIFeature.mockClear(); - mockOnUpdateForAIFeature.mockClear(); - }); + describe("Dataset Creation", () => { + test('should create a "list" type dataset', async () => { + render( + , + ); + await act(async () => { + fireEvent.change(screen.getByLabelText("Name"), { + target: { value: "My List Dataset" }, + }); + fireEvent.change(screen.getByLabelText("Description"), { + target: { value: "A desc" }, + }); + fireEvent.change( + screen.getByPlaceholderText("Enter each option on a new line"), + { target: { value: "Option 1\nOption 2" } }, + ); + }); + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Create" })); + }); + expect(mockOnCreate).toHaveBeenCalledWith({ + name: "My List Dataset", + description: "A desc", + type: "list", + data: ["Option 1", "Option 2"], + }); + }); - const getOptionsTextarea = () => - screen.getByLabelText("Options") as HTMLTextAreaElement; + test('should create a "csv" type dataset with a file', async () => { + render( + , + ); + await act(async () => { + fireEvent.change(screen.getByLabelText("Name"), { + target: { value: "My CSV Dataset" }, + }); + fireEvent.change(screen.getByLabelText("Description"), { + target: { value: "A CSV desc" }, + }); + fireEvent.click(screen.getByLabelText("CSV")); + }); + const csvFile = mockFile("test.csv", "text/csv", "h1,h2\nv1,v2", 10); // Provide size + const fileInput = screen.getByLabelText("CSV Files") as HTMLInputElement; + await act(async () => { + fireEvent.change(fileInput, { target: { files: [csvFile] } }); + await new Promise((r) => setTimeout(r, 50)); + }); + await findFileItemByName(csvFile.name); // Will look for "test.csv (0.01 KB)" + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Create" })); + }); + expect(mockOnCreate).toHaveBeenCalledWith({ + name: "My CSV Dataset", + description: "A CSV desc", + type: "csv", + data: [csvFile.name], + files: [csvFile], + }); + }); - it('DOES show wand icon button when dataset type is "list"', async () => { - renderCreateDatasetDialogForAIFeature(); - const radioList = screen.getByLabelText("List"); - await userEvent.click(radioList); - await screen.findByText("Options"); - await screen.findByLabelText("wand-button"); + test.skip('should create an "image" type dataset with an image file', async () => { + /* Kept skipped */ + }); + + test("should replace a CSV file if a new file with the same name is uploaded", async () => { + render( + , + ); + + await act(async () => { + fireEvent.change(screen.getByLabelText("Name"), { + target: { value: "CSV Replacement Test" }, + }); + fireEvent.click(screen.getByLabelText("CSV")); + }); + + const fileInput = screen.getByLabelText("CSV Files") as HTMLInputElement; + const fileA_v1 = mockFile("fileA.csv", "text/csv", "version1", 1024); // 1.00 KB + const fileA_v2 = mockFile( + "fileA.csv", + "text/csv", + "version2_new_content", + 2048, + ); // 2.00 KB + + await act(async () => { + fireEvent.change(fileInput, { target: { files: [fileA_v1] } }); + await new Promise((r) => setTimeout(r, 50)); + }); + await screen.findByText( + (content) => + content.startsWith("fileA.csv") && content.includes("1.00 KB"), + ); + + await act(async () => { + fireEvent.change(fileInput, { target: { files: [fileA_v2] } }); + await new Promise((r) => setTimeout(r, 50)); + }); + + await screen.findByText( + (content) => + content.startsWith("fileA.csv") && content.includes("2.00 KB"), + ); + expect( + screen.queryByText( + (content) => + content.startsWith("fileA.csv") && content.includes("1.00 KB"), + ), + ).toBeNull(); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Create" })); + }); + expect(mockOnCreate).toHaveBeenCalledWith( + expect.objectContaining({ data: [fileA_v2.name], files: [fileA_v2] }), + ); + }); }); - it("opens GenerateOptionsDialog when wand icon is clicked", async () => { - renderCreateDatasetDialogForAIFeature(); - await userEvent.click(screen.getByLabelText("List")); + describe("Dataset Update", () => { + const existingListDataset: MockDatasetInfo = { + id: "list1", + name: "Existing List", + description: "Old list description", + type: "list", + data: ["Old Option 1"], + columns: [], + }; + test('should update a "list" type dataset', async () => { + render( + , + ); + await act(async () => { + fireEvent.change(screen.getByLabelText("Name"), { + target: { value: "Updated List Name" }, + }); + }); + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Update" })); + }); + expect(mockOnUpdate).toHaveBeenCalledWith( + existingListDataset.id, + expect.objectContaining({ name: "Updated List Name" }), + ); + }); - const radioList = screen.getByLabelText("List"); - await userEvent.click(radioList); - await screen.findByText("Options"); - await userEvent.click(screen.getByLabelText("wand-button")); - expect(screen.getByTestId("generate-options-dialog")).toBeInTheDocument(); - }); + const existingCsvDataset: MockDatasetInfo = { + id: "csv1", + name: "Existing CSV", + description: "Old CSV file", + type: "csv", + data: ["file1.csv", "file2.csv", "file3.csv"], + columns: [], + }; + test('should update a "csv" type dataset by adding a new file', async () => { + render( + , + ); + await findFileItemByName(existingCsvDataset.data[0]); + const newCsvFile = mockFile("new_upload.csv", "text/csv"); + const fileInput = screen.getByLabelText("CSV Files") as HTMLInputElement; + await act(async () => { + fireEvent.change(fileInput, { target: { files: [newCsvFile] } }); + await new Promise((r) => setTimeout(r, 50)); + }); + await findFileItemByName(newCsvFile.name); + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Update" })); + }); + expect(mockOnUpdate).toHaveBeenCalledWith( + existingCsvDataset.id, + expect.objectContaining({ files: [newCsvFile] }), + ); + }); - it("passes correct datasetName and datasetDescription to GenerateOptionsDialog", async () => { - const datasetName = "AI Test Name"; - const datasetDescription = "AI Test Description"; - renderCreateDatasetDialogForAIFeature(); - - const nameInput = screen.getByLabelText("Name"); - await userEvent.clear(nameInput); - await userEvent.type(nameInput, datasetName); - - const descriptionInput = screen.getByLabelText("Description"); - await userEvent.clear(descriptionInput); - await userEvent.type(descriptionInput, datasetDescription); - - await userEvent.click(screen.getByLabelText("List")); - await userEvent.click(screen.getByLabelText("wand-button")); - - expect(screen.getByTestId("generate-options-dialog")).toBeInTheDocument(); - expect( - screen.getByText(`Dataset Name: ${datasetName}`), - ).toBeInTheDocument(); - expect( - screen.getByText(`Dataset Description: ${datasetDescription}`), - ).toBeInTheDocument(); - }); + test.skip('should update an "image" type dataset by adding a new image', async () => { + /* Kept skipped */ + }); - it("appends generated options to textarea when onGenerationComplete is called from mock", async () => { - renderCreateDatasetDialogForAIFeature(); - await userEvent.click(screen.getByLabelText("List")); - - const optionsTextarea = getOptionsTextarea(); - await userEvent.type(optionsTextarea, "Initial Option 1\nInitial Option 2"); - await userEvent.click(screen.getByLabelText("wand-button")); - const mockGenerateButton = screen.getByTestId("generate-options-submit"); - await userEvent.click(mockGenerateButton); - - const expectedOptions = "gen_opt1_from_mock\ngen_opt2_from_mock"; - expect(optionsTextarea.value).toBe(expectedOptions); - expect( - screen.queryByTestId("generate-options-dialog"), - ).not.toBeInTheDocument(); + describe("DND Reordering", () => { + const existingCsvDatasetForDND: MockDatasetInfo = { + id: "csvDND", + name: "CSV DND", + description: "CSV DND test", + type: "csv", + data: ["fileA.csv", "fileB.csv", "fileC.csv"], + columns: [], + }; + test('should reorder files for a "csv" dataset via DND and call onUpdate', async () => { + // This test does not need fake timers as CSV file item rendering is synchronous after initial load + render( + , + ); + + await findFileItemByName(existingCsvDatasetForDND.data[0]); + await findFileItemByName(existingCsvDatasetForDND.data[1]); + await findFileItemByName(existingCsvDatasetForDND.data[2]); + + // IDs are: dataset.id + '-' + fileName + '-' + index + const activeItemId = `csvDND-${existingCsvDatasetForDND.data[2]}-2`; // fileC.csv (index 2) + const overItemId = `csvDND-${existingCsvDatasetForDND.data[0]}-0`; // fileA.csv (index 0) + + expect(dndOnDragEnd).toBeDefined(); + if (dndOnDragEnd) { + const dragEndEvent: DragEndEvent = { + active: { id: activeItemId as UniqueIdentifier } as Active, + over: { id: overItemId as UniqueIdentifier } as Over, + } as DragEndEvent; + + await act(async () => { + // Wrap state update in act + dndOnDragEnd!(dragEndEvent); + }); + } + + // Optional: Verify DOM order change if needed, though payload is primary check + // const fileItemsAfterDrag = await screen.findAllByRole('button', { name: /Drag to reorder/i }); + // expect(fileItemsAfterDrag[0].closest('[data-sortable-id]')?.getAttribute('data-sortable-id')).toBe(activeItemId); + + await act(async () => { + fireEvent.click(screen.getByRole("button", { name: "Update" })); + }); + + expect(mockOnUpdate).toHaveBeenCalledTimes(1); + expect(mockOnUpdate).toHaveBeenCalledWith( + existingCsvDatasetForDND.id, + expect.objectContaining({ + name: existingCsvDatasetForDND.name, + type: "csv", + data: [ + existingCsvDatasetForDND.data[2], + existingCsvDatasetForDND.data[0], + existingCsvDatasetForDND.data[1], + ], + files: undefined, + }), + ); + expect(mockOnClose).toHaveBeenCalledTimes(1); + }); + }); }); }); diff --git a/ui/src/components/dialog/dataset/dataset.tsx b/ui/src/components/dialog/dataset/dataset.tsx index 150bb5a..a9799a8 100644 --- a/ui/src/components/dialog/dataset/dataset.tsx +++ b/ui/src/components/dialog/dataset/dataset.tsx @@ -19,35 +19,127 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; -import { Wand2 } from "lucide-react"; +import { imageUrl } from "@/urls"; +import { + closestCenter, + DndContext, + DragEndEvent, + KeyboardSensor, + PointerSensor, + useSensor, + useSensors, +} from "@dnd-kit/core"; +import { + arrayMove, + SortableContext, + sortableKeyboardCoordinates, + useSortable, + verticalListSortingStrategy, +} from "@dnd-kit/sortable"; +import { CSS } from "@dnd-kit/utilities"; +import { GripVertical, Wand2 } from "lucide-react"; import React, { useEffect, useRef, useState } from "react"; import { GenerateOptionsDialog } from "../generate-options-dialog"; +interface FileItem { + id: string; + name: string; + file?: File; + thumbnail?: string; +} + export interface CreateDatasetDialogProps { - // Added export dataset?: DatasetInfo; isOpen: boolean; onClose: () => void; - onCreate: (data: { + onCreate: (payload: { name: string; description: string; - type: "list" | "csv"; - options?: string[]; + type: "list" | "csv" | "image"; + data?: string[]; files?: File[]; }) => void; onUpdate: ( id: string, - data: { + payload: { name: string; description: string; - type: "list" | "csv"; - options?: string[]; + type: "list" | "csv" | "image"; + data?: string[]; files?: File[]; }, ) => void; } -type DatasetType = "list" | "csv"; +interface SortableFileItemProps { + item: FileItem; + datasetType: DatasetType; + onRemove: (id: string) => void; +} + +function SortableFileItem({ + item, + datasetType, + onRemove, +}: SortableFileItemProps) { + const { + attributes, + listeners, + setNodeRef, + transform, + transition, + isDragging, + } = useSortable({ id: item.id }); + + const style = { + transform: CSS.Transform.toString(transform), + transition, + opacity: isDragging ? 0.8 : 1, + }; + + return ( +
+
+ + {item.thumbnail && datasetType === "image" && ( + {`Image + )} + + {item.name} + {item.file + ? `(${(item.file.size / 1024).toFixed(2)} KB)` + : "(existing)"} + +
+ +
+ ); +} + +type DatasetType = "list" | "csv" | "image"; export function CreateDatasetDialog({ dataset, @@ -60,7 +152,7 @@ export function CreateDatasetDialog({ const [description, setDescription] = useState(""); const [type, setType] = useState("list"); const [listOptions, setListOptions] = useState(""); - const [selectedFiles, setSelectedFiles] = useState([]); + const [fileItems, setFileItems] = useState([]); const [isGenerateOptionsDialogOpen, setIsGenerateOptionsDialogOpen] = useState(false); @@ -70,14 +162,45 @@ export function CreateDatasetDialog({ const internalCloseInitiatedRef = useRef(false); + const sensors = useSensors( + useSensor(PointerSensor), + useSensor(KeyboardSensor, { + coordinateGetter: sortableKeyboardCoordinates, + }), + ); + + function handleDragEnd(event: DragEndEvent) { + const { active, over } = event; + if (over && active.id !== over.id) { + setFileItems((items) => { + const oldIndex = items.findIndex((item) => item.id === active.id); + const newIndex = items.findIndex((item) => item.id === over.id); + return arrayMove(items, oldIndex, newIndex); + }); + } + } + useEffect(() => { resetForm(); if (dataset) { setName(dataset.name); setDescription(dataset.description); setType(dataset.type); - if (dataset.data) { - setListOptions(dataset.data.join("\n")); + switch (dataset.type) { + case "csv": + case "image": + const initialFileItems = dataset.data.map((fileName, index) => ({ + id: `${dataset.id}-${fileName}-${index}`, + name: fileName as string, + file: undefined, + thumbnail: imageUrl( + `datasets/shared/${dataset.id}/${fileName}?key=${Date.now()}`, + ), + })); + setFileItems(initialFileItems); + break; + case "list": + setListOptions(dataset.data.join("\n")); } } }, [isOpen]); @@ -87,7 +210,7 @@ export function CreateDatasetDialog({ setDescription(""); setType("list"); setListOptions(""); - setSelectedFiles([]); + setFileItems([]); setNameError(""); setListOptionsError(""); setFilesError(""); @@ -115,9 +238,16 @@ export function CreateDatasetDialog({ setListOptionsError(""); } - if (type === "csv" && dataset === undefined && selectedFiles.length === 0) { + if (type === "csv" && dataset === undefined && fileItems.length === 0) { setFilesError("Please select at least one CSV file"); isValid = false; + } else if ( + type === "image" && + dataset === undefined && + fileItems.length === 0 + ) { + setFilesError("Please select at least one image file"); + isValid = false; } else { setFilesError(""); } @@ -135,36 +265,74 @@ export function CreateDatasetDialog({ name, description, type, - options: listOptions + data: listOptions .split("\n") .map((opt) => opt.trim()) + .map((opt) => opt.trim()) .filter((opt) => opt), }); } else if (type === "csv") { + const orderedFileNames = fileItems.map((item) => item.name); + const newFiles = fileItems + .filter((item) => item.file) + .map((item) => item.file as File); + + onUpdate(dataset.id, { + name, + description, + type, + data: orderedFileNames, + files: newFiles.length > 0 ? newFiles : undefined, + }); + } else if (type === "image") { + const orderedFileNames = fileItems.map((item) => item.name); + const newFiles = fileItems + .filter((item) => item.file) + .map((item) => item.file as File); onUpdate(dataset.id, { name, description, type, - files: selectedFiles, + data: orderedFileNames, + files: newFiles.length > 0 ? newFiles : undefined, }); } } else { + // Creating a new dataset if (type === "list") { onCreate({ name, description, type, - options: listOptions + data: listOptions .split("\n") .map((opt) => opt.trim()) .filter((opt) => opt), }); } else if (type === "csv") { + const orderedFileNames = fileItems.map((item) => item.name); + const newFiles = fileItems + .filter((item) => item.file) + .map((item) => item.file as File); + onCreate({ name, description, type, - files: selectedFiles, + data: orderedFileNames, + files: newFiles, + }); + } else if (type === "image") { + const orderedFileNames = fileItems.map((item) => item.name); + const newFiles = fileItems + .filter((item) => item.file) + .map((item) => item.file as File); + onCreate({ + name, + description, + type, + data: orderedFileNames, + files: newFiles, }); } } @@ -174,25 +342,150 @@ export function CreateDatasetDialog({ const handleFileChange = (event: React.ChangeEvent) => { if (event.target.files) { const filesArray = Array.from(event.target.files); - const csvFiles = filesArray.filter( - (file) => file.type === "text/csv" || file.name.endsWith(".csv"), - ); - if (csvFiles.length !== filesArray.length) { - setFilesError("Only CSV files are allowed."); + let processedFiles: File[] = []; + let errorMessage = ""; + + if (type === "csv") { + processedFiles = filesArray.filter( + (file) => file.type === "text/csv" || file.name.endsWith(".csv"), + ); + if (processedFiles.length !== filesArray.length) { + errorMessage = "Only CSV files are allowed."; + } + } else if (type === "image") { + const imageMimeTypes = ["image/png", "image/jpeg", "image/gif"]; + const imageExtensions = [".png", ".jpg", ".jpeg", ".gif"]; + processedFiles = filesArray.filter( + (file) => + imageMimeTypes.includes(file.type) || + imageExtensions.some((ext) => + file.name.toLowerCase().endsWith(ext), + ), + ); + if (processedFiles.length !== filesArray.length) { + errorMessage = "Only image files (PNG, JPG, GIF) are allowed."; + } + } else { + processedFiles = filesArray; + } + + if (errorMessage) { + setFilesError(errorMessage); } else { setFilesError(""); + const preliminaryFileItems: FileItem[] = []; + const replaced = new Map(); + + // Process existing files: replace if new one with same name is uploaded + fileItems.forEach((f) => { + const newFile = processedFiles.find((pf) => pf.name === f.name); + if (newFile) { + replaced.set(newFile.name, true); + preliminaryFileItems.push({ + id: `${newFile.name}-${newFile.size}`, + name: newFile.name, + file: newFile, + thumbnail: type === "image" ? undefined : f.thumbnail, + }); + } else { + preliminaryFileItems.push(f); + } + }); + + // Add new files that weren't replacements + processedFiles.forEach((pf) => { + if (!replaced.get(pf.name)) { + preliminaryFileItems.push({ + id: `${pf.name}-${pf.size}`, + name: pf.name, + file: pf, + thumbnail: type === "image" ? undefined : undefined, + }); + } + }); + + if (type === "image") { + const thumbnailPromises = preliminaryFileItems.map((item) => { + if (item.file && item.file.type.startsWith("image/")) { + return new Promise((resolve) => { + const reader = new FileReader(); + reader.onload = (e) => { + const img = new Image(); + img.onload = () => { + const MAX_WIDTH = 50; + const MAX_HEIGHT = 50; + let width = img.width; + let height = img.height; + + if (width > height) { + if (width > MAX_WIDTH) { + height *= MAX_WIDTH / width; + width = MAX_WIDTH; + } + } else { + if (height > MAX_HEIGHT) { + width *= MAX_HEIGHT / height; + height = MAX_HEIGHT; + } + } + + const canvas = document.createElement("canvas"); + canvas.width = width; + canvas.height = height; + const ctx = canvas.getContext("2d"); + if (ctx) { + ctx.drawImage(img, 0, 0, width, height); + item.thumbnail = canvas.toDataURL( + item.file?.type || "image/png", + ); + } else { + item.thumbnail = undefined; + } + resolve(item); + }; + img.onerror = () => { + item.thumbnail = undefined; + resolve(item); + }; + img.src = e.target?.result as string; + }; + reader.onerror = () => { + item.thumbnail = undefined; + resolve(item); + }; + if (item.file) { + reader.readAsDataURL(item.file); + } + }); + } + return Promise.resolve(item); + }); + + Promise.all(thumbnailPromises).then((updatedFileItems) => { + setFileItems(updatedFileItems); + if (filesError === "Please select at least one image file") { + setFilesError(""); + } + }); + } else { + // For CSV or other types, set items directly + setFileItems(preliminaryFileItems); + if ( + type === "csv" && + filesError === "Please select at least one CSV file" + ) { + setFilesError(""); + } + } } - setSelectedFiles((prev) => - [...prev, ...csvFiles].filter( - (f, i, self) => - self.findIndex((t) => t.name === f.name && t.size === f.size) === i, - ), - ); } }; - const removeFile = (fileName: string) => { - setSelectedFiles((prev) => prev.filter((file) => file.name !== fileName)); + const removeFile = (idToRemove: string) => { + // Changed parameter to id + setFileItems((prevItems) => + prevItems.filter((item) => item.id !== idToRemove), + ); }; return ( @@ -209,7 +502,7 @@ export function CreateDatasetDialog({ } }} > - + {dataset ? "Update Dataset" : "Create New Dataset"} @@ -267,6 +560,10 @@ export function CreateDatasetDialog({ +
+ + +
@@ -332,38 +629,90 @@ export function CreateDatasetDialog({ {filesError && (

{filesError}

)} - {selectedFiles.length > 0 && ( - -
- {selectedFiles.map((file) => ( -
- - {file.name} ({(file.size / 1024).toFixed(2)} KB) - - + {fileItems.length > 0 && ( + + item.id)} + strategy={verticalListSortingStrategy} + > + +
+ {fileItems.map((item) => ( + + ))}
- ))} -
- + + + )}

- {dataset - ? "Select one or more CSV files to REPLACE original data or leave it empty if you don't wan t to change." + {dataset && type === "csv" + ? "Add new CSV files to replace existing ones. Leave empty to keep current files. You can reorder files by dragging." : "Select one or more CSV files."}

)} + + {type === "image" && ( +
+ +
+ + {filesError && ( +

{filesError}

+ )} + {fileItems.length > 0 && ( + + item.id)} + strategy={verticalListSortingStrategy} + > + +
+ {fileItems.map((item) => ( + + ))} +
+
+
+
+ )} +

+ {dataset && type === "image" + ? "Add new image files to replace existing ones. Leave empty to keep current files. You can reorder files by dragging." + : "Select one or more image files (PNG, JPG, GIF)."} +

+
+
+ )}