From 6eba0ebf589528d2271ca5f39aaa3a911115438a Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Fri, 12 Jan 2024 14:36:45 -0800 Subject: [PATCH 1/6] Get the `create-report` async task in shape This PR makes assorted tweaks to the runner's `create-report` task, like: - Adding new `pacta.FileType`s for assorted HTML-adjacent types present in the report (see below) - Includes an `UNKNOWN` type in case I miss anything, we can always go back and re-label them based on file extension - Fix some issues with paths, use the portfolio UUID as the directory prefix (to avoid collisions) - Publish the event when we're done so the main app can record the info Mostly unrelated changes: - Fixes #135 by pulling `handleMissingTranslation` from the `$nuxt` object instead of `inject` - Add a helper script for iterating quickly on the runner, `build_and_load_runner.sh` Some resources that get created from this: ``` select * from blob WHERE blob_uri LIKE 'az://reports/%'; id | blob_uri | file_type | file_name | created_at ---------------------------+--------------------------------------------------------------------------------------------------------------+-----------+--------------------------+------------------------------- blob.9c6a2be6135288dc141a | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/index.html | html | index.html | 2024-01-12 22:24:07.695008+00 blob.47508626ad7c784151d6 | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/libs/accessible-code-block-0.0.1/empty-anchor.js | js | empty-anchor.js | 2024-01-12 22:24:07.695008+00 blob.e952045a95b60b0744fd | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/libs/anchor-sections-1.1.0/anchor-sections-hash.css | css | anchor-sections-hash.css | 2024-01-12 22:24:07.695008+00 blob.59b3c81bad6b701bbf83 | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/libs/anchor-sections-1.1.0/anchor-sections.js | js | anchor-sections.js | 2024-01-12 22:24:07.695008+00 blob.7edd40c453bac1c53754 | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/libs/gitbook-2.6.7/css/fontawesome/fontawesome-webfont.ttf | ttf | fontawesome-webfont.ttf | 2024-01-12 22:24:07.695008+00 blob.7eda957bd02cbf3ab01f | az://reports/afcc58f1-486c-437c-9fce-0a4774cd4128/libs/gitbook-2.6.7/css/plugin-search.css | css | plugin-search.css | 2024-01-12 22:24:07.695008+00 ``` --- cmd/runner/README.md | 15 ++-- cmd/runner/main.go | 87 +++++++++++++++++-- cmd/server/pactasrv/analysis.go | 6 ++ .../0011_add_report_file_types.down.sql | 13 +++ .../0011_add_report_file_types.up.sql | 9 ++ frontend/i18n.config.ts | 6 +- .../plugins/handle-missing-translation.ts | 2 +- pacta/pacta.go | 22 +++++ scripts/BUILD.bazel | 5 ++ scripts/build_and_load_runner.sh | 20 +++++ 10 files changed, 166 insertions(+), 19 deletions(-) create mode 100644 db/sqldb/migrations/0011_add_report_file_types.down.sql create mode 100644 db/sqldb/migrations/0011_add_report_file_types.up.sql create mode 100755 scripts/build_and_load_runner.sh diff --git a/cmd/runner/README.md b/cmd/runner/README.md index 4b6f15b..c6277d2 100644 --- a/cmd/runner/README.md +++ b/cmd/runner/README.md @@ -15,11 +15,12 @@ bazel run //scripts:run_server -- --use_azure_runner ### Creating a new docker image to run locally +When testing locally (e.g. without `--use_azure_runner`), you can build and tag a runner image locally and use that. To do that, run `bazel run //scripts:build_and_load_runner` + +### Cleaning up old runner containers + +By default, we don't auto-remove stopped containers (i.e. finished runner tasks), to give developers a chance to review the logs (e.g. with `docker logs `). To clean up all completed runs at once, run: + ```bash -# Build the runner binary -bazel build --@io_bazel_rules_go//go/config:pure //cmd/runner:image_tarball -# Load the new image into docker, which will output a SHA256 value -docker load < bazel-bin/cmd/runner/image_tarball/tarball.tar -# Tag the runner image in order for it to be picked up locally. Don't push this to the registry! -docker tag rmisa.azurecr.io/runner -``` \ No newline at end of file +docker rm $(docker ps -a -q -f "status=exited" -f "ancestor=rmisa.azurecr.io/runner:latest") +``` diff --git a/cmd/runner/main.go b/cmd/runner/main.go index 9cefb09..e555c09 100644 --- a/cmd/runner/main.go +++ b/cmd/runner/main.go @@ -180,24 +180,68 @@ func parsePortfolioReq() (*task.ParsePortfolioRequest, error) { return &task, nil } -func (h *handler) uploadDirectory(ctx context.Context, dirPath, container string) error { +func (h *handler) uploadDirectory(ctx context.Context, dirPath, container string) ([]*task.AnalysisArtifact, error) { base := filepath.Base(dirPath) - return filepath.WalkDir(dirPath, func(path string, info fs.DirEntry, err error) error { + var artifacts []*task.AnalysisArtifact + err := filepath.WalkDir(dirPath, func(path string, info fs.DirEntry, err error) error { if info.IsDir() { return nil } // This is a file, let's upload it to the container - uri := blob.Join(h.blob.Scheme(), container, base, strings.TrimPrefix(path, dirPath)) + uri := blob.Join(h.blob.Scheme(), container, base, strings.TrimPrefix(path, dirPath+"/")) if err := h.uploadBlob(ctx, path, uri); err != nil { return fmt.Errorf("failed to upload blob: %w", err) } + + fn := filepath.Base(path) + // Returns pacta.FileType_UNKNOWN for unrecognized extensions, which we'll serve as binary blobs. + ft := fileTypeFromExt(filepath.Ext(fn)) + if ft == pacta.FileType_UNKNOWN { + h.logger.Error("unhandled file extension", zap.String("dir", dirPath), zap.String("file_ext", filepath.Ext(fn))) + } + artifacts = append(artifacts, &task.AnalysisArtifact{ + BlobURI: pacta.BlobURI(uri), + FileName: fn, + FileType: ft, + }) return nil }) + if err != nil { + return nil, fmt.Errorf("error while walking dir/uploading blobs: %w", err) + } + return artifacts, nil +} + +func fileTypeFromExt(ext string) pacta.FileType { + switch ext { + case ".csv": + return pacta.FileType_CSV + case ".yaml": + return pacta.FileType_YAML + case ".zip": + return pacta.FileType_ZIP + case ".html": + return pacta.FileType_HTML + case ".json": + return pacta.FileType_JSON + case ".txt": + return pacta.FileType_TEXT + case ".css": + return pacta.FileType_CSS + case ".js": + return pacta.FileType_JS + case ".ttf": + return pacta.FileType_TTF + default: + return pacta.FileType_UNKNOWN + } } func (h *handler) uploadBlob(ctx context.Context, srcPath, destURI string) error { + h.logger.Info("uploading blob", zap.String("src", srcPath), zap.String("dest", destURI)) + srcF, err := os.Open(srcPath) if err != nil { return fmt.Errorf("failed to open file for upload: %w", err) @@ -388,13 +432,16 @@ func (h *handler) createAudit(ctx context.Context, taskID task.ID, req *task.Cre func (h *handler) createReport(ctx context.Context, taskID task.ID, req *task.CreateReportRequest) error { fileNames := []string{} - for i, blobURI := range req.BlobURIs { + for _, blobURI := range req.BlobURIs { // Load the parsed portfolio from blob storage, place it in /mnt/ // processed_portfolios, where the `create_report.R` script expects it // to be. - fileName := fmt.Sprintf("%d.json", i) - fileNames = append(fileNames, fileName) - destPath := filepath.Join("/", "mnt", "processed_portfolios", fileName) + fileNameWithExt := filepath.Base(string(blobURI)) + if !strings.HasSuffix(fileNameWithExt, ".json") { + return fmt.Errorf("given blob wasn't a JSON-formatted portfolio, %q", fileNameWithExt) + } + fileNames = append(fileNames, strings.TrimSuffix(fileNameWithExt, ".json")) + destPath := filepath.Join("/", "mnt", "processed_portfolios", fileNameWithExt) if err := h.downloadBlob(ctx, string(blobURI), destPath); err != nil { return fmt.Errorf("failed to download processed portfolio blob: %w", err) } @@ -423,16 +470,40 @@ func (h *handler) createReport(ctx context.Context, taskID task.ID, req *task.Cr return fmt.Errorf("failed to read report directory: %w", err) } + var artifacts []*task.AnalysisArtifact for _, dirEntry := range dirEntries { if !dirEntry.IsDir() { continue } dirPath := filepath.Join(reportDir, dirEntry.Name()) - if err := h.uploadDirectory(ctx, dirPath, h.reportContainer); err != nil { + tmp, err := h.uploadDirectory(ctx, dirPath, h.reportContainer) + if err != nil { return fmt.Errorf("failed to upload report directory: %w", err) } + artifacts = tmp + } + + events := []publisher.Event{ + { + Data: task.CreateReportResponse{ + TaskID: taskID, + Request: req, + Artifacts: artifacts, + }, + DataVersion: to.Ptr("1.0"), + EventType: to.Ptr("created-report"), + EventTime: to.Ptr(time.Now()), + ID: to.Ptr(string(taskID)), + Subject: to.Ptr(string(taskID)), + }, + } + + if _, err := h.pubsub.PublishEvents(ctx, events, nil); err != nil { + return fmt.Errorf("failed to publish event: %w", err) } + h.logger.Info("created report", zap.String("task_id", string(taskID))) + return nil } diff --git a/cmd/server/pactasrv/analysis.go b/cmd/server/pactasrv/analysis.go index 2d69232..b0b16c0 100644 --- a/cmd/server/pactasrv/analysis.go +++ b/cmd/server/pactasrv/analysis.go @@ -255,6 +255,12 @@ func (s *Server) RunAnalysis(ctx context.Context, request api.RunAnalysisRequest return nil, oapierr.Internal("unknown analysis type", zap.String("analysis_type", string(analysisType))) } + now := s.Now() + if err := s.DB.UpdateAnalysis(s.DB.NoTxn(ctx), analysisID, db.SetAnalysisRanAt(now)); err != nil { + // Just log the error, it's non-critical + s.Logger.Error("failed to set ranAt time on analysis", zap.String("analysis_id", string(analysisID)), zap.Time("ran_at", now)) + } + return api.RunAnalysis200JSONResponse{AnalysisId: string(analysisID)}, nil } diff --git a/db/sqldb/migrations/0011_add_report_file_types.down.sql b/db/sqldb/migrations/0011_add_report_file_types.down.sql new file mode 100644 index 0000000..c801d50 --- /dev/null +++ b/db/sqldb/migrations/0011_add_report_file_types.down.sql @@ -0,0 +1,13 @@ +BEGIN; + +-- There isn't a way to delete a value from an enum, so this is the workaround +-- https://stackoverflow.com/a/56777227/17909149 +DROP TYPE file_type; +CREATE TYPE file_type AS ENUM ( + 'csv', + 'yaml', + 'zip', + 'html', + 'json'); + +COMMIT; diff --git a/db/sqldb/migrations/0011_add_report_file_types.up.sql b/db/sqldb/migrations/0011_add_report_file_types.up.sql new file mode 100644 index 0000000..cfd26fc --- /dev/null +++ b/db/sqldb/migrations/0011_add_report_file_types.up.sql @@ -0,0 +1,9 @@ +BEGIN; + +ALTER TYPE file_type ADD VALUE 'txt'; +ALTER TYPE file_type ADD VALUE 'css'; +ALTER TYPE file_type ADD VALUE 'js'; +ALTER TYPE file_type ADD VALUE 'ttf'; +ALTER TYPE file_type ADD VALUE ''; -- Unknown file types + +COMMIT; diff --git a/frontend/i18n.config.ts b/frontend/i18n.config.ts index 04cb498..fc8442d 100644 --- a/frontend/i18n.config.ts +++ b/frontend/i18n.config.ts @@ -5,10 +5,10 @@ export default defineI18nConfig(() => ({ missing: (locale, key, vm) => { // TODO(grady) figure out how to skip this if we're in production + just log. // Consider using process.env.NODE_ENV == 'prod', etc. - const fn = inject('handleMissingTranslation') + const fn = vm?.appContext.app.$nuxt.$missingTranslations.handleMissingTranslation if (fn) { - const callable = fn as (locale: string, key: string) => void - callable(locale, key) + // const callable = fn as (locale: string, key: string) => void + fn(locale, key) } }, })) diff --git a/frontend/plugins/handle-missing-translation.ts b/frontend/plugins/handle-missing-translation.ts index f45f48e..430332e 100644 --- a/frontend/plugins/handle-missing-translation.ts +++ b/frontend/plugins/handle-missing-translation.ts @@ -16,13 +16,13 @@ export default defineNuxtPlugin((nuxtApp) => { } } - nuxtApp.vueApp.provide('handleMissingTranslation', handleMissingTranslation) const values = computed(() => { return missingTranslations.value }) return { provide: { missingTranslations: { + handleMissingTranslation, values, numberMissing, }, diff --git a/pacta/pacta.go b/pacta/pacta.go index 1f46959..764506a 100644 --- a/pacta/pacta.go +++ b/pacta/pacta.go @@ -206,6 +206,13 @@ const ( FileType_ZIP = "zip" FileType_HTML = "html" FileType_JSON = "json" + + // All for serving reports + FileType_TEXT = "txt" + FileType_CSS = "css" + FileType_JS = "js" + FileType_TTF = "ttf" + FileType_UNKNOWN = "" ) var FileTypeValues = []FileType{ @@ -215,6 +222,11 @@ var FileTypeValues = []FileType{ FileType_JSON, FileType_HTML, FileType_JSON, + FileType_TEXT, + FileType_CSS, + FileType_JS, + FileType_TTF, + FileType_UNKNOWN, } func ParseFileType(s string) (FileType, error) { @@ -233,6 +245,16 @@ func ParseFileType(s string) (FileType, error) { return FileType_HTML, nil case "json": return FileType_JSON, nil + case "txt": + return FileType_TEXT, nil + case "css": + return FileType_CSS, nil + case "js": + return FileType_JS, nil + case "ttf": + return FileType_TTF, nil + case "": + return FileType_UNKNOWN, nil } return "", fmt.Errorf("unknown pacta.FileType: %q", s) } diff --git a/scripts/BUILD.bazel b/scripts/BUILD.bazel index 526e14b..3d48fc9 100644 --- a/scripts/BUILD.bazel +++ b/scripts/BUILD.bazel @@ -47,3 +47,8 @@ sh_binary( "//scripts/shared:migrate", ], ) + +sh_binary( + name = "build_and_load_runner", + srcs = ["build_and_load_runner.sh"], +) diff --git a/scripts/build_and_load_runner.sh b/scripts/build_and_load_runner.sh new file mode 100755 index 0000000..2b90a29 --- /dev/null +++ b/scripts/build_and_load_runner.sh @@ -0,0 +1,20 @@ +#!/bin/bash +set -euo pipefail + +ROOT="$BUILD_WORKSPACE_DIRECTORY" +cd "$ROOT" + +# Build the image +bazel build --@io_bazel_rules_go//go/config:pure //cmd/runner:image_tarball + +# Load it into Docker, capture output +LOAD_OUTPUT=$(docker load < bazel-bin/cmd/runner/image_tarball/tarball.tar) + +# Extract the SHA +IMAGE_ID=$(echo $LOAD_OUTPUT | grep -oP 'sha256:\K\w+') + +# Tag the image +docker tag $IMAGE_ID rmisa.azurecr.io/runner:latest + +echo "Tagged $IMAGE_ID as rmisa.azurecr.io/runner:latest" + From f82cc850d203961c44e79f1932e5cbad94ac1ffd Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Fri, 12 Jan 2024 14:41:13 -0800 Subject: [PATCH 2/6] Run DB regen --- db/sqldb/golden/human_readable_schema.sql | 7 ++++++- db/sqldb/golden/schema_dump.sql | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/db/sqldb/golden/human_readable_schema.sql b/db/sqldb/golden/human_readable_schema.sql index 0cf7970..20283df 100644 --- a/db/sqldb/golden/human_readable_schema.sql +++ b/db/sqldb/golden/human_readable_schema.sql @@ -47,7 +47,12 @@ CREATE TYPE file_type AS ENUM ( 'yaml', 'zip', 'html', - 'json'); + 'json', + 'txt', + 'css', + 'js', + 'ttf', + ''); CREATE TYPE language AS ENUM ( 'en', 'de', diff --git a/db/sqldb/golden/schema_dump.sql b/db/sqldb/golden/schema_dump.sql index b01a776..4a7d555 100644 --- a/db/sqldb/golden/schema_dump.sql +++ b/db/sqldb/golden/schema_dump.sql @@ -116,7 +116,12 @@ CREATE TYPE public.file_type AS ENUM ( 'yaml', 'zip', 'html', - 'json' + 'json', + 'txt', + 'css', + 'js', + 'ttf', + '' ); From 72b214303639f6d4a2d5a3363ba6b94556ecc1a8 Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Fri, 12 Jan 2024 14:42:22 -0800 Subject: [PATCH 3/6] Update migrations test --- db/sqldb/sqldb_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/db/sqldb/sqldb_test.go b/db/sqldb/sqldb_test.go index 8bf39fc..a5628a6 100644 --- a/db/sqldb/sqldb_test.go +++ b/db/sqldb/sqldb_test.go @@ -92,6 +92,8 @@ func TestSchemaHistory(t *testing.T) { {ID: 8, Version: 8}, // 0008_indexes_on_blob_ids {ID: 9, Version: 9}, // 0009_support_user_merge {ID: 10, Version: 10}, // 0010_audit_log_enum_values + {ID: 11, Version: 11}, // 0011_add_report_file_types + } if diff := cmp.Diff(want, got); diff != "" { From 0c1b96bac7cda1e69090ce2c09dbb3ddcd85c557 Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Fri, 12 Jan 2024 15:48:43 -0800 Subject: [PATCH 4/6] Fix SQL test --- db/sqldb/blob.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/db/sqldb/blob.go b/db/sqldb/blob.go index 2593ed2..f48f82f 100644 --- a/db/sqldb/blob.go +++ b/db/sqldb/blob.go @@ -233,11 +233,9 @@ func validateBlobForCreation(b *pacta.Blob) error { if b.BlobURI == "" { return fmt.Errorf("blob missing BlobURI") } - if b.FileType == "" { - return fmt.Errorf("blob missing FileType") - } if b.FileName == "" { return fmt.Errorf("blob missing FileName") } + // A blank FileType is valid, just means we don't recognize the file type yet. return nil } From 69a9defea769c8b23ff9b5bdcca7e67b2f38f0ba Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Fri, 12 Jan 2024 19:40:55 -0800 Subject: [PATCH 5/6] Address review comments --- cmd/runner/README.md | 8 +++++++- frontend/i18n.config.ts | 3 ++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/cmd/runner/README.md b/cmd/runner/README.md index c6277d2..80ba6a0 100644 --- a/cmd/runner/README.md +++ b/cmd/runner/README.md @@ -15,7 +15,13 @@ bazel run //scripts:run_server -- --use_azure_runner ### Creating a new docker image to run locally -When testing locally (e.g. without `--use_azure_runner`), you can build and tag a runner image locally and use that. To do that, run `bazel run //scripts:build_and_load_runner` +When developing the runner, you have two options: + +* **Test against local Docker** - Run the server **without** the `--use_azure_runner`, which means async tasks will run locally, using `docker run ...`. To test local runner changes, you can build and tag a runner image locally with `bazel run //scripts:build_and_load_runner`. + * After running the script, the updated runner will immediately be available, no need to restart the server. + * This is the option you'll want to use most of the time. +* **Test against Azure Container Apps Jobs** - Run the server **with** the `--use_azure_runner`, which means async tasks will be run on Azure, created via the Azure API. To test changes here, you can build and tag a runner image locally with `bazel run //scripts:build_and_load_runner`, and then push it to Azure with `docker push rmisa.azurecr.io/runner:latest` + * You generally won't need to use this option unless you're testing something very specific about the runner's integration with Azure, as the runner code is identical whether run locally or on Azure. ### Cleaning up old runner containers diff --git a/frontend/i18n.config.ts b/frontend/i18n.config.ts index fc8442d..9c0fa03 100644 --- a/frontend/i18n.config.ts +++ b/frontend/i18n.config.ts @@ -7,8 +7,9 @@ export default defineI18nConfig(() => ({ // Consider using process.env.NODE_ENV == 'prod', etc. const fn = vm?.appContext.app.$nuxt.$missingTranslations.handleMissingTranslation if (fn) { - // const callable = fn as (locale: string, key: string) => void fn(locale, key) + } else { + console.warn(`No handleMissingTranslation function found, can't handle ${locale} ${key}`) } }, })) From 23527435e40e085b5204cf26c910dffa858f0530 Mon Sep 17 00:00:00 2001 From: Brandon Sprague Date: Mon, 15 Jan 2024 18:45:59 -0800 Subject: [PATCH 6/6] Rename `''` filetype to `'unknown'` --- db/sqldb/blob.go | 4 +++- db/sqldb/golden/human_readable_schema.sql | 2 +- db/sqldb/golden/schema_dump.sql | 2 +- db/sqldb/migrations/0011_add_report_file_types.up.sql | 2 +- pacta/pacta.go | 4 ++-- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/db/sqldb/blob.go b/db/sqldb/blob.go index f48f82f..2593ed2 100644 --- a/db/sqldb/blob.go +++ b/db/sqldb/blob.go @@ -233,9 +233,11 @@ func validateBlobForCreation(b *pacta.Blob) error { if b.BlobURI == "" { return fmt.Errorf("blob missing BlobURI") } + if b.FileType == "" { + return fmt.Errorf("blob missing FileType") + } if b.FileName == "" { return fmt.Errorf("blob missing FileName") } - // A blank FileType is valid, just means we don't recognize the file type yet. return nil } diff --git a/db/sqldb/golden/human_readable_schema.sql b/db/sqldb/golden/human_readable_schema.sql index 20283df..093cda3 100644 --- a/db/sqldb/golden/human_readable_schema.sql +++ b/db/sqldb/golden/human_readable_schema.sql @@ -52,7 +52,7 @@ CREATE TYPE file_type AS ENUM ( 'css', 'js', 'ttf', - ''); + 'unknown'); CREATE TYPE language AS ENUM ( 'en', 'de', diff --git a/db/sqldb/golden/schema_dump.sql b/db/sqldb/golden/schema_dump.sql index 4a7d555..a07ea70 100644 --- a/db/sqldb/golden/schema_dump.sql +++ b/db/sqldb/golden/schema_dump.sql @@ -121,7 +121,7 @@ CREATE TYPE public.file_type AS ENUM ( 'css', 'js', 'ttf', - '' + 'unknown' ); diff --git a/db/sqldb/migrations/0011_add_report_file_types.up.sql b/db/sqldb/migrations/0011_add_report_file_types.up.sql index cfd26fc..19e567e 100644 --- a/db/sqldb/migrations/0011_add_report_file_types.up.sql +++ b/db/sqldb/migrations/0011_add_report_file_types.up.sql @@ -4,6 +4,6 @@ ALTER TYPE file_type ADD VALUE 'txt'; ALTER TYPE file_type ADD VALUE 'css'; ALTER TYPE file_type ADD VALUE 'js'; ALTER TYPE file_type ADD VALUE 'ttf'; -ALTER TYPE file_type ADD VALUE ''; -- Unknown file types +ALTER TYPE file_type ADD VALUE 'unknown'; COMMIT; diff --git a/pacta/pacta.go b/pacta/pacta.go index 764506a..fd733c8 100644 --- a/pacta/pacta.go +++ b/pacta/pacta.go @@ -212,7 +212,7 @@ const ( FileType_CSS = "css" FileType_JS = "js" FileType_TTF = "ttf" - FileType_UNKNOWN = "" + FileType_UNKNOWN = "unknown" ) var FileTypeValues = []FileType{ @@ -253,7 +253,7 @@ func ParseFileType(s string) (FileType, error) { return FileType_JS, nil case "ttf": return FileType_TTF, nil - case "": + case "unknown": return FileType_UNKNOWN, nil } return "", fmt.Errorf("unknown pacta.FileType: %q", s)