Skip to content

Commit

Permalink
Some cleanup and tweaks.
Browse files Browse the repository at this point in the history
  • Loading branch information
naveensrinivasan committed Jul 21, 2022
1 parent c47e46d commit 28e68ff
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 36 deletions.
30 changes: 15 additions & 15 deletions cron/internal/config/config.go
Expand Up @@ -34,20 +34,20 @@ const (
// ShardNumFilename is the name of the file that stores the number of shards.
ShardNumFilename string = ".shard_num"
// TransferStatusFilename file identifies if shard transfer to BigQuery is completed.
TransferStatusFilename string = ".transfer_complete"
projectID string = "SCORECARD_PROJECT_ID"
requestTopicURL string = "SCORECARD_REQUEST_TOPIC_URL"
requestSubscriptionURL string = "SCORECARD_REQUEST_SUBSCRIPTION_URL"
bigqueryDataset string = "SCORECARD_BIGQUERY_DATASET"
completionThreshold string = "SCORECARD_COMPLETION_THRESHOLD"
shardSize string = "SCORECARD_SHARD_SIZE"
webhookURL string = "SCORECARD_WEBHOOK_URL"
metricExporter string = "SCORECARD_METRIC_EXPORTER"
ciiDataBucketURL string = "SCORECARD_CII_DATA_BUCKET_URL"
blacklistedChecks string = "SCORECARD_BLACKLISTED_CHECKS"
bigqueryTable string = "SCORECARD_BIGQUERY_TABLE"
resultDataBucketURL string = "SCORECARD_DATA_BUCKET_URL"
bqExportResultsBucketURL string = "SCORECARD_BQ_EXPORT_RESULTS_BUCKET_URL"
TransferStatusFilename string = ".transfer_complete"
projectID string = "SCORECARD_PROJECT_ID"
requestTopicURL string = "SCORECARD_REQUEST_TOPIC_URL"
requestSubscriptionURL string = "SCORECARD_REQUEST_SUBSCRIPTION_URL"
bigqueryDataset string = "SCORECARD_BIGQUERY_DATASET"
completionThreshold string = "SCORECARD_COMPLETION_THRESHOLD"
shardSize string = "SCORECARD_SHARD_SIZE"
webhookURL string = "SCORECARD_WEBHOOK_URL"
metricExporter string = "SCORECARD_METRIC_EXPORTER"
ciiDataBucketURL string = "SCORECARD_CII_DATA_BUCKET_URL"
blacklistedChecks string = "SCORECARD_BLACKLISTED_CHECKS"
bigqueryTable string = "SCORECARD_BIGQUERY_TABLE"
resultDataBucketURL string = "SCORECARD_DATA_BUCKET_URL"
apiResultsBucketURL string = "SCORECARD_API_RESULTS_BUCKET_URL"
// Raw results.
rawBigqueryTable string = "RAW_SCORECARD_BIGQUERY_TABLE"
rawResultDataBucketURL string = "RAW_SCORECARD_DATA_BUCKET_URL"
Expand Down Expand Up @@ -236,6 +236,6 @@ func GetMetricExporter() (string, error) {

// GetBQExportResultsBucketURL returns the bucket URL for storing cron job results.
func GetBQExportResultsBucketURL() (string, error) {
return getStringConfigValue(bqExportResultsBucketURL, configYAML,
return getStringConfigValue(apiResultsBucketURL, configYAML,
"BigQueryExportResultsBucketURL", "bq-export-results-bucket-url")
}
2 changes: 1 addition & 1 deletion cron/internal/config/config.yaml
Expand Up @@ -30,4 +30,4 @@ result-data-bucket-url: gs://ossf-scorecard-data2
raw-result-data-bucket-url: gs://ossf-scorecard-rawdata
raw-bigquery-table: scorecard-rawdata
# export-bucket
bigquery-export-results-bucket-url: gs://ossf-scorecard-bq-export-results
bigquery-export-results-bucket-url: gs://ossf-scorecard-cron-results
33 changes: 17 additions & 16 deletions cron/internal/config/config_test.go
Expand Up @@ -39,7 +39,7 @@ const (
// Raw results.
prodRawBucket = "gs://ossf-scorecard-rawdata"
prodRawBigQueryTable = "scorecard-rawdata"
prodBigQueryExportsBucketURL = "gs://ossf-scorecard-bq-export-results"
prodBigQueryExportsBucketURL = "gs://ossf-scorecard-cron-results"
)

func getByteValueFromFile(filename string) ([]byte, error) {
Expand All @@ -61,20 +61,21 @@ func TestYAMLParsing(t *testing.T) {
name: "validate",
filename: "config.yaml",
expectedConfig: config{
ProjectID: prodProjectID,
ResultDataBucketURL: prodBucket,
RequestTopicURL: prodTopic,
RequestSubscriptionURL: prodSubscription,
BigQueryDataset: prodBigQueryDataset,
BigQueryTable: prodBigQueryTable,
CompletionThreshold: prodCompletionThreshold,
WebhookURL: prodWebhookURL,
CIIDataBucketURL: prodCIIDataBucket,
BlacklistedChecks: prodBlacklistedChecks,
ShardSize: prodShardSize,
MetricExporter: prodMetricExporter,
RawResultDataBucketURL: prodRawBucket,
RawBigQueryTable: prodRawBigQueryTable,
ProjectID: prodProjectID,
ResultDataBucketURL: prodBucket,
RequestTopicURL: prodTopic,
RequestSubscriptionURL: prodSubscription,
BigQueryDataset: prodBigQueryDataset,
BigQueryTable: prodBigQueryTable,
CompletionThreshold: prodCompletionThreshold,
WebhookURL: prodWebhookURL,
CIIDataBucketURL: prodCIIDataBucket,
BlacklistedChecks: prodBlacklistedChecks,
ShardSize: prodShardSize,
MetricExporter: prodMetricExporter,
RawResultDataBucketURL: prodRawBucket,
RawBigQueryTable: prodRawBigQueryTable,
BigQueryExportResultsBucketURL: prodBigQueryExportsBucketURL,
},
},

Expand Down Expand Up @@ -349,7 +350,7 @@ func TestGetMetricExporter(t *testing.T) {
//nolint:paralleltest // Since os.Setenv is used.
func TestGetBigQueryExportsBucketURL(t *testing.T) {
t.Run("GetBigQueryExportsBucketURL", func(t *testing.T) {
bigqueryExportsBucketURL := bqExportResultsBucketURL
bigqueryExportsBucketURL := apiResultsBucketURL
os.Unsetenv(bigqueryExportsBucketURL)
bucket, err := GetBQExportResultsBucketURL()
if err != nil {
Expand Down
28 changes: 24 additions & 4 deletions cron/internal/worker/main.go
Expand Up @@ -129,20 +129,40 @@ func processRequest(ctx context.Context,
if err := format.AsJSON2(&result, true /*showDetails*/, log.InfoLevel, checkDocs, &buffer2); err != nil {
return fmt.Errorf("error during result.AsJSON2: %w", err)
}
// these are for exporting results to GCS for API consumption
var exportBuffer bytes.Buffer
var exportRawBuffer bytes.Buffer

if err := format.AsJSON2(&result, true /*showDetails*/, log.InfoLevel, checkDocs, &exportBuffer); err != nil {
return fmt.Errorf("error during result.AsJSON2 for export: %w", err)
}
if err := format.AsRawJSON(&result, &exportRawBuffer); err != nil {
return fmt.Errorf("error during result.AsRawJSON for export: %w", err)
}
exportPath := fmt.Sprintf("%s/result.json", repo.URI())
exportCommitSHAPath := fmt.Sprintf("%s/%s/result.json", repo.URI(), result.Repo.CommitSHA)
exportRawPath := fmt.Sprintf("%s/raw.json", repo.URI())
exportRawCommitSHAPath := fmt.Sprintf("%s/%s/raw.json", repo.URI(), result.Repo.CommitSHA)

// Raw result.
if err := format.AsRawJSON(&result, &rawBuffer); err != nil {
return fmt.Errorf("error during result.AsRawJSON: %w", err)
}

if err := data.WriteToBlobStore(ctx, exportBucketURL, exportPath, buffer2.Bytes()); err != nil {
return fmt.Errorf("error during WriteToBlobStore2: %w", err)
// These are results without the commit SHA which represents the latest commit.
if err := data.WriteToBlobStore(ctx, exportBucketURL, exportPath, exportBuffer.Bytes()); err != nil {
return fmt.Errorf("error during writing to exportBucketURL: %w", err)
}
// Export result based on commitSHA.
if err := data.WriteToBlobStore(ctx, exportBucketURL, exportCommitSHAPath, buffer2.Bytes()); err != nil {
return fmt.Errorf("error during WriteToBlobStore2: %w", err)
if err := data.WriteToBlobStore(ctx, exportBucketURL, exportCommitSHAPath, exportBuffer.Bytes()); err != nil {
return fmt.Errorf("error during exportBucketURL with commit SHA: %w", err)
}
// Export raw result.
if err := data.WriteToBlobStore(ctx, exportBucketURL, exportRawPath, exportRawBuffer.Bytes()); err != nil {
return fmt.Errorf("error during writing to exportBucketURL for raw results: %w", err)
}
if err := data.WriteToBlobStore(ctx, exportBucketURL, exportRawCommitSHAPath, exportRawBuffer.Bytes()); err != nil {
return fmt.Errorf("error during exportBucketURL for raw results with commit SHA: %w", err)
}
}

Expand Down
2 changes: 2 additions & 0 deletions cron/k8s/worker.release.yaml
Expand Up @@ -42,6 +42,8 @@ spec:
value: "printer"
- name: GITHUB_AUTH_SERVER
value: "10.4.4.210:80"
- name: "SCORECARD_API_RESULTS_BUCKET_URL"
value: "gs://ossf-scorecard-cron-results"
resources:
requests:
memory: 5Gi
Expand Down

0 comments on commit 28e68ff

Please sign in to comment.