2016-09-30 19:53:12 +00:00
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package main
/ *
Generate the tasks . json file .
* /
import (
"encoding/json"
2017-02-01 20:56:55 +00:00
"flag"
2016-09-30 19:53:12 +00:00
"fmt"
2017-02-01 20:56:55 +00:00
"io/ioutil"
2016-09-30 19:53:12 +00:00
"os"
"path"
2017-06-14 19:25:31 +00:00
"path/filepath"
2017-02-01 20:56:55 +00:00
"regexp"
2017-06-14 19:25:31 +00:00
"runtime"
2016-09-30 19:53:12 +00:00
"sort"
2017-10-17 17:40:52 +00:00
"strconv"
2016-09-30 19:53:12 +00:00
"strings"
2016-11-08 17:55:32 +00:00
"time"
2016-09-30 19:53:12 +00:00
2018-06-08 12:25:57 +00:00
"github.com/golang/glog"
2017-06-14 19:25:31 +00:00
"go.skia.org/infra/go/sklog"
2016-09-30 19:53:12 +00:00
"go.skia.org/infra/go/util"
"go.skia.org/infra/task_scheduler/go/specs"
)
const (
2017-11-29 19:45:14 +00:00
BUNDLE_RECIPES_NAME = "Housekeeper-PerCommit-BundleRecipes"
2018-03-07 19:44:44 +00:00
ISOLATE_GCLOUD_LINUX_NAME = "Housekeeper-PerCommit-IsolateGCloudLinux"
2018-08-03 18:29:03 +00:00
ISOLATE_GO_DEPS_NAME = "Housekeeper-PerCommit-IsolateGoDeps"
2018-03-07 19:44:44 +00:00
ISOLATE_GO_LINUX_NAME = "Housekeeper-PerCommit-IsolateGoLinux"
2017-11-29 19:45:14 +00:00
ISOLATE_SKIMAGE_NAME = "Housekeeper-PerCommit-IsolateSkImage"
ISOLATE_SKP_NAME = "Housekeeper-PerCommit-IsolateSKP"
ISOLATE_SVG_NAME = "Housekeeper-PerCommit-IsolateSVG"
ISOLATE_NDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
2018-02-20 16:40:25 +00:00
ISOLATE_SDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
2017-11-29 19:45:14 +00:00
ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
ISOLATE_WIN_VULKAN_SDK_NAME = "Housekeeper-PerCommit-IsolateWinVulkanSDK"
2017-04-04 13:06:16 +00:00
2018-04-06 21:39:06 +00:00
DEFAULT_OS_DEBIAN = "Debian-9.4"
DEFAULT_OS_LINUX_GCE = DEFAULT_OS_DEBIAN
2018-08-06 14:00:54 +00:00
DEFAULT_OS_MAC = "Mac-10.13.6"
2017-11-15 16:22:57 +00:00
DEFAULT_OS_UBUNTU = "Ubuntu-14.04"
DEFAULT_OS_WIN = "Windows-2016Server-14393"
2016-09-30 19:53:12 +00:00
2018-04-19 13:36:45 +00:00
DEFAULT_PROJECT = "skia"
2018-05-14 16:38:09 +00:00
// Small is a 2-core machine.
// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
2018-05-04 15:18:01 +00:00
MACHINE_TYPE_SMALL = "n1-highmem-2"
2018-05-14 16:38:09 +00:00
// Medium is a 16-core machine
MACHINE_TYPE_MEDIUM = "n1-standard-16"
// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
// any of our tasks.)
2018-05-04 15:18:01 +00:00
MACHINE_TYPE_LARGE = "n1-highcpu-64"
2018-04-17 19:55:57 +00:00
// Swarming output dirs.
2018-09-24 15:22:55 +00:00
OUTPUT_NONE = "output_ignored" // This will result in outputs not being isolated.
OUTPUT_BUILD = "build"
OUTPUT_TEST = "test"
OUTPUT_PERF = "perf"
2018-04-17 19:55:57 +00:00
2016-09-30 19:53:12 +00:00
// Name prefix for upload jobs.
PREFIX_UPLOAD = "Upload"
2018-04-17 19:55:57 +00:00
SERVICE_ACCOUNT_BOOKMAKER = "skia-bookmaker@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_COMPILE = "skia-external-compile-tasks@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_HOUSEKEEPER = "skia-external-housekeeper@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_RECREATE_SKPS = "skia-recreate-skps@skia-swarming-bots.iam.gserviceaccount.com"
2018-09-27 18:55:03 +00:00
SERVICE_ACCOUNT_UPDATE_GO_DEPS = "skia-recreate-skps@skia-swarming-bots.iam.gserviceaccount.com"
2018-04-17 19:55:57 +00:00
SERVICE_ACCOUNT_UPDATE_META_CONFIG = "skia-update-meta-config@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_BINARY = "skia-external-binary-uploader@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_CALMBENCH = "skia-external-calmbench-upload@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_GM = "skia-external-gm-uploader@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_NANO = "skia-external-nano-uploader@skia-swarming-bots.iam.gserviceaccount.com"
2016-09-30 19:53:12 +00:00
)
var (
// "Constants"
2017-02-01 20:56:55 +00:00
// Top-level list of all jobs to run at each commit; loaded from
// jobs.json.
JOBS [ ] string
// General configuration information.
CONFIG struct {
2018-09-24 15:22:55 +00:00
GsBucketGm string ` json:"gs_bucket_gm" `
GoldHashesURL string ` json:"gold_hashes_url" `
GsBucketNano string ` json:"gs_bucket_nano" `
GsBucketCalm string ` json:"gs_bucket_calm" `
NoUpload [ ] string ` json:"no_upload" `
Pool string ` json:"pool" `
2016-09-30 19:53:12 +00:00
}
2018-04-19 13:36:45 +00:00
// alternateProject can be set in an init function to override the default project ID.
alternateProject string
2018-04-17 16:45:29 +00:00
// alternateServiceAccount can be set in an init function to override the normal service accounts.
// Takes one of SERVICE_ACCOUNT_* constants as an argument and returns the service account that
// should be used, or uses sklog.Fatal to indicate a problem.
alternateServiceAccount func ( serviceAccountEnum string ) string
2017-06-13 21:01:16 +00:00
// alternateSwarmDimensions can be set in an init function to override the default swarming bot
// dimensions for the given task.
alternateSwarmDimensions func ( parts map [ string ] string ) [ ] string
2017-09-15 14:35:44 +00:00
// internalHardwareLabelFn can be set in an init function to provide an
// internal_hardware_label variable to the recipe.
2017-09-15 18:09:07 +00:00
internalHardwareLabelFn func ( parts map [ string ] string ) * int
2017-09-15 12:35:31 +00:00
2016-09-30 19:53:12 +00:00
// Defines the structure of job names.
jobNameSchema * JobNameSchema
2017-02-01 20:56:55 +00:00
2018-04-27 17:14:38 +00:00
// Named caches used by tasks.
CACHES_GIT = [ ] * specs . Cache {
& specs . Cache {
Name : "git" ,
Path : "cache/git" ,
} ,
& specs . Cache {
Name : "git_cache" ,
Path : "cache/git_cache" ,
} ,
}
CACHES_WORKDIR = [ ] * specs . Cache {
& specs . Cache {
Name : "work" ,
Path : "cache/work" ,
} ,
}
2018-08-03 14:26:00 +00:00
CACHES_DOCKER = [ ] * specs . Cache {
& specs . Cache {
Name : "docker" ,
Path : "cache/docker" ,
} ,
}
2018-06-15 13:19:13 +00:00
// Versions of the following copied from
// https://chrome-internal.googlesource.com/infradata/config/+/master/configs/cr-buildbucket/swarming_task_template_canary.json#42
// to test the fix for chromium:836196.
// (In the future we may want to use versions from
// https://chrome-internal.googlesource.com/infradata/config/+/master/configs/cr-buildbucket/swarming_task_template.json#42)
2018-04-17 19:55:57 +00:00
// TODO(borenet): Roll these versions automatically!
CIPD_PKGS_PYTHON = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/tools/luci/vpython/${platform}" ,
Path : "cipd_bin_packages" ,
2018-08-03 14:31:24 +00:00
Version : "git_revision:b6cdec8586c9f8d3d728b1bc0bd4331330ba66fc" ,
2018-04-17 19:55:57 +00:00
} ,
2017-12-07 14:54:05 +00:00
}
2018-04-17 19:55:57 +00:00
2018-06-18 20:02:15 +00:00
CIPD_PKGS_CPYTHON = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/python/cpython/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "version:2.7.14.chromium14" ,
} ,
}
2018-04-17 19:55:57 +00:00
CIPD_PKGS_KITCHEN = append ( [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/tools/luci/kitchen/${platform}" ,
Path : "." ,
2018-06-15 13:19:13 +00:00
Version : "git_revision:546aae39f1fb9dce9add528e2011afa574535ecd" ,
2018-04-17 19:55:57 +00:00
} ,
& specs . CipdPackage {
2018-06-11 19:48:35 +00:00
Name : "infra/tools/luci-auth/${platform}" ,
2018-04-17 19:55:57 +00:00
Path : "cipd_bin_packages" ,
2018-06-11 19:48:35 +00:00
Version : "git_revision:e1abc57be62d198b5c2f487bfb2fa2d2eb0e867c" ,
2018-04-17 19:55:57 +00:00
} ,
} , CIPD_PKGS_PYTHON ... )
CIPD_PKGS_GIT = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/git/${platform}" ,
Path : "cipd_bin_packages" ,
2018-06-20 11:11:38 +00:00
Version : "version:2.17.1.chromium15" ,
2018-04-17 19:55:57 +00:00
} ,
& specs . CipdPackage {
Name : "infra/tools/git/${platform}" ,
Path : "cipd_bin_packages" ,
2018-06-15 11:39:36 +00:00
Version : "git_revision:0ae21738597e5601ba90372315145fec18582fc4" ,
2018-04-17 19:55:57 +00:00
} ,
& specs . CipdPackage {
Name : "infra/tools/luci/git-credential-luci/${platform}" ,
Path : "cipd_bin_packages" ,
2018-06-11 19:48:35 +00:00
Version : "git_revision:e1abc57be62d198b5c2f487bfb2fa2d2eb0e867c" ,
2018-04-17 19:55:57 +00:00
} ,
2017-06-06 12:27:09 +00:00
}
2017-12-07 14:21:07 +00:00
2018-04-17 19:55:57 +00:00
CIPD_PKGS_GSUTIL = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/gsutil" ,
Path : "cipd_bin_packages" ,
Version : "version:4.28" ,
} ,
}
2018-08-03 14:07:47 +00:00
CIPD_PKGS_XCODE = [ ] * specs . CipdPackage {
// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
// This package is really just an installer for XCode.
& specs . CipdPackage {
2018-08-09 16:52:16 +00:00
Name : "infra/tools/mac_toolchain/${platform}" ,
Path : "mac_toolchain" ,
// When this is updated, also update
// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
2018-08-03 14:07:47 +00:00
Version : "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a" ,
} ,
}
2017-02-01 20:56:55 +00:00
// Flags.
2017-02-07 14:16:30 +00:00
builderNameSchemaFile = flag . String ( "builder_name_schema" , "" , "Path to the builder_name_schema.json file. If not specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json from this repo." )
assetsDir = flag . String ( "assets_dir" , "" , "Directory containing assets." )
cfgFile = flag . String ( "cfg_file" , "" , "JSON file containing general configuration information." )
jobsFile = flag . String ( "jobs" , "" , "JSON file containing jobs to run." )
2016-09-30 19:53:12 +00:00
)
2018-04-19 13:36:45 +00:00
// Build the LogDog annotation URL.
func logdogAnnotationUrl ( ) string {
project := DEFAULT_PROJECT
if alternateProject != "" {
project = alternateProject
}
return fmt . Sprintf ( "logdog://logs.chromium.org/%s/%s/+/annotations" , project , specs . PLACEHOLDER_TASK_ID )
}
2018-04-17 19:55:57 +00:00
// Create a properties JSON string.
func props ( p map [ string ] string ) string {
d := make ( map [ string ] interface { } , len ( p ) + 1 )
for k , v := range p {
d [ k ] = interface { } ( v )
}
d [ "$kitchen" ] = struct {
DevShell bool ` json:"devshell" `
GitAuth bool ` json:"git_auth" `
} {
DevShell : true ,
GitAuth : true ,
}
j , err := json . Marshal ( d )
if err != nil {
sklog . Fatal ( err )
}
return strings . Replace ( string ( j ) , "\\u003c" , "<" , - 1 )
}
// kitchenTask returns a specs.TaskSpec instance which uses Kitchen to run a
// recipe.
func kitchenTask ( name , recipe , isolate , serviceAccount string , dimensions [ ] string , extraProps map [ string ] string , outputDir string ) * specs . TaskSpec {
if serviceAccount != "" && alternateServiceAccount != nil {
serviceAccount = alternateServiceAccount ( serviceAccount )
}
cipd := append ( [ ] * specs . CipdPackage { } , CIPD_PKGS_KITCHEN ... )
2018-06-18 20:02:15 +00:00
if strings . Contains ( name , "Win" ) {
cipd = append ( cipd , CIPD_PKGS_CPYTHON ... )
}
2018-04-17 19:55:57 +00:00
properties := map [ string ] string {
"buildbucket_build_id" : specs . PLACEHOLDER_BUILDBUCKET_BUILD_ID ,
"buildername" : name ,
"patch_issue" : specs . PLACEHOLDER_ISSUE ,
2018-05-03 13:56:48 +00:00
"patch_ref" : specs . PLACEHOLDER_PATCH_REF ,
2018-04-17 19:55:57 +00:00
"patch_repo" : specs . PLACEHOLDER_PATCH_REPO ,
"patch_set" : specs . PLACEHOLDER_PATCHSET ,
"patch_storage" : specs . PLACEHOLDER_PATCH_STORAGE ,
"repository" : specs . PLACEHOLDER_REPO ,
"revision" : specs . PLACEHOLDER_REVISION ,
"swarm_out_dir" : outputDir ,
}
for k , v := range extraProps {
properties [ k ] = v
}
var outputs [ ] string = nil
if outputDir != OUTPUT_NONE {
outputs = [ ] string { outputDir }
}
2018-04-27 17:14:38 +00:00
task := & specs . TaskSpec {
Caches : [ ] * specs . Cache {
& specs . Cache {
Name : "vpython" ,
Path : "cache/vpython" ,
} ,
} ,
2018-04-17 19:55:57 +00:00
CipdPackages : cipd ,
Command : [ ] string {
"./kitchen${EXECUTABLE_SUFFIX}" , "cook" ,
"-checkout-dir" , "recipe_bundle" ,
"-mode" , "swarming" ,
"-luci-system-account" , "system" ,
"-cache-dir" , "cache" ,
"-temp-dir" , "tmp" ,
"-known-gerrit-host" , "android.googlesource.com" ,
"-known-gerrit-host" , "boringssl.googlesource.com" ,
"-known-gerrit-host" , "chromium.googlesource.com" ,
"-known-gerrit-host" , "dart.googlesource.com" ,
"-known-gerrit-host" , "fuchsia.googlesource.com" ,
"-known-gerrit-host" , "go.googlesource.com" ,
"-known-gerrit-host" , "llvm.googlesource.com" ,
"-known-gerrit-host" , "skia.googlesource.com" ,
"-known-gerrit-host" , "webrtc.googlesource.com" ,
"-output-result-json" , "${ISOLATED_OUTDIR}/build_result_filename" ,
"-workdir" , "." ,
"-recipe" , recipe ,
"-properties" , props ( properties ) ,
2018-04-19 13:36:45 +00:00
"-logdog-annotation-url" , logdogAnnotationUrl ( ) ,
2018-04-17 19:55:57 +00:00
} ,
Dependencies : [ ] string { BUNDLE_RECIPES_NAME } ,
Dimensions : dimensions ,
EnvPrefixes : map [ string ] [ ] string {
2018-10-09 20:25:27 +00:00
"PATH" : [ ] string { "cipd_bin_packages" , "cipd_bin_packages/bin" } ,
2018-08-20 13:45:02 +00:00
"VPYTHON_VIRTUALENV_ROOT" : [ ] string { "cache/vpython" } ,
2018-04-17 19:55:57 +00:00
} ,
ExtraTags : map [ string ] string {
2018-04-19 13:36:45 +00:00
"log_location" : logdogAnnotationUrl ( ) ,
2018-04-17 19:55:57 +00:00
} ,
Isolate : relpath ( isolate ) ,
Outputs : outputs ,
ServiceAccount : serviceAccount ,
}
2018-04-27 17:14:38 +00:00
timeout ( task , time . Hour )
return task
2018-04-17 19:55:57 +00:00
}
2017-09-15 14:35:44 +00:00
// internalHardwareLabel returns the internal ID for the bot, if any.
2017-09-15 18:09:07 +00:00
func internalHardwareLabel ( parts map [ string ] string ) * int {
2017-09-15 14:35:44 +00:00
if internalHardwareLabelFn != nil {
return internalHardwareLabelFn ( parts )
2017-09-15 12:35:31 +00:00
}
return nil
}
2018-10-16 14:15:01 +00:00
// linuxGceDimensions are the Swarming dimensions for Linux GCE instances.
2018-05-04 15:18:01 +00:00
func linuxGceDimensions ( machineType string ) [ ] string {
2017-02-01 20:56:55 +00:00
return [ ] string {
2017-09-25 16:56:53 +00:00
// Specify CPU to avoid running builds on bots with a more unique CPU.
"cpu:x86-64-Haswell_GCE" ,
2017-02-01 20:56:55 +00:00
"gpu:none" ,
2018-04-26 22:02:23 +00:00
// Currently all Linux GCE tasks run on 16-CPU machines.
2018-05-04 15:18:01 +00:00
fmt . Sprintf ( "machine_type:%s" , machineType ) ,
2017-11-15 16:22:57 +00:00
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
2017-02-01 20:56:55 +00:00
fmt . Sprintf ( "pool:%s" , CONFIG . Pool ) ,
}
}
2018-10-16 14:15:01 +00:00
func wasmGceDimensions ( ) [ ] string {
// There's limited parallelism for WASM builds, so we can get away with the medium
// instance instead of the beefy large instance.
// Docker being intsalled is the most important part.
return append ( linuxGceDimensions ( MACHINE_TYPE_MEDIUM ) , "docker_installed:true" )
}
2016-09-30 19:53:12 +00:00
// deriveCompileTaskName returns the name of a compile task based on the given
// job name.
func deriveCompileTaskName ( jobName string , parts map [ string ] string ) string {
2018-01-02 19:54:43 +00:00
if strings . Contains ( jobName , "Bookmaker" ) {
2017-12-08 17:58:20 +00:00
return "Build-Debian9-GCC-x86_64-Release"
2018-01-05 16:13:43 +00:00
} else if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" || parts [ "role" ] == "Calmbench" {
2016-09-30 19:53:12 +00:00
task_os := parts [ "os" ]
2017-04-27 17:08:50 +00:00
ec := [ ] string { }
if val := parts [ "extra_config" ] ; val != "" {
ec = strings . Split ( val , "_" )
2018-08-17 17:24:51 +00:00
ignore := [ ] string { "Skpbench" , "AbandonGpuContext" , "PreAbandonGpuContext" , "Valgrind" , "ReleaseAndAbandonGpuContext" , "CCPR" , "FSAA" , "FAAA" , "FDAA" , "NativeFonts" , "GDI" , "NoGPUThreads" , "ProcDump" , "DDL1" , "DDL3" , "T8888" , "DDLTotal" , "DDLRecord" , "9x9" , "BonusConfigs" }
2017-04-27 17:08:50 +00:00
keep := make ( [ ] string , 0 , len ( ec ) )
for _ , part := range ec {
if ! util . In ( part , ignore ) {
keep = append ( keep , part )
}
}
ec = keep
2017-04-26 18:25:29 +00:00
}
2016-09-30 19:53:12 +00:00
if task_os == "Android" {
2017-04-27 17:08:50 +00:00
if ! util . In ( "Android" , ec ) {
ec = append ( [ ] string { "Android" } , ec ... )
2016-09-30 19:53:12 +00:00
}
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-03-08 19:01:01 +00:00
} else if task_os == "Chromecast" {
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { "Chromecast" } , ec ... )
2017-04-07 14:04:08 +00:00
} else if strings . Contains ( task_os , "ChromeOS" ) {
2017-11-02 13:34:08 +00:00
ec = append ( [ ] string { "Chromebook" , "GLES" } , ec ... )
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
} else if task_os == "iOS" {
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { task_os } , ec ... )
2016-09-30 19:53:12 +00:00
task_os = "Mac"
} else if strings . Contains ( task_os , "Win" ) {
task_os = "Win"
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( task_os , "Ubuntu" ) || strings . Contains ( task_os , "Debian" ) {
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
}
2016-11-18 18:10:51 +00:00
jobNameMap := map [ string ] string {
2016-09-30 19:53:12 +00:00
"role" : "Build" ,
"os" : task_os ,
"compiler" : parts [ "compiler" ] ,
"target_arch" : parts [ "arch" ] ,
"configuration" : parts [ "configuration" ] ,
2016-11-18 18:10:51 +00:00
}
2018-08-09 14:00:02 +00:00
if strings . Contains ( jobName , "PathKit" ) {
ec = [ ] string { "PathKit" }
}
2018-10-09 13:36:35 +00:00
if strings . Contains ( jobName , "CanvasKit" ) {
2018-10-16 14:15:01 +00:00
if parts [ "cpu_or_gpu" ] == "CPU" {
ec = [ ] string { "CanvasKit_CPU" }
} else {
ec = [ ] string { "CanvasKit" }
}
2018-10-09 13:36:35 +00:00
}
2017-04-27 17:08:50 +00:00
if len ( ec ) > 0 {
jobNameMap [ "extra_config" ] = strings . Join ( ec , "_" )
2016-11-18 18:10:51 +00:00
}
name , err := jobNameSchema . MakeJobName ( jobNameMap )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
return name
2018-10-08 17:58:47 +00:00
} else if parts [ "role" ] == "BuildStats" {
return strings . Replace ( jobName , "BuildStats" , "Build" , 1 )
2016-09-30 19:53:12 +00:00
} else {
return jobName
}
}
// swarmDimensions generates swarming bot dimensions for the given task.
func swarmDimensions ( parts map [ string ] string ) [ ] string {
2017-06-13 21:01:16 +00:00
if alternateSwarmDimensions != nil {
return alternateSwarmDimensions ( parts )
}
return defaultSwarmDimensions ( parts )
}
// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
func defaultSwarmDimensions ( parts map [ string ] string ) [ ] string {
2016-09-30 19:53:12 +00:00
d := map [ string ] string {
2017-02-01 20:56:55 +00:00
"pool" : CONFIG . Pool ,
2016-09-30 19:53:12 +00:00
}
if os , ok := parts [ "os" ] ; ok {
2017-06-13 21:01:16 +00:00
d [ "os" ] , ok = map [ string ] string {
2017-03-21 13:25:34 +00:00
"Android" : "Android" ,
"Chromecast" : "Android" ,
2017-04-07 14:04:08 +00:00
"ChromeOS" : "ChromeOS" ,
2017-06-28 15:45:54 +00:00
"Debian9" : DEFAULT_OS_DEBIAN ,
2017-11-14 18:30:04 +00:00
"Mac" : DEFAULT_OS_MAC ,
2017-06-28 15:45:54 +00:00
"Ubuntu14" : DEFAULT_OS_UBUNTU ,
2017-07-11 12:11:15 +00:00
"Ubuntu17" : "Ubuntu-17.04" ,
2018-11-06 18:45:40 +00:00
"Ubuntu18" : "Ubuntu-18.04" ,
2017-11-14 18:30:04 +00:00
"Win" : DEFAULT_OS_WIN ,
2018-11-21 18:20:48 +00:00
"Win10" : "Windows-10-17134.407" ,
2017-03-21 13:25:34 +00:00
"Win2k8" : "Windows-2008ServerR2-SP1" ,
2017-11-14 18:30:04 +00:00
"Win2016" : DEFAULT_OS_WIN ,
2017-04-18 19:38:15 +00:00
"Win7" : "Windows-7-SP1" ,
2017-03-21 13:25:34 +00:00
"Win8" : "Windows-8.1-SP0" ,
2018-08-16 22:11:57 +00:00
"iOS" : "iOS-11.4.1" ,
2016-12-02 17:09:10 +00:00
} [ os ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in OS mapping." , os )
}
2018-02-28 19:22:27 +00:00
if os == "Win10" && parts [ "model" ] == "Golo" {
2018-03-15 17:53:25 +00:00
// ChOps-owned machines have Windows 10 v1709, but a slightly different version than Skolo.
d [ "os" ] = "Windows-10-16299.309"
2018-02-23 16:17:03 +00:00
}
2018-04-24 20:59:48 +00:00
if d [ "os" ] == DEFAULT_OS_WIN {
// TODO(dogben): Temporarily add image dimension during upgrade.
2018-07-18 14:14:23 +00:00
d [ "image" ] = "windows-server-2016-dc-v20180710"
2018-04-24 20:59:48 +00:00
}
2016-09-30 19:53:12 +00:00
} else {
2017-06-28 15:45:54 +00:00
d [ "os" ] = DEFAULT_OS_DEBIAN
2016-09-30 19:53:12 +00:00
}
2017-11-02 17:48:23 +00:00
if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" || parts [ "role" ] == "Calmbench" {
2017-03-21 13:25:34 +00:00
if strings . Contains ( parts [ "os" ] , "Android" ) || strings . Contains ( parts [ "os" ] , "Chromecast" ) {
2016-09-30 19:53:12 +00:00
// For Android, the device type is a better dimension
// than CPU or GPU.
2017-06-14 14:01:45 +00:00
deviceInfo , ok := map [ string ] [ ] string {
2017-06-13 21:01:16 +00:00
"AndroidOne" : { "sprout" , "MOB30Q" } ,
2018-01-24 20:48:26 +00:00
"Chorizo" : { "chorizo" , "1.30_109591" } ,
2017-11-28 14:41:48 +00:00
"GalaxyS6" : { "zerofltetmo" , "NRD90M_G920TUVU5FQK1" } ,
2018-09-13 19:05:15 +00:00
"GalaxyS7_G930FD" : { "herolte" , "R16NW_G930FXXS2ERH6" } , // This is Oreo.
2018-06-04 21:32:04 +00:00
"MotoG4" : { "athene" , "NPJS25.93-14.7-8" } ,
2018-08-28 19:52:38 +00:00
"NVIDIA_Shield" : { "foster" , "OPR6.170623.010" } ,
2017-12-06 21:29:04 +00:00
"Nexus5" : { "hammerhead" , "M4B30Z_3437181" } ,
2017-11-17 13:59:44 +00:00
"Nexus5x" : { "bullhead" , "OPR6.170623.023" } ,
2017-12-06 21:29:04 +00:00
"Nexus7" : { "grouper" , "LMY47V_1836172" } , // 2012 Nexus 7
2018-04-11 16:36:56 +00:00
"NexusPlayer" : { "fugu" , "OPR2.170623.027" } ,
2018-08-21 17:00:14 +00:00
"Pixel" : { "sailfish" , "PPR1.180610.009" } ,
2018-08-24 14:24:54 +00:00
"Pixel2XL" : { "taimen" , "PPR1.180610.009" } ,
2017-06-14 14:01:45 +00:00
} [ parts [ "model" ] ]
2017-02-01 20:56:55 +00:00
if ! ok {
2017-06-14 14:01:45 +00:00
glog . Fatalf ( "Entry %q not found in Android mapping." , parts [ "model" ] )
2017-02-01 20:56:55 +00:00
}
2016-11-08 17:55:32 +00:00
d [ "device_type" ] = deviceInfo [ 0 ]
d [ "device_os" ] = deviceInfo [ 1 ]
2016-09-30 19:53:12 +00:00
} else if strings . Contains ( parts [ "os" ] , "iOS" ) {
2017-06-13 21:01:16 +00:00
device , ok := map [ string ] string {
2016-11-09 19:03:20 +00:00
"iPadMini4" : "iPad5,1" ,
2017-04-25 15:38:38 +00:00
"iPhone6" : "iPhone7,2" ,
"iPhone7" : "iPhone9,1" ,
"iPadPro" : "iPad6,3" ,
2016-09-30 19:53:12 +00:00
} [ parts [ "model" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in iOS mapping." , parts [ "model" ] )
}
d [ "device" ] = device
2018-05-22 14:46:15 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "SwiftShader" ) {
if parts [ "model" ] != "GCE" || d [ "os" ] != DEFAULT_OS_DEBIAN || parts [ "cpu_or_gpu_value" ] != "SwiftShader" {
glog . Fatalf ( "Please update defaultSwarmDimensions for SwiftShader %s %s %s." , parts [ "os" ] , parts [ "model" ] , parts [ "cpu_or_gpu_value" ] )
}
d [ "cpu" ] = "x86-64-Haswell_GCE"
d [ "os" ] = DEFAULT_OS_LINUX_GCE
d [ "machine_type" ] = MACHINE_TYPE_SMALL
2018-10-08 19:07:42 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "SKQP" ) && parts [ "cpu_or_gpu_value" ] == "Emulator" {
if parts [ "model" ] != "NUC7i5BNK" || d [ "os" ] != DEFAULT_OS_DEBIAN {
glog . Fatalf ( "Please update defaultSwarmDimensions for SKQP::Emulator %s %s." , parts [ "os" ] , parts [ "model" ] )
}
d [ "cpu" ] = "x86-64-i5-7260U"
d [ "os" ] = "Debian-9.4"
// KVM means Kernel-based Virtual Machine, that is, can this vm virtualize commands
// For us, this means, can we run an x86 android emulator on it.
// kjlubick tried running this on GCE, but it was a bit too slow on the large install.
// So, we run on bare metal machines in the Skolo (that should also have KVM).
d [ "kvm" ] = "1"
d [ "docker_installed" ] = "true"
2016-09-30 19:53:12 +00:00
} else if parts [ "cpu_or_gpu" ] == "CPU" {
2017-09-25 16:56:53 +00:00
modelMapping , ok := map [ string ] map [ string ] string {
"AVX" : {
2018-08-22 18:07:48 +00:00
"Golo" : "x86-64-E5-2670" ,
2017-09-25 16:56:53 +00:00
} ,
"AVX2" : {
2018-08-06 15:27:17 +00:00
"GCE" : "x86-64-Haswell_GCE" ,
"MacBookPro11.5" : "x86-64-i7-4870HQ" ,
"NUC5i7RYH" : "x86-64-i7-5557U" ,
2017-09-25 16:56:53 +00:00
} ,
2017-09-28 20:38:34 +00:00
"AVX512" : {
"GCE" : "x86-64-Skylake_GCE" ,
} ,
2016-09-30 19:53:12 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in CPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
2017-09-25 16:56:53 +00:00
cpu , ok := modelMapping [ parts [ "model" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in %q model mapping." , parts [ "model" ] , parts [ "cpu_or_gpu_value" ] )
2016-09-30 19:53:12 +00:00
}
2017-09-25 16:56:53 +00:00
d [ "cpu" ] = cpu
2017-11-15 16:22:57 +00:00
if parts [ "model" ] == "GCE" && d [ "os" ] == DEFAULT_OS_DEBIAN {
d [ "os" ] = DEFAULT_OS_LINUX_GCE
}
2018-05-04 15:18:01 +00:00
if parts [ "model" ] == "GCE" && d [ "cpu" ] == "x86-64-Haswell_GCE" {
2018-09-24 15:22:55 +00:00
d [ "machine_type" ] = MACHINE_TYPE_MEDIUM
2017-12-14 18:15:01 +00:00
}
2016-09-30 19:53:12 +00:00
} else {
2018-10-16 14:15:01 +00:00
if strings . Contains ( parts [ "extra_config" ] , "CanvasKit" ) {
// GPU is defined for the WebGL version of CanvasKit, but
// it can still run on a GCE instance.
return wasmGceDimensions ( )
} else if strings . Contains ( parts [ "os" ] , "Win" ) {
2017-06-14 14:34:18 +00:00
gpu , ok := map [ string ] string {
2018-03-15 17:53:25 +00:00
"GT610" : "10de:104a-23.21.13.9101" ,
2018-10-26 18:09:52 +00:00
"GTX660" : "10de:11c0-25.21.14.1634" ,
"GTX960" : "10de:1401-25.21.14.1634" ,
2018-08-28 14:59:24 +00:00
"IntelHD4400" : "8086:0a16-20.19.15.4963" ,
2018-10-30 20:33:30 +00:00
"IntelIris540" : "8086:1926-25.20.100.6326" ,
2018-08-28 14:59:24 +00:00
"IntelIris6100" : "8086:162b-20.19.15.4963" ,
2018-08-27 22:09:52 +00:00
"RadeonHD7770" : "1002:683d-24.20.13001.1010" ,
"RadeonR9M470X" : "1002:6646-24.20.13001.1010" ,
2018-11-15 02:16:00 +00:00
"QuadroP400" : "10de:1cb3-25.21.14.1678" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Win GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( parts [ "os" ] , "Ubuntu" ) || strings . Contains ( parts [ "os" ] , "Debian" ) {
2017-06-14 14:34:18 +00:00
gpu , ok := map [ string ] string {
// Intel drivers come from CIPD, so no need to specify the version here.
"IntelBayTrail" : "8086:0f31" ,
"IntelHD2000" : "8086:0102" ,
"IntelHD405" : "8086:22b1" ,
2017-12-19 20:14:12 +00:00
"IntelIris640" : "8086:5926" ,
2017-08-04 02:29:22 +00:00
"QuadroP400" : "10de:1cb3-384.59" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Ubuntu GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
2018-11-06 18:45:40 +00:00
if parts [ "os" ] == "Ubuntu18" && parts [ "cpu_or_gpu_value" ] == "QuadroP400" {
// Ubuntu18 has a slightly newer GPU driver.
gpu = "10de:1cb3-390.87"
}
2017-06-14 14:34:18 +00:00
d [ "gpu" ] = gpu
} else if strings . Contains ( parts [ "os" ] , "Mac" ) {
gpu , ok := map [ string ] string {
2018-02-02 19:47:31 +00:00
"IntelHD6000" : "8086:1626" ,
2018-02-08 21:57:04 +00:00
"IntelHD615" : "8086:591e" ,
2017-08-17 21:29:04 +00:00
"IntelIris5100" : "8086:0a2e" ,
2018-05-15 20:45:42 +00:00
"RadeonHD8870M" : "1002:6821-4.0.20-3.2.8" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Mac GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
2018-02-09 15:29:09 +00:00
// Yuck. We have two different types of MacMini7,1 with the same GPU but different CPUs.
if parts [ "cpu_or_gpu_value" ] == "IntelIris5100" {
// Run all tasks on Golo machines for now.
d [ "cpu" ] = "x86-64-i7-4578U"
}
2017-06-14 14:34:18 +00:00
} else if strings . Contains ( parts [ "os" ] , "ChromeOS" ) {
2017-12-04 20:43:31 +00:00
version , ok := map [ string ] string {
2018-05-01 14:43:35 +00:00
"MaliT604" : "10575.22.0" ,
2018-05-01 12:53:52 +00:00
"MaliT764" : "10575.22.0" ,
2018-05-01 17:12:58 +00:00
"MaliT860" : "10575.22.0" ,
"PowerVRGX6250" : "10575.22.0" ,
"TegraK1" : "10575.22.0" ,
"IntelHDGraphics615" : "10575.22.0" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in ChromeOS GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
2017-12-04 20:43:31 +00:00
d [ "gpu" ] = parts [ "cpu_or_gpu_value" ]
d [ "release_version" ] = version
2017-06-14 14:34:18 +00:00
} else {
glog . Fatalf ( "Unknown GPU mapping for OS %q." , parts [ "os" ] )
2017-02-19 04:28:26 +00:00
}
2016-09-30 19:53:12 +00:00
}
} else {
d [ "gpu" ] = "none"
2017-06-28 15:45:54 +00:00
if d [ "os" ] == DEFAULT_OS_DEBIAN {
2018-10-09 13:36:35 +00:00
if strings . Contains ( parts [ "extra_config" ] , "PathKit" ) || strings . Contains ( parts [ "extra_config" ] , "CanvasKit" ) {
2018-10-16 14:15:01 +00:00
return wasmGceDimensions ( )
2018-08-22 13:35:32 +00:00
}
2018-10-12 20:58:52 +00:00
if parts [ "role" ] == "BuildStats" {
// Doesn't require a lot of resources
return linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
}
2018-05-04 15:18:01 +00:00
// Use many-core machines for Build tasks.
return linuxGceDimensions ( MACHINE_TYPE_LARGE )
2017-11-14 18:30:04 +00:00
} else if d [ "os" ] == DEFAULT_OS_WIN {
// Windows CPU bots.
2017-09-26 22:19:56 +00:00
d [ "cpu" ] = "x86-64-Haswell_GCE"
2018-05-04 15:18:01 +00:00
// Use many-core machines for Build tasks.
d [ "machine_type" ] = MACHINE_TYPE_LARGE
2017-11-14 18:30:04 +00:00
} else if d [ "os" ] == DEFAULT_OS_MAC {
// Mac CPU bots.
2017-09-26 22:19:56 +00:00
d [ "cpu" ] = "x86-64-E5-2697_v2"
2017-03-22 19:54:54 +00:00
}
2016-09-30 19:53:12 +00:00
}
2017-03-22 19:54:54 +00:00
2016-09-30 19:53:12 +00:00
rv := make ( [ ] string , 0 , len ( d ) )
for k , v := range d {
rv = append ( rv , fmt . Sprintf ( "%s:%s" , k , v ) )
}
sort . Strings ( rv )
return rv
}
2017-06-14 19:25:31 +00:00
// relpath returns the relative path to the given file from the config file.
func relpath ( f string ) string {
_ , filename , _ , _ := runtime . Caller ( 0 )
dir := path . Dir ( filename )
rel := dir
if * cfgFile != "" {
rel = path . Dir ( * cfgFile )
}
rv , err := filepath . Rel ( rel , path . Join ( dir , f ) )
if err != nil {
sklog . Fatal ( err )
}
return rv
}
2017-04-04 13:06:16 +00:00
// bundleRecipes generates the task to bundle and isolate the recipes.
func bundleRecipes ( b * specs . TasksCfgBuilder ) string {
2018-06-21 13:02:30 +00:00
pkgs := append ( [ ] * specs . CipdPackage { } , CIPD_PKGS_GIT ... )
pkgs = append ( pkgs , CIPD_PKGS_PYTHON ... )
2017-04-04 13:06:16 +00:00
b . MustAddTask ( BUNDLE_RECIPES_NAME , & specs . TaskSpec {
2018-06-21 13:02:30 +00:00
CipdPackages : pkgs ,
2018-04-17 19:55:57 +00:00
Command : [ ] string {
"/bin/bash" , "skia/infra/bots/bundle_recipes.sh" , specs . PLACEHOLDER_ISOLATED_OUTDIR ,
} ,
2018-05-04 15:18:01 +00:00
Dimensions : linuxGceDimensions ( MACHINE_TYPE_SMALL ) ,
2018-04-17 19:55:57 +00:00
EnvPrefixes : map [ string ] [ ] string {
"PATH" : [ ] string { "cipd_bin_packages" , "cipd_bin_packages/bin" } ,
2017-04-04 13:06:16 +00:00
} ,
2018-07-24 21:10:46 +00:00
Isolate : relpath ( "swarm_recipe.isolate" ) ,
2017-04-04 13:06:16 +00:00
} )
return BUNDLE_RECIPES_NAME
}
2017-05-11 17:35:23 +00:00
type isolateAssetCfg struct {
2018-04-17 19:55:57 +00:00
cipdPkg string
path string
2017-05-11 17:35:23 +00:00
}
var ISOLATE_ASSET_MAPPING = map [ string ] isolateAssetCfg {
2018-03-07 19:44:44 +00:00
ISOLATE_GCLOUD_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "gcloud_linux" ,
path : "gcloud_linux" ,
2018-03-07 19:44:44 +00:00
} ,
2018-08-03 18:29:03 +00:00
ISOLATE_GO_DEPS_NAME : {
cipdPkg : "go_deps" ,
path : "go_deps" ,
} ,
2018-03-07 19:44:44 +00:00
ISOLATE_GO_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "go" ,
path : "go" ,
2018-03-07 19:44:44 +00:00
} ,
2017-05-11 17:35:23 +00:00
ISOLATE_SKIMAGE_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "skimage" ,
path : "skimage" ,
2017-05-11 17:35:23 +00:00
} ,
ISOLATE_SKP_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "skp" ,
path : "skp" ,
2017-05-11 17:35:23 +00:00
} ,
ISOLATE_SVG_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "svg" ,
path : "svg" ,
2017-05-11 17:35:23 +00:00
} ,
2017-11-29 19:45:14 +00:00
ISOLATE_NDK_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "android_ndk_linux" ,
path : "android_ndk_linux" ,
2017-11-29 19:45:14 +00:00
} ,
2018-02-20 16:40:25 +00:00
ISOLATE_SDK_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "android_sdk_linux" ,
path : "android_sdk_linux" ,
2018-02-20 16:40:25 +00:00
} ,
2017-11-29 19:45:14 +00:00
ISOLATE_WIN_TOOLCHAIN_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "win_toolchain" ,
path : "t" ,
2017-11-29 19:45:14 +00:00
} ,
ISOLATE_WIN_VULKAN_SDK_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "win_vulkan_sdk" ,
path : "win_vulkan_sdk" ,
2017-11-29 19:45:14 +00:00
} ,
2017-05-11 17:35:23 +00:00
}
2018-04-17 19:55:57 +00:00
// isolateCIPDAsset generates a task to isolate the given CIPD asset.
2017-05-11 17:35:23 +00:00
func isolateCIPDAsset ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
asset := ISOLATE_ASSET_MAPPING [ name ]
2017-05-11 17:35:23 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
CipdPackages : [ ] * specs . CipdPackage {
2018-04-17 19:55:57 +00:00
b . MustGetCipdPackageFromAsset ( asset . cipdPkg ) ,
2017-05-11 17:35:23 +00:00
} ,
2018-04-17 19:55:57 +00:00
Command : [ ] string { "/bin/cp" , "-rL" , asset . path , "${ISOLATED_OUTDIR}" } ,
2018-05-04 15:18:01 +00:00
Dimensions : linuxGceDimensions ( MACHINE_TYPE_SMALL ) ,
2018-04-17 19:55:57 +00:00
Isolate : relpath ( "empty.isolate" ) ,
2017-05-11 17:35:23 +00:00
} )
return name
}
2017-05-15 12:30:27 +00:00
// getIsolatedCIPDDeps returns the slice of Isolate_* tasks a given task needs.
// This allows us to save time on I/O bound bots, like the RPIs.
func getIsolatedCIPDDeps ( parts map [ string ] string ) [ ] string {
deps := [ ] string { }
2017-05-11 17:35:23 +00:00
// Only do this on the RPIs for now. Other, faster machines shouldn't see much
// benefit and we don't need the extra complexity, for now
2017-05-15 12:30:27 +00:00
rpiOS := [ ] string { "Android" , "ChromeOS" , "iOS" }
if o := parts [ "os" ] ; strings . Contains ( o , "Chromecast" ) {
// Chromecasts don't have enough disk space to fit all of the content,
// so we do a subset of the skps.
deps = append ( deps , ISOLATE_SKP_NAME )
} else if e := parts [ "extra_config" ] ; strings . Contains ( e , "Skpbench" ) {
// Skpbench only needs skps
deps = append ( deps , ISOLATE_SKP_NAME )
} else if util . In ( o , rpiOS ) {
deps = append ( deps , ISOLATE_SKP_NAME )
deps = append ( deps , ISOLATE_SVG_NAME )
deps = append ( deps , ISOLATE_SKIMAGE_NAME )
}
return deps
2017-05-11 17:35:23 +00:00
}
2018-04-27 17:14:38 +00:00
// usesGit adds attributes to tasks which use git.
func usesGit ( t * specs . TaskSpec , name string ) {
t . Caches = append ( t . Caches , CACHES_GIT ... )
if ! strings . Contains ( name , "NoDEPS" ) {
t . Caches = append ( t . Caches , CACHES_WORKDIR ... )
}
t . CipdPackages = append ( t . CipdPackages , CIPD_PKGS_GIT ... )
}
2018-08-03 14:26:00 +00:00
// usesDocker adds attributes to tasks which use docker.
func usesDocker ( t * specs . TaskSpec , name string ) {
// currently, just the WASM (using EMCC) builder uses Docker.
if strings . Contains ( name , "EMCC" ) {
t . Caches = append ( t . Caches , CACHES_DOCKER ... )
}
}
2018-04-27 17:14:38 +00:00
// timeout sets the timeout(s) for this task.
func timeout ( task * specs . TaskSpec , timeout time . Duration ) {
task . ExecutionTimeout = timeout
task . IoTimeout = timeout // With kitchen, step logs don't count toward IoTimeout.
}
2016-09-30 19:53:12 +00:00
// compile generates a compile task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func compile ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "compile" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , swarmDimensions ( parts ) , nil , OUTPUT_BUILD )
2018-04-27 17:14:38 +00:00
usesGit ( task , name )
2018-08-03 14:26:00 +00:00
usesDocker ( task , name )
2016-09-30 19:53:12 +00:00
// Android bots require a toolchain.
if strings . Contains ( name , "Android" ) {
2018-02-01 18:38:13 +00:00
if parts [ "extra_config" ] == "Android_Framework" {
// Do not need a toolchain when building the
// Android Framework.
} else if strings . Contains ( name , "Mac" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "android_ndk_darwin" ) )
2016-11-02 17:13:16 +00:00
} else if strings . Contains ( name , "Win" ) {
2016-11-02 19:44:26 +00:00
pkg := b . MustGetCipdPackageFromAsset ( "android_ndk_windows" )
pkg . Path = "n"
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , pkg )
2018-10-08 19:07:42 +00:00
} else if ! strings . Contains ( name , "SKQP" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_NDK_LINUX_NAME ) )
2016-09-30 19:53:12 +00:00
}
2017-03-08 19:01:01 +00:00
} else if strings . Contains ( name , "Chromecast" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "cast_toolchain" ) )
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-04-05 11:32:45 +00:00
} else if strings . Contains ( name , "Chromebook" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
2017-11-02 13:34:08 +00:00
if parts [ "target_arch" ] == "x86_64" {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_x86_64_gles" ) )
2017-11-02 13:34:08 +00:00
} else if parts [ "target_arch" ] == "arm" {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "armhf_sysroot" ) )
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-11-02 13:34:08 +00:00
}
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( name , "Debian" ) {
2017-01-18 14:24:56 +00:00
if strings . Contains ( name , "Clang" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
2017-01-18 14:24:56 +00:00
}
2018-11-07 16:27:56 +00:00
if strings . Contains ( name , "Vulkan" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
2017-01-18 14:24:56 +00:00
}
2018-04-04 19:35:01 +00:00
if parts [ "target_arch" ] == "mips64el" || parts [ "target_arch" ] == "loongson3a" {
if parts [ "compiler" ] != "GCC" {
glog . Fatalf ( "mips64el toolchain is GCC, but compiler is %q in %q" , parts [ "compiler" ] , name )
}
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "mips64el_toolchain_linux" ) )
2018-04-04 19:35:01 +00:00
}
2018-04-19 21:53:03 +00:00
if strings . Contains ( name , "SwiftShader" ) {
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "cmake_linux" ) )
}
2018-06-29 00:41:04 +00:00
if strings . Contains ( name , "OpenCL" ) {
task . CipdPackages = append ( task . CipdPackages ,
b . MustGetCipdPackageFromAsset ( "opencl_headers" ) ,
b . MustGetCipdPackageFromAsset ( "opencl_ocl_icd_linux" ) ,
)
}
2016-11-09 21:31:42 +00:00
} else if strings . Contains ( name , "Win" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_WIN_TOOLCHAIN_NAME ) )
2017-07-31 18:57:20 +00:00
if strings . Contains ( name , "Clang" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_win" ) )
2017-07-31 18:57:20 +00:00
}
2016-09-30 19:53:12 +00:00
if strings . Contains ( name , "Vulkan" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_WIN_VULKAN_SDK_NAME ) )
2016-09-30 19:53:12 +00:00
}
2018-08-07 20:58:15 +00:00
if strings . Contains ( name , "OpenCL" ) {
task . CipdPackages = append ( task . CipdPackages ,
b . MustGetCipdPackageFromAsset ( "opencl_headers" ) ,
)
}
2018-08-03 14:07:47 +00:00
} else if strings . Contains ( name , "Mac" ) {
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_XCODE ... )
task . Caches = append ( task . Caches , & specs . Cache {
Name : "xcode" ,
Path : "cache/Xcode.app" ,
} )
if strings . Contains ( name , "CommandBuffer" ) {
timeout ( task , 2 * time . Hour )
}
if strings . Contains ( name , "MoltenVK" ) {
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "moltenvk" ) )
}
2018-05-17 16:17:10 +00:00
}
2018-10-22 14:55:15 +00:00
task . MaxAttempts = 2
2018-04-20 01:48:55 +00:00
2018-04-17 11:28:46 +00:00
// Add the task.
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-12-12 19:30:12 +00:00
// All compile tasks are runnable as their own Job. Assert that the Job
// is listed in JOBS.
if ! util . In ( name , JOBS ) {
glog . Fatalf ( "Job %q is missing from the JOBS list!" , name )
}
2017-12-12 22:08:24 +00:00
// Upload the skiaserve binary only for Linux Android compile bots.
// See skbug.com/7399 for context.
if parts [ "configuration" ] == "Release" &&
parts [ "extra_config" ] == "Android" &&
! strings . Contains ( parts [ "os" ] , "Win" ) &&
! strings . Contains ( parts [ "os" ] , "Mac" ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-05-04 15:18:01 +00:00
task := kitchenTask ( uploadName , "upload_skiaserve" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_BINARY , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , nil , OUTPUT_NONE )
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , name )
b . MustAddTask ( uploadName , task )
2017-12-12 22:08:24 +00:00
return uploadName
}
2016-09-30 19:53:12 +00:00
return name
}
// recreateSKPs generates a RecreateSKPs task. Returns the name of the last
// task in the generated chain of tasks, which the Job should add as a
// dependency.
2018-04-16 13:21:01 +00:00
func recreateSKPs ( b * specs . TasksCfgBuilder , name string ) string {
2018-05-15 21:19:42 +00:00
dims := [ ] string {
"pool:SkiaCT" ,
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
}
task := kitchenTask ( name , "recreate_skps" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_RECREATE_SKPS , dims , nil , OUTPUT_NONE )
2018-04-26 12:49:38 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
2018-08-03 18:29:03 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_GO_DEPS_NAME ) )
2018-04-27 17:14:38 +00:00
timeout ( task , 4 * time . Hour )
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-09-30 19:53:12 +00:00
return name
}
2018-09-27 18:55:03 +00:00
// updateGoDEPS generates an UpdateGoDEPS task. Returns the name of the last
// task in the generated chain of tasks, which the Job should add as a
// dependency.
func updateGoDEPS ( b * specs . TasksCfgBuilder , name string ) string {
dims := linuxGceDimensions ( MACHINE_TYPE_LARGE )
task := kitchenTask ( name , "update_go_deps" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPDATE_GO_DEPS , dims , nil , OUTPUT_NONE )
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_GO_DEPS_NAME ) )
b . MustAddTask ( name , task )
return name
}
2017-07-28 11:35:28 +00:00
// checkGeneratedFiles verifies that no generated SKSL files have been edited
// by hand.
2018-04-16 13:21:01 +00:00
func checkGeneratedFiles ( b * specs . TasksCfgBuilder , name string ) string {
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "check_generated_files" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( MACHINE_TYPE_LARGE ) , nil , OUTPUT_NONE )
2018-04-27 17:14:38 +00:00
task . Caches = append ( task . Caches , CACHES_WORKDIR ... )
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2017-07-28 11:35:28 +00:00
return name
}
2016-09-30 19:53:12 +00:00
// housekeeper generates a Housekeeper task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
2018-10-09 20:25:27 +00:00
func housekeeper ( b * specs . TasksCfgBuilder , name string ) string {
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "housekeeper" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_HOUSEKEEPER , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , nil , OUTPUT_NONE )
2018-04-27 17:14:38 +00:00
usesGit ( task , name )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
b . MustAddTask ( name , task )
2016-09-30 19:53:12 +00:00
return name
}
2017-12-08 17:58:20 +00:00
// bookmaker generates a Bookmaker task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
func bookmaker ( b * specs . TasksCfgBuilder , name , compileTaskName string ) string {
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "bookmaker" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_BOOKMAKER , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , nil , OUTPUT_NONE )
2018-04-27 17:14:38 +00:00
task . Caches = append ( task . Caches , CACHES_WORKDIR ... )
2018-04-26 12:49:38 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , compileTaskName )
2018-04-27 17:14:38 +00:00
timeout ( task , 2 * time . Hour )
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2017-12-08 17:58:20 +00:00
return name
}
2018-02-01 18:38:13 +00:00
// androidFrameworkCompile generates an Android Framework Compile task. Returns
// the name of the last task in the generated chain of tasks, which the Job
// should add as a dependency.
func androidFrameworkCompile ( b * specs . TasksCfgBuilder , name string ) string {
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "android_compile" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , nil , OUTPUT_NONE )
2018-04-20 01:48:55 +00:00
task . MaxAttempts = 1
2018-04-27 17:14:38 +00:00
timeout ( task , time . Hour )
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2018-02-01 18:38:13 +00:00
return name
}
2016-10-14 13:32:09 +00:00
// infra generates an infra_tests task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2018-04-16 13:21:01 +00:00
func infra ( b * specs . TasksCfgBuilder , name string ) string {
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "infra" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , nil , OUTPUT_NONE )
2018-04-27 17:14:38 +00:00
usesGit ( task , name )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
2018-08-03 18:29:03 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_GO_DEPS_NAME ) )
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-10-14 13:32:09 +00:00
return name
}
2018-10-08 17:58:47 +00:00
func buildstats ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string ) string {
task := kitchenTask ( name , "compute_buildstats" , "swarm_recipe.isolate" , "" , swarmDimensions ( parts ) , nil , OUTPUT_PERF )
task . Dependencies = append ( task . Dependencies , compileTaskName )
2018-10-09 17:31:33 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "bloaty" ) )
2018-10-08 17:58:47 +00:00
b . MustAddTask ( name , task )
2018-10-18 17:35:04 +00:00
// Upload release results (for tracking in perf)
// We have some jobs that are FYI (e.g. Debug-CanvasKit)
if strings . Contains ( name , "Release" ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketNano ,
}
uploadTask := kitchenTask ( name , "upload_buildstats_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_NANO , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , extraProps , OUTPUT_NONE )
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
return uploadName
}
2018-10-08 17:58:47 +00:00
return name
}
2018-01-05 16:13:43 +00:00
func getParentRevisionName ( compileTaskName string , parts map [ string ] string ) string {
if parts [ "extra_config" ] == "" {
return compileTaskName + "-ParentRevision"
} else {
return compileTaskName + "_ParentRevision"
}
}
2017-10-16 16:24:43 +00:00
// calmbench generates a calmbench task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2018-04-17 19:55:57 +00:00
func calmbench ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName , compileParentName string ) string {
task := kitchenTask ( name , "calmbench" , "calmbench.isolate" , "" , swarmDimensions ( parts ) , nil , OUTPUT_PERF )
2018-04-27 17:14:38 +00:00
usesGit ( task , name )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , compileTaskName , compileParentName , ISOLATE_SKP_NAME , ISOLATE_SVG_NAME )
2018-10-22 14:55:15 +00:00
task . MaxAttempts = 2
2018-10-01 14:07:42 +00:00
if parts [ "cpu_or_gpu_value" ] == "QuadroP400" {
// Specify "rack" dimension for consistent test results.
// See https://bugs.chromium.org/p/chromium/issues/detail?id=784662&desc=2#c34
// for more context.
2018-11-06 18:45:40 +00:00
if parts [ "os" ] == "Ubuntu18" {
task . Dimensions = append ( task . Dimensions , "rack:2" )
} else {
task . Dimensions = append ( task . Dimensions , "rack:1" )
}
2018-10-01 14:07:42 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2017-10-16 16:24:43 +00:00
2017-10-24 13:43:21 +00:00
// Upload results if necessary.
if strings . Contains ( name , "Release" ) && doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketCalm ,
}
2018-05-04 15:18:01 +00:00
uploadTask := kitchenTask ( name , "upload_calmbench_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_CALMBENCH , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , extraProps , OUTPUT_NONE )
2018-04-17 19:55:57 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2017-10-24 13:43:21 +00:00
return uploadName
}
2017-10-16 16:24:43 +00:00
return name
}
2016-09-30 19:53:12 +00:00
// doUpload indicates whether the given Job should upload its results.
func doUpload ( name string ) bool {
2017-02-01 20:56:55 +00:00
for _ , s := range CONFIG . NoUpload {
m , err := regexp . MatchString ( s , name )
if err != nil {
glog . Fatal ( err )
}
if m {
2016-09-30 19:53:12 +00:00
return false
}
}
return true
}
// test generates a Test task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func test ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2018-03-07 19:44:44 +00:00
recipe := "test"
2018-04-17 19:55:57 +00:00
if strings . Contains ( name , "SKQP" ) {
recipe = "skqp_test"
2018-10-08 19:07:42 +00:00
if strings . Contains ( name , "Emulator" ) {
recipe = "test_skqp_emulator"
}
2018-06-28 21:43:08 +00:00
} else if strings . Contains ( name , "OpenCL" ) {
// TODO(dogben): Longer term we may not want this to be called a "Test" task, but until we start
// running hs_bench or kx, it will be easier to fit into the current job name schema.
recipe = "compute_test"
2018-08-09 14:00:02 +00:00
} else if strings . Contains ( name , "PathKit" ) {
recipe = "test_pathkit"
2018-10-09 13:36:35 +00:00
} else if strings . Contains ( name , "CanvasKit" ) {
recipe = "test_canvaskit"
2018-08-28 14:52:18 +00:00
} else if strings . Contains ( name , "LottieWeb" ) {
recipe = "test_lottie_web"
2018-04-17 19:55:57 +00:00
}
2018-08-02 06:51:38 +00:00
extraProps := map [ string ] string {
"gold_hashes_url" : CONFIG . GoldHashesURL ,
}
2018-04-19 13:36:45 +00:00
iid := internalHardwareLabel ( parts )
if iid != nil {
extraProps [ "internal_hardware_label" ] = strconv . Itoa ( * iid )
}
2018-08-09 14:00:02 +00:00
isolate := "test_skia_bundled.isolate"
2018-10-09 13:36:35 +00:00
if strings . Contains ( name , "CanvasKit" ) || strings . Contains ( name , "Emulator" ) || strings . Contains ( name , "LottieWeb" ) || strings . Contains ( name , "PathKit" ) {
2018-08-09 14:00:02 +00:00
isolate = "swarm_recipe.isolate"
}
task := kitchenTask ( name , recipe , isolate , "" , swarmDimensions ( parts ) , extraProps , OUTPUT_TEST )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , pkgs ... )
2018-07-19 17:27:49 +00:00
if strings . Contains ( name , "Lottie" ) {
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "lottie-samples" ) )
}
2018-08-28 14:52:18 +00:00
if ! strings . Contains ( name , "LottieWeb" ) {
// Test.+LottieWeb doesn't require anything in Skia to be compiled.
task . Dependencies = append ( task . Dependencies , compileTaskName )
}
2018-04-17 11:28:46 +00:00
if strings . Contains ( name , "Android_ASAN" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_NDK_LINUX_NAME ) )
2018-04-17 11:28:46 +00:00
}
2018-03-07 19:44:44 +00:00
if strings . Contains ( name , "SKQP" ) {
2018-10-08 19:07:42 +00:00
if ! strings . Contains ( name , "Emulator" ) {
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_GCLOUD_LINUX_NAME ) )
}
2017-04-04 13:06:16 +00:00
}
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , deps ... )
2017-05-11 17:35:23 +00:00
}
2018-04-17 19:55:57 +00:00
task . Expiration = 20 * time . Hour
2018-10-22 14:55:15 +00:00
task . MaxAttempts = 2
2018-04-27 17:14:38 +00:00
timeout ( task , 4 * time . Hour )
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
2018-04-27 17:14:38 +00:00
timeout ( task , 9 * time . Hour )
2018-04-17 19:55:57 +00:00
task . Expiration = 48 * time . Hour
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
task . Dimensions = append ( task . Dimensions , "valgrind:1" )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
2018-04-27 17:14:38 +00:00
timeout ( task , 9 * time . Hour )
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
2018-04-27 17:14:38 +00:00
timeout ( task , 6 * time . Hour )
2016-11-08 17:55:32 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-11-08 17:55:32 +00:00
2017-10-09 19:26:19 +00:00
// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
2016-09-30 19:53:12 +00:00
if doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketGm ,
}
2018-05-04 15:18:01 +00:00
uploadTask := kitchenTask ( name , "upload_dm_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_GM , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , extraProps , OUTPUT_NONE )
2018-04-17 19:55:57 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2016-09-30 19:53:12 +00:00
return uploadName
2017-10-17 17:40:52 +00:00
}
return name
}
2016-09-30 19:53:12 +00:00
// perf generates a Perf task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func perf ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2017-04-10 12:14:33 +00:00
recipe := "perf"
2018-04-17 19:55:57 +00:00
isolate := relpath ( "perf_skia_bundled.isolate" )
2016-11-14 18:42:27 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Skpbench" ) {
2017-04-10 12:14:33 +00:00
recipe = "skpbench"
2018-04-17 19:55:57 +00:00
isolate = relpath ( "skpbench_skia_bundled.isolate" )
2018-10-12 19:21:17 +00:00
} else if strings . Contains ( name , "PathKit" ) {
recipe = "perf_pathkit"
2018-10-17 18:59:35 +00:00
} else if strings . Contains ( name , "CanvasKit" ) {
recipe = "perf_canvaskit"
2017-04-04 13:06:16 +00:00
}
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , recipe , isolate , "" , swarmDimensions ( parts ) , nil , OUTPUT_PERF )
task . CipdPackages = append ( task . CipdPackages , pkgs ... )
task . Dependencies = append ( task . Dependencies , compileTaskName )
task . Expiration = 20 * time . Hour
2018-10-22 14:55:15 +00:00
task . MaxAttempts = 2
2018-04-27 17:14:38 +00:00
timeout ( task , 4 * time . Hour )
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , deps ... )
2017-05-15 12:30:27 +00:00
}
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
2018-04-27 17:14:38 +00:00
timeout ( task , 9 * time . Hour )
2018-04-17 19:55:57 +00:00
task . Expiration = 48 * time . Hour
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
task . Dimensions = append ( task . Dimensions , "valgrind:1" )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
2018-04-27 17:14:38 +00:00
timeout ( task , 9 * time . Hour )
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
2018-04-27 17:14:38 +00:00
timeout ( task , 6 * time . Hour )
2016-11-08 17:55:32 +00:00
}
2017-09-15 14:35:44 +00:00
iid := internalHardwareLabel ( parts )
2017-09-15 12:35:31 +00:00
if iid != nil {
2018-04-17 19:55:57 +00:00
task . Command = append ( task . Command , fmt . Sprintf ( "internal_hardware_label=%d" , * iid ) )
2017-09-15 12:35:31 +00:00
}
2018-10-01 14:07:42 +00:00
if parts [ "cpu_or_gpu_value" ] == "QuadroP400" {
// Specify "rack" dimension for consistent test results.
// See https://bugs.chromium.org/p/chromium/issues/detail?id=784662&desc=2#c34
// for more context.
2018-11-06 18:45:40 +00:00
if parts [ "os" ] == "Ubuntu18" {
task . Dimensions = append ( task . Dimensions , "rack:2" )
} else {
task . Dimensions = append ( task . Dimensions , "rack:1" )
}
2018-10-01 14:07:42 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-11-08 17:55:32 +00:00
2016-09-30 19:53:12 +00:00
// Upload results if necessary.
if strings . Contains ( name , "Release" ) && doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketNano ,
}
2018-05-04 15:18:01 +00:00
uploadTask := kitchenTask ( name , "upload_nano_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_NANO , linuxGceDimensions ( MACHINE_TYPE_SMALL ) , extraProps , OUTPUT_NONE )
2018-04-17 19:55:57 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2016-09-30 19:53:12 +00:00
return uploadName
}
return name
}
2018-04-25 19:09:22 +00:00
// Run the presubmit.
func presubmit ( b * specs . TasksCfgBuilder , name string ) string {
extraProps := map [ string ] string {
"category" : "cq" ,
"patch_gerrit_url" : "https://skia-review.googlesource.com" ,
"patch_project" : "skia" ,
2018-05-03 13:56:48 +00:00
"patch_ref" : specs . PLACEHOLDER_PATCH_REF ,
2018-04-25 19:09:22 +00:00
"reason" : "CQ" ,
"repo_name" : "skia" ,
}
2018-05-14 16:38:09 +00:00
// Use MACHINE_TYPE_LARGE because it seems to save time versus MEDIUM and we want presubmit to be
// fast.
2018-05-04 15:18:01 +00:00
task := kitchenTask ( name , "run_presubmit" , "empty.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( MACHINE_TYPE_LARGE ) , extraProps , OUTPUT_NONE )
2018-04-25 19:09:22 +00:00
replaceArg := func ( key , value string ) {
found := false
for idx , arg := range task . Command {
if arg == key {
task . Command [ idx + 1 ] = value
found = true
}
}
if ! found {
task . Command = append ( task . Command , key , value )
}
}
replaceArg ( "-repository" , "https://chromium.googlesource.com/chromium/tools/build" )
replaceArg ( "-revision" , "HEAD" )
2018-04-27 17:14:38 +00:00
usesGit ( task , name )
2018-04-25 19:09:22 +00:00
task . Dependencies = [ ] string { } // No bundled recipes for this one.
b . MustAddTask ( name , task )
return name
}
2016-09-30 19:53:12 +00:00
// process generates tasks and jobs for the given job name.
2016-10-20 18:04:31 +00:00
func process ( b * specs . TasksCfgBuilder , name string ) {
2018-07-24 21:10:46 +00:00
var priority float64 // Leave as default for most jobs.
2016-09-30 19:53:12 +00:00
deps := [ ] string { }
2017-04-04 13:06:16 +00:00
// Bundle Recipes.
if name == BUNDLE_RECIPES_NAME {
deps = append ( deps , bundleRecipes ( b ) )
}
2017-05-11 17:35:23 +00:00
// Isolate CIPD assets.
if _ , ok := ISOLATE_ASSET_MAPPING [ name ] ; ok {
deps = append ( deps , isolateCIPDAsset ( b , name ) )
}
2016-09-30 19:53:12 +00:00
parts , err := jobNameSchema . ParseJobName ( name )
if err != nil {
glog . Fatal ( err )
}
// RecreateSKPs.
if strings . Contains ( name , "RecreateSKPs" ) {
2018-04-16 13:21:01 +00:00
deps = append ( deps , recreateSKPs ( b , name ) )
2016-09-30 19:53:12 +00:00
}
2018-09-27 18:55:03 +00:00
// Update Go DEPS.
if strings . Contains ( name , "UpdateGoDEPS" ) {
deps = append ( deps , updateGoDEPS ( b , name ) )
}
2016-10-14 13:32:09 +00:00
// Infra tests.
if name == "Housekeeper-PerCommit-InfraTests" {
2018-04-16 13:21:01 +00:00
deps = append ( deps , infra ( b , name ) )
2016-10-14 13:32:09 +00:00
}
2016-09-30 19:53:12 +00:00
// Compile bots.
if parts [ "role" ] == "Build" {
2018-02-01 18:38:13 +00:00
if parts [ "extra_config" ] == "Android_Framework" {
// Android Framework compile tasks use a different recipe.
deps = append ( deps , androidFrameworkCompile ( b , name ) )
} else {
deps = append ( deps , compile ( b , name , parts ) )
}
2016-09-30 19:53:12 +00:00
}
2016-11-15 20:18:20 +00:00
// Most remaining bots need a compile task.
2016-09-30 19:53:12 +00:00
compileTaskName := deriveCompileTaskName ( name , parts )
2016-10-17 17:17:53 +00:00
compileTaskParts , err := jobNameSchema . ParseJobName ( compileTaskName )
if err != nil {
glog . Fatal ( err )
}
2018-01-05 16:13:43 +00:00
compileParentName := getParentRevisionName ( compileTaskName , compileTaskParts )
compileParentParts , err := jobNameSchema . ParseJobName ( compileParentName )
if err != nil {
glog . Fatal ( err )
}
2016-11-17 16:33:27 +00:00
// These bots do not need a compile task.
2018-01-05 16:13:43 +00:00
if parts [ "role" ] != "Build" &&
2018-09-27 18:55:03 +00:00
name != "Housekeeper-Nightly-UpdateGoDEPS" &&
2017-04-10 15:00:09 +00:00
name != "Housekeeper-PerCommit-BundleRecipes" &&
2016-11-15 20:18:20 +00:00
name != "Housekeeper-PerCommit-InfraTests" &&
2017-07-28 11:35:28 +00:00
name != "Housekeeper-PerCommit-CheckGeneratedFiles" &&
2018-04-25 19:09:22 +00:00
name != "Housekeeper-OnDemand-Presubmit" &&
2018-10-09 20:25:27 +00:00
name != "Housekeeper-PerCommit" &&
2018-02-01 18:38:13 +00:00
! strings . Contains ( name , "Android_Framework" ) &&
2016-11-30 19:05:16 +00:00
! strings . Contains ( name , "RecreateSKPs" ) &&
2018-08-28 14:52:18 +00:00
! strings . Contains ( name , "Housekeeper-PerCommit-Isolate" ) &&
! strings . Contains ( name , "LottieWeb" ) {
2016-10-20 18:04:31 +00:00
compile ( b , compileTaskName , compileTaskParts )
2018-01-10 16:14:52 +00:00
if parts [ "role" ] == "Calmbench" {
2018-01-05 16:13:43 +00:00
compile ( b , compileParentName , compileParentParts )
}
2016-10-17 17:17:53 +00:00
}
2016-09-30 19:53:12 +00:00
2017-07-28 11:35:28 +00:00
// Housekeepers.
2016-12-02 16:01:33 +00:00
if name == "Housekeeper-PerCommit" {
2018-10-09 20:25:27 +00:00
deps = append ( deps , housekeeper ( b , name ) )
2016-09-30 19:53:12 +00:00
}
2017-07-28 11:35:28 +00:00
if name == "Housekeeper-PerCommit-CheckGeneratedFiles" {
2018-04-16 13:21:01 +00:00
deps = append ( deps , checkGeneratedFiles ( b , name ) )
2017-07-28 11:35:28 +00:00
}
2018-04-25 19:09:22 +00:00
if name == "Housekeeper-OnDemand-Presubmit" {
2018-07-24 21:10:46 +00:00
priority = 1
2018-04-25 19:09:22 +00:00
deps = append ( deps , presubmit ( b , name ) )
}
2018-01-02 19:54:43 +00:00
if strings . Contains ( name , "Bookmaker" ) {
2017-12-08 17:58:20 +00:00
deps = append ( deps , bookmaker ( b , name , compileTaskName ) )
}
2016-09-30 19:53:12 +00:00
// Common assets needed by the remaining bots.
2017-05-11 17:35:23 +00:00
pkgs := [ ] * specs . CipdPackage { }
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) == 0 {
2017-05-11 17:35:23 +00:00
pkgs = [ ] * specs . CipdPackage {
b . MustGetCipdPackageFromAsset ( "skimage" ) ,
b . MustGetCipdPackageFromAsset ( "skp" ) ,
b . MustGetCipdPackageFromAsset ( "svg" ) ,
}
2016-09-30 19:53:12 +00:00
}
2017-05-15 12:30:27 +00:00
2017-10-02 14:48:32 +00:00
if strings . Contains ( name , "Ubuntu" ) || strings . Contains ( name , "Debian" ) {
if strings . Contains ( name , "SAN" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
}
2017-02-17 15:25:34 +00:00
if strings . Contains ( name , "Vulkan" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
}
2017-10-02 14:48:32 +00:00
if strings . Contains ( name , "Intel" ) && strings . Contains ( name , "GPU" ) {
2018-09-11 21:09:28 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "mesa_intel_driver_linux" ) )
2017-02-06 17:45:29 +00:00
}
2018-06-28 21:43:08 +00:00
if strings . Contains ( name , "OpenCL" ) {
pkgs = append ( pkgs ,
b . MustGetCipdPackageFromAsset ( "opencl_ocl_icd_linux" ) ,
b . MustGetCipdPackageFromAsset ( "opencl_intel_neo_linux" ) ,
)
}
2017-02-06 17:45:29 +00:00
}
2018-03-09 18:42:56 +00:00
if strings . Contains ( name , "ProcDump" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "procdump_win" ) )
}
2018-10-09 13:36:35 +00:00
if strings . Contains ( name , "CanvasKit" ) || strings . Contains ( name , "LottieWeb" ) || strings . Contains ( name , "PathKit" ) {
2018-08-28 14:52:18 +00:00
// Docker-based tests that don't need the standard CIPD assets
2018-08-09 14:00:02 +00:00
pkgs = [ ] * specs . CipdPackage { }
}
2016-09-30 19:53:12 +00:00
// Test bots.
2017-10-17 17:40:52 +00:00
if parts [ "role" ] == "Test" {
2018-10-09 14:31:11 +00:00
deps = append ( deps , test ( b , name , parts , compileTaskName , pkgs ) )
2016-09-30 19:53:12 +00:00
}
// Perf bots.
2018-10-09 14:31:11 +00:00
if parts [ "role" ] == "Perf" {
2016-10-20 18:04:31 +00:00
deps = append ( deps , perf ( b , name , parts , compileTaskName , pkgs ) )
2016-09-30 19:53:12 +00:00
}
2018-01-05 16:13:43 +00:00
// Calmbench bots.
if parts [ "role" ] == "Calmbench" {
deps = append ( deps , calmbench ( b , name , parts , compileTaskName , compileParentName ) )
}
2018-10-08 17:58:47 +00:00
// BuildStats bots. This computes things like binary size.
if parts [ "role" ] == "BuildStats" {
deps = append ( deps , buildstats ( b , name , parts , compileTaskName ) )
}
2016-09-30 19:53:12 +00:00
// Add the Job spec.
2016-11-15 20:18:20 +00:00
j := & specs . JobSpec {
2018-07-24 21:10:46 +00:00
Priority : priority ,
2016-09-30 19:53:12 +00:00
TaskSpecs : deps ,
2017-07-31 14:41:15 +00:00
Trigger : specs . TRIGGER_ANY_BRANCH ,
}
if strings . Contains ( name , "-Nightly-" ) {
j . Trigger = specs . TRIGGER_NIGHTLY
2018-10-09 14:31:11 +00:00
} else if strings . Contains ( name , "-Weekly-" ) {
2017-07-31 14:41:15 +00:00
j . Trigger = specs . TRIGGER_WEEKLY
2018-05-04 12:59:16 +00:00
} else if strings . Contains ( name , "Flutter" ) || strings . Contains ( name , "CommandBuffer" ) {
2017-07-31 14:41:15 +00:00
j . Trigger = specs . TRIGGER_MASTER_ONLY
2018-04-26 12:58:26 +00:00
} else if strings . Contains ( name , "-OnDemand-" ) || strings . Contains ( name , "Android_Framework" ) {
2018-04-25 19:09:22 +00:00
j . Trigger = specs . TRIGGER_ON_DEMAND
2016-11-30 19:05:16 +00:00
}
2016-12-12 19:30:12 +00:00
b . MustAddJob ( name , j )
2016-09-30 19:53:12 +00:00
}
2017-02-01 20:56:55 +00:00
func loadJson ( flag * string , defaultFlag string , val interface { } ) {
if * flag == "" {
* flag = defaultFlag
}
b , err := ioutil . ReadFile ( * flag )
if err != nil {
glog . Fatal ( err )
}
if err := json . Unmarshal ( b , val ) ; err != nil {
glog . Fatal ( err )
}
}
2016-09-30 19:53:12 +00:00
// Regenerate the tasks.json file.
func main ( ) {
2016-10-20 18:04:31 +00:00
b := specs . MustNewTasksCfgBuilder ( )
2017-02-01 20:56:55 +00:00
b . SetAssetsDir ( * assetsDir )
infraBots := path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" )
// Load the jobs from a JSON file.
loadJson ( jobsFile , path . Join ( infraBots , "jobs.json" ) , & JOBS )
// Load general config information from a JSON file.
loadJson ( cfgFile , path . Join ( infraBots , "cfg.json" ) , & CONFIG )
2016-09-30 19:53:12 +00:00
// Create the JobNameSchema.
2017-02-07 14:16:30 +00:00
if * builderNameSchemaFile == "" {
* builderNameSchemaFile = path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" , "recipe_modules" , "builder_name_schema" , "builder_name_schema.json" )
}
schema , err := NewJobNameSchema ( * builderNameSchemaFile )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
jobNameSchema = schema
// Create Tasks and Jobs.
2016-10-20 18:04:31 +00:00
for _ , name := range JOBS {
process ( b , name )
2016-09-30 19:53:12 +00:00
}
2016-10-20 18:04:31 +00:00
b . MustFinish ( )
2016-09-30 19:53:12 +00:00
}
// TODO(borenet): The below really belongs in its own file, probably next to the
// builder_name_schema.json file.
2018-04-17 18:11:23 +00:00
// schema is a sub-struct of JobNameSchema.
type schema struct {
Keys [ ] string ` json:"keys" `
OptionalKeys [ ] string ` json:"optional_keys" `
RecurseRoles [ ] string ` json:"recurse_roles" `
}
2016-09-30 19:53:12 +00:00
// JobNameSchema is a struct used for (de)constructing Job names in a
// predictable format.
type JobNameSchema struct {
2018-04-17 18:11:23 +00:00
Schema map [ string ] * schema ` json:"builder_name_schema" `
Sep string ` json:"builder_name_sep" `
2016-09-30 19:53:12 +00:00
}
// NewJobNameSchema returns a JobNameSchema instance based on the given JSON
// file.
func NewJobNameSchema ( jsonFile string ) ( * JobNameSchema , error ) {
var rv JobNameSchema
f , err := os . Open ( jsonFile )
if err != nil {
return nil , err
}
defer util . Close ( f )
if err := json . NewDecoder ( f ) . Decode ( & rv ) ; err != nil {
return nil , err
}
return & rv , nil
}
// ParseJobName splits the given Job name into its component parts, according
// to the schema.
func ( s * JobNameSchema ) ParseJobName ( n string ) ( map [ string ] string , error ) {
2018-04-17 18:11:23 +00:00
popFront := func ( items [ ] string ) ( string , [ ] string , error ) {
if len ( items ) == 0 {
return "" , nil , fmt . Errorf ( "Invalid job name: %s (not enough parts)" , n )
}
return items [ 0 ] , items [ 1 : ] , nil
}
result := map [ string ] string { }
var parse func ( int , string , [ ] string ) ( [ ] string , error )
parse = func ( depth int , role string , parts [ ] string ) ( [ ] string , error ) {
s , ok := s . Schema [ role ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job name; %q is not a valid role." , role )
}
if depth == 0 {
result [ "role" ] = role
} else {
result [ fmt . Sprintf ( "sub-role-%d" , depth ) ] = role
}
var err error
for _ , key := range s . Keys {
var value string
value , parts , err = popFront ( parts )
if err != nil {
return nil , err
}
result [ key ] = value
}
for _ , subRole := range s . RecurseRoles {
if len ( parts ) > 0 && parts [ 0 ] == subRole {
parts , err = parse ( depth + 1 , parts [ 0 ] , parts [ 1 : ] )
if err != nil {
return nil , err
}
}
}
for _ , key := range s . OptionalKeys {
if len ( parts ) > 0 {
var value string
value , parts , err = popFront ( parts )
if err != nil {
return nil , err
}
result [ key ] = value
}
}
if len ( parts ) > 0 {
return nil , fmt . Errorf ( "Invalid job name: %s (too many parts)" , n )
}
return parts , nil
}
2016-09-30 19:53:12 +00:00
split := strings . Split ( n , s . Sep )
if len ( split ) < 2 {
2018-04-17 18:11:23 +00:00
return nil , fmt . Errorf ( "Invalid job name: %s (not enough parts)" , n )
2016-09-30 19:53:12 +00:00
}
role := split [ 0 ]
split = split [ 1 : ]
2018-04-17 18:11:23 +00:00
_ , err := parse ( 0 , role , split )
return result , err
2016-09-30 19:53:12 +00:00
}
// MakeJobName assembles the given parts of a Job name, according to the schema.
func ( s * JobNameSchema ) MakeJobName ( parts map [ string ] string ) ( string , error ) {
rvParts := make ( [ ] string , 0 , len ( parts ) )
2018-04-17 18:11:23 +00:00
var process func ( int , map [ string ] string ) ( map [ string ] string , error )
process = func ( depth int , parts map [ string ] string ) ( map [ string ] string , error ) {
roleKey := "role"
if depth != 0 {
roleKey = fmt . Sprintf ( "sub-role-%d" , depth )
}
role , ok := parts [ roleKey ]
2016-09-30 19:53:12 +00:00
if ! ok {
2018-04-17 18:11:23 +00:00
return nil , fmt . Errorf ( "Invalid job parts; missing key %q" , roleKey )
}
s , ok := s . Schema [ role ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; unknown role %q" , role )
}
rvParts = append ( rvParts , role )
delete ( parts , roleKey )
for _ , key := range s . Keys {
value , ok := parts [ key ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; missing %q" , key )
}
rvParts = append ( rvParts , value )
delete ( parts , key )
2016-09-30 19:53:12 +00:00
}
2018-04-17 18:11:23 +00:00
if len ( s . RecurseRoles ) > 0 {
subRoleKey := fmt . Sprintf ( "sub-role-%d" , depth + 1 )
subRole , ok := parts [ subRoleKey ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; missing %q" , subRoleKey )
}
rvParts = append ( rvParts , subRole )
delete ( parts , subRoleKey )
found := false
for _ , recurseRole := range s . RecurseRoles {
if recurseRole == subRole {
found = true
var err error
parts , err = process ( depth + 1 , parts )
if err != nil {
return nil , err
}
break
}
}
if ! found {
return nil , fmt . Errorf ( "Invalid job parts; unknown sub-role %q" , subRole )
}
}
for _ , key := range s . OptionalKeys {
if value , ok := parts [ key ] ; ok {
rvParts = append ( rvParts , value )
delete ( parts , key )
}
}
if len ( parts ) > 0 {
return nil , fmt . Errorf ( "Invalid job parts: too many parts: %v" , parts )
}
return parts , nil
}
// Copy the parts map, so that we can modify at will.
partsCpy := make ( map [ string ] string , len ( parts ) )
for k , v := range parts {
partsCpy [ k ] = v
2016-09-30 19:53:12 +00:00
}
2018-04-17 18:11:23 +00:00
if _ , err := process ( 0 , partsCpy ) ; err != nil {
return "" , err
2016-09-30 19:53:12 +00:00
}
return strings . Join ( rvParts , s . Sep ) , nil
}