[infra] Move commands from isolates to gen_tasks.go

This has the side effect of bundling recipes for all tasks,
which will be required anyway to run recipes in the future.

Bug: skia:7050
Change-Id: Ia30a95c750f2a237a8bf60263b2981682673c043
Reviewed-on: https://skia-review.googlesource.com/81300
Commit-Queue: Eric Boren <borenet@google.com>
Reviewed-by: Kevin Lubick <kjlubick@google.com>
This commit is contained in:
Eric Boren 2017-12-07 09:21:07 -05:00 committed by Skia Commit-Bot
parent 37cc2253ff
commit 66db75da67
39 changed files with 11053 additions and 8569 deletions

View File

@ -1,14 +0,0 @@
{
'includes': [
'infrabots.isolate',
],
'variables': {
'command': [
'python', 'recipes.py', '--package', '../config/recipes.cfg', 'run', '--timestamps',
],
'files': [
'../../../.gclient',
'../config/recipes.cfg',
],
},
}

13
infra/bots/bundle_recipes.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -x -e
cd skia
git init
git add .
git commit -m "Commit Recipes"
python infra/bots/recipes.py bundle --destination ${1}/recipe_bundle

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,6 +0,0 @@
{
'includes': [
'skia_repo.isolate',
'test_skia.isolate',
],
}

View File

@ -1,6 +0,0 @@
{
'includes': [
'skia_repo.isolate',
'swarm_recipe.isolate',
],
}

1
infra/bots/empty.isolate Normal file
View File

@ -0,0 +1 @@
{}

View File

@ -77,17 +77,27 @@ var (
jobNameSchema *JobNameSchema
// Git 2.13.
cipdGit1 = &specs.CipdPackage{
Name: fmt.Sprintf("infra/git/${platform}"),
Path: "git",
Version: fmt.Sprintf("version:2.13.0.chromium9"),
}
cipdGit2 = &specs.CipdPackage{
Name: fmt.Sprintf("infra/tools/git/${platform}"),
Path: "git",
Version: fmt.Sprintf("git_revision:a78b5f3658c0578a017db48df97d20ac09822bcd"),
CIPD_PKGS_GIT = []*specs.CipdPackage{
&specs.CipdPackage{
Name: "infra/git/${platform}",
Path: "cipd_bin_packages",
Version: "version:2.14.1.chromium10",
},
&specs.CipdPackage{
Name: "infra/tools/git/${platform}",
Path: "cipd_bin_packages",
Version: "git_revision:fa7a52f4741f5e04bba0dfccc9b8456dc572c60b",
},
&specs.CipdPackage{
Name: "infra/tools/luci/git-credential-luci/${platform}",
Path: "cipd_bin_packages",
Version: "git_revision:fa7a52f4741f5e04bba0dfccc9b8456dc572c60b",
},
}
RECIPE_BUNDLE_UNIX = "recipe_bundle/recipes"
RECIPE_BUNDLE_WIN = "recipe_bundle/recipes.bat"
// Flags.
builderNameSchemaFile = flag.String("builder_name_schema", "", "Path to the builder_name_schema.json file. If not specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json from this repo.")
assetsDir = flag.String("assets_dir", "", "Directory containing assets.")
@ -364,6 +374,14 @@ func defaultSwarmDimensions(parts map[string]string) []string {
return rv
}
// recipes returns the path to the "recipes" executable.
func recipes(parts map[string]string) string {
if strings.Contains(parts["os"], "Win") {
return RECIPE_BUNDLE_WIN
}
return RECIPE_BUNDLE_UNIX
}
// relpath returns the relative path to the given file from the config file.
func relpath(f string) string {
_, filename, _, _ := runtime.Caller(0)
@ -382,14 +400,15 @@ func relpath(f string) string {
// bundleRecipes generates the task to bundle and isolate the recipes.
func bundleRecipes(b *specs.TasksCfgBuilder) string {
b.MustAddTask(BUNDLE_RECIPES_NAME, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{cipdGit1, cipdGit2},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "bundle_recipes",
fmt.Sprintf("buildername=%s", BUNDLE_RECIPES_NAME),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
CipdPackages: CIPD_PKGS_GIT,
Command: []string{
"/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR,
},
Isolate: relpath("bundle_recipes.isolate"),
Dimensions: linuxGceDimensions(),
EnvPrefixes: map[string][]string{
"PATH": []string{"cipd_bin_packages", "cipd_bin_packages/bin"},
},
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.7,
})
return BUNDLE_RECIPES_NAME
@ -403,45 +422,47 @@ func useBundledRecipes(parts map[string]string) bool {
}
type isolateAssetCfg struct {
isolateFile string
cipdPkg string
cipdPkg string
path string
}
var ISOLATE_ASSET_MAPPING = map[string]isolateAssetCfg{
ISOLATE_SKIMAGE_NAME: {
isolateFile: "isolate_skimage.isolate",
cipdPkg: "skimage",
cipdPkg: "skimage",
path: "skimage",
},
ISOLATE_SKP_NAME: {
isolateFile: "isolate_skp.isolate",
cipdPkg: "skp",
cipdPkg: "skp",
path: "skp",
},
ISOLATE_SVG_NAME: {
isolateFile: "isolate_svg.isolate",
cipdPkg: "svg",
cipdPkg: "svg",
path: "svg",
},
ISOLATE_NDK_LINUX_NAME: {
isolateFile: "isolate_ndk_linux.isolate",
cipdPkg: "android_ndk_linux",
cipdPkg: "android_ndk_linux",
path: "android_ndk_linux",
},
ISOLATE_WIN_TOOLCHAIN_NAME: {
isolateFile: "isolate_win_toolchain.isolate",
cipdPkg: "win_toolchain",
cipdPkg: "win_toolchain",
path: "t",
},
ISOLATE_WIN_VULKAN_SDK_NAME: {
isolateFile: "isolate_win_vulkan_sdk.isolate",
cipdPkg: "win_vulkan_sdk",
cipdPkg: "win_vulkan_sdk",
path: "win_vulkan_sdk",
},
}
// bundleRecipes generates the task to bundle and isolate the recipes.
// isolateCIPDAsset generates a task to isolate the given CIPD asset.
func isolateCIPDAsset(b *specs.TasksCfgBuilder, name string) string {
asset := ISOLATE_ASSET_MAPPING[name]
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{
b.MustGetCipdPackageFromAsset(ISOLATE_ASSET_MAPPING[name].cipdPkg),
b.MustGetCipdPackageFromAsset(asset.cipdPkg),
},
Command: []string{"/bin/cp", "-rL", asset.path, "${ISOLATED_OUTDIR}"},
Dimensions: linuxGceDimensions(),
Isolate: relpath(ISOLATE_ASSET_MAPPING[name].isolateFile),
Isolate: "empty.isolate",
Priority: 0.7,
})
return name
@ -476,7 +497,7 @@ func getIsolatedCIPDDeps(parts map[string]string) []string {
func compile(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
// Collect the necessary CIPD packages.
pkgs := []*specs.CipdPackage{}
deps := []string{}
deps := []string{BUNDLE_RECIPES_NAME}
// Android bots require a toolchain.
if strings.Contains(name, "Android") {
@ -525,10 +546,9 @@ func compile(b *specs.TasksCfgBuilder, name string, parts map[string]string) str
// Add the task.
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: pkgs,
Dimensions: dimensions,
Dependencies: deps,
ExtraArgs: []string{
"--workdir", "../../..", "compile",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "compile",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -538,8 +558,10 @@ func compile(b *specs.TasksCfgBuilder, name string, parts map[string]string) str
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("compile_skia.isolate"),
Priority: 0.8,
Dependencies: deps,
Dimensions: dimensions,
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
// All compile tasks are runnable as their own Job. Assert that the Job
// is listed in JOBS.
@ -552,13 +574,12 @@ func compile(b *specs.TasksCfgBuilder, name string, parts map[string]string) str
// recreateSKPs generates a RecreateSKPs task. Returns the name of the last
// task in the generated chain of tasks, which the Job should add as a
// dependency.
func recreateSKPs(b *specs.TasksCfgBuilder, name string) string {
func recreateSKPs(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("go")},
Dimensions: linuxGceDimensions(),
ExecutionTimeout: 4 * time.Hour,
ExtraArgs: []string{
"--workdir", "../../..", "recreate_skps",
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("go")},
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "recreate_skps",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -568,9 +589,12 @@ func recreateSKPs(b *specs.TasksCfgBuilder, name string) string {
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
IoTimeout: 40 * time.Minute,
Isolate: relpath("compile_skia.isolate"),
Priority: 0.8,
Dependencies: []string{BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
ExecutionTimeout: 4 * time.Hour,
IoTimeout: 40 * time.Minute,
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return name
}
@ -578,12 +602,12 @@ func recreateSKPs(b *specs.TasksCfgBuilder, name string) string {
// updateMetaConfig generates a UpdateMetaConfig task. Returns the name of the
// last task in the generated chain of tasks, which the Job should add as a
// dependency.
func updateMetaConfig(b *specs.TasksCfgBuilder, name string) string {
func updateMetaConfig(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "update_meta_config",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "update_meta_config",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -593,24 +617,22 @@ func updateMetaConfig(b *specs.TasksCfgBuilder, name string) string {
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("meta_config.isolate"),
Priority: 0.8,
Dependencies: []string{BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return name
}
// ctSKPs generates a CT SKPs task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
func ctSKPs(b *specs.TasksCfgBuilder, name string) string {
func ctSKPs(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{},
Dimensions: []string{
"pool:SkiaCT",
fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
},
ExecutionTimeout: 24 * time.Hour,
ExtraArgs: []string{
"--workdir", "../../..", "ct_skps",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "ct_skps",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -620,21 +642,27 @@ func ctSKPs(b *specs.TasksCfgBuilder, name string) string {
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
IoTimeout: time.Hour,
Isolate: relpath("ct_skps_skia.isolate"),
Priority: 0.8,
Dependencies: []string{BUNDLE_RECIPES_NAME},
Dimensions: []string{
"pool:SkiaCT",
fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
},
ExecutionTimeout: 24 * time.Hour,
IoTimeout: time.Hour,
Isolate: relpath("skia_repo.isolate"),
Priority: 0.8,
})
return name
}
// checkGeneratedFiles verifies that no generated SKSL files have been edited
// by hand.
func checkGeneratedFiles(b *specs.TasksCfgBuilder, name string) string {
func checkGeneratedFiles(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "check_generated_files",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "check_generated_files",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -644,21 +672,22 @@ func checkGeneratedFiles(b *specs.TasksCfgBuilder, name string) string {
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("compile_skia.isolate"),
Priority: 0.8,
Dependencies: []string{BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return name
}
// housekeeper generates a Housekeeper task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
func housekeeper(b *specs.TasksCfgBuilder, name, compileTaskName string) string {
func housekeeper(b *specs.TasksCfgBuilder, name, compileTaskName string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("go")},
Dependencies: []string{compileTaskName},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "housekeeper",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "housekeeper",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -668,20 +697,21 @@ func housekeeper(b *specs.TasksCfgBuilder, name, compileTaskName string) string
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("housekeeper_skia.isolate"),
Priority: 0.8,
Dependencies: []string{compileTaskName, BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return name
}
// infra generates an infra_tests task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
func infra(b *specs.TasksCfgBuilder, name string) string {
func infra(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
b.MustAddTask(name, &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("go")},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "infra",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "infra",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -691,8 +721,11 @@ func infra(b *specs.TasksCfgBuilder, name string) string {
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("infra_skia.isolate"),
Priority: 0.8,
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("go")},
Dependencies: []string{BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return name
}
@ -702,9 +735,9 @@ func infra(b *specs.TasksCfgBuilder, name string) string {
func calmbench(b *specs.TasksCfgBuilder, name string, parts map[string]string) string {
s := &specs.TaskSpec{
CipdPackages: []*specs.CipdPackage{b.MustGetCipdPackageFromAsset("clang_linux")},
Dimensions: swarmDimensions(parts),
ExtraArgs: []string{
"--workdir", "../../..", "calmbench",
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "calmbench",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -714,8 +747,9 @@ func calmbench(b *specs.TasksCfgBuilder, name string, parts map[string]string) s
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
Isolate: relpath("infra_skia.isolate"),
Priority: 0.8,
Dimensions: swarmDimensions(parts),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
}
s.Dependencies = append(s.Dependencies, ISOLATE_SKP_NAME, ISOLATE_SVG_NAME)
@ -726,10 +760,9 @@ func calmbench(b *specs.TasksCfgBuilder, name string, parts map[string]string) s
if strings.Contains(name, "Release") && doUpload(name) {
uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, jobNameSchema.Sep, name)
b.MustAddTask(uploadName, &specs.TaskSpec{
Dependencies: []string{name},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "upload_calmbench_results",
Command: []string{
RECIPE_BUNDLE_UNIX, "run", "--timestamps",
"--workdir", ".", "upload_calmbench_results",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -740,9 +773,10 @@ func calmbench(b *specs.TasksCfgBuilder, name string, parts map[string]string) s
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
fmt.Sprintf("gs_bucket=%s", CONFIG.GsBucketCalm),
},
// We're using the same isolate as upload_nano_results
Isolate: relpath("upload_nano_results.isolate"),
Priority: 0.8,
Dependencies: []string{name},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return uploadName
}
@ -768,13 +802,10 @@ func doUpload(name string) bool {
// generated chain of tasks, which the Job should add as a dependency.
func test(b *specs.TasksCfgBuilder, name string, parts map[string]string, compileTaskName string, pkgs []*specs.CipdPackage) string {
s := &specs.TaskSpec{
CipdPackages: pkgs,
Dependencies: []string{compileTaskName},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
ExtraArgs: []string{
"--workdir", "../../..", "test",
CipdPackages: pkgs,
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "test",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildbucket_build_id=%s", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID),
fmt.Sprintf("buildername=%s", name),
@ -785,18 +816,14 @@ func test(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
IoTimeout: 40 * time.Minute,
Isolate: relpath("test_skia.isolate"),
MaxAttempts: 1,
Priority: 0.8,
}
if useBundledRecipes(parts) {
s.Dependencies = append(s.Dependencies, BUNDLE_RECIPES_NAME)
if strings.Contains(parts["os"], "Win") {
s.Isolate = relpath("test_skia_bundled_win.isolate")
} else {
s.Isolate = relpath("test_skia_bundled_unix.isolate")
}
Dependencies: []string{compileTaskName, BUNDLE_RECIPES_NAME},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
IoTimeout: 40 * time.Minute,
Isolate: relpath("test_skia_bundled.isolate"),
MaxAttempts: 1,
Priority: 0.8,
}
if deps := getIsolatedCIPDDeps(parts); len(deps) > 0 {
s.Dependencies = append(s.Dependencies, deps...)
@ -824,10 +851,9 @@ func test(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
if doUpload(name) {
uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, jobNameSchema.Sep, name)
b.MustAddTask(uploadName, &specs.TaskSpec{
Dependencies: []string{name},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "upload_dm_results",
Command: []string{
RECIPE_BUNDLE_UNIX, "run", "--timestamps",
"--workdir", ".", "upload_dm_results",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -838,8 +864,10 @@ func test(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
fmt.Sprintf("gs_bucket=%s", CONFIG.GsBucketGm),
},
Isolate: relpath("upload_dm_results.isolate"),
Priority: 0.8,
Dependencies: []string{name, BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return uploadName
}
@ -864,13 +892,10 @@ func coverage(b *specs.TasksCfgBuilder, name string, parts map[string]string, co
for i := 0; i < shards; i++ {
n := strings.Replace(name, tf, fmt.Sprintf("shard_%02d_%02d", i, shards), 1)
s := &specs.TaskSpec{
CipdPackages: pkgs,
Dependencies: []string{compileTaskName},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
ExtraArgs: []string{
"--workdir", "../../..", "test",
CipdPackages: pkgs,
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", "test",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", n),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -880,18 +905,14 @@ func coverage(b *specs.TasksCfgBuilder, name string, parts map[string]string, co
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
IoTimeout: 40 * time.Minute,
Isolate: relpath("test_skia.isolate"),
MaxAttempts: 1,
Priority: 0.8,
}
if useBundledRecipes(parts) {
s.Dependencies = append(s.Dependencies, BUNDLE_RECIPES_NAME)
if strings.Contains(parts["os"], "Win") {
s.Isolate = relpath("test_skia_bundled_win.isolate")
} else {
s.Isolate = relpath("test_skia_bundled_unix.isolate")
}
Dependencies: []string{compileTaskName, BUNDLE_RECIPES_NAME},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
IoTimeout: 40 * time.Minute,
Isolate: relpath("test_skia_bundled.isolate"),
MaxAttempts: 1,
Priority: 0.8,
}
if deps := getIsolatedCIPDDeps(parts); len(deps) > 0 {
s.Dependencies = append(s.Dependencies, deps...)
@ -914,8 +935,9 @@ func coverage(b *specs.TasksCfgBuilder, name string, parts map[string]string, co
Dependencies: deps,
Dimensions: linuxGceDimensions(),
CipdPackages: pkgs,
ExtraArgs: []string{
"--workdir", "../../..", "upload_coverage_results",
Command: []string{
RECIPE_BUNDLE_UNIX, "run", "--timestamps",
"--workdir", ".", "upload_coverage_results",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -926,7 +948,7 @@ func coverage(b *specs.TasksCfgBuilder, name string, parts map[string]string, co
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
fmt.Sprintf("gs_bucket=%s", CONFIG.GsBucketCoverage),
},
Isolate: relpath("upload_coverage_results.isolate"),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return uploadName
@ -936,32 +958,16 @@ func coverage(b *specs.TasksCfgBuilder, name string, parts map[string]string, co
// generated chain of tasks, which the Job should add as a dependency.
func perf(b *specs.TasksCfgBuilder, name string, parts map[string]string, compileTaskName string, pkgs []*specs.CipdPackage) string {
recipe := "perf"
isolate := relpath("perf_skia.isolate")
isolate := relpath("perf_skia_bundled.isolate")
if strings.Contains(parts["extra_config"], "Skpbench") {
recipe = "skpbench"
isolate = relpath("skpbench_skia.isolate")
if useBundledRecipes(parts) {
if strings.Contains(parts["os"], "Win") {
isolate = relpath("skpbench_skia_bundled_win.isolate")
} else {
isolate = relpath("skpbench_skia_bundled_unix.isolate")
}
}
} else if useBundledRecipes(parts) {
if strings.Contains(parts["os"], "Win") {
isolate = relpath("perf_skia_bundled_win.isolate")
} else {
isolate = relpath("perf_skia_bundled_unix.isolate")
}
isolate = relpath("skpbench_skia_bundled.isolate")
}
s := &specs.TaskSpec{
CipdPackages: pkgs,
Dependencies: []string{compileTaskName},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
ExtraArgs: []string{
"--workdir", "../../..", recipe,
CipdPackages: pkgs,
Command: []string{
recipes(parts), "run", "--timestamps",
"--workdir", ".", recipe,
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -971,13 +977,14 @@ func perf(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
fmt.Sprintf("patch_issue=%s", specs.PLACEHOLDER_ISSUE),
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
},
IoTimeout: 40 * time.Minute,
Isolate: isolate,
MaxAttempts: 1,
Priority: 0.8,
}
if useBundledRecipes(parts) {
s.Dependencies = append(s.Dependencies, BUNDLE_RECIPES_NAME)
Dependencies: []string{compileTaskName, BUNDLE_RECIPES_NAME},
Dimensions: swarmDimensions(parts),
ExecutionTimeout: 4 * time.Hour,
Expiration: 20 * time.Hour,
IoTimeout: 40 * time.Minute,
Isolate: isolate,
MaxAttempts: 1,
Priority: 0.8,
}
if deps := getIsolatedCIPDDeps(parts); len(deps) > 0 {
s.Dependencies = append(s.Dependencies, deps...)
@ -1005,10 +1012,9 @@ func perf(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
if strings.Contains(name, "Release") && doUpload(name) {
uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, jobNameSchema.Sep, name)
b.MustAddTask(uploadName, &specs.TaskSpec{
Dependencies: []string{name},
Dimensions: linuxGceDimensions(),
ExtraArgs: []string{
"--workdir", "../../..", "upload_nano_results",
Command: []string{
RECIPE_BUNDLE_UNIX, "run", "--timestamps",
"--workdir", ".", "upload_nano_results",
fmt.Sprintf("repository=%s", specs.PLACEHOLDER_REPO),
fmt.Sprintf("buildername=%s", name),
fmt.Sprintf("swarm_out_dir=%s", specs.PLACEHOLDER_ISOLATED_OUTDIR),
@ -1019,8 +1025,10 @@ func perf(b *specs.TasksCfgBuilder, name string, parts map[string]string, compil
fmt.Sprintf("patch_set=%s", specs.PLACEHOLDER_PATCHSET),
fmt.Sprintf("gs_bucket=%s", CONFIG.GsBucketNano),
},
Isolate: relpath("upload_nano_results.isolate"),
Priority: 0.8,
Dependencies: []string{name, BUNDLE_RECIPES_NAME},
Dimensions: linuxGceDimensions(),
Isolate: relpath("swarm_recipe.isolate"),
Priority: 0.8,
})
return uploadName
}
@ -1048,22 +1056,22 @@ func process(b *specs.TasksCfgBuilder, name string) {
// RecreateSKPs.
if strings.Contains(name, "RecreateSKPs") {
deps = append(deps, recreateSKPs(b, name))
deps = append(deps, recreateSKPs(b, name, parts))
}
// UpdateMetaConfig bot.
if strings.Contains(name, "UpdateMetaConfig") {
deps = append(deps, updateMetaConfig(b, name))
deps = append(deps, updateMetaConfig(b, name, parts))
}
// CT bots.
if strings.Contains(name, "-CT_") {
deps = append(deps, ctSKPs(b, name))
deps = append(deps, ctSKPs(b, name, parts))
}
// Infra tests.
if name == "Housekeeper-PerCommit-InfraTests" {
deps = append(deps, infra(b, name))
deps = append(deps, infra(b, name, parts))
}
// Compile bots.
@ -1096,10 +1104,10 @@ func process(b *specs.TasksCfgBuilder, name string) {
// Housekeepers.
if name == "Housekeeper-PerCommit" {
deps = append(deps, housekeeper(b, name, compileTaskName))
deps = append(deps, housekeeper(b, name, compileTaskName, parts))
}
if name == "Housekeeper-PerCommit-CheckGeneratedFiles" {
deps = append(deps, checkGeneratedFiles(b, name))
deps = append(deps, checkGeneratedFiles(b, name, parts))
}
// Common assets needed by the remaining bots.

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,7 +1,6 @@
{
'variables': {
'files': [
'../../tools/valgrind.supp',
'./',
],
},

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 'android_ndk_linux', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 'skimage', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 'skp', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 'svg', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 't', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'/bin/cp', '-rL', 'win_vulkan_sdk', '${ISOLATED_OUTDIR}',
],
},
}

View File

@ -1,5 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
}

View File

@ -1,13 +0,0 @@
{
'includes': [
'android_bin.isolate',
'ios_bin.isolate',
'resources.isolate',
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -4,10 +4,10 @@
'assets.isolate',
'ios_bin.isolate',
'resources.isolate',
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
'../../tools/valgrind.supp',
],
},

View File

@ -1,6 +0,0 @@
{
'includes': [
'perf_skia_bundled.isolate',
'swarm_recipe_bundled_unix.isolate',
],
}

View File

@ -1,6 +0,0 @@
{
'includes': [
'perf_skia_bundled.isolate',
'swarm_recipe_bundled_win.isolate',
],
}

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,61 +0,0 @@
[
{
"cmd": [
"git",
"init"
],
"cwd": "[START_DIR]/skia",
"env": {
"PATH": "[START_DIR]/git:[START_DIR]/git/bin:<PATH>"
},
"infra_step": true,
"name": "git init"
},
{
"cmd": [
"git",
"add",
"."
],
"cwd": "[START_DIR]/skia",
"env": {
"PATH": "[START_DIR]/git:[START_DIR]/git/bin:<PATH>"
},
"infra_step": true,
"name": "git add"
},
{
"cmd": [
"git",
"commit",
"-m",
"commit recipes"
],
"cwd": "[START_DIR]/skia",
"env": {
"PATH": "[START_DIR]/git:[START_DIR]/git/bin:<PATH>"
},
"infra_step": true,
"name": "git commit"
},
{
"cmd": [
"python",
"[START_DIR]/skia/infra/bots/recipes.py",
"bundle",
"--destination",
"[SWARM_OUT_DIR]/recipe_bundle"
],
"cwd": "[START_DIR]/skia",
"env": {
"PATH": "[START_DIR]/git:[START_DIR]/git/bin:<PATH>"
},
"infra_step": true,
"name": "Bundle Recipes"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]

View File

@ -1,40 +0,0 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Recipe module for Skia Swarming compile.
DEPS = [
'git',
'recipe_engine/context',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/step',
]
def RunSteps(api):
bundle_dir = api.properties['swarm_out_dir'] + '/recipe_bundle'
skia_dir = api.path['start_dir'].join('skia')
recipes_py = api.path['start_dir'].join('skia', 'infra', 'bots', 'recipes.py')
with api.git.env():
with api.context(cwd=skia_dir):
api.step('git init', infra_step=True,
cmd=['git', 'init'])
api.step('git add', infra_step=True,
cmd=['git', 'add', '.'])
api.step('git commit', infra_step=True,
cmd=['git', 'commit', '-m', 'commit recipes'])
api.step('Bundle Recipes', infra_step=True,
cmd=['python', recipes_py, 'bundle',
'--destination', bundle_dir])
def GenTests(api):
yield (
api.test('BundleRecipes') +
api.properties(buildername='Housekeeper-PerCommit-BundleRecipes',
swarm_out_dir='[SWARM_OUT_DIR]')
)

View File

@ -1,11 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
'../../tools/skpbench/',
],
},
}

View File

@ -1,10 +1,10 @@
{
'includes': [
'assets.isolate',
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
'../../tools/valgrind.supp',
'../../tools/skpbench/',
],

View File

@ -1,6 +0,0 @@
{
'includes': [
'skpbench_skia_bundled.isolate',
'swarm_recipe_bundled_unix.isolate',
],
}

View File

@ -1,6 +0,0 @@
{
'includes': [
'skpbench_skia_bundled.isolate',
'swarm_recipe_bundled_win.isolate',
],
}

View File

@ -3,10 +3,8 @@
'infrabots.isolate',
],
'variables': {
'command': [
'python', 'recipes.py', '--package', '../config/recipes.cfg', 'run', '--timestamps',
],
'files': [
'../../../.gclient',
'../config/recipes.cfg',
],
},

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'../../../recipe_bundle/recipes', 'run', '--timestamps',
],
},
}

View File

@ -1,7 +0,0 @@
{
'variables': {
'command': [
'../../../recipe_bundle/recipes.bat', 'run', '--timestamps',
],
},
}

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +0,0 @@
{
'includes': [
'android_bin.isolate',
'ios_bin.isolate',
'resources.isolate',
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -4,10 +4,10 @@
'assets.isolate',
'ios_bin.isolate',
'resources.isolate',
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
'../../tools/valgrind.supp',
],
},

View File

@ -1,6 +0,0 @@
{
'includes': [
'test_skia_bundled.isolate',
'swarm_recipe_bundled_unix.isolate',
],
}

View File

@ -1,6 +0,0 @@
{
'includes': [
'test_skia_bundled.isolate',
'swarm_recipe_bundled_win.isolate',
],
}

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}

View File

@ -1,10 +0,0 @@
{
'includes': [
'swarm_recipe.isolate',
],
'variables': {
'files': [
'../../../.gclient',
],
},
}