Update recipes.py to f605e5c0bc2e748170ca851383cb552c7225316b

This will enable it to be compatible with both the textpb and
upcoming jsonpb form of recipes.cfg.

BUG=chromium:699379

Change-Id: Ieabf2b0c3e41bbb250e987a90902ee1bdb71b73d
Reviewed-on: https://skia-review.googlesource.com/9749
Reviewed-by: Eric Boren <borenet@google.com>
Commit-Queue: Eric Boren <borenet@google.com>
This commit is contained in:
Robert Iannucci 2017-03-15 18:04:05 -07:00 committed by Skia Commit-Bot
parent b66fa526b8
commit 2ba659e85f

View File

@ -28,6 +28,7 @@ BOOTSTRAP_VERSION = 1
import argparse
import ast
import json
import logging
import random
import re
@ -36,8 +37,71 @@ import sys
import time
import traceback
from cStringIO import StringIO
def parse_protobuf(fh):
def parse(repo_root, recipes_cfg_path):
"""Parse is transitional code which parses a recipes.cfg file as either jsonpb
or as textpb.
Args:
repo_root (str) - native path to the root of the repo we're trying to run
recipes for.
recipes_cfg_path (str) - native path to the recipes.cfg file to process.
Returns (as tuple):
engine_url (str) - the url to the engine repo we want to use.
engine_revision (str) - the git revision for the engine to get.
engine_subpath (str) - the subdirectory in the engine repo we should use to
find it's recipes.py entrypoint. This is here for completeness, but will
essentially always be empty. It would be used if the recipes-py repo was
merged as a subdirectory of some other repo and you depended on that
subdirectory.
recipes_path (str) - native path to where the recipes live inside of the
current repo (i.e. the folder containing `recipes/` and/or
`recipe_modules`)
"""
with open(recipes_cfg_path, 'rU') as fh:
data = fh.read()
if data.lstrip().startswith('{'):
pb = json.loads(data)
engine = next(
(d for d in pb['deps'] if d['project_id'] == 'recipe_engine'), None)
if engine is None:
raise ValueError('could not find recipe_engine dep in %r'
% recipes_cfg_path)
engine_url = engine['url']
engine_revision = engine['revision']
engine_subpath = engine.get('path_override', '')
recipes_path = pb.get('recipes_path', '')
else:
def get_unique(things):
if len(things) == 1:
return things[0]
elif len(things) == 0:
raise ValueError("Expected to get one thing, but dinna get none.")
else:
logging.warn('Expected to get one thing, but got a bunch: %s\n%s' %
(things, traceback.format_stack()))
return things[0]
protobuf = parse_textpb(StringIO(data))
engine_buf = get_unique([
b for b in protobuf.get('deps', [])
if b.get('project_id') == ['recipe_engine'] ])
engine_url = get_unique(engine_buf['url'])
engine_revision = get_unique(engine_buf['revision'])
engine_subpath = (get_unique(engine_buf.get('path_override', ['']))
.replace('/', os.path.sep))
recipes_path = get_unique(protobuf.get('recipes_path', ['']))
recipes_path = os.path.join(repo_root, recipes_path.replace('/', os.path.sep))
return engine_url, engine_revision, engine_subpath, recipes_path
def parse_textpb(fh):
"""Parse the protobuf text format just well enough to understand recipes.cfg.
We don't use the protobuf library because we want to be as self-contained
@ -75,7 +139,7 @@ def parse_protobuf(fh):
m = re.match(r'(\w+)\s*{', line)
if m:
subparse = parse_protobuf(fh)
subparse = parse_textpb(fh)
ret.setdefault(m.group(1), []).append(subparse)
continue
@ -89,17 +153,6 @@ def parse_protobuf(fh):
return ret
def get_unique(things):
if len(things) == 1:
return things[0]
elif len(things) == 0:
raise ValueError("Expected to get one thing, but dinna get none.")
else:
logging.warn('Expected to get one thing, but got a bunch: %s\n%s' %
(things, traceback.format_stack()))
return things[0]
def _subprocess_call(argv, **kwargs):
logging.info('Running %r', argv)
return subprocess.call(argv, **kwargs)
@ -146,37 +199,28 @@ def main():
os.path.join(os.path.dirname(__file__), REPO_ROOT))
recipes_cfg_path = os.path.join(repo_root, RECIPES_CFG)
with open(recipes_cfg_path, 'rU') as fh:
protobuf = parse_protobuf(fh)
engine_url, engine_revision, engine_subpath, recipes_path = parse(
repo_root, recipes_cfg_path)
engine_buf = get_unique([
b for b in protobuf.get('deps', [])
if b.get('project_id') == ['recipe_engine'] ])
engine_url = get_unique(engine_buf['url'])
engine_revision = get_unique(engine_buf['revision'])
engine_subpath = (get_unique(engine_buf.get('path_override', ['']))
.replace('/', os.path.sep))
recipes_path = os.path.join(repo_root,
get_unique(protobuf.get('recipes_path', [''])).replace('/', os.path.sep))
deps_path = os.path.join(recipes_path, '.recipe_deps')
engine_path = find_engine_override(sys.argv[1:])
if not engine_path:
# Ensure that we have the recipe engine cloned.
engine_path = os.path.join(deps_path, 'recipe_engine')
engine_root_path = os.path.join(deps_path, 'recipe_engine')
engine_path = os.path.join(engine_root_path, engine_subpath)
def ensure_engine():
if not os.path.exists(deps_path):
os.makedirs(deps_path)
if not os.path.exists(engine_path):
_subprocess_check_call([git, 'clone', engine_url, engine_path])
if not os.path.exists(engine_root_path):
_subprocess_check_call([git, 'clone', engine_url, engine_root_path])
needs_fetch = _subprocess_call(
[git, 'rev-parse', '--verify', '%s^{commit}' % engine_revision],
cwd=engine_path, stdout=open(os.devnull, 'w'))
cwd=engine_root_path, stdout=open(os.devnull, 'w'))
if needs_fetch:
_subprocess_check_call([git, 'fetch'], cwd=engine_path)
_subprocess_check_call([git, 'fetch'], cwd=engine_root_path)
_subprocess_check_call(
[git, 'checkout', '--quiet', engine_revision], cwd=engine_path)
[git, 'checkout', '--quiet', engine_revision], cwd=engine_root_path)
try:
ensure_engine()
@ -190,7 +234,7 @@ def main():
args = ['--package', recipes_cfg_path] + sys.argv[1:]
return _subprocess_call([
sys.executable, '-u',
os.path.join(engine_path, engine_subpath, 'recipes.py')] + args)
os.path.join(engine_path, 'recipes.py')] + args)
if __name__ == '__main__':
sys.exit(main())