Commit e6e6f931 authored by ale's avatar ale

Refactor code to wait for builds recursively

This ensures images are built in the right dependency order.
parent 0482b4c2
Pipeline #1264 failed with stages
in 60 minutes and 4 seconds
import gitlab
import optparse
import re
import os
import sys
import time
import urlparse
def parse_dockerfile(df):
def _parse_dockerfile(df):
for line in df.split('\n'):
if line.startswith('FROM '):
return line[5:].strip()
def fetch_dockerfile(gl, project):
def _fetch_dockerfile(gl, project):
try:
f = project.files.get(file_path='Dockerfile', ref='master')
return f.decode()
......@@ -19,97 +20,105 @@ def fetch_dockerfile(gl, project):
return None
def repo_path_from_image(image_name, registry):
if not image_name.startswith(registry):
return None
repo = image_name[len(registry)+1:]
repo = repo.split(':')[0]
return repo
def build_dependency_tree(gl, registry, match_pattern):
"""Build a dependency map of base images for all projects.
def _remove_image_tag(name):
if ':' in name:
return name.split(':')[0]
return name
Only includes projects matching the given regexp, having a valid
Dockerfile, and whose base image is hosted on the specified Docker
registry (usually the Gitlab-hosted one).
"""
dtree = {}
match_rx = re.compile(match_pattern)
def build_dependency_tree(gl, search_pattern=None):
"""Build the project dependency map based on Dockerfiles."""
deps = {}
# Use a generator to scan over the full list of projects
# (potentially large).
projects = gl.projects.list(all=True, as_list=False)
projects = gl.projects.list(all=True, search=search_pattern, as_list=False)
for project in projects:
if not match_rx.search(project.path_with_namespace):
continue
df = fetch_dockerfile(gl, project)
df = _fetch_dockerfile(gl, project)
if not df:
continue
base_image = parse_dockerfile(df)
base_image = _parse_dockerfile(df)
if not base_image:
print >>sys.stderr, 'ERROR: could not find base image for %s' % (
project.path_with_namespace,)
continue
base_repo = repo_path_from_image(base_image, registry)
if not base_repo:
# External base.
continue
deps.setdefault(_remove_image_tag(base_image), []).append(project)
return deps
dtree.setdefault(base_repo, []).append(project)
return dtree
def rebuild(project, wait=False):
pipeline = project.pipelines.create({'ref': 'master'})
if wait:
while pipeline.finished_at is None:
pipeline.refresh()
time.sleep(3)
return pipeline
def find_deps(dtree, repo, recurse):
deps = []
def _add_deps(x):
for d in dtree.get(x, []):
deps.append(d)
if recurse:
_add_deps(d.path_with_namespace)
_add_deps(repo)
return deps
def rebuild_deps(gitlab_url, registry_hostname, gitlab_token,
search_pattern, image_name,
dry_run=False, recurse=False, wait=False):
"""Rebuild dependencies of the given image."""
gl = gitlab.Gitlab(gitlab_url, private_token=gitlab_token)
if gitlab_token:
gl.auth()
deps = build_dependency_tree(gl, search_pattern)
stack = deps.get(_remove_image_tag(image_name), [])
while stack:
project = stack.pop(0)
print 'rebuilding %s' % project.path_with_namespace
if not dry_run:
pipeline = rebuild(project, wait)
if pipeline.status != 'success':
print >>sys.stderr, 'ERROR: build failed for %s' % (
project.path_with_namespace,)
return
if recurse:
image_name = '%s/%s' % (
registry_hostname, project.path_with_namespace)
stack.extend(deps.get(image_name, []))
def main():
parser = optparse.OptionParser(usage='%prog [<options>] <repo_path>')
parser = optparse.OptionParser(usage='%prog [<options>] <image_name>')
parser.add_option('--token', help='Authentication token')
parser.add_option('--registry', help='Docker registry hostname')
parser.add_option('--registry',
help='Docker registry hostname (if empty, it will be '
'automatically derived from --url)')
parser.add_option('--url', help='Gitlab URL')
parser.add_option('--rebuild', action='store_true',
help='Trigger a rebuild of the dependencies')
parser.add_option('-n', '--dry-run', action='store_true', dest='dry_run',
help='Only show what would be done')
parser.add_option('--recurse', action='store_true',
help='Include all dependencies recursively')
help='Include all dependencies recursively '
'and wait for completion of the pipelines')
parser.add_option('--match',
default='/docker-',
help='Project paths should match this regexp')
help='Search keyword(s) to filter project list')
opts, args = parser.parse_args()
if not opts.url:
parser.error('Must specify --url')
if len(args) > 1:
parser.error('Too many arguments')
if len(args) != 1:
parser.error('Bad number of arguments')
# If --registry is not specified, make an educated guess.
registry = opts.registry
if not registry:
registry = 'registry.' + urlparse.urlsplit(opts.url).netloc
print >>sys.stderr, 'using %s as Docker registry' % (registry,)
gl = gitlab.Gitlab(opts.url, private_token=opts.token)
if opts.token:
gl.auth()
dtree = build_dependency_tree(gl, registry, opts.match)
deps = find_deps(dtree, args[0], opts.recurse)
for d in deps:
print d.path_with_namespace
if opts.rebuild:
d.pipelines.create({'ref': 'master'})
registry_hostname = opts.registry
if not registry_hostname:
registry_hostname = 'registry.' + urlparse.urlsplit(opts.url).netloc
print >>sys.stderr, 'using %s as Docker registry' % (registry_hostname,)
rebuild_deps(
opts.url,
registry_hostname,
opts.token,
opts.match,
args[0],
opts.dry_run,
opts.recurse,
opts.recurse,
)
if __name__ == '__main__':
main()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment