diff --git a/module_build_service/builder/KojiContentGenerator.py b/module_build_service/builder/KojiContentGenerator.py
index 078cc1f..4c9ee67 100644
--- a/module_build_service/builder/KojiContentGenerator.py
+++ b/module_build_service/builder/KojiContentGenerator.py
@@ -32,6 +32,7 @@ import platform
import shutil
import subprocess
import tempfile
+import time
import koji
@@ -273,7 +274,7 @@ class KojiContentGenerator(object):
u'filesize': len(self.mmd),
u'checksum_type': u'md5',
u'checksum': unicode(hashlib.md5(self.mmd).hexdigest()),
- u'filename': u'modulemd.yaml',
+ u'filename': u'modulemd.txt',
u'components': components
}
)
@@ -317,7 +318,7 @@ class KojiContentGenerator(object):
Returns path to the temporary directory
"""
prepdir = tempfile.mkdtemp(prefix="koji-cg-import")
- mmd_path = os.path.join(prepdir, "modulemd.yaml")
+ mmd_path = os.path.join(prepdir, "modulemd.txt")
with open(mmd_path, "w") as mmd_f:
mmd_f.write(self.mmd)
@@ -328,6 +328,32 @@ class KojiContentGenerator(object):
return prepdir
+ def _upload_outputs(self, session, metadata, file_dir):
+ """
+ Uploads output files to Koji hub.
+ """
+ to_upload = []
+ for info in metadata['output']:
+ if info.get('metadata_only', False):
+ continue
+ localpath = os.path.join(file_dir, info['filename'])
+ if not os.path.exists(localpath):
+ err = "Cannot upload %s to Koji. No such file." % localpath
+ log.error(err)
+ raise RuntimeError(err)
+
+ to_upload.append([localpath, info])
+
+ # Create unique server directory.
+ serverdir = 'mbs/%r.%d' % (time.time(), self.module.id)
+
+ for localpath, info in to_upload:
+ log.info("Uploading %s to Koji" % localpath)
+ session.uploadWrapper(localpath, serverdir, callback=None)
+ log.info("Upload of %s to Koji done" % localpath)
+
+ return serverdir
+
def koji_import(self):
"""This method imports given module into the configured koji instance as
a content generator based build
@@ -338,7 +364,8 @@ class KojiContentGenerator(object):
file_dir = self._prepare_file_directory()
metadata = self._get_content_generator_metadata(file_dir)
try:
- build_info = session.CGImport(metadata, file_dir)
+ serverdir = self._upload_outputs(session, metadata, file_dir)
+ build_info = session.CGImport(metadata, serverdir)
log.debug("Content generator import done: %s",
json.dumps(build_info, sort_keys=True, indent=4))
except Exception, e:
diff --git a/module_build_service/scheduler/handlers/repos.py b/module_build_service/scheduler/handlers/repos.py
index 7a6f7d8..56c8668 100644
--- a/module_build_service/scheduler/handlers/repos.py
+++ b/module_build_service/scheduler/handlers/repos.py
@@ -89,13 +89,6 @@ def done(config, session, msg):
tag_name=tag, components=[c.package for c in module_build.component_builds])
builder.buildroot_connect(groups)
- # Ok, for the subset of builds that did complete successfully, check to
- # see if they are in the buildroot.
- artifacts = [component_build.nvr for component_build in good]
- if not builder.buildroot_ready(artifacts):
- log.info("Not all of %r are in the buildroot. Waiting." % artifacts)
- return
-
# If we have reached here then we know the following things:
#
# - All components in this batch have finished (failed or succeeded)
@@ -116,6 +109,13 @@ def done(config, session, msg):
further_work = []
if has_unbuilt_components and not has_failed_components:
+ # Ok, for the subset of builds that did complete successfully, check to
+ # see if they are in the buildroot before starting new batch.
+ artifacts = [component_build.nvr for component_build in good]
+ if not builder.buildroot_ready(artifacts):
+ log.info("Not all of %r are in the buildroot. Waiting." % artifacts)
+ return
+
# Try to start next batch build, because there are still unbuilt
# components in a module.
further_work += start_next_batch_build(
diff --git a/module_build_service/scheduler/handlers/tags.py b/module_build_service/scheduler/handlers/tags.py
index 493a5da..bdf7d78 100644
--- a/module_build_service/scheduler/handlers/tags.py
+++ b/module_build_service/scheduler/handlers/tags.py
@@ -27,7 +27,7 @@ import module_build_service.builder
import module_build_service.pdc
import logging
import koji
-from module_build_service import models, log
+from module_build_service import models, log, messaging
from module_build_service.utils import start_next_batch_build
logging.basicConfig(level=logging.DEBUG)
@@ -78,13 +78,30 @@ def tagged(config, session, msg):
if not c.tagged and c.state == koji.BUILD_STATES['COMPLETE']
]
+ further_work = []
+
# If all components are tagged, start newRepo task.
if not untagged_components:
- log.info("All components tagged, regenerating repo for tag %s", tag)
builder = module_build_service.builder.GenericBuilder.create_from_module(
session, module_build, config)
- task_id = builder.koji_session.newRepo(tag)
- module_build.new_repo_task_id = task_id
+
+ unbuilt_components = [
+ c for c in module_build.component_builds
+ if c.state == koji.BUILD_STATES['BUILDING'] or not c.state
+ ]
+ if unbuilt_components:
+ log.info("All components in batch tagged, regenerating repo for tag %s", tag)
+ task_id = builder.koji_session.newRepo(tag)
+ module_build.new_repo_task_id = task_id
+ else:
+ # In case this is the last batch, we do not need to regenerate the
+ # buildroot, because we will not build anything else in it. It
+ # would be useless to wait for a repository we will not use anyway.
+ log.info("All components in module tagged and built, skipping the "
+ "last repo regeneration")
+ further_work += [messaging.KojiRepoChange(
+ 'components::_finalize: fake msg',
+ builder.module_build_tag['name'])]
session.commit()
- return []
+ return further_work