Always set workflow:rules in spack ci pipelines (#38921)
This commit is contained in:
		
				
					committed by
					
						
						GitHub
					
				
			
			
				
	
			
			
			
						parent
						
							f017f586df
						
					
				
				
					commit
					3ad65bbfc1
				
			@@ -1287,9 +1287,6 @@ def main_script_replacements(cmd):
 | 
				
			|||||||
        if spack_stack_name:
 | 
					        if spack_stack_name:
 | 
				
			||||||
            output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
 | 
					            output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Ensure the child pipeline always runs
 | 
					 | 
				
			||||||
        output_object["workflow"] = {"rules": [{"when": "always"}]}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if spack_buildcache_copy:
 | 
					        if spack_buildcache_copy:
 | 
				
			||||||
            # Write out the file describing specs that should be copied
 | 
					            # Write out the file describing specs that should be copied
 | 
				
			||||||
            copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
 | 
					            copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
 | 
				
			||||||
@@ -1305,21 +1302,17 @@ def main_script_replacements(cmd):
 | 
				
			|||||||
            with open(copy_specs_file, "w") as fd:
 | 
					            with open(copy_specs_file, "w") as fd:
 | 
				
			||||||
                fd.write(json.dumps(buildcache_copies))
 | 
					                fd.write(json.dumps(buildcache_copies))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        sorted_output = {}
 | 
					 | 
				
			||||||
        for output_key, output_value in sorted(output_object.items()):
 | 
					 | 
				
			||||||
            sorted_output[output_key] = output_value
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO(opadron): remove this or refactor
 | 
					        # TODO(opadron): remove this or refactor
 | 
				
			||||||
        if run_optimizer:
 | 
					        if run_optimizer:
 | 
				
			||||||
            import spack.ci_optimization as ci_opt
 | 
					            import spack.ci_optimization as ci_opt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            sorted_output = ci_opt.optimizer(sorted_output)
 | 
					            output_object = ci_opt.optimizer(output_object)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO(opadron): remove this or refactor
 | 
					        # TODO(opadron): remove this or refactor
 | 
				
			||||||
        if use_dependencies:
 | 
					        if use_dependencies:
 | 
				
			||||||
            import spack.ci_needs_workaround as cinw
 | 
					            import spack.ci_needs_workaround as cinw
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            sorted_output = cinw.needs_to_dependencies(sorted_output)
 | 
					            output_object = cinw.needs_to_dependencies(output_object)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        # No jobs were generated
 | 
					        # No jobs were generated
 | 
				
			||||||
        noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
 | 
					        noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
 | 
				
			||||||
@@ -1330,10 +1323,17 @@ def main_script_replacements(cmd):
 | 
				
			|||||||
            noop_job["script"] = [
 | 
					            noop_job["script"] = [
 | 
				
			||||||
                'echo "copy-only pipelines are not supported with deprecated ci configs"'
 | 
					                'echo "copy-only pipelines are not supported with deprecated ci configs"'
 | 
				
			||||||
            ]
 | 
					            ]
 | 
				
			||||||
            sorted_output = {"unsupported-copy": noop_job}
 | 
					            output_object = {"unsupported-copy": noop_job}
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            tty.debug("No specs to rebuild, generating no-op job")
 | 
					            tty.debug("No specs to rebuild, generating no-op job")
 | 
				
			||||||
            sorted_output = {"no-specs-to-rebuild": noop_job}
 | 
					            output_object = {"no-specs-to-rebuild": noop_job}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Ensure the child pipeline always runs
 | 
				
			||||||
 | 
					    output_object["workflow"] = {"rules": [{"when": "always"}]}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    sorted_output = {}
 | 
				
			||||||
 | 
					    for output_key, output_value in sorted(output_object.items()):
 | 
				
			||||||
 | 
					        sorted_output[output_key] = output_value
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if known_broken_specs_encountered:
 | 
					    if known_broken_specs_encountered:
 | 
				
			||||||
        tty.error("This pipeline generated hashes known to be broken on develop:")
 | 
					        tty.error("This pipeline generated hashes known to be broken on develop:")
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -215,6 +215,10 @@ def test_ci_generate_with_env(
 | 
				
			|||||||
        with open(outputfile) as f:
 | 
					        with open(outputfile) as f:
 | 
				
			||||||
            contents = f.read()
 | 
					            contents = f.read()
 | 
				
			||||||
            yaml_contents = syaml.load(contents)
 | 
					            yaml_contents = syaml.load(contents)
 | 
				
			||||||
 | 
					            assert "workflow" in yaml_contents
 | 
				
			||||||
 | 
					            assert "rules" in yaml_contents["workflow"]
 | 
				
			||||||
 | 
					            assert yaml_contents["workflow"]["rules"] == [{"when": "always"}]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            assert "stages" in yaml_contents
 | 
					            assert "stages" in yaml_contents
 | 
				
			||||||
            assert len(yaml_contents["stages"]) == 5
 | 
					            assert len(yaml_contents["stages"]) == 5
 | 
				
			||||||
            assert yaml_contents["stages"][0] == "stage-0"
 | 
					            assert yaml_contents["stages"][0] == "stage-0"
 | 
				
			||||||
@@ -1102,9 +1106,9 @@ def test_push_mirror_contents(
 | 
				
			|||||||
            with open(outputfile_pruned) as f:
 | 
					            with open(outputfile_pruned) as f:
 | 
				
			||||||
                contents = f.read()
 | 
					                contents = f.read()
 | 
				
			||||||
                yaml_contents = syaml.load(contents)
 | 
					                yaml_contents = syaml.load(contents)
 | 
				
			||||||
                assert "no-specs-to-rebuild" in yaml_contents
 | 
					 | 
				
			||||||
                # Make sure there are no other spec jobs or rebuild-index
 | 
					                # Make sure there are no other spec jobs or rebuild-index
 | 
				
			||||||
                assert len(yaml_contents.keys()) == 1
 | 
					                assert set(yaml_contents.keys()) == {"no-specs-to-rebuild", "workflow"}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                the_elt = yaml_contents["no-specs-to-rebuild"]
 | 
					                the_elt = yaml_contents["no-specs-to-rebuild"]
 | 
				
			||||||
                assert "tags" in the_elt
 | 
					                assert "tags" in the_elt
 | 
				
			||||||
                assert "nonbuildtag" in the_elt["tags"]
 | 
					                assert "nonbuildtag" in the_elt["tags"]
 | 
				
			||||||
@@ -1112,6 +1116,9 @@ def test_push_mirror_contents(
 | 
				
			|||||||
                assert the_elt["image"] == "basicimage"
 | 
					                assert the_elt["image"] == "basicimage"
 | 
				
			||||||
                assert the_elt["custom_attribute"] == "custom!"
 | 
					                assert the_elt["custom_attribute"] == "custom!"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                assert "rules" in yaml_contents["workflow"]
 | 
				
			||||||
 | 
					                assert yaml_contents["workflow"]["rules"] == [{"when": "always"}]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml"))
 | 
					            outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml"))
 | 
				
			||||||
            ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
 | 
					            ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user