Skip to content

Commit

Permalink
TechnologyBrewery#149 implement update dockerfile to create virtual e…
Browse files Browse the repository at this point in the history
…nvironment
  • Loading branch information
csun-cpointe committed Jun 26, 2024
1 parent 34ad85d commit f079d57
Show file tree
Hide file tree
Showing 10 changed files with 643 additions and 264 deletions.
498 changes: 264 additions & 234 deletions README.md

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,16 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.technologybrewery.habushu.util.HabushuUtil;
import org.technologybrewery.habushu.util.ContainerizeDepsDockerfileHelper;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.stream.Collectors;

Expand Down Expand Up @@ -99,7 +103,7 @@ public class ContainerizeDepsMojo extends AbstractHabushuMojo {
* Location of where containerization files will be placed.
*/
@Parameter(defaultValue = "${project.build.directory}/containerize-support", readonly = true)
protected File containerizeSupportDirectory;
protected String containerizeSupportDirectory;

/**
* Upstream directory that houses all necessary monorepo dependencies.
Expand All @@ -108,12 +112,22 @@ public class ContainerizeDepsMojo extends AbstractHabushuMojo {
@Parameter(readonly = true)
protected File anchorSourceDirectory;

private File anchorOutputDirectory;
/**
* Update dockerfile
*/
@Parameter(defaultValue = "true", property = "habushu.updateDockerfile")
protected boolean updateDockerfile;

/**
* Dockerfile to be updated with the stage content
*/
@Parameter(property = "habushu.dockerfile")
protected File dockerfile;

private Path anchorOutputDirectory;

protected final String HABUSHU = "habushu";
protected final String FORWARD_SLASH = "/";
protected final String GLOB_RECURSIVE_ALL = "/**";

/**
* Overriding to allow execution in non-habushu projects.
*/
Expand All @@ -132,33 +146,45 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException {
this.workingDirectoryRelativeToBasedir = "";
}

Set<MavenProject> habushuProjects = getHabushuProjects();
ProjectCollectionResult result = getHabushuProjects();
try {
copySourceCode(habushuProjects);
Path targetProjectPath = copySourceCode(result);
if (this.updateDockerfile) {
if (this.dockerfile == null) {
throw new HabushuException("`updateDockerfile` is set to true but `dockerfile` is not specified");
}
performDockerfileUpdateForVirtualEnvironment(targetProjectPath);
}
} catch (IOException e) {
throw new HabushuException(e);
}
}

/**
* Copies the relevant source files by leveraging {@link FileSet}s to filter appropriately.
* @param habushuProjects corresponding projects of the pom's habushu-type dependencies
* @param projectCollection corresponding projects of the pom's habushu-type dependencies
* @return the relative path from the staging root to the primary project being containerized
* @throws IOException
* @throws MojoExecutionException
*/
protected void copySourceCode(Set<MavenProject> habushuProjects) throws IOException, MojoExecutionException {
this.anchorOutputDirectory = new File(containerizeSupportDirectory
+ FORWARD_SLASH + this.anchorSourceDirectory.getName());
protected Path copySourceCode(ProjectCollectionResult projectCollection) throws IOException, MojoExecutionException {
this.anchorOutputDirectory = Path.of(containerizeSupportDirectory, this.anchorSourceDirectory.getName());
Path srcRoot = this.anchorSourceDirectory.toPath();
Path destRoot = this.anchorOutputDirectory.toPath();
Path primaryProjectPath = null;

Map<Path, FileSet> dependencyFileSets = new HashMap<>();
for (MavenProject project : habushuProjects) {
for (MavenProject project : projectCollection.getAllProjects()) {
Path projectPath = getWorkingDirectoryPath(project);
Path relativeProjectPath = srcRoot.relativize(projectPath);
FileSet fileSet = getDefaultFileSet(project);
fileSet.setDirectory(projectPath.toString());
dependencyFileSets.put(relativeProjectPath, fileSet);
if (project.equals(projectCollection.getPrimaryProject())) {
primaryProjectPath = relativeProjectPath;
}
}
if( primaryProjectPath == null ) {
throw new HabushuException("Primary project was not included in the set of projects. Ensure the habushu project is in the build and the pom dependencies are configured correctly");
}

FileSetManager fileSetManager = new FileSetManager();
Expand All @@ -170,10 +196,11 @@ protected void copySourceCode(Set<MavenProject> habushuProjects) throws IOExcept
);
for (String includedFile : fileSetManager.getIncludedFiles(fileSet)) {
Path relativePath = project.resolve(includedFile);
Files.createDirectories(destRoot.resolve(relativePath).getParent());
Files.copy(srcRoot.resolve(relativePath), destRoot.resolve(relativePath));
Files.createDirectories(this.anchorOutputDirectory.resolve(relativePath).getParent());
Files.copy(srcRoot.resolve(relativePath), this.anchorOutputDirectory.resolve(relativePath), StandardCopyOption.REPLACE_EXISTING);
}
}
return primaryProjectPath;
}

private Path getWorkingDirectoryPath(MavenProject project) {
Expand Down Expand Up @@ -211,39 +238,43 @@ && new File(virtualEnvironmentPath).isDirectory()) {
* Checks listed habushu-type dependencies against the set of projects included in the Maven build's session
* @return the corresponding Maven projects that match the habushu-type dependencies
*/
protected Set<MavenProject> getHabushuProjects() {
Set<MavenProject> habushuProjects = new HashSet<>();
protected ProjectCollectionResult getHabushuProjects() {
ProjectCollectionResult collectionResult;
Set<Dependency> directHabushuDeps = session.getCurrentProject().getDependencies().stream()
.filter(d -> HABUSHU.equals(d.getType()))
.collect(Collectors.toSet());
// TODO: modify this exception throw, once support for
// more than one direct monorepo dep specification is implemented
if (directHabushuDeps.size() > 1) {
throw new HabushuException("More than one habushu-type dependency was found."
throw new HabushuException("More than one `habushu` packaged dependency was found."
+ "Only one habushu-type dependency should be specified.");
}

collectHabushuDependenciesAsProjects(project, habushuProjects);
return habushuProjects;
} else if (directHabushuDeps.size() == 1) {
collectionResult = new ProjectCollectionResult(directHabushuDeps.iterator().next());
} else {
throw new HabushuException("No `habushu` packaged dependencies were found to containerize.");
}
return collectHabushuDependenciesAsProjects(project, collectionResult);
}

/**
* Collects the projects with habushu-type dependencies and adds them to the given project set
* @param currentProject the project to interrogate the habushu-type dependencies against
* @param habushuProjects the set to append to with matching projects
* @param collectionResult the result object to add the projects to
*/
protected void collectHabushuDependenciesAsProjects(MavenProject currentProject, Set<MavenProject> habushuProjects) {
protected ProjectCollectionResult collectHabushuDependenciesAsProjects(MavenProject currentProject, ProjectCollectionResult collectionResult) {
Set<String> habushuDeps = currentProject.getDependencies().stream()
.filter(d -> HABUSHU.equals(d.getType()))
.map(ContainerizeDepsMojo::toGav)
.collect(Collectors.toSet());
for (MavenProject project : getSession().getProjects()) {
if (habushuDeps.contains(toGav(project))) {
logger.info("Found project {} as habushu-type dependency.", project);
habushuProjects.add(project);
collectHabushuDependenciesAsProjects(project, habushuProjects);
collectionResult.addProject(project);
collectHabushuDependenciesAsProjects(project, collectionResult);
}
}
return collectionResult;
}

protected static String toGav(Dependency dependency) {
Expand All @@ -261,7 +292,60 @@ protected void setAnchorSourceDirectory(File anchorSourceDirectory) {
this.anchorSourceDirectory = anchorSourceDirectory;
}

public File getAnchorOutputDirectory() {
public Path getAnchorOutputDirectory() {
return anchorOutputDirectory;
}

protected void setDockerfile(File dockerfile) {
this.dockerfile = dockerfile;
}

protected void setUpdateDockerfile(boolean update) {
this.updateDockerfile = update;
}

protected void performDockerfileUpdateForVirtualEnvironment(Path targetProjectPath) {
Path outputDir = session.getCurrentProject().getBasedir().toPath().relativize(this.anchorOutputDirectory);
String updatedDockerfile =
ContainerizeDepsDockerfileHelper.updateDockerfileWithContainerStageLogic(
this.dockerfile, outputDir.toString(), targetProjectPath.toString());

try (Writer writer = new FileWriter(this.dockerfile)) {
writer.write(updatedDockerfile);

} catch (IOException e) {
throw new HabushuException("Unable to update Dockerfile.", e);
}
}

/**
* Result object for collecting Maven projects that are required to containerize a given Habushu project. There is
* one "primary" project that is the direct target of containerization. Other Habushu projects are included when
* they are monorepo dependencies of the primary project.
*/
protected static class ProjectCollectionResult {
private final Dependency directDependency;
private final Set<MavenProject> habushuProjects; //includes primaryProject
private MavenProject primaryProject;
public ProjectCollectionResult(Dependency directDependency) {
this.directDependency = directDependency;
this.habushuProjects = new HashSet<>();
}
public void addProject(MavenProject project) {
this.habushuProjects.add(project);
if (toGav(directDependency).equals(toGav(project))) {
primaryProject = project;
}
}
/**
* @return all projects including the primary project
*/
public Set<MavenProject> getAllProjects() {
return habushuProjects;
}
public MavenProject getPrimaryProject() {
return primaryProject;
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
package org.technologybrewery.habushu.util;

import org.technologybrewery.habushu.HabushuException;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

public class ContainerizeDepsDockerfileHelper {
public static final String HABUSHU_FINAL_STAGE = "#HABUSHU_FINAL_STAGE";
public static final String HABUSHU_BUILDER_STAGE = "#HABUSHU_BUILDER_STAGE";
public static final String HABUSHU_COMMENT_START = " - HABUSHU GENERATED CODE (DO NOT MODIFY)";
public static final String HABUSHU_COMMENT_END = " - HABUSHU GENERATED CODE (END)";
public static final String REPLACE_WITH_SINGLE_REPO_PROJECT_DIR = "REPLACE_WITH_SINGLE_REPO_PROJECT_DIR";
public static final String ANCHOR_DIRECTORY = "ANCHOR_DIRECTORY";
public static final String FINAL_STAGE_TEMPLATE = "dockerfile_final_stage_template";
public static final String BUILDER_STAGE_TEMPLATE = "dockerfile_builder_stage_template";

/**
* Create the container stage content with given template, anchor directory and the single module's base directory
* @param template container stage template name
* @param anchorDirectory the anchor directory
* @param moduleBaseDir the module base directory
* @return container stage content
*/
public static String createContainerStageContentFrom(String template, String anchorDirectory, String moduleBaseDir) {
StringBuilder content = new StringBuilder();
InputStream inputStream = ContainerizeDepsDockerfileHelper.class.getClassLoader().getResourceAsStream(template);

try (InputStreamReader inputStreamReader = new InputStreamReader(inputStream); BufferedReader buffer = new BufferedReader(inputStreamReader)) {
String line = buffer.readLine();

while (line != null) {
line = line.strip();
if (line.contains(ANCHOR_DIRECTORY)) {
line = line.replaceAll(ANCHOR_DIRECTORY, anchorDirectory);
}
if (line.contains(REPLACE_WITH_SINGLE_REPO_PROJECT_DIR)) {
line = line.replaceAll(REPLACE_WITH_SINGLE_REPO_PROJECT_DIR, moduleBaseDir);
}
content.append(line).append("\n");
line = buffer.readLine();
}
inputStream.close();
} catch (IOException e) {
throw new HabushuException("Could not read from file.", e);
}
return content.toString();
}

/**
* Update the Dockerfile with container stage logic
* @param dockerFile the Dockerfile to be updated
* @param anchorDirectory the anchor directory
* @param moduleBaseDir the module base directory
* @return updated Dockerfile content
*/
public static String updateDockerfileWithContainerStageLogic(File dockerFile, String anchorDirectory, String moduleBaseDir) {
String builderStageContent = ContainerizeDepsDockerfileHelper.createContainerStageContentFrom(BUILDER_STAGE_TEMPLATE, anchorDirectory, moduleBaseDir);
String finalStageContent = ContainerizeDepsDockerfileHelper.createContainerStageContentFrom(FINAL_STAGE_TEMPLATE, null, null);
StringBuilder content = new StringBuilder();
Boolean builderStageContentIncluded = false;
Boolean finalStageContentIncluded = false;

boolean skipLine = false;
try (BufferedReader buffer = new BufferedReader(new FileReader(dockerFile))) {
String line = buffer.readLine();

while (line != null) {
line = line.strip();

// start skipping the line if reads HABUSHU_COMMENT_START
if (!skipLine && line.contains(HABUSHU_COMMENT_START)) {
skipLine = true;
}

// end skipping the line when reads HABUSHU_COMMENT_END
if (skipLine && line.contains(HABUSHU_COMMENT_END)) {
skipLine = false;
}

if (!skipLine) {
if (line.contains(HABUSHU_BUILDER_STAGE)) {
line = wrapWithHabushuComment(builderStageContent, HABUSHU_BUILDER_STAGE);
builderStageContentIncluded = true;
}

if (line.contains(HABUSHU_FINAL_STAGE)) {
line = wrapWithHabushuComment(finalStageContent, HABUSHU_FINAL_STAGE);
finalStageContentIncluded = true;
}
content.append(line).append("\n");
}
line = buffer.readLine();
}
if (!builderStageContentIncluded) {
content.insert(0, wrapWithHabushuComment(builderStageContent, HABUSHU_BUILDER_STAGE) + "\n\n");
}
if (!finalStageContentIncluded) {
content.append("\n");
content.append(wrapWithHabushuComment(finalStageContent, HABUSHU_FINAL_STAGE)).append("\n");
}
} catch (IOException e) {
throw new HabushuException("Could not update Dockerfile with container stage logic.", e);
}
return content.toString();
}

private static String wrapWithHabushuComment(String content, String stage) {
StringBuilder contentBuilder = new StringBuilder();
contentBuilder.append(stage).append(HABUSHU_COMMENT_START).append("\n");
contentBuilder.append(content);
contentBuilder.append(stage).append(HABUSHU_COMMENT_END);
return contentBuilder.toString();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
FROM registry.access.redhat.com/ubi9/python-311:1-66 as habushu_builder
# Poetry and supporting plugin installations
RUN pip install poetry && \
poetry self add poetry-monorepo-dependency-plugin && \
poetry self add poetry-plugin-bundle

WORKDIR /work-dir
COPY --chown=1001 ANCHOR_DIRECTORY ./containerize-support/
RUN find . -type f -name pyproject.toml -exec sed -i 's|develop[[:blank:]]*=[[:blank:]]*true|develop = false|g' {} \;

USER root
WORKDIR /work-dir/containerize-support/REPLACE_WITH_SINGLE_REPO_PROJECT_DIR
# ensure Poetry's cache directory is propertly set
ENV POETRY_CACHE_DIR="/.cache/pypoetry"
# instruct Docker to persistently store the container's Poetry cache while
# resolving dependencies during the lock process of the venv-specifier and
# building/exporting the virtual environment of the venv-specifier to /venv
RUN --mount=type=cache,target=/.cache/pypoetry/ \
poetry lock && \
poetry bundle venv /venv
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# copy the pre-built venv from the builder and
COPY --from=habushu_builder /venv /opt/venv
# configure the container to use the venv
ENV PATH="/opt/venv/bin:$PATH"
# update the venv python symlink to point to the default python executable
RUN ln -sf $(which python) /opt/venv/bin/python
Loading

0 comments on commit f079d57

Please sign in to comment.