Replace the locking spy with locking in the MojoExecutor (#523)

This is a port of https://github.com/apache/maven/pull/628 to mvnd and it should go away when we upgrade to maven 3.8.4 which should include the fix
This commit is contained in:
Guillaume Nodet
2021-12-06 18:00:33 +01:00
committed by GitHub
parent 00a404c2c1
commit 588e87993d
2 changed files with 414 additions and 85 deletions

View File

@@ -0,0 +1,414 @@
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.maven.lifecycle.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.resolver.filter.CumulativeScopeArtifactFilter;
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.lifecycle.LifecycleExecutionException;
import org.apache.maven.lifecycle.MissingProjectException;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.MavenPluginManager;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugin.PluginConfigurationException;
import org.apache.maven.plugin.PluginIncompatibleException;
import org.apache.maven.plugin.PluginManagerException;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.util.StringUtils;
import org.eclipse.aether.SessionData;
/**
* <p>
* Executes an individual mojo
* </p>
* <strong>NOTE:</strong> This class is not part of any public api and can be changed or deleted without prior notice.
*
* @author Jason van Zyl
* @author Benjamin Bentmann
* @author Kristian Rosenvold
* @since 3.0
*/
@Component(role = MojoExecutor.class)
public class MojoExecutor {
@Requirement
private BuildPluginManager pluginManager;
@Requirement
private MavenPluginManager mavenPluginManager;
@Requirement
private LifecycleDependencyResolver lifeCycleDependencyResolver;
@Requirement
private ExecutionEventCatapult eventCatapult;
private final ReadWriteLock aggregatorLock = new ReentrantReadWriteLock();
public MojoExecutor() {
}
public DependencyContext newDependencyContext(MavenSession session, List<MojoExecution> mojoExecutions) {
Set<String> scopesToCollect = new TreeSet<>();
Set<String> scopesToResolve = new TreeSet<>();
collectDependencyRequirements(scopesToResolve, scopesToCollect, mojoExecutions);
return new DependencyContext(session.getCurrentProject(), scopesToCollect, scopesToResolve);
}
private void collectDependencyRequirements(Set<String> scopesToResolve, Set<String> scopesToCollect,
Collection<MojoExecution> mojoExecutions) {
for (MojoExecution mojoExecution : mojoExecutions) {
MojoDescriptor mojoDescriptor = mojoExecution.getMojoDescriptor();
scopesToResolve.addAll(toScopes(mojoDescriptor.getDependencyResolutionRequired()));
scopesToCollect.addAll(toScopes(mojoDescriptor.getDependencyCollectionRequired()));
}
}
private Collection<String> toScopes(String classpath) {
Collection<String> scopes = Collections.emptyList();
if (StringUtils.isNotEmpty(classpath)) {
if (Artifact.SCOPE_COMPILE.equals(classpath)) {
scopes = Arrays.asList(Artifact.SCOPE_COMPILE, Artifact.SCOPE_SYSTEM, Artifact.SCOPE_PROVIDED);
} else if (Artifact.SCOPE_RUNTIME.equals(classpath)) {
scopes = Arrays.asList(Artifact.SCOPE_COMPILE, Artifact.SCOPE_RUNTIME);
} else if (Artifact.SCOPE_COMPILE_PLUS_RUNTIME.equals(classpath)) {
scopes = Arrays.asList(Artifact.SCOPE_COMPILE, Artifact.SCOPE_SYSTEM, Artifact.SCOPE_PROVIDED,
Artifact.SCOPE_RUNTIME);
} else if (Artifact.SCOPE_RUNTIME_PLUS_SYSTEM.equals(classpath)) {
scopes = Arrays.asList(Artifact.SCOPE_COMPILE, Artifact.SCOPE_SYSTEM, Artifact.SCOPE_RUNTIME);
} else if (Artifact.SCOPE_TEST.equals(classpath)) {
scopes = Arrays.asList(Artifact.SCOPE_COMPILE, Artifact.SCOPE_SYSTEM, Artifact.SCOPE_PROVIDED,
Artifact.SCOPE_RUNTIME, Artifact.SCOPE_TEST);
}
}
return Collections.unmodifiableCollection(scopes);
}
public void execute(MavenSession session, List<MojoExecution> mojoExecutions, ProjectIndex projectIndex)
throws LifecycleExecutionException
{
DependencyContext dependencyContext = newDependencyContext(session, mojoExecutions);
PhaseRecorder phaseRecorder = new PhaseRecorder(session.getCurrentProject());
for (MojoExecution mojoExecution : mojoExecutions) {
execute(session, mojoExecution, projectIndex, dependencyContext, phaseRecorder);
}
}
public void execute(MavenSession session, MojoExecution mojoExecution, ProjectIndex projectIndex,
DependencyContext dependencyContext, PhaseRecorder phaseRecorder)
throws LifecycleExecutionException {
execute(session, mojoExecution, projectIndex, dependencyContext);
phaseRecorder.observeExecution(mojoExecution);
}
private void execute(MavenSession session, MojoExecution mojoExecution, ProjectIndex projectIndex,
DependencyContext dependencyContext)
throws LifecycleExecutionException {
MojoDescriptor mojoDescriptor = mojoExecution.getMojoDescriptor();
try {
mavenPluginManager.checkRequiredMavenVersion(mojoDescriptor.getPluginDescriptor());
} catch (PluginIncompatibleException e) {
throw new LifecycleExecutionException(mojoExecution, session.getCurrentProject(), e);
}
if (mojoDescriptor.isProjectRequired() && !session.getRequest().isProjectPresent()) {
Throwable cause = new MissingProjectException(
"Goal requires a project to execute" + " but there is no POM in this directory ("
+ session.getExecutionRootDirectory() + ")."
+ " Please verify you invoked Maven from the correct directory.");
throw new LifecycleExecutionException(mojoExecution, null, cause);
}
if (mojoDescriptor.isOnlineRequired() && session.isOffline()) {
if (MojoExecution.Source.CLI.equals(mojoExecution.getSource())) {
Throwable cause = new IllegalStateException(
"Goal requires online mode for execution" + " but Maven is currently offline.");
throw new LifecycleExecutionException(mojoExecution, session.getCurrentProject(), cause);
} else {
eventCatapult.fire(ExecutionEvent.Type.MojoSkipped, session, mojoExecution);
return;
}
}
try (ProjectLock lock = new ProjectLock(session, mojoDescriptor, aggregatorLock)) {
doExecute(session, mojoExecution, projectIndex, dependencyContext);
}
}
/**
* Aggregating mojo executions (possibly) modify all MavenProjects, including those that are currently in use
* by concurrently running mojo executions. To prevent race conditions, an aggregating execution will block
* all other executions until finished.
* We also lock on a given project to forbid a forked lifecycle to be executed concurrently with the project.
* TODO: ideally, the builder should take care of the ordering in a smarter way
* TODO: and concurrency issues fixed with MNG-7157
*/
private static class ProjectLock implements AutoCloseable {
final Lock acquiredAggregatorLock;
final Lock acquiredProjectLock;
ProjectLock(MavenSession session, MojoDescriptor mojoDescriptor, ReadWriteLock aggregatorLock) {
if (session.getRequest().getDegreeOfConcurrency() > 1) {
boolean aggregator = mojoDescriptor.isAggregator();
acquiredAggregatorLock = aggregator ? aggregatorLock.writeLock() : aggregatorLock.readLock();
acquiredProjectLock = getProjectLock(session);
acquiredAggregatorLock.lock();
acquiredProjectLock.lock();
} else {
acquiredAggregatorLock = null;
acquiredProjectLock = null;
}
}
@Override
public void close() {
// release the lock in the reverse order of the acquisition
if (acquiredProjectLock != null) {
acquiredProjectLock.unlock();
}
if (acquiredAggregatorLock != null) {
acquiredAggregatorLock.unlock();
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private Lock getProjectLock(MavenSession session) {
SessionData data = session.getRepositorySession().getData();
ConcurrentMap<MavenProject, Lock> locks = (ConcurrentMap) data.get(ProjectLock.class);
// initialize the value if not already done (in case of a concurrent access) to the method
if (locks == null) {
// the call to data.set(k, null, v) is effectively a call to data.putIfAbsent(k, v)
data.set(ProjectLock.class, null, new ConcurrentHashMap<>());
locks = (ConcurrentMap) data.get(ProjectLock.class);
}
Lock acquiredProjectLock = locks.get(session.getCurrentProject());
if (acquiredProjectLock == null) {
acquiredProjectLock = new ReentrantLock();
Lock prev = locks.putIfAbsent(session.getCurrentProject(), acquiredProjectLock);
if (prev != null) {
acquiredProjectLock = prev;
}
}
return acquiredProjectLock;
}
}
private void doExecute(MavenSession session, MojoExecution mojoExecution, ProjectIndex projectIndex,
DependencyContext dependencyContext)
throws LifecycleExecutionException {
MojoDescriptor mojoDescriptor = mojoExecution.getMojoDescriptor();
List<MavenProject> forkedProjects = executeForkedExecutions(mojoExecution, session, projectIndex);
ensureDependenciesAreResolved(mojoDescriptor, session, dependencyContext);
eventCatapult.fire(ExecutionEvent.Type.MojoStarted, session, mojoExecution);
try {
try {
pluginManager.executeMojo(session, mojoExecution);
} catch (MojoFailureException | PluginManagerException | PluginConfigurationException
| MojoExecutionException e) {
throw new LifecycleExecutionException(mojoExecution, session.getCurrentProject(), e);
}
eventCatapult.fire(ExecutionEvent.Type.MojoSucceeded, session, mojoExecution);
} catch (LifecycleExecutionException e) {
eventCatapult.fire(ExecutionEvent.Type.MojoFailed, session, mojoExecution, e);
throw e;
} finally {
for (MavenProject forkedProject : forkedProjects) {
forkedProject.setExecutionProject(null);
}
}
}
public void ensureDependenciesAreResolved(MojoDescriptor mojoDescriptor, MavenSession session,
DependencyContext dependencyContext)
throws LifecycleExecutionException
{
MavenProject project = dependencyContext.getProject();
boolean aggregating = mojoDescriptor.isAggregator();
if (dependencyContext.isResolutionRequiredForCurrentProject()) {
Collection<String> scopesToCollect = dependencyContext.getScopesToCollectForCurrentProject();
Collection<String> scopesToResolve = dependencyContext.getScopesToResolveForCurrentProject();
lifeCycleDependencyResolver.resolveProjectDependencies(project, scopesToCollect, scopesToResolve, session,
aggregating, Collections.<Artifact> emptySet());
dependencyContext.synchronizeWithProjectState();
}
if (aggregating) {
Collection<String> scopesToCollect = toScopes(mojoDescriptor.getDependencyCollectionRequired());
Collection<String> scopesToResolve = toScopes(mojoDescriptor.getDependencyResolutionRequired());
if (dependencyContext.isResolutionRequiredForAggregatedProjects(scopesToCollect, scopesToResolve)) {
for (MavenProject aggregatedProject : session.getProjects()) {
if (aggregatedProject != project) {
lifeCycleDependencyResolver.resolveProjectDependencies(aggregatedProject, scopesToCollect,
scopesToResolve, session, aggregating,
Collections.<Artifact> emptySet());
}
}
}
}
ArtifactFilter artifactFilter = getArtifactFilter(mojoDescriptor);
List<MavenProject> projectsToResolve = LifecycleDependencyResolver.getProjects(session.getCurrentProject(), session,
mojoDescriptor.isAggregator());
for (MavenProject projectToResolve : projectsToResolve) {
projectToResolve.setArtifactFilter(artifactFilter);
}
}
private ArtifactFilter getArtifactFilter(MojoDescriptor mojoDescriptor) {
String scopeToResolve = mojoDescriptor.getDependencyResolutionRequired();
String scopeToCollect = mojoDescriptor.getDependencyCollectionRequired();
List<String> scopes = new ArrayList<>(2);
if (StringUtils.isNotEmpty(scopeToCollect)) {
scopes.add(scopeToCollect);
}
if (StringUtils.isNotEmpty(scopeToResolve)) {
scopes.add(scopeToResolve);
}
if (scopes.isEmpty()) {
return null;
} else {
return new CumulativeScopeArtifactFilter(scopes);
}
}
public List<MavenProject> executeForkedExecutions(MojoExecution mojoExecution, MavenSession session,
ProjectIndex projectIndex)
throws LifecycleExecutionException {
List<MavenProject> forkedProjects = Collections.emptyList();
Map<String, List<MojoExecution>> forkedExecutions = mojoExecution.getForkedExecutions();
if (!forkedExecutions.isEmpty()) {
eventCatapult.fire(ExecutionEvent.Type.ForkStarted, session, mojoExecution);
MavenProject project = session.getCurrentProject();
forkedProjects = new ArrayList<>(forkedExecutions.size());
try {
for (Map.Entry<String, List<MojoExecution>> fork : forkedExecutions.entrySet()) {
String projectId = fork.getKey();
int index = projectIndex.getIndices().get(projectId);
MavenProject forkedProject = projectIndex.getProjects().get(projectId);
forkedProjects.add(forkedProject);
MavenProject executedProject = forkedProject.clone();
forkedProject.setExecutionProject(executedProject);
List<MojoExecution> mojoExecutions = fork.getValue();
if (mojoExecutions.isEmpty()) {
continue;
}
try {
session.setCurrentProject(executedProject);
session.getProjects().set(index, executedProject);
projectIndex.getProjects().put(projectId, executedProject);
eventCatapult.fire(ExecutionEvent.Type.ForkedProjectStarted, session, mojoExecution);
execute(session, mojoExecutions, projectIndex);
eventCatapult.fire(ExecutionEvent.Type.ForkedProjectSucceeded, session, mojoExecution);
} catch (LifecycleExecutionException e) {
eventCatapult.fire(ExecutionEvent.Type.ForkedProjectFailed, session, mojoExecution, e);
throw e;
} finally {
projectIndex.getProjects().put(projectId, forkedProject);
session.getProjects().set(index, forkedProject);
session.setCurrentProject(project);
}
}
eventCatapult.fire(ExecutionEvent.Type.ForkSucceeded, session, mojoExecution);
} catch (LifecycleExecutionException e) {
eventCatapult.fire(ExecutionEvent.Type.ForkFailed, session, mojoExecution, e);
throw e;
}
}
return forkedProjects;
}
}

View File

@@ -1,85 +0,0 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mvndaemon.mvnd.execution;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.maven.eventspy.AbstractEventSpy;
import org.apache.maven.eventspy.EventSpy;
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.project.MavenProject;
import org.eclipse.aether.SessionData;
import org.eclipse.sisu.Typed;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* EventSpy implementation that provides a per-project locking mechanism
* to make sure a given project can not be build twice concurrently.
* This case can happen when running parallel builds with forked lifecycles
*/
@Singleton
@Named
@Typed(EventSpy.class)
public class LockingEventSpy extends AbstractEventSpy {
private final Logger logger = LoggerFactory.getLogger(getClass());
private static final Object LOCKS_KEY = new Object();
@SuppressWarnings({ "unchecked", "rawtypes" })
private Lock getLock(ExecutionEvent event) {
SessionData data = event.getSession().getRepositorySession().getData();
Map<MavenProject, Lock> locks = (Map) data.get(LOCKS_KEY);
// initialize the value if not already done (in case of a concurrent access) to the method
if (locks == null) {
// the call to data.set(k, null, v) is effectively a call to data.putIfAbsent(k, v)
data.set(LOCKS_KEY, null, new ConcurrentHashMap<>());
locks = (Map) data.get(LOCKS_KEY);
}
return locks.computeIfAbsent(event.getProject(), p -> new ReentrantLock());
}
@Override
public void onEvent(Object event) throws Exception {
if (event instanceof ExecutionEvent) {
ExecutionEvent executionEvent = (ExecutionEvent) event;
switch (executionEvent.getType()) {
case ProjectStarted:
case ForkedProjectStarted: {
Lock lock = getLock(executionEvent);
if (!lock.tryLock()) {
logger.warn("Suspending concurrent execution of project '{}'", executionEvent.getProject());
lock.lockInterruptibly();
logger.warn("Resuming execution of project '{}'", executionEvent.getProject());
}
break;
}
case ProjectSucceeded:
case ProjectFailed:
case ForkedProjectSucceeded:
case ForkedProjectFailed:
getLock(executionEvent).unlock();
break;
}
}
}
}