001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, 013 * software distributed under the License is distributed on an 014 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 015 * KIND, either express or implied. See the License for the 016 * specific language governing permissions and limitations 017 * under the License. 018 */ 019package org.eclipse.aether.internal.impl.checksum; 020 021import javax.inject.Inject; 022import javax.inject.Named; 023import javax.inject.Singleton; 024 025import java.io.BufferedReader; 026import java.io.IOException; 027import java.io.UncheckedIOException; 028import java.nio.charset.StandardCharsets; 029import java.nio.file.Files; 030import java.nio.file.Path; 031import java.util.ArrayList; 032import java.util.HashMap; 033import java.util.List; 034import java.util.Map; 035import java.util.Objects; 036import java.util.concurrent.ConcurrentHashMap; 037import java.util.concurrent.atomic.AtomicBoolean; 038 039import org.eclipse.aether.MultiRuntimeException; 040import org.eclipse.aether.RepositorySystemSession; 041import org.eclipse.aether.artifact.Artifact; 042import org.eclipse.aether.impl.RepositorySystemLifecycle; 043import org.eclipse.aether.internal.impl.LocalPathComposer; 044import org.eclipse.aether.repository.ArtifactRepository; 045import org.eclipse.aether.spi.connector.checksum.ChecksumAlgorithmFactory; 046import org.eclipse.aether.util.FileUtils; 047import org.slf4j.Logger; 048import org.slf4j.LoggerFactory; 049 050import static java.util.Objects.requireNonNull; 051import static java.util.stream.Collectors.toList; 052 053/** 054 * Compact file {@link FileTrustedChecksumsSourceSupport} implementation that use specified directory as base 055 * directory, where it expects a "summary" file named as "checksums.${checksumExt}" for each checksum algorithm. 056 * File format is GNU Coreutils compatible: each line holds checksum followed by two spaces and artifact relative path 057 * (from local repository root, without leading "./"). This means that trusted checksums summary file can be used to 058 * validate artifacts or generate it using standard GNU tools like GNU {@code sha1sum} is (for BSD derivatives same 059 * file can be used with {@code -r} switch). 060 * <p> 061 * The format supports comments "#" (hash) and empty lines for easier structuring the file content, and both are 062 * ignored. Also, their presence makes the summary file incompatible with GNU Coreutils format. On save of the 063 * summary file, the comments and empty lines are lost, and file is sorted by path names for easier diffing 064 * (2nd column in file). 065 * <p> 066 * The source by default is "origin aware", and it will factor in origin repository ID as well into summary file name, 067 * for example "checksums-central.sha256". 068 * <p> 069 * Example commands for managing summary file (in examples will use repository ID "central"): 070 * <ul> 071 * <li>To create summary file: {@code find * -not -name "checksums-central.sha256" -type f -print0 | 072 * xargs -0 sha256sum | sort -k 2 > checksums-central.sha256}</li> 073 * <li>To verify artifacts using summary file: {@code sha256sum --quiet -c checksums-central.sha256}</li> 074 * </ul> 075 * <p> 076 * The checksums summary file is lazily loaded and remains cached during lifetime of the component, so file changes 077 * during lifecycle of the component are not picked up. This implementation can be simultaneously used to lookup and 078 * also record checksums. The recorded checksums will become visible for every session, and will be flushed 079 * at repository system shutdown, merged with existing ones on disk. 080 * <p> 081 * The name of this implementation is "summaryFile". 082 * 083 * @see <a href="https://man7.org/linux/man-pages/man1/sha1sum.1.html">sha1sum man page</a> 084 * @see <a href="https://www.gnu.org/software/coreutils/manual/coreutils.html#md5sum-invocation">GNU Coreutils: md5sum</a> 085 * @since 1.9.0 086 */ 087@Singleton 088@Named(SummaryFileTrustedChecksumsSource.NAME) 089public final class SummaryFileTrustedChecksumsSource extends FileTrustedChecksumsSourceSupport { 090 public static final String NAME = "summaryFile"; 091 092 private static final String CHECKSUMS_FILE_PREFIX = "checksums"; 093 094 private static final Logger LOGGER = LoggerFactory.getLogger(SummaryFileTrustedChecksumsSource.class); 095 096 private final LocalPathComposer localPathComposer; 097 098 private final RepositorySystemLifecycle repositorySystemLifecycle; 099 100 private final ConcurrentHashMap<Path, ConcurrentHashMap<String, String>> checksums; 101 102 private final ConcurrentHashMap<Path, Boolean> changedChecksums; 103 104 private final AtomicBoolean onShutdownHandlerRegistered; 105 106 @Inject 107 public SummaryFileTrustedChecksumsSource( 108 LocalPathComposer localPathComposer, RepositorySystemLifecycle repositorySystemLifecycle) { 109 super(NAME); 110 this.localPathComposer = requireNonNull(localPathComposer); 111 this.repositorySystemLifecycle = requireNonNull(repositorySystemLifecycle); 112 this.checksums = new ConcurrentHashMap<>(); 113 this.changedChecksums = new ConcurrentHashMap<>(); 114 this.onShutdownHandlerRegistered = new AtomicBoolean(false); 115 } 116 117 @Override 118 protected Map<String, String> doGetTrustedArtifactChecksums( 119 RepositorySystemSession session, 120 Artifact artifact, 121 ArtifactRepository artifactRepository, 122 List<ChecksumAlgorithmFactory> checksumAlgorithmFactories) { 123 final HashMap<String, String> result = new HashMap<>(); 124 final Path basedir = getBasedir(session, false); 125 if (Files.isDirectory(basedir)) { 126 final String artifactPath = localPathComposer.getPathForArtifact(artifact, false); 127 final boolean originAware = isOriginAware(session); 128 for (ChecksumAlgorithmFactory checksumAlgorithmFactory : checksumAlgorithmFactories) { 129 Path summaryFile = summaryFile( 130 basedir, originAware, artifactRepository.getId(), checksumAlgorithmFactory.getFileExtension()); 131 ConcurrentHashMap<String, String> algorithmChecksums = checksums.computeIfAbsent(summaryFile, f -> { 132 ConcurrentHashMap<String, String> loaded = loadProvidedChecksums(summaryFile); 133 if (Files.isRegularFile(summaryFile)) { 134 LOGGER.info( 135 "Loaded {} {} trusted checksums for remote repository {}", 136 loaded.size(), 137 checksumAlgorithmFactory.getName(), 138 artifactRepository.getId()); 139 } 140 return loaded; 141 }); 142 String checksum = algorithmChecksums.get(artifactPath); 143 if (checksum != null) { 144 result.put(checksumAlgorithmFactory.getName(), checksum); 145 } 146 } 147 } 148 return result; 149 } 150 151 @Override 152 protected SummaryFileWriter doGetTrustedArtifactChecksumsWriter(RepositorySystemSession session) { 153 if (onShutdownHandlerRegistered.compareAndSet(false, true)) { 154 repositorySystemLifecycle.addOnSystemEndedHandler(this::saveRecordedLines); 155 } 156 return new SummaryFileWriter(checksums, getBasedir(session, true), isOriginAware(session)); 157 } 158 159 /** 160 * Returns the summary file path. The file itself and its parent directories may not exist, this method merely 161 * calculate the path. 162 */ 163 private Path summaryFile(Path basedir, boolean originAware, String repositoryId, String checksumExtension) { 164 String fileName = CHECKSUMS_FILE_PREFIX; 165 if (originAware) { 166 fileName += "-" + repositoryId; 167 } 168 return basedir.resolve(fileName + "." + checksumExtension); 169 } 170 171 private ConcurrentHashMap<String, String> loadProvidedChecksums(Path summaryFile) { 172 ConcurrentHashMap<String, String> result = new ConcurrentHashMap<>(); 173 if (Files.isRegularFile(summaryFile)) { 174 try (BufferedReader reader = Files.newBufferedReader(summaryFile, StandardCharsets.UTF_8)) { 175 String line; 176 while ((line = reader.readLine()) != null) { 177 if (!line.startsWith("#") && !line.isEmpty()) { 178 String[] parts = line.split(" ", 2); 179 if (parts.length == 2) { 180 String newChecksum = parts[0]; 181 String artifactPath = parts[1]; 182 String oldChecksum = result.put(artifactPath, newChecksum); 183 if (oldChecksum != null) { 184 if (Objects.equals(oldChecksum, newChecksum)) { 185 LOGGER.warn( 186 "Checksums file '{}' contains duplicate checksums for artifact {}: {}", 187 summaryFile, 188 artifactPath, 189 oldChecksum); 190 } else { 191 LOGGER.warn( 192 "Checksums file '{}' contains different checksums for artifact {}: " 193 + "old '{}' replaced by new '{}'", 194 summaryFile, 195 artifactPath, 196 oldChecksum, 197 newChecksum); 198 } 199 } 200 } else { 201 LOGGER.warn("Checksums file '{}' ignored malformed line '{}'", summaryFile, line); 202 } 203 } 204 } 205 } catch (IOException e) { 206 throw new UncheckedIOException(e); 207 } 208 } 209 return result; 210 } 211 212 private class SummaryFileWriter implements Writer { 213 private final ConcurrentHashMap<Path, ConcurrentHashMap<String, String>> cache; 214 215 private final Path basedir; 216 217 private final boolean originAware; 218 219 private SummaryFileWriter( 220 ConcurrentHashMap<Path, ConcurrentHashMap<String, String>> cache, Path basedir, boolean originAware) { 221 this.cache = cache; 222 this.basedir = basedir; 223 this.originAware = originAware; 224 } 225 226 @Override 227 public void addTrustedArtifactChecksums( 228 Artifact artifact, 229 ArtifactRepository artifactRepository, 230 List<ChecksumAlgorithmFactory> checksumAlgorithmFactories, 231 Map<String, String> trustedArtifactChecksums) { 232 String artifactPath = localPathComposer.getPathForArtifact(artifact, false); 233 for (ChecksumAlgorithmFactory checksumAlgorithmFactory : checksumAlgorithmFactories) { 234 Path summaryFile = summaryFile( 235 basedir, originAware, artifactRepository.getId(), checksumAlgorithmFactory.getFileExtension()); 236 String checksum = requireNonNull(trustedArtifactChecksums.get(checksumAlgorithmFactory.getName())); 237 238 String oldChecksum = cache.computeIfAbsent(summaryFile, k -> loadProvidedChecksums(summaryFile)) 239 .put(artifactPath, checksum); 240 241 if (oldChecksum == null) { 242 changedChecksums.put(summaryFile, Boolean.TRUE); // new 243 } else if (!Objects.equals(oldChecksum, checksum)) { 244 changedChecksums.put(summaryFile, Boolean.TRUE); // replaced 245 LOGGER.info( 246 "Trusted checksum for artifact {} replaced: old {}, new {}", 247 artifact, 248 oldChecksum, 249 checksum); 250 } 251 } 252 } 253 } 254 255 /** 256 * On-close handler that saves recorded checksums, if any. 257 */ 258 private void saveRecordedLines() { 259 if (changedChecksums.isEmpty()) { 260 return; 261 } 262 263 ArrayList<Exception> exceptions = new ArrayList<>(); 264 for (Map.Entry<Path, ConcurrentHashMap<String, String>> entry : checksums.entrySet()) { 265 Path summaryFile = entry.getKey(); 266 if (changedChecksums.get(summaryFile) != Boolean.TRUE) { 267 continue; 268 } 269 ConcurrentHashMap<String, String> recordedLines = entry.getValue(); 270 if (!recordedLines.isEmpty()) { 271 try { 272 ConcurrentHashMap<String, String> result = new ConcurrentHashMap<>(); 273 result.putAll(loadProvidedChecksums(summaryFile)); 274 result.putAll(recordedLines); 275 276 LOGGER.info("Saving {} checksums to '{}'", result.size(), summaryFile); 277 FileUtils.writeFileWithBackup( 278 summaryFile, 279 p -> Files.write( 280 p, 281 result.entrySet().stream() 282 .sorted(Map.Entry.comparingByValue()) 283 .map(e -> e.getValue() + " " + e.getKey()) 284 .collect(toList()))); 285 } catch (IOException e) { 286 exceptions.add(e); 287 } 288 } 289 } 290 MultiRuntimeException.mayThrow("session save checksums failure", exceptions); 291 } 292}