mirror of
https://github.com/github/codeql.git
synced 2026-05-01 03:35:13 +02:00
JS: Replace yarn with manual dependency resolution
This commit is contained in:
@@ -0,0 +1,84 @@
|
||||
package com.semmle.js.dependencies;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.semmle.js.dependencies.packument.Packument;
|
||||
|
||||
/**
|
||||
* Asynchronous I/O operations needed for dependency installation.
|
||||
* <p>
|
||||
* The methods in this class are non-blocking, that is, they return more or less immediately, always scheduling the work
|
||||
* in the provided executor service. Requests are cached where it makes sense.
|
||||
*/
|
||||
public class AsyncFetcher {
|
||||
private Fetcher fetcher = new Fetcher();
|
||||
private ExecutorService executor;
|
||||
private Consumer<CompletionException> errorReporter;
|
||||
|
||||
/**
|
||||
* @param executor thread pool to perform I/O tasks
|
||||
* @param errorReporter called once for each error from the underlying I/O tasks
|
||||
*/
|
||||
public AsyncFetcher(ExecutorService executor, Consumer<CompletionException> errorReporter) {
|
||||
this.executor = executor;
|
||||
this.errorReporter = errorReporter;
|
||||
}
|
||||
|
||||
private CompletionException makeError(String message, Exception cause) {
|
||||
CompletionException ex = new CompletionException(message, cause);
|
||||
errorReporter.accept(ex); // Handle here to ensure each exception is logged at most once, not once per consumer
|
||||
throw ex;
|
||||
}
|
||||
|
||||
class CachedOperation<K, V> {
|
||||
private Map<K, CompletableFuture<V>> cache = new LinkedHashMap<>();
|
||||
|
||||
public synchronized CompletableFuture<V> get(K key, Supplier<V> builder) {
|
||||
CompletableFuture<V> future = cache.get(key);
|
||||
if (future == null) {
|
||||
future = CompletableFuture.supplyAsync(() -> builder.get(), executor);
|
||||
cache.put(key, future);
|
||||
}
|
||||
return future;
|
||||
}
|
||||
}
|
||||
|
||||
private CachedOperation<String, Packument> packuments = new CachedOperation<>();
|
||||
|
||||
/**
|
||||
* Returns a future that completes with the packument for the given package.
|
||||
* <p>
|
||||
* At most one fetch will be performed.
|
||||
*/
|
||||
public CompletableFuture<Packument> getPackument(String packageName) {
|
||||
return packuments.get(packageName, () -> {
|
||||
try {
|
||||
return fetcher.getPackument(packageName);
|
||||
} catch (IOException e) {
|
||||
throw makeError("Could not fetch packument for " + packageName, e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the relevant contents of the given tarball URL in the given folder;
|
||||
* the returned future completes when done.
|
||||
*/
|
||||
public CompletableFuture<Void> installFromTarballUrl(String tarballUrl, Path destDir) {
|
||||
return CompletableFuture.runAsync(() -> {
|
||||
try {
|
||||
fetcher.extractFromTarballUrl(tarballUrl, destDir);
|
||||
} catch (IOException e) {
|
||||
throw makeError("Could not install package from " + tarballUrl, e);
|
||||
}
|
||||
}, executor);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,221 @@
|
||||
package com.semmle.js.dependencies;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.semmle.js.dependencies.packument.PackageJson;
|
||||
import com.semmle.util.data.Pair;
|
||||
|
||||
public class DependencyResolver {
|
||||
private AsyncFetcher fetcher;
|
||||
private List<Constraint> constraints = new ArrayList<>();
|
||||
|
||||
/** Packages we don't try to install because it is part of the same monorepo. */
|
||||
private Set<String> packagesInRepo;
|
||||
|
||||
private static class Constraint {
|
||||
final PackageJson targetPackage;
|
||||
final SemVer targetPackageVersion;
|
||||
final PackageJson demandingPackage;
|
||||
final int depth;
|
||||
|
||||
Constraint(PackageJson targetPackage, SemVer targetPackageVersion, PackageJson demandingPackage, int depth) {
|
||||
this.targetPackage = targetPackage;
|
||||
this.targetPackageVersion = targetPackageVersion;
|
||||
this.demandingPackage = demandingPackage;
|
||||
this.depth = depth;
|
||||
}
|
||||
|
||||
String getTargetPackageName() {
|
||||
return targetPackage.getName(); // Must exist as you can't depend on a package without a name
|
||||
}
|
||||
}
|
||||
|
||||
public DependencyResolver(ExecutorService threadPool, Set<String> packagesInRepo) {
|
||||
this.fetcher = new AsyncFetcher(threadPool, this::reportError);
|
||||
this.packagesInRepo = packagesInRepo;
|
||||
}
|
||||
|
||||
private void reportError(CompletionException ex) {
|
||||
System.err.println(ex);
|
||||
}
|
||||
|
||||
private void addConstraint(Constraint constraint) {
|
||||
synchronized(constraints) {
|
||||
constraints.add(constraint);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first version number mentioned in the given constraints, excluding upper bounds such as `< 2.0.0`,
|
||||
* or `null` if no such version number was found.
|
||||
* <p>
|
||||
* To help ensure deterministic version resolution, we prefer the version mentioned in the constraint, rather than
|
||||
* the latest version satisfying the constraint (as the latter can change in time).
|
||||
*/
|
||||
private SemVer getPreferredVersionFromConstraints(List<VersionConstraint> constraints) {
|
||||
for (VersionConstraint constraint : constraints) {
|
||||
if (!constraint.getOperator().equals("<") && constraint.getVersion() != null) {
|
||||
return constraint.getVersion();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of available versions, pick the oldest version no older than <code>preferredVersion</code>.
|
||||
*/
|
||||
private Pair<SemVer, PackageJson> getTargetVersion(Map<String, PackageJson> versions, SemVer preferredVersion) {
|
||||
PackageJson result = versions.get(preferredVersion.toString());
|
||||
if (result != null) return Pair.make(preferredVersion, result);
|
||||
SemVer bestVersion = null;
|
||||
for (Map.Entry<String, PackageJson> entry : versions.entrySet()) {
|
||||
SemVer version = SemVer.tryParse(entry.getKey());
|
||||
if (version == null) continue; // Could not parse version
|
||||
if (version.compareTo(preferredVersion) < 0) continue; // Version is older than preferred version, ignore
|
||||
if (bestVersion != null && bestVersion.compareTo(version) < 0) continue; // We already found an older version
|
||||
bestVersion = version;
|
||||
result = entry.getValue();
|
||||
}
|
||||
return Pair.make(bestVersion, result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all packages and builds up the constraint system needed for resolving.
|
||||
*/
|
||||
private CompletableFuture<Void> fetchRelevantPackages(PackageJson pack, int depth) {
|
||||
List<CompletableFuture<Void>> futures = new ArrayList<>();
|
||||
List<Map<String, String>> dependencyMaps = depth == 0
|
||||
? Arrays.asList(pack.getDependencies(), pack.getPeerDependencies(), pack.getDevDependencies())
|
||||
: Arrays.asList(pack.getDependencies()); // for transitive dependencies, only follow explicit dependencies
|
||||
for (Map<String, String> dependencies : dependencyMaps) {
|
||||
if (dependencies == null) continue;
|
||||
dependencies.forEach((targetName, targetVersions) -> {
|
||||
if (packagesInRepo.contains(targetName)) {
|
||||
return;
|
||||
}
|
||||
List<VersionConstraint> constraints = VersionConstraint.parseVersionConstraints(targetVersions);
|
||||
SemVer preferredVersion = getPreferredVersionFromConstraints(constraints);
|
||||
if (preferredVersion == null) return;
|
||||
futures.add(fetcher.getPackument(targetName).exceptionally(ex -> null).thenCompose(targetPackument -> {
|
||||
if (targetPackument == null) {
|
||||
return CompletableFuture.completedFuture(null);
|
||||
}
|
||||
Map<String, PackageJson> versions = targetPackument.getVersions();
|
||||
if (versions == null) return CompletableFuture.completedFuture(null);
|
||||
|
||||
// Pick the matching version
|
||||
Pair<SemVer, PackageJson> targetVersionAndPackage = getTargetVersion(versions, preferredVersion);
|
||||
SemVer targetVersion = targetVersionAndPackage.fst();
|
||||
PackageJson targetPackage = targetVersionAndPackage.snd();
|
||||
if (targetPackage == null) return CompletableFuture.completedFuture(null);
|
||||
|
||||
if (targetName.startsWith("@types/")) {
|
||||
// Deeply install dependencies in `@types`
|
||||
addConstraint(new Constraint(targetPackage, targetVersion, pack, depth));
|
||||
return fetchRelevantPackages(targetPackage, depth + 1);
|
||||
} else if (dependencies != pack.getDevDependencies() && (targetPackage.getTypes() != null || targetPackage.getTypings() != null)) {
|
||||
// If a non-dev dependency contains its own typings, do a shallow install of that package
|
||||
addConstraint(new Constraint(targetPackage, targetVersion, pack, depth));
|
||||
}
|
||||
return CompletableFuture.completedFuture(null);
|
||||
}));
|
||||
});
|
||||
}
|
||||
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the dependencies of the given package in a deterministic way.
|
||||
*/
|
||||
private CompletableFuture<Map<String, PackageJson>> resolvePackages(PackageJson rootPackage) {
|
||||
return fetchRelevantPackages(rootPackage, 0).thenApply(void_ -> {
|
||||
// Compute the minimum depth from which each dependency is requested.
|
||||
Map<String, Integer> packageDepth = new LinkedHashMap<>();
|
||||
for (Constraint constraint : constraints) {
|
||||
Integer currentDepth = packageDepth.get(constraint.getTargetPackageName());
|
||||
if (currentDepth == null || currentDepth > constraint.depth) {
|
||||
packageDepth.put(constraint.getTargetPackageName(), constraint.depth);
|
||||
}
|
||||
}
|
||||
|
||||
// We use a greedy solver: sort the constraints and then satisfy them eagerly in that order.
|
||||
constraints.sort((c1, c2) -> {
|
||||
int cmp;
|
||||
|
||||
cmp = Integer.compare(packageDepth.get(c1.getTargetPackageName()), packageDepth.get(c2.getTargetPackageName()));
|
||||
if (cmp != 0) return cmp;
|
||||
|
||||
cmp = c1.getTargetPackageName().compareTo(c2.getTargetPackageName());
|
||||
if (cmp != 0) return cmp;
|
||||
|
||||
// Pick the most recent version, so reverse-sort by package version.
|
||||
cmp = -c1.targetPackageVersion.compareTo(c2.targetPackageVersion);
|
||||
if (cmp != 0) return cmp;
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
Map<String, PackageJson> selectedPackages = new LinkedHashMap<>();
|
||||
for (Constraint constraint : constraints) {
|
||||
if (selectedPackages.containsKey(constraint.getTargetPackageName())) {
|
||||
// Too bad, we already picked a version for this package. Ignore the constraint.
|
||||
continue;
|
||||
}
|
||||
if (constraint.demandingPackage != rootPackage) {
|
||||
PackageJson selectedDemander = selectedPackages.get(constraint.demandingPackage.getName());
|
||||
if (selectedDemander != null && selectedDemander != constraint.demandingPackage) {
|
||||
// The constraint comes from a package version we already decided not to install (a different version was picked).
|
||||
// There is no need to try to satisfy this constraint, so ignore it.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Picked " + constraint.getTargetPackageName() + "@" + constraint.targetPackageVersion);
|
||||
selectedPackages.put(constraint.getTargetPackageName(), constraint.targetPackage);
|
||||
}
|
||||
|
||||
return selectedPackages;
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> installDependencies(PackageJson rootPackage, Path nodeModulesDir) {
|
||||
return resolvePackages(rootPackage).thenCompose(resolvedPackages -> {
|
||||
List<CompletableFuture<Void>> futures = new ArrayList<>();
|
||||
resolvedPackages.forEach((name, targetPackage) -> {
|
||||
Path destinationDir = nodeModulesDir.resolve(Fetcher.toSafePath(name));
|
||||
futures.add(fetcher.installFromTarballUrl(targetPackage.getDist().getTarball(), destinationDir));
|
||||
});
|
||||
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
|
||||
});
|
||||
}
|
||||
|
||||
/** Entry point which installs dependencies from a given `package.json`, used for testing andbenchmarking. */
|
||||
public static void main(String[] args) throws IOException {
|
||||
ExecutorService executors = Executors.newFixedThreadPool(50);
|
||||
try {
|
||||
DependencyResolver resolver = new DependencyResolver(executors, Collections.emptySet());
|
||||
for (String packageJsonPath : args) {
|
||||
Path path = Paths.get(packageJsonPath).toAbsolutePath();
|
||||
PackageJson packageJson = new Gson().fromJson(Files.newBufferedReader(path), PackageJson.class);
|
||||
resolver.installDependencies(packageJson, path.getParent().resolve("node_modules")).join();
|
||||
}
|
||||
System.out.println("Done");
|
||||
} finally {
|
||||
executors.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
140
javascript/extractor/src/com/semmle/js/dependencies/Fetcher.java
Normal file
140
javascript/extractor/src/com/semmle/js/dependencies/Fetcher.java
Normal file
@@ -0,0 +1,140 @@
|
||||
package com.semmle.js.dependencies;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PushbackInputStream;
|
||||
import java.io.Reader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.semmle.js.dependencies.packument.Packument;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.apache.commons.compress.utils.IOUtils;
|
||||
|
||||
/**
|
||||
* Synchronous I/O operations needed for dependency installation.
|
||||
* <p>
|
||||
* By design, the methods in this class are thread-safe, synchronous (blocking), and do not cache anything.
|
||||
* <p>
|
||||
* Should only be used through the {@link AsyncFetcher} class, which wraps this class with scheduling and caching.
|
||||
*/
|
||||
public class Fetcher {
|
||||
private Object mkdirpLock = new Object();
|
||||
|
||||
/** Creates the given directory and its parent directories. Only one thread is allowed to create directories at once. */
|
||||
private void mkdirp(Path dir) throws IOException {
|
||||
synchronized (mkdirpLock) {
|
||||
Files.createDirectories(dir);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Pattern validPackageName = Pattern.compile("(?:@[\\w.-]+\\/)?\\w[\\w.-]*");
|
||||
|
||||
private boolean isValidPackageName(String name) {
|
||||
return validPackageName.matcher(name).matches();
|
||||
}
|
||||
|
||||
public static Path toSafePath(String string) {
|
||||
if (string == null) return null;
|
||||
Path path = Paths.get(string).normalize();
|
||||
if (path.startsWith("..") || path.isAbsolute()) {
|
||||
return null;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Submits a GET request to the given URL and returns an input with the response.
|
||||
*/
|
||||
private InputStream fetch(String url) throws IOException {
|
||||
URLConnection connection = new URL(url).openConnection();
|
||||
connection.setRequestProperty("Accept-Encoding", "gzip, identity, *");
|
||||
connection.setDoInput(true);
|
||||
connection.connect();
|
||||
InputStream input = connection.getInputStream();
|
||||
String encoding = connection.getContentEncoding();
|
||||
if ("gzip".equals(encoding)) {
|
||||
return new GzipCompressorInputStream(new BufferedInputStream(input));
|
||||
} else {
|
||||
return input;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the packument for the given package (containing all versions of the package.json).
|
||||
*/
|
||||
public Packument getPackument(String packageName) throws IOException {
|
||||
if (!isValidPackageName(packageName)) {
|
||||
throw new IOException("Package name contains unexpected characters:" + packageName);
|
||||
}
|
||||
System.out.println("Fetching package metadata for " + packageName);
|
||||
try (Reader reader = new BufferedReader(new InputStreamReader(fetch("https://registry.npmjs.org/" + packageName)))) {
|
||||
return new Gson().fromJson(reader, Packument.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the package at the given tarball URL into the given directory.
|
||||
*
|
||||
* Only `package.json` and `.d.ts` files are extracted.
|
||||
*/
|
||||
public void extractFromTarballUrl(String tarballUrl, Path destDir) throws IOException {
|
||||
if (!tarballUrl.startsWith("https://registry.npmjs.org/") || !tarballUrl.endsWith(".tgz")) { // Paranoid check
|
||||
throw new IOException("Tarball URL has unexpected format: " + tarballUrl);
|
||||
}
|
||||
System.out.println("Unpacking " + tarballUrl + " to " + destDir);
|
||||
try (InputStream rawStream = new URL(tarballUrl).openStream()) {
|
||||
// Despite having the .tgz extension, the file is not always gzipped, sometimes it's just a raw tar archive,
|
||||
// regardless of what Accept-Encoding header we send.
|
||||
// Sniff the header to detect which is the case.
|
||||
PushbackInputStream pushback = new PushbackInputStream(rawStream, 2);
|
||||
int byte1 = pushback.read();
|
||||
int byte2 = pushback.read();
|
||||
pushback.unread(byte2);
|
||||
pushback.unread(byte1);
|
||||
InputStream decompressedStream = (byte1 == 31 && byte2 == 139)
|
||||
? new GzipCompressorInputStream(new BufferedInputStream(pushback))
|
||||
: pushback;
|
||||
TarArchiveInputStream stream = new TarArchiveInputStream(new BufferedInputStream(decompressedStream));
|
||||
TarArchiveEntry tarEntry;
|
||||
while ((tarEntry = stream.getNextTarEntry()) != null) {
|
||||
if (!stream.canReadEntryData(tarEntry)) {
|
||||
continue;
|
||||
}
|
||||
if (tarEntry.isDirectory()) {
|
||||
continue; // We create directories on demand.
|
||||
}
|
||||
Path entryPath = toSafePath(tarEntry.getName());
|
||||
if (entryPath == null) continue;
|
||||
|
||||
// Strip off the leading folder name.
|
||||
// The entire package is inside a folder, but the name of that folder is unspecified and its name varies.
|
||||
if (entryPath.getNameCount() < 2) continue;
|
||||
entryPath = entryPath.subpath(1, entryPath.getNameCount());
|
||||
|
||||
String filename = entryPath.getFileName().toString();
|
||||
if (!filename.endsWith(".d.ts") && !filename.equals("package.json")) {
|
||||
continue; // Only extract .d.ts files and package.json
|
||||
}
|
||||
Path outputFile = destDir.resolve(entryPath);
|
||||
mkdirp(outputFile.getParent());
|
||||
try (OutputStream output = new BufferedOutputStream(Files.newOutputStream(outputFile))) {
|
||||
IOUtils.copy(stream, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
106
javascript/extractor/src/com/semmle/js/dependencies/SemVer.java
Normal file
106
javascript/extractor/src/com/semmle/js/dependencies/SemVer.java
Normal file
@@ -0,0 +1,106 @@
|
||||
package com.semmle.js.dependencies;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class SemVer implements Comparable<SemVer> {
|
||||
private int major, minor, patch;
|
||||
private String preRelease;
|
||||
|
||||
public SemVer(int major, int minor, int patch, String preRelease) {
|
||||
this.major = major;
|
||||
this.minor = minor;
|
||||
this.patch = patch;
|
||||
if (preRelease == null) {
|
||||
preRelease = "";
|
||||
}
|
||||
this.preRelease = preRelease;
|
||||
}
|
||||
|
||||
public int getMajor() {
|
||||
return major;
|
||||
}
|
||||
|
||||
public int getMinor() {
|
||||
return minor;
|
||||
}
|
||||
|
||||
public int getPatch() {
|
||||
return patch;
|
||||
}
|
||||
|
||||
public String getPreRelease() {
|
||||
return preRelease;
|
||||
}
|
||||
|
||||
private static final Pattern pattern = Pattern.compile("(\\d+)(?:\\.(\\d+)(?:\\.(\\d+))?)?(-[0-9A-Za-z.-]*)?(\\+.*)?");
|
||||
|
||||
public static SemVer tryParse(String str) {
|
||||
Matcher m = pattern.matcher(str);
|
||||
if (m.matches()) {
|
||||
int major = Integer.parseInt(m.group(1));
|
||||
int minor = m.group(2) == null ? 0 : Integer.parseInt(m.group(2));
|
||||
int patch = m.group(3) == null ? 0 : Integer.parseInt(m.group(3));
|
||||
String preRelease = m.group(4);
|
||||
return new SemVer(major, minor, patch, preRelease);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + major;
|
||||
result = prime * result + minor;
|
||||
result = prime * result + patch;
|
||||
result = prime * result + ((preRelease == null) ? 0 : preRelease.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
SemVer other = (SemVer) obj;
|
||||
if (major != other.major)
|
||||
return false;
|
||||
if (minor != other.minor)
|
||||
return false;
|
||||
if (patch != other.patch)
|
||||
return false;
|
||||
if (preRelease == null) {
|
||||
if (other.preRelease != null)
|
||||
return false;
|
||||
} else if (!preRelease.equals(other.preRelease))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(SemVer other) {
|
||||
if (major != other.major) {
|
||||
return Integer.compare(major, other.major);
|
||||
}
|
||||
if (minor != other.minor) {
|
||||
return Integer.compare(minor, other.minor);
|
||||
}
|
||||
if (patch != other.patch) {
|
||||
return Integer.compare(patch, other.patch);
|
||||
}
|
||||
if (!preRelease.equals(other.preRelease)) {
|
||||
return preRelease.compareTo(other.preRelease);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return major + "." + minor + "." + patch + (preRelease.isEmpty() ? "" : "-" + preRelease);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
package com.semmle.js.dependencies;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class VersionConstraint {
|
||||
private String operator;
|
||||
private SemVer version;
|
||||
|
||||
public VersionConstraint(String operator, SemVer version) {
|
||||
this.operator = operator;
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public static VersionConstraint exact(String version) {
|
||||
return new VersionConstraint("", SemVer.tryParse(version));
|
||||
}
|
||||
|
||||
public String getOperator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
public SemVer getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
private static final Pattern pattern = Pattern.compile("([~^<>=]*)\\s*(\\d.*)");
|
||||
|
||||
public static List<VersionConstraint> parseVersionConstraints(String str) {
|
||||
String[] parts = str.split(",");
|
||||
List<VersionConstraint> constraints = new ArrayList<>();
|
||||
for (String part : parts) {
|
||||
part = part.trim();
|
||||
if (part.equals("*")) {
|
||||
constraints.add(new VersionConstraint("*", null));
|
||||
continue;
|
||||
}
|
||||
Matcher matcher = pattern.matcher(str);
|
||||
if (matcher.matches()) {
|
||||
String operator = matcher.group(1);
|
||||
String versionStr = matcher.group(2);
|
||||
if (operator.isEmpty() && versionStr.contains("x")) {
|
||||
// Normalize "1.x" to ">= 1.0"
|
||||
operator = ">=";
|
||||
versionStr = versionStr.replaceAll("x", "0");
|
||||
}
|
||||
SemVer version = SemVer.tryParse(versionStr);
|
||||
if (version != null) {
|
||||
constraints.add(new VersionConstraint(operator, version));
|
||||
}
|
||||
}
|
||||
}
|
||||
return constraints;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
package com.semmle.js.dependencies.packument;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class PackageJson {
|
||||
private String name;
|
||||
private String version;
|
||||
private Map<String, String> dependencies;
|
||||
private Map<String, String> devDependencies;
|
||||
private Map<String, String> peerDependencies;
|
||||
private String types;
|
||||
private String typings;
|
||||
private String main;
|
||||
private Dist dist;
|
||||
|
||||
public static class Dist {
|
||||
private String tarball;
|
||||
|
||||
public String getTarball() {
|
||||
return tarball;
|
||||
}
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public Map<String, String> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
public void setDependencies(Map<String, String> dependencies) {
|
||||
this.dependencies = dependencies;
|
||||
}
|
||||
|
||||
public Map<String, String> getDevDependencies() {
|
||||
return devDependencies;
|
||||
}
|
||||
|
||||
public void setDevDependencies(Map<String, String> devDependencies) {
|
||||
this.devDependencies = devDependencies;
|
||||
}
|
||||
|
||||
public Map<String, String> getPeerDependencies() {
|
||||
return peerDependencies;
|
||||
}
|
||||
|
||||
public void setPeerDependencies(Map<String, String> peerDependencies) {
|
||||
this.peerDependencies = peerDependencies;
|
||||
}
|
||||
|
||||
public String getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public void setTypes(String types) {
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
public String getTypings() {
|
||||
return typings;
|
||||
}
|
||||
|
||||
public void setTypings(String typings) {
|
||||
this.typings = typings;
|
||||
}
|
||||
|
||||
public String getMain() {
|
||||
return main;
|
||||
}
|
||||
|
||||
public void setMain(String main) {
|
||||
this.main = main;
|
||||
}
|
||||
|
||||
public Dist getDist() {
|
||||
return dist;
|
||||
}
|
||||
|
||||
public void setDist(Dist dist) {
|
||||
this.dist = dist;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.semmle.js.dependencies.packument;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class Packument {
|
||||
private String name;
|
||||
private Map<String, PackageJson> versions;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Map<String, PackageJson> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
||||
public void setVersions(Map<String, PackageJson> versions) {
|
||||
this.versions = versions;
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,8 @@
|
||||
package com.semmle.js.extractor;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.Writer;
|
||||
import java.lang.ProcessBuilder.Redirect;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
@@ -38,8 +35,9 @@ import com.google.gson.Gson;
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import com.google.gson.JsonParseException;
|
||||
import com.google.gson.JsonParser;
|
||||
import com.google.gson.JsonPrimitive;
|
||||
import com.semmle.js.dependencies.DependencyResolver;
|
||||
import com.semmle.js.dependencies.packument.PackageJson;
|
||||
import com.semmle.js.extractor.ExtractorConfig.SourceType;
|
||||
import com.semmle.js.extractor.FileExtractor.FileType;
|
||||
import com.semmle.js.extractor.trapcache.DefaultTrapCache;
|
||||
@@ -213,11 +211,10 @@ public class AutoBuild {
|
||||
private volatile boolean seenCode = false;
|
||||
private volatile boolean seenFiles = false;
|
||||
private boolean installDependencies = false;
|
||||
private int installDependenciesTimeout;
|
||||
private final VirtualSourceRoot virtualSourceRoot;
|
||||
private ExtractorState state;
|
||||
|
||||
/** The default timeout when running <code>yarn</code>, in milliseconds. */
|
||||
/** The default timeout when installing dependencies, in milliseconds. */
|
||||
public static final int INSTALL_DEPENDENCIES_DEFAULT_TIMEOUT = 10 * 60 * 1000; // 10 minutes
|
||||
|
||||
public AutoBuild() {
|
||||
@@ -229,10 +226,6 @@ public class AutoBuild {
|
||||
getEnumFromEnvVar("LGTM_INDEX_TYPESCRIPT", TypeScriptMode.class, TypeScriptMode.FULL);
|
||||
this.defaultEncoding = getEnvVar("LGTM_INDEX_DEFAULT_ENCODING");
|
||||
this.installDependencies = Boolean.valueOf(getEnvVar("LGTM_INDEX_TYPESCRIPT_INSTALL_DEPS"));
|
||||
this.installDependenciesTimeout =
|
||||
Env.systemEnv()
|
||||
.getInt(
|
||||
"LGTM_INDEX_TYPESCRIPT_INSTALL_DEPS_TIMEOUT", INSTALL_DEPENDENCIES_DEFAULT_TIMEOUT);
|
||||
this.virtualSourceRoot = makeVirtualSourceRoot();
|
||||
setupFileTypes();
|
||||
setupXmlMode();
|
||||
@@ -690,28 +683,6 @@ public class AutoBuild {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Returns true if yarn is installed, otherwise prints a warning and returns false. */
|
||||
private boolean verifyYarnInstallation() {
|
||||
ProcessBuilder pb = new ProcessBuilder(Arrays.asList("yarn", "-v"));
|
||||
try {
|
||||
Process process = pb.start();
|
||||
boolean completed = process.waitFor(this.installDependenciesTimeout, TimeUnit.MILLISECONDS);
|
||||
if (!completed) {
|
||||
System.err.println("Yarn could not be launched. Timeout during 'yarn -v'.");
|
||||
return false;
|
||||
}
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
||||
String version = reader.readLine();
|
||||
System.out.println("Found yarn version: " + version);
|
||||
return true;
|
||||
} catch (IOException | InterruptedException ex) {
|
||||
System.err.println(
|
||||
"Yarn not found. Please put 'yarn' on the PATH for automatic dependency installation.");
|
||||
Exceptions.ignore(ex, "Continue without dependency installation");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an existing file named <code>dir/stem.ext</code> where <code>.ext</code> is any
|
||||
* of the given extensions, or <code>null</code> if no such file exists.
|
||||
@@ -736,17 +707,6 @@ public class AutoBuild {
|
||||
return tryResolveWithExtensions(dir, stem, FileType.JS.getExtensions());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a child of a JSON object as a string, or <code>null</code>.
|
||||
*/
|
||||
private String getChildAsString(JsonObject obj, String name) {
|
||||
JsonElement child = obj.get(name);
|
||||
if (child instanceof JsonPrimitive && ((JsonPrimitive)child).isString()) {
|
||||
return child.getAsString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a relative path from <code>from</code> to <code>to</code> provided
|
||||
* the latter is contained in the former. Otherwise returns <code>null</code>.
|
||||
@@ -769,11 +729,8 @@ public class AutoBuild {
|
||||
* <p>
|
||||
* Downloaded packages are intalled under <tt>SCRATCH_DIR</tt>, in a mirrored directory hierarchy
|
||||
* we call the "virtual source root".
|
||||
* Each <tt>package.json</tt> file is rewritten and copied to the virtual source root,
|
||||
* where <tt>yarn install</tt> is invoked.
|
||||
* <p>
|
||||
* Packages that exists within the repo are stripped from the dependencies
|
||||
* before installation, so they are not downloaded. Since they are part of the main source tree,
|
||||
* Packages that exists within the repo are not downloaded. Since they are part of the main source tree,
|
||||
* these packages are not mirrored under the virtual source root.
|
||||
* Instead, an explicit package location mapping is passed to the TypeScript parser wrapper.
|
||||
* <p>
|
||||
@@ -784,23 +741,20 @@ protected DependencyInstallationResult preparePackagesAndDependencies(Set<Path>
|
||||
final Path sourceRoot = LGTM_SRC;
|
||||
|
||||
// Read all package.json files and index them by name.
|
||||
Map<Path, JsonObject> packageJsonFiles = new LinkedHashMap<>();
|
||||
Map<Path, PackageJson> packageJsonFiles = new LinkedHashMap<>();
|
||||
Map<String, Path> packagesInRepo = new LinkedHashMap<>();
|
||||
Map<String, Path> packageMainFile = new LinkedHashMap<>();
|
||||
for (Path file : filesToExtract) {
|
||||
if (file.getFileName().toString().equals("package.json")) {
|
||||
try {
|
||||
String text = new WholeIO().read(file);
|
||||
JsonElement json = new JsonParser().parse(text);
|
||||
if (!(json instanceof JsonObject)) continue;
|
||||
JsonObject jsonObject = (JsonObject) json;
|
||||
PackageJson packageJson = new Gson().fromJson(new WholeIO().read(file), PackageJson.class);
|
||||
file = file.toAbsolutePath();
|
||||
if (tryRelativize(sourceRoot, file) == null) {
|
||||
continue; // Ignore package.json files outside the source root.
|
||||
}
|
||||
packageJsonFiles.put(file, jsonObject);
|
||||
packageJsonFiles.put(file, packageJson);
|
||||
|
||||
String name = getChildAsString(jsonObject, "name");
|
||||
String name = packageJson.getName();
|
||||
if (name != null) {
|
||||
packagesInRepo.put(name, file);
|
||||
}
|
||||
@@ -812,45 +766,12 @@ protected DependencyInstallationResult preparePackagesAndDependencies(Set<Path>
|
||||
}
|
||||
}
|
||||
|
||||
// Process all package.json files now that we know the names of all local packages.
|
||||
// - remove dependencies on local packages
|
||||
// - guess the main file for each package
|
||||
// Note that we ignore optional dependencies during installation, so "optionalDependencies"
|
||||
// is ignored here as well.
|
||||
final List<String> dependencyFields =
|
||||
Arrays.asList("dependencies", "devDependencies", "peerDependencies");
|
||||
// Guess the main file for each package.
|
||||
packageJsonFiles.forEach(
|
||||
(path, packageJson) -> {
|
||||
(path, packageJson) -> {
|
||||
Path relativePath = sourceRoot.relativize(path);
|
||||
for (String dependencyField : dependencyFields) {
|
||||
JsonElement dependencyElm = packageJson.get(dependencyField);
|
||||
if (!(dependencyElm instanceof JsonObject)) continue;
|
||||
JsonObject dependencyObj = (JsonObject) dependencyElm;
|
||||
List<String> propsToRemove = new ArrayList<>();
|
||||
for (String packageName : dependencyObj.keySet()) {
|
||||
if (packagesInRepo.containsKey(packageName)) {
|
||||
// Remove dependency on local package
|
||||
propsToRemove.add(packageName);
|
||||
} else {
|
||||
// Remove file dependency on a package that doesn't exist in the checkout.
|
||||
String dependency = getChildAsString(dependencyObj, packageName);
|
||||
if (dependency != null && (dependency.startsWith("file:") || dependency.startsWith("./") || dependency.startsWith("../"))) {
|
||||
if (dependency.startsWith("file:")) {
|
||||
dependency = dependency.substring("file:".length());
|
||||
}
|
||||
Path resolvedPackage = path.getParent().resolve(dependency + "/package.json");
|
||||
if (!Files.exists(resolvedPackage)) {
|
||||
propsToRemove.add(packageName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (String prop : propsToRemove) {
|
||||
dependencyObj.remove(prop);
|
||||
}
|
||||
}
|
||||
// For named packages, find the main file.
|
||||
String name = getChildAsString(packageJson, "name");
|
||||
String name = packageJson.getName();
|
||||
if (name != null) {
|
||||
Path entryPoint = guessPackageMainFile(path, packageJson, FileType.TYPESCRIPT.getExtensions());
|
||||
if (entryPoint == null) {
|
||||
@@ -866,45 +787,24 @@ protected DependencyInstallationResult preparePackagesAndDependencies(Set<Path>
|
||||
}
|
||||
});
|
||||
|
||||
// Write the new package.json files to disk
|
||||
for (Path file : packageJsonFiles.keySet()) {
|
||||
Path virtualFile = virtualSourceRoot.toVirtualFile(file);
|
||||
|
||||
if (installDependencies) {
|
||||
// Use more threads for dependency installation than for extraction, as this is mainly I/O bound and we want
|
||||
// many concurrent HTTP requests.
|
||||
ExecutorService installationThreadPool = Executors.newFixedThreadPool(50);
|
||||
try {
|
||||
Files.createDirectories(virtualFile.getParent());
|
||||
try (Writer writer = Files.newBufferedWriter(virtualFile)) {
|
||||
new Gson().toJson(packageJsonFiles.get(file), writer);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new ResourceError("Could not rewrite package.json file: " + virtualFile, e);
|
||||
}
|
||||
}
|
||||
|
||||
// Install dependencies
|
||||
if (this.installDependencies && verifyYarnInstallation()) {
|
||||
for (Path file : packageJsonFiles.keySet()) {
|
||||
Path virtualFile = virtualSourceRoot.toVirtualFile(file);
|
||||
System.out.println("Installing dependencies from " + virtualFile);
|
||||
ProcessBuilder pb =
|
||||
new ProcessBuilder(
|
||||
Arrays.asList(
|
||||
"yarn",
|
||||
"install",
|
||||
"--non-interactive",
|
||||
"--ignore-scripts",
|
||||
"--ignore-platform",
|
||||
"--ignore-engines",
|
||||
"--ignore-optional",
|
||||
"--no-default-rc",
|
||||
"--no-bin-links",
|
||||
"--pure-lockfile"));
|
||||
pb.directory(virtualFile.getParent().toFile());
|
||||
pb.redirectOutput(Redirect.INHERIT);
|
||||
pb.redirectError(Redirect.INHERIT);
|
||||
List<CompletableFuture<Void>> futures = new ArrayList<>();
|
||||
packageJsonFiles.forEach((file, packageJson) -> {
|
||||
Path virtualFile = virtualSourceRoot.toVirtualFile(file);
|
||||
Path nodeModulesDir = virtualFile.getParent().resolve("node_modules");
|
||||
futures.add(new DependencyResolver(installationThreadPool, packagesInRepo.keySet()).installDependencies(packageJson, nodeModulesDir));
|
||||
});
|
||||
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
|
||||
} finally {
|
||||
installationThreadPool.shutdown();
|
||||
try {
|
||||
pb.start().waitFor(this.installDependenciesTimeout, TimeUnit.MILLISECONDS);
|
||||
} catch (IOException | InterruptedException ex) {
|
||||
throw new ResourceError("Could not install dependencies from " + file, ex);
|
||||
installationThreadPool.awaitTermination(1, TimeUnit.HOURS);
|
||||
} catch (InterruptedException e) {
|
||||
Exceptions.ignore(e, "Awaiting termination is not essential.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -917,7 +817,7 @@ protected DependencyInstallationResult preparePackagesAndDependencies(Set<Path>
|
||||
* given package - that is, the file you get when importing the package by name
|
||||
* without any path suffix.
|
||||
*/
|
||||
private Path guessPackageMainFile(Path packageJsonFile, JsonObject packageJson, Iterable<String> extensions) {
|
||||
private Path guessPackageMainFile(Path packageJsonFile, PackageJson packageJson, Iterable<String> extensions) {
|
||||
Path packageDir = packageJsonFile.getParent();
|
||||
|
||||
// Try <package_dir>/index.ts.
|
||||
@@ -929,7 +829,7 @@ protected DependencyInstallationResult preparePackagesAndDependencies(Set<Path>
|
||||
// Get the "main" property from the package.json
|
||||
// This usually refers to the compiled output, such as `./out/foo.js` but may hint as to
|
||||
// the name of main file ("foo" in this case).
|
||||
String mainStr = getChildAsString(packageJson, "main");
|
||||
String mainStr = packageJson.getMain();
|
||||
|
||||
// Look for source files `./src` if it exists
|
||||
Path sourceDir = packageDir.resolve("src");
|
||||
|
||||
Reference in New Issue
Block a user